diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index 88c683ef44e93..8c1833a0c44df 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -143,12 +143,12 @@ impl<'a, S: Str> StrVector for &'a [S] { } // `len` calculation may overflow but push_str will check boundaries - let len = self.iter().map(|s| s.as_slice().len()).sum(); + let len = self.iter().map(|s| s.as_str().len()).sum(); let mut result = String::with_capacity(len); for s in self.iter() { - result.push_str(s.as_slice()) + result.push_str(s.as_str()) } result @@ -167,7 +167,7 @@ impl<'a, S: Str> StrVector for &'a [S] { // this is wrong without the guarantee that `self` is non-empty // `len` calculation may overflow but push_str but will check boundaries let len = sep.len() * (self.len() - 1) - + self.iter().map(|s| s.as_slice().len()).sum(); + + self.iter().map(|s| s.as_str().len()).sum(); let mut result = String::with_capacity(len); let mut first = true; @@ -177,7 +177,7 @@ impl<'a, S: Str> StrVector for &'a [S] { } else { result.push_str(sep); } - result.push_str(s.as_slice()); + result.push_str(s.as_str()); } result } @@ -408,7 +408,7 @@ impl<'a> Iterator for Recompositions<'a> { /// use std::str; /// let string = "orange"; /// let new_string = str::replace(string, "or", "str"); -/// assert_eq!(new_string.as_slice(), "strange"); +/// assert_eq!(new_string.as_str(), "strange"); /// ``` pub fn replace(s: &str, from: &str, to: &str) -> String { let mut result = String::new(); @@ -497,7 +497,7 @@ impl<'a> MaybeOwned<'a> { /// /// ```rust /// let string = "orange"; - /// let maybe_owned_string = string.as_slice().into_maybe_owned(); + /// let maybe_owned_string = string.as_str().into_maybe_owned(); /// assert_eq!(true, maybe_owned_string.is_slice()); /// ``` #[inline] @@ -534,7 +534,7 @@ impl<'a> IntoMaybeOwned<'a> for &'a str { /// /// ```rust /// let string = "orange"; - /// let maybe_owned_str = string.as_slice().into_maybe_owned(); + /// let maybe_owned_str = string.as_str().into_maybe_owned(); /// assert_eq!(false, maybe_owned_str.is_owned()); /// ``` #[inline] @@ -546,7 +546,7 @@ impl<'a> IntoMaybeOwned<'a> for MaybeOwned<'a> { /// /// ```rust /// let str = "orange"; - /// let maybe_owned_str = str.as_slice().into_maybe_owned(); + /// let maybe_owned_str = str.as_str().into_maybe_owned(); /// let maybe_maybe_owned_str = maybe_owned_str.into_maybe_owned(); /// assert_eq!(false, maybe_maybe_owned_str.is_owned()); /// ``` @@ -557,7 +557,7 @@ impl<'a> IntoMaybeOwned<'a> for MaybeOwned<'a> { impl<'a> PartialEq for MaybeOwned<'a> { #[inline] fn eq(&self, other: &MaybeOwned) -> bool { - self.as_slice() == other.as_slice() + self.as_str() == other.as_str() } } @@ -573,23 +573,23 @@ impl<'a> PartialOrd for MaybeOwned<'a> { impl<'a> Ord for MaybeOwned<'a> { #[inline] fn cmp(&self, other: &MaybeOwned) -> Ordering { - self.as_slice().cmp(&other.as_slice()) + self.as_str().cmp(&other.as_str()) } } impl<'a, S: Str> Equiv for MaybeOwned<'a> { #[inline] fn equiv(&self, other: &S) -> bool { - self.as_slice() == other.as_slice() + self.as_str() == other.as_str() } } impl<'a> Str for MaybeOwned<'a> { #[inline] - fn as_slice<'b>(&'b self) -> &'b str { + fn as_str<'b>(&'b self) -> &'b str { match *self { Slice(s) => s, - Owned(ref s) => s.as_slice() + Owned(ref s) => s.as_str() } } } @@ -606,7 +606,7 @@ impl<'a> StrAllocating for MaybeOwned<'a> { impl<'a> Collection for MaybeOwned<'a> { #[inline] - fn len(&self) -> uint { self.as_slice().len() } + fn len(&self) -> uint { self.as_str().len() } } impl<'a> Clone for MaybeOwned<'a> { @@ -614,7 +614,7 @@ impl<'a> Clone for MaybeOwned<'a> { fn clone(&self) -> MaybeOwned<'a> { match *self { Slice(s) => Slice(s), - Owned(ref s) => Owned(String::from_str(s.as_slice())) + Owned(ref s) => Owned(String::from_str(s.as_str())) } } } @@ -627,7 +627,7 @@ impl<'a> Default for MaybeOwned<'a> { impl<'a, H: hash::Writer> hash::Hash for MaybeOwned<'a> { #[inline] fn hash(&self, hasher: &mut H) { - self.as_slice().hash(hasher) + self.as_str().hash(hasher) } } @@ -694,7 +694,7 @@ pub trait StrAllocating: Str { /// Escapes each char in `s` with `char::escape_default`. fn escape_default(&self) -> String { - let me = self.as_slice(); + let me = self.as_str(); let mut out = String::with_capacity(me.len()); for c in me.chars() { c.escape_default(|c| out.push_char(c)); @@ -704,7 +704,7 @@ pub trait StrAllocating: Str { /// Escapes each char in `s` with `char::escape_unicode`. fn escape_unicode(&self) -> String { - let me = self.as_slice(); + let me = self.as_str(); let mut out = String::with_capacity(me.len()); for c in me.chars() { c.escape_unicode(|c| out.push_char(c)); @@ -737,7 +737,7 @@ pub trait StrAllocating: Str { /// assert_eq!(s.replace("cookie monster", "little lamb"), s); /// ``` fn replace(&self, from: &str, to: &str) -> String { - let me = self.as_slice(); + let me = self.as_str(); let mut result = String::new(); let mut last_end = 0; for (start, end) in me.match_indices(from) { @@ -754,19 +754,19 @@ pub trait StrAllocating: Str { #[inline] fn to_owned(&self) -> String { unsafe { - mem::transmute(Vec::from_slice(self.as_slice().as_bytes())) + mem::transmute(Vec::from_slice(self.as_str().as_bytes())) } } /// Converts to a vector of `u16` encoded as UTF-16. #[deprecated = "use `utf16_units` instead"] fn to_utf16(&self) -> Vec { - self.as_slice().utf16_units().collect::>() + self.as_str().utf16_units().collect::>() } /// Given a string, makes a new string with repeated copies of it. fn repeat(&self, nn: uint) -> String { - let me = self.as_slice(); + let me = self.as_str(); let mut ret = String::with_capacity(nn * me.len()); for _ in range(0, nn) { ret.push_str(me); @@ -776,7 +776,7 @@ pub trait StrAllocating: Str { /// Returns the Levenshtein Distance between two strings. fn lev_distance(&self, t: &str) -> uint { - let me = self.as_slice(); + let me = self.as_str(); let slen = me.len(); let tlen = t.len(); @@ -814,7 +814,7 @@ pub trait StrAllocating: Str { #[inline] fn nfd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self.as_slice().chars(), + iter: self.as_str().chars(), buffer: Vec::new(), sorted: false, kind: Canonical @@ -826,7 +826,7 @@ pub trait StrAllocating: Str { #[inline] fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self.as_slice().chars(), + iter: self.as_str().chars(), buffer: Vec::new(), sorted: false, kind: Compatible @@ -954,10 +954,10 @@ mod tests { #[test] fn test_collect() { let empty = String::from_str(""); - let s: String = empty.as_slice().chars().collect(); + let s: String = empty.as_str().chars().collect(); assert_eq!(empty, s); let data = String::from_str("ประเทศไทย中"); - let s: String = data.as_slice().chars().collect(); + let s: String = data.as_str().chars().collect(); assert_eq!(data, s); } @@ -965,7 +965,7 @@ mod tests { fn test_into_bytes() { let data = String::from_str("asdf"); let buf = data.into_bytes(); - assert_eq!(b"asdf", buf.as_slice()); + assert_eq!(b"asdf", buf.as_str()); } #[test] @@ -982,21 +982,21 @@ mod tests { let string = "ประเทศไทย中华Việt Nam"; let mut data = String::from_str(string); data.push_str(string); - assert!(data.as_slice().find_str("ไท华").is_none()); - assert_eq!(data.as_slice().slice(0u, 43u).find_str(""), Some(0u)); - assert_eq!(data.as_slice().slice(6u, 43u).find_str(""), Some(6u - 6u)); + assert!(data.as_str().find_str("ไท华").is_none()); + assert_eq!(data.as_str().slice(0u, 43u).find_str(""), Some(0u)); + assert_eq!(data.as_str().slice(6u, 43u).find_str(""), Some(6u - 6u)); - assert_eq!(data.as_slice().slice(0u, 43u).find_str("ประ"), Some( 0u)); - assert_eq!(data.as_slice().slice(0u, 43u).find_str("ทศไ"), Some(12u)); - assert_eq!(data.as_slice().slice(0u, 43u).find_str("ย中"), Some(24u)); - assert_eq!(data.as_slice().slice(0u, 43u).find_str("iệt"), Some(34u)); - assert_eq!(data.as_slice().slice(0u, 43u).find_str("Nam"), Some(40u)); + assert_eq!(data.as_str().slice(0u, 43u).find_str("ประ"), Some( 0u)); + assert_eq!(data.as_str().slice(0u, 43u).find_str("ทศไ"), Some(12u)); + assert_eq!(data.as_str().slice(0u, 43u).find_str("ย中"), Some(24u)); + assert_eq!(data.as_str().slice(0u, 43u).find_str("iệt"), Some(34u)); + assert_eq!(data.as_str().slice(0u, 43u).find_str("Nam"), Some(40u)); - assert_eq!(data.as_slice().slice(43u, 86u).find_str("ประ"), Some(43u - 43u)); - assert_eq!(data.as_slice().slice(43u, 86u).find_str("ทศไ"), Some(55u - 43u)); - assert_eq!(data.as_slice().slice(43u, 86u).find_str("ย中"), Some(67u - 43u)); - assert_eq!(data.as_slice().slice(43u, 86u).find_str("iệt"), Some(77u - 43u)); - assert_eq!(data.as_slice().slice(43u, 86u).find_str("Nam"), Some(83u - 43u)); + assert_eq!(data.as_str().slice(43u, 86u).find_str("ประ"), Some(43u - 43u)); + assert_eq!(data.as_str().slice(43u, 86u).find_str("ทศไ"), Some(55u - 43u)); + assert_eq!(data.as_str().slice(43u, 86u).find_str("ย中"), Some(67u - 43u)); + assert_eq!(data.as_str().slice(43u, 86u).find_str("iệt"), Some(77u - 43u)); + assert_eq!(data.as_str().slice(43u, 86u).find_str("Nam"), Some(83u - 43u)); } #[test] @@ -1015,7 +1015,7 @@ mod tests { #[test] fn test_concat() { fn t(v: &[String], s: &str) { - assert_eq!(v.concat().as_slice(), s); + assert_eq!(v.concat().as_str(), s); } t([String::from_str("you"), String::from_str("know"), String::from_str("I'm"), @@ -1029,7 +1029,7 @@ mod tests { #[test] fn test_connect() { fn t(v: &[String], sep: &str, s: &str) { - assert_eq!(v.connect(sep).as_slice(), s); + assert_eq!(v.connect(sep).as_str(), s); } t([String::from_str("you"), String::from_str("know"), String::from_str("I'm"), @@ -1043,7 +1043,7 @@ mod tests { #[test] fn test_concat_slices() { fn t(v: &[&str], s: &str) { - assert_eq!(v.concat().as_slice(), s); + assert_eq!(v.concat().as_str(), s); } t(["you", "know", "I'm", "no", "good"], "youknowI'mnogood"); let v: &[&str] = []; @@ -1054,7 +1054,7 @@ mod tests { #[test] fn test_connect_slices() { fn t(v: &[&str], sep: &str, s: &str) { - assert_eq!(v.connect(sep).as_slice(), s); + assert_eq!(v.connect(sep).as_str(), s); } t(["you", "know", "I'm", "no", "good"], " ", "you know I'm no good"); @@ -1096,7 +1096,7 @@ mod tests { } let letters = a_million_letter_a(); assert!(half_a_million_letter_a() == - unsafe {String::from_str(raw::slice_bytes(letters.as_slice(), + unsafe {String::from_str(raw::slice_bytes(letters.as_str(), 0u, 500000))}); } @@ -1148,7 +1148,7 @@ mod tests { let a = "ประเ"; let a2 = "دولة الكويتทศไทย中华"; - assert_eq!(data.replace(a, repl).as_slice(), a2); + assert_eq!(data.replace(a, repl).as_str(), a2); } #[test] @@ -1158,7 +1158,7 @@ mod tests { let b = "ะเ"; let b2 = "ปรدولة الكويتทศไทย中华"; - assert_eq!(data.replace(b, repl).as_slice(), b2); + assert_eq!(data.replace(b, repl).as_str(), b2); } #[test] @@ -1168,7 +1168,7 @@ mod tests { let c = "中华"; let c2 = "ประเทศไทยدولة الكويت"; - assert_eq!(data.replace(c, repl).as_slice(), c2); + assert_eq!(data.replace(c, repl).as_str(), c2); } #[test] @@ -1177,7 +1177,7 @@ mod tests { let repl = "دولة الكويت"; let d = "ไท华"; - assert_eq!(data.replace(d, repl).as_slice(), data); + assert_eq!(data.replace(d, repl).as_str(), data); } #[test] @@ -1213,7 +1213,7 @@ mod tests { } let letters = a_million_letter_x(); assert!(half_a_million_letter_x() == - String::from_str(letters.as_slice().slice(0u, 3u * 500000u))); + String::from_str(letters.as_str().slice(0u, 3u * 500000u))); } #[test] @@ -1452,7 +1452,7 @@ mod tests { let b: &[u8] = &[]; assert_eq!("".as_bytes(), b); assert_eq!("abc".as_bytes(), b"abc"); - assert_eq!("ศไทย中华Việt Nam".as_bytes(), v.as_slice()); + assert_eq!("ศไทย中华Việt Nam".as_bytes(), v.as_str()); } #[test] @@ -1487,7 +1487,7 @@ mod tests { let string = "a\nb\nc"; let lines: Vec<&str> = string.lines().collect(); - let lines = lines.as_slice(); + let lines = lines.as_str(); assert_eq!(string.subslice_offset(lines[0]), 0); assert_eq!(string.subslice_offset(lines[1]), 2); assert_eq!(string.subslice_offset(lines[2]), 4); @@ -1506,7 +1506,7 @@ mod tests { let s1: String = String::from_str("All mimsy were the borogoves"); let v: Vec = Vec::from_slice(s1.as_bytes()); - let s2: String = String::from_str(from_utf8(v.as_slice()).unwrap()); + let s2: String = String::from_str(from_utf8(v.as_str()).unwrap()); let mut i: uint = 0u; let n1: uint = s1.len(); let n2: uint = v.len(); @@ -2230,10 +2230,10 @@ mod tests { let s = "a̐éö̲\r\n"; let gr_inds = s.grapheme_indices(true).collect::>(); let b: &[_] = &[(0u, "a̐"), (3, "é"), (6, "ö̲"), (11, "\r\n")]; - assert_eq!(gr_inds.as_slice(), b); + assert_eq!(gr_inds.as_str(), b); let gr_inds = s.grapheme_indices(true).rev().collect::>(); let b: &[_] = &[(11, "\r\n"), (6, "ö̲"), (3, "é"), (0u, "a̐")]; - assert_eq!(gr_inds.as_slice(), b); + assert_eq!(gr_inds.as_str(), b); let mut gr_inds = s.grapheme_indices(true); let e1 = gr_inds.size_hint(); assert_eq!(e1, (1, Some(13))); @@ -2246,14 +2246,14 @@ mod tests { let s = "\n\r\n\r"; let gr = s.graphemes(true).rev().collect::>(); let b: &[_] = &["\r", "\r\n", "\n"]; - assert_eq!(gr.as_slice(), b); + assert_eq!(gr.as_str(), b); } #[test] fn test_split_strator() { fn t(s: &str, sep: &str, u: &[&str]) { let v: Vec<&str> = s.split_str(sep).collect(); - assert_eq!(v.as_slice(), u.as_slice()); + assert_eq!(v.as_str(), u.as_str()); } t("--1233345--", "12345", ["--1233345--"]); t("abc::hello::there", "::", ["abc", "hello", "there"]); @@ -2276,7 +2276,7 @@ mod tests { use std::default::Default; fn t() { let s: S = Default::default(); - assert_eq!(s.as_slice(), ""); + assert_eq!(s.as_str(), ""); } t::<&str>(); @@ -2293,7 +2293,7 @@ mod tests { assert_eq!(5, sum_len(["012", "", "34"])); assert_eq!(5, sum_len([String::from_str("01"), String::from_str("2"), String::from_str("34"), String::from_str("")])); - assert_eq!(5, sum_len([s.as_slice()])); + assert_eq!(5, sum_len([s.as_str()])); } #[test] @@ -2312,17 +2312,17 @@ mod tests { fn test_maybe_owned_traits() { let s = Slice("abcde"); assert_eq!(s.len(), 5); - assert_eq!(s.as_slice(), "abcde"); - assert_eq!(String::from_str(s.as_slice()).as_slice(), "abcde"); - assert_eq!(format!("{}", s).as_slice(), "abcde"); + assert_eq!(s.as_str(), "abcde"); + assert_eq!(String::from_str(s.as_str()).as_str(), "abcde"); + assert_eq!(format!("{}", s).as_str(), "abcde"); assert!(s.lt(&Owned(String::from_str("bcdef")))); assert_eq!(Slice(""), Default::default()); let o = Owned(String::from_str("abcde")); assert_eq!(o.len(), 5); - assert_eq!(o.as_slice(), "abcde"); - assert_eq!(String::from_str(o.as_slice()).as_slice(), "abcde"); - assert_eq!(format!("{}", o).as_slice(), "abcde"); + assert_eq!(o.as_str(), "abcde"); + assert_eq!(String::from_str(o.as_str()).as_str(), "abcde"); + assert_eq!(format!("{}", o).as_str(), "abcde"); assert!(o.lt(&Slice("bcdef"))); assert_eq!(Owned(String::from_str("")), Default::default()); diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 05d91a7504150..e54909a7c2e3f 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -577,7 +577,7 @@ impl String { /// ``` #[inline] pub fn truncate(&mut self, len: uint) { - assert!(self.as_slice().is_char_boundary(len)); + assert!(self.as_str().is_char_boundary(len)); self.vec.truncate(len) } @@ -648,7 +648,7 @@ impl String { return None } - let CharRange {ch, next} = self.as_slice().char_range_at_reverse(len); + let CharRange {ch, next} = self.as_str().char_range_at_reverse(len); unsafe { self.vec.set_len(next); } @@ -698,7 +698,7 @@ impl String { return None } - let CharRange {ch, next} = self.as_slice().char_range_at(0); + let CharRange {ch, next} = self.as_str().char_range_at(0); let new_len = len - next; unsafe { ptr::copy_memory(self.vec.as_mut_ptr(), self.vec.as_ptr().offset(next as int), new_len); @@ -760,7 +760,7 @@ impl Extendable for String { impl Str for String { #[inline] - fn as_slice<'a>(&'a self) -> &'a str { + fn as_str<'a>(&'a self) -> &'a str { unsafe { mem::transmute(self.vec.as_slice()) } @@ -782,28 +782,28 @@ impl Default for String { impl fmt::Show for String { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.as_slice().fmt(f) + self.as_str().fmt(f) } } impl hash::Hash for String { #[inline] fn hash(&self, hasher: &mut H) { - self.as_slice().hash(hasher) + self.as_str().hash(hasher) } } impl<'a, S: Str> Equiv for String { #[inline] fn equiv(&self, other: &S) -> bool { - self.as_slice() == other.as_slice() + self.as_str() == other.as_str() } } impl Add for String { fn add(&self, other: &S) -> String { - let mut s = String::from_str(self.as_slice()); - s.push_str(other.as_slice()); + let mut s = String::from_str(self.as_str()); + s.push_str(other.as_str()); return s; } } diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index be75bfec32c86..85b1fd4c75947 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -662,7 +662,7 @@ impl Bool for bool { impl<'a, T: str::Str> String for T { fn fmt(&self, f: &mut Formatter) -> Result { - f.pad(self.as_slice()) + f.pad(self.as_str()) } } diff --git a/src/libcore/str.rs b/src/libcore/str.rs index d6f35b0dcc650..01a9c882e168f 100644 --- a/src/libcore/str.rs +++ b/src/libcore/str.rs @@ -1077,19 +1077,19 @@ pub mod traits { impl<'a, S: Str> Equiv for &'a str { #[inline] - fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) } + fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_str()) } } } /// Any string that can be represented as a slice pub trait Str { - /// Work with `self` as a slice. - fn as_slice<'a>(&'a self) -> &'a str; + /// Work with `self` as a &str + fn as_str<'a>(&'a self) -> &'a str; } impl<'a> Str for &'a str { #[inline] - fn as_slice<'a>(&'a self) -> &'a str { *self } + fn as_str<'a>(&'a self) -> &'a str { *self } } impl<'a> Collection for &'a str { diff --git a/src/libdebug/fmt.rs b/src/libdebug/fmt.rs index 0b04a07ea888a..0e3ebd783006a 100644 --- a/src/libdebug/fmt.rs +++ b/src/libdebug/fmt.rs @@ -46,7 +46,7 @@ impl Poly for T { // this allocation of a new string _ => { let s = repr::repr_to_string(self); - f.pad(s.as_slice()) + f.pad(s.as_str()) } } } diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 9272369f73cf6..459ee2a52f7c8 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -201,11 +201,11 @@ impl<'a> Parser<'a> { Some((_, other)) => { self.err(format!("expected `{}`, found `{}`", c, - other).as_slice()); + other).as_str()); } None => { self.err(format!("expected `{}` but string was terminated", - c).as_slice()); + c).as_str()); } } } diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs index 3ffd39a0065e4..27dda94a71c5b 100644 --- a/src/libgetopts/lib.rs +++ b/src/libgetopts/lib.rs @@ -252,7 +252,7 @@ impl OptGroup { aliases: Vec::new() }, (1,0) => Opt { - name: Short(short_name.as_slice().char_at(0)), + name: Short(short_name.as_str().char_at(0)), hasarg: hasarg, occur: occur, aliases: Vec::new() @@ -263,7 +263,7 @@ impl OptGroup { occur: occur, aliases: vec!( Opt { - name: Short(short_name.as_slice().char_at(0)), + name: Short(short_name.as_str().char_at(0)), hasarg: hasarg, occur: occur, aliases: Vec::new() @@ -306,7 +306,7 @@ impl Matches { pub fn opts_present(&self, names: &[String]) -> bool { for nm in names.iter() { match find_opt(self.opts.as_slice(), - Name::from_str(nm.as_slice())) { + Name::from_str(nm.as_str())) { Some(id) if !self.vals[id].is_empty() => return true, _ => (), }; @@ -317,7 +317,7 @@ impl Matches { /// Returns the string argument supplied to one of several matching options or `None`. pub fn opts_str(&self, names: &[String]) -> Option { for nm in names.iter() { - match self.opt_val(nm.as_slice()) { + match self.opt_val(nm.as_str()) { Some(Val(ref s)) => return Some(s.clone()), _ => () } @@ -547,9 +547,9 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { while i < l { let cur = args[i].clone(); let curlen = cur.len(); - if !is_arg(cur.as_slice()) { + if !is_arg(cur.as_str()) { free.push(cur); - } else if cur.as_slice() == "--" { + } else if cur.as_str() == "--" { let mut j = i + 1; while j < l { free.push(args[j].clone()); j += 1; } break; @@ -557,7 +557,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { let mut names; let mut i_arg = None; if cur.as_bytes()[1] == b'-' { - let tail = cur.as_slice().slice(2, curlen); + let tail = cur.as_str().slice(2, curlen); let tail_eq: Vec<&str> = tail.split('=').collect(); if tail_eq.len() <= 1 { names = vec!(Long(tail.to_string())); @@ -570,7 +570,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { let mut j = 1; names = Vec::new(); while j < curlen { - let range = cur.as_slice().char_range_at(j); + let range = cur.as_str().char_range_at(j); let opt = Short(range.ch); /* In a series of potential options (eg. -aheJ), if we @@ -593,7 +593,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { }; if arg_follows && range.next < curlen { - i_arg = Some(cur.as_slice() + i_arg = Some(cur.as_str() .slice(range.next, curlen).to_string()); break; } @@ -621,7 +621,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { .push(Val((i_arg.clone()) .unwrap())); } else if name_pos < names.len() || i + 1 == l || - is_arg(args[i + 1].as_slice()) { + is_arg(args[i + 1].as_str()) { vals.get_mut(optid).push(Given); } else { i += 1; @@ -686,7 +686,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} 1 => { row.push_char('-'); - row.push_str(short_name.as_slice()); + row.push_str(short_name.as_str()); row.push_char(' '); } _ => fail!("the short name should only be 1 ascii char long"), @@ -697,7 +697,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} _ => { row.push_str("--"); - row.push_str(long_name.as_slice()); + row.push_str(long_name.as_str()); row.push_char(' '); } } @@ -705,35 +705,35 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // arg match hasarg { No => {} - Yes => row.push_str(hint.as_slice()), + Yes => row.push_str(hint.as_str()), Maybe => { row.push_char('['); - row.push_str(hint.as_slice()); + row.push_str(hint.as_str()); row.push_char(']'); } } // FIXME: #5516 should be graphemes not codepoints // here we just need to indent the start of the description - let rowlen = row.as_slice().char_len(); + let rowlen = row.as_str().char_len(); if rowlen < 24 { for _ in range(0, 24 - rowlen) { row.push_char(' '); } } else { - row.push_str(desc_sep.as_slice()) + row.push_str(desc_sep.as_str()) } // Normalize desc to contain words separated by one space character let mut desc_normalized_whitespace = String::new(); - for word in desc.as_slice().words() { + for word in desc.as_str().words() { desc_normalized_whitespace.push_str(word); desc_normalized_whitespace.push_char(' '); } // FIXME: #5516 should be graphemes not codepoints let mut desc_rows = Vec::new(); - each_split_within(desc_normalized_whitespace.as_slice(), + each_split_within(desc_normalized_whitespace.as_str(), 54, |substr| { desc_rows.push(substr.to_string()); @@ -742,7 +742,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // FIXME: #5516 should be graphemes not codepoints // wrapped description - row.push_str(desc_rows.connect(desc_sep.as_slice()).as_slice()); + row.push_str(desc_rows.connect(desc_sep.as_str()).as_str()); row }); @@ -761,10 +761,10 @@ fn format_option(opt: &OptGroup) -> String { // Use short_name is possible, but fallback to long_name. if opt.short_name.len() > 0 { line.push_char('-'); - line.push_str(opt.short_name.as_slice()); + line.push_str(opt.short_name.as_str()); } else { line.push_str("--"); - line.push_str(opt.long_name.as_slice()); + line.push_str(opt.long_name.as_str()); } if opt.hasarg != No { @@ -772,7 +772,7 @@ fn format_option(opt: &OptGroup) -> String { if opt.hasarg == Maybe { line.push_char('['); } - line.push_str(opt.hint.as_slice()); + line.push_str(opt.hint.as_str()); if opt.hasarg == Maybe { line.push_char(']'); } @@ -795,7 +795,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String { .map(format_option) .collect::>() .connect(" ") - .as_slice()); + .as_str()); line } diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index ea298f5e05f71..d5ba013118864 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -354,7 +354,7 @@ impl<'a> Id<'a> { pub fn new>(name: Name) -> Id<'a> { let name = name.into_maybe_owned(); { - let mut chars = name.as_slice().chars(); + let mut chars = name.as_str().chars(); assert!(is_letter_or_underscore(chars.next().unwrap())); assert!(chars.all(is_constituent)); } @@ -372,7 +372,7 @@ impl<'a> Id<'a> { } pub fn as_slice(&'a self) -> &'a str { - self.name.as_slice() + self.name.as_str() } pub fn name(self) -> str::MaybeOwned<'a> { @@ -434,8 +434,8 @@ impl<'a> LabelText<'a> { /// Renders text as string suitable for a label in a .dot file. pub fn escape(&self) -> String { match self { - &LabelStr(ref s) => s.as_slice().escape_default(), - &EscStr(ref s) => LabelText::escape_str(s.as_slice()), + &LabelStr(ref s) => s.as_str().escape_default(), + &EscStr(ref s) => LabelText::escape_str(s.as_str()), } } @@ -446,8 +446,8 @@ impl<'a> LabelText<'a> { fn pre_escaped_content(self) -> str::MaybeOwned<'a> { match self { EscStr(s) => s, - LabelStr(s) => if s.as_slice().contains_char('\\') { - str::Owned(s.as_slice().escape_default()) + LabelStr(s) => if s.as_str().contains_char('\\') { + str::Owned(s.as_str().escape_default()) } else { s }, @@ -463,7 +463,7 @@ impl<'a> LabelText<'a> { pub fn suffix_line(self, suffix: LabelText) -> LabelText<'static> { let prefix = self.pre_escaped_content().into_string(); let suffix = suffix.pre_escaped_content(); - EscStr(str::Owned(prefix.append(r"\n\n").append(suffix.as_slice()))) + EscStr(str::Owned(prefix.append(r"\n\n").append(suffix.as_str()))) } } @@ -518,7 +518,7 @@ pub fn render<'a, N:'a, E:'a, G:Labeller<'a,N,E>+GraphWalk<'a,N,E>, W:Writer>( let id = g.node_id(n); let escaped = g.node_label(n).escape(); try!(writeln(w, [id.as_slice(), - "[label=\"", escaped.as_slice(), "\"];"])); + "[label=\"", escaped.as_str(), "\"];"])); } for e in g.edges().iter() { @@ -529,7 +529,7 @@ pub fn render<'a, N:'a, E:'a, G:Labeller<'a,N,E>+GraphWalk<'a,N,E>, W:Writer>( let source_id = g.node_id(&source); let target_id = g.node_id(&target); try!(writeln(w, [source_id.as_slice(), " -> ", target_id.as_slice(), - "[label=\"", escaped_label.as_slice(), "\"];"])); + "[label=\"", escaped_label.as_str(), "\"];"])); } writeln(w, ["}"]) diff --git a/src/libgreen/macros.rs b/src/libgreen/macros.rs index 4cce430d88a8d..21a2ccf8ce643 100644 --- a/src/libgreen/macros.rs +++ b/src/libgreen/macros.rs @@ -47,7 +47,7 @@ macro_rules! rtassert ( macro_rules! rtabort ( ($($arg:tt)*) => ( { - ::macros::abort(format!($($arg)*).as_slice()); + ::macros::abort(format!($($arg)*).as_str()); } ) ) diff --git a/src/libgreen/stack.rs b/src/libgreen/stack.rs index 4673e7b3ba209..91a658f2c2fea 100644 --- a/src/libgreen/stack.rs +++ b/src/libgreen/stack.rs @@ -164,7 +164,7 @@ fn max_cached_stacks() -> uint { 0 => {} n => return n - 1, } - let amt = getenv("RUST_MAX_CACHED_STACKS").and_then(|s| from_str(s.as_slice())); + let amt = getenv("RUST_MAX_CACHED_STACKS").and_then(|s| from_str(s.as_str())); // This default corresponds to 20M of cache per scheduler (at the // default size). let amt = amt.unwrap_or(10); diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs index 6461a6cf402a0..f8282b5924ce6 100644 --- a/src/liblog/lib.rs +++ b/src/liblog/lib.rs @@ -283,7 +283,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: &fmt::Arguments) { // Test the literal string from args against the current filter, if there // is one. match unsafe { FILTER.as_ref() } { - Some(filter) if filter.is_match(args.to_string().as_slice()) => return, + Some(filter) if filter.is_match(args.to_string().as_str()) => return, _ => {} } @@ -372,7 +372,7 @@ fn enabled(level: u32, // Search for the longest match, the vector is assumed to be pre-sorted. for directive in iter.rev() { match directive.name { - Some(ref name) if !module.starts_with(name.as_slice()) => {}, + Some(ref name) if !module.starts_with(name.as_str()) => {}, Some(..) | None => { return level <= directive.level } @@ -387,7 +387,7 @@ fn enabled(level: u32, /// `Once` primitive (and this function is called from that primitive). fn init() { let (mut directives, filter) = match os::getenv("RUST_LOG") { - Some(spec) => directive::parse_logging_spec(spec.as_slice()), + Some(spec) => directive::parse_logging_spec(spec.as_str()), None => (Vec::new(), None), }; diff --git a/src/libregex/parse.rs b/src/libregex/parse.rs index c3ce7bbd9f2ca..79cdd6e1ce402 100644 --- a/src/libregex/parse.rs +++ b/src/libregex/parse.rs @@ -281,7 +281,7 @@ impl<'a> Parser<'a> { true => Ok(()), false => { self.err(format!("Expected {} but got EOF.", - expected).as_slice()) + expected).as_str()) } } } @@ -290,10 +290,10 @@ impl<'a> Parser<'a> { match self.next_char() { true if self.cur() == expected => Ok(()), true => self.err(format!("Expected '{}' but got '{}'.", - expected, self.cur()).as_slice()), + expected, self.cur()).as_str()), false => { self.err(format!("Expected '{}' but got EOF.", - expected).as_slice()) + expected).as_str()) } } } @@ -440,7 +440,7 @@ impl<'a> Parser<'a> { return self.err(format!("Invalid character class \ range '{}-{}'", c, - c2).as_slice()) + c2).as_str()) } ranges.push((c, self.cur())) } else { @@ -480,7 +480,7 @@ impl<'a> Parser<'a> { FLAG_EMPTY }; let name = self.slice(name_start, closer - 1); - match find_class(ASCII_CLASSES, name.as_slice()) { + match find_class(ASCII_CLASSES, name.as_str()) { None => None, Some(ranges) => { self.chari = closer; @@ -505,7 +505,7 @@ impl<'a> Parser<'a> { return self.err(format!("No closing brace for counted \ repetition starting at position \ {}.", - start).as_slice()) + start).as_str()) } }; self.chari = closer; @@ -515,11 +515,11 @@ impl<'a> Parser<'a> { // Parse the min and max values from the regex. let (mut min, mut max): (uint, Option); - if !inner.as_slice().contains(",") { - min = try!(self.parse_uint(inner.as_slice())); + if !inner.as_str().contains(",") { + min = try!(self.parse_uint(inner.as_str())); max = Some(min); } else { - let pieces: Vec<&str> = inner.as_slice().splitn(1, ',').collect(); + let pieces: Vec<&str> = inner.as_str().splitn(1, ',').collect(); let (smin, smax) = (pieces[0], pieces[1]); if smin.len() == 0 { return self.err("Max repetitions cannot be specified \ @@ -538,19 +538,19 @@ impl<'a> Parser<'a> { if min > MAX_REPEAT { return self.err(format!( "{} exceeds maximum allowed repetitions ({})", - min, MAX_REPEAT).as_slice()); + min, MAX_REPEAT).as_str()); } if max.is_some() { let m = max.unwrap(); if m > MAX_REPEAT { return self.err(format!( "{} exceeds maximum allowed repetitions ({})", - m, MAX_REPEAT).as_slice()); + m, MAX_REPEAT).as_str()); } if m < min { return self.err(format!( "Max repetitions ({}) cannot be smaller than min \ - repetitions ({}).", m, min).as_slice()); + repetitions ({}).", m, min).as_str()); } } @@ -615,7 +615,7 @@ impl<'a> Parser<'a> { } _ => { self.err(format!("Invalid escape sequence '\\\\{}'", - c).as_slice()) + c).as_str()) } } } @@ -635,7 +635,7 @@ impl<'a> Parser<'a> { Some(i) => i, None => return self.err(format!( "Missing '}}' for unclosed '{{' at position {}", - self.chari).as_slice()), + self.chari).as_str()), }; if closer - self.chari + 1 == 0 { return self.err("No Unicode class name found.") @@ -649,10 +649,10 @@ impl<'a> Parser<'a> { name = self.slice(self.chari + 1, self.chari + 2); self.chari += 1; } - match find_class(UNICODE_CLASSES, name.as_slice()) { + match find_class(UNICODE_CLASSES, name.as_str()) { None => { return self.err(format!("Could not find Unicode class '{}'", - name).as_slice()) + name).as_str()) } Some(ranges) => { Ok(Class(ranges, negated | (self.flags & FLAG_NOCASE))) @@ -675,11 +675,11 @@ impl<'a> Parser<'a> { } } let s = self.slice(start, end); - match num::from_str_radix::(s.as_slice(), 8) { + match num::from_str_radix::(s.as_str(), 8) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { self.err(format!("Could not parse '{}' as octal number.", - s).as_slice()) + s).as_str()) } } } @@ -697,12 +697,12 @@ impl<'a> Parser<'a> { None => { return self.err(format!("Missing '}}' for unclosed \ '{{' at position {}", - start).as_slice()) + start).as_str()) } Some(i) => i, }; self.chari = closer; - self.parse_hex_digits(self.slice(start, closer).as_slice()) + self.parse_hex_digits(self.slice(start, closer).as_str()) } // Parses a two-digit hex number. @@ -713,8 +713,8 @@ impl<'a> Parser<'a> { let (start, end) = (self.chari, self.chari + 2); let bad = self.slice(start - 2, self.chars.len()); try!(self.noteof(format!("Invalid hex escape sequence '{}'", - bad).as_slice())) - self.parse_hex_digits(self.slice(start, end).as_slice()) + bad).as_str())) + self.parse_hex_digits(self.slice(start, end).as_str()) } // Parses `s` as a hexadecimal number. @@ -723,7 +723,7 @@ impl<'a> Parser<'a> { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { self.err(format!("Could not parse '{}' as hex number.", - s).as_slice()) + s).as_str()) } } } @@ -743,13 +743,13 @@ impl<'a> Parser<'a> { return self.err("Capture names must have at least 1 character.") } let name = self.slice(self.chari, closer); - if !name.as_slice().chars().all(is_valid_cap) { + if !name.as_str().chars().all(is_valid_cap) { return self.err( "Capture names can only have underscores, letters and digits.") } if self.names.contains(&name) { return self.err(format!("Duplicate capture group name '{}'.", - name).as_slice()) + name).as_str()) } self.names.push(name.clone()); self.chari = closer; @@ -781,7 +781,7 @@ impl<'a> Parser<'a> { if sign < 0 { return self.err(format!( "Cannot negate flags twice in '{}'.", - self.slice(start, self.chari + 1)).as_slice()) + self.slice(start, self.chari + 1)).as_str()) } sign = -1; saw_flag = false; @@ -792,7 +792,7 @@ impl<'a> Parser<'a> { if !saw_flag { return self.err(format!( "A valid flag does not follow negation in '{}'", - self.slice(start, self.chari + 1)).as_slice()) + self.slice(start, self.chari + 1)).as_str()) } flags = flags ^ flags; } @@ -804,7 +804,7 @@ impl<'a> Parser<'a> { return Ok(()) } _ => return self.err(format!( - "Unrecognized flag '{}'.", self.cur()).as_slice()), + "Unrecognized flag '{}'.", self.cur()).as_str()), } } } @@ -900,7 +900,7 @@ impl<'a> Parser<'a> { Some(i) => Ok(i), None => { self.err(format!("Expected an unsigned integer but got '{}'.", - s).as_slice()) + s).as_str()) } } } @@ -911,7 +911,7 @@ impl<'a> Parser<'a> { None => { self.err(format!("Could not decode '{}' to unicode \ character.", - n).as_slice()) + n).as_str()) } } } diff --git a/src/libregex/re.rs b/src/libregex/re.rs index 8e4145b2a3198..a70ca7b31e6dc 100644 --- a/src/libregex/re.rs +++ b/src/libregex/re.rs @@ -501,7 +501,7 @@ impl Regex { let (s, e) = cap.pos(0).unwrap(); // captures only reports matches new.push_str(text.slice(last_match, s)); - new.push_str(rep.reg_replace(&cap).as_slice()); + new.push_str(rep.reg_replace(&cap).as_str()); last_match = e; } new.append(text.slice(last_match, text.len())) @@ -510,8 +510,8 @@ impl Regex { /// Returns the original string of this regex. pub fn as_str<'a>(&'a self) -> &'a str { match *self { - Dynamic(Dynamic { ref original, .. }) => original.as_slice(), - Native(Native { ref original, .. }) => original.as_slice(), + Dynamic(Dynamic { ref original, .. }) => original.as_str(), + Native(Native { ref original, .. }) => original.as_str(), } } @@ -765,13 +765,13 @@ impl<'t> Captures<'t> { let text = re.replace_all(text, |refs: &Captures| -> String { let (pre, name) = (refs.at(1), refs.at(2)); format!("{}{}", pre, - match from_str::(name.as_slice()) { + match from_str::(name.as_str()) { None => self.name(name).to_string(), Some(i) => self.at(i).to_string(), }) }); let re = Regex::new(r"\$\$").unwrap(); - re.replace_all(text.as_slice(), NoExpand("$")) + re.replace_all(text.as_str(), NoExpand("$")) } } diff --git a/src/libregex/vm.rs b/src/libregex/vm.rs index 1adaf9c92a6ac..d08a92616b0c2 100644 --- a/src/libregex/vm.rs +++ b/src/libregex/vm.rs @@ -144,7 +144,7 @@ impl<'r, 't> Nfa<'r, 't> { // jump ahead quickly. If it can't be found, then we can bail // out early. if self.prog.prefix.len() > 0 && clist.size == 0 { - let needle = self.prog.prefix.as_slice().as_bytes(); + let needle = self.prog.prefix.as_str().as_bytes(); let haystack = self.input.as_bytes().slice_from(self.ic); match find_prefix(needle, haystack) { None => break, diff --git a/src/librustc/back/link.rs b/src/librustc/back/link.rs index bf026560c6afd..209afb7003eee 100644 --- a/src/librustc/back/link.rs +++ b/src/librustc/back/link.rs @@ -131,7 +131,7 @@ pub fn find_crate_name(sess: Option<&Session>, use syntax::crateid::CrateId; let validate = |s: String, span: Option| { - creader::validate_crate_name(sess, s.as_slice(), span); + creader::validate_crate_name(sess, s.as_str(), span); s }; @@ -147,11 +147,11 @@ pub fn find_crate_name(sess: Option<&Session>, match sess.opts.crate_name { Some(ref s) => { match attr_crate_name { - Some((attr, ref name)) if s.as_slice() != name.get() => { + Some((attr, ref name)) if s.as_str() != name.get() => { let msg = format!("--crate-name and #[crate_name] \ are required to match, but `{}` \ != `{}`", s, name); - sess.span_err(attr.span, msg.as_slice()); + sess.span_err(attr.span, msg.as_str()); } _ => {}, } @@ -226,17 +226,17 @@ fn symbol_hash(tcx: &ty::ctxt, // to be independent of one another in the crate. symbol_hasher.reset(); - symbol_hasher.input_str(link_meta.crate_name.as_slice()); + symbol_hasher.input_str(link_meta.crate_name.as_str()); symbol_hasher.input_str("-"); symbol_hasher.input_str(link_meta.crate_hash.as_str()); for meta in tcx.sess.crate_metadata.borrow().iter() { - symbol_hasher.input_str(meta.as_slice()); + symbol_hasher.input_str(meta.as_str()); } symbol_hasher.input_str("-"); - symbol_hasher.input_str(encoder::encoded_ty(tcx, t).as_slice()); + symbol_hasher.input_str(encoder::encoded_ty(tcx, t).as_str()); // Prefix with 'h' so that it never blends into adjacent digits let mut hash = String::from_str("h"); - hash.push_str(truncated_hash_result(symbol_hasher).as_slice()); + hash.push_str(truncated_hash_result(symbol_hasher).as_str()); hash } @@ -285,7 +285,7 @@ pub fn sanitize(s: &str) -> String { let mut tstr = String::new(); char::escape_unicode(c, |c| tstr.push_char(c)); result.push_char('$'); - result.push_str(tstr.as_slice().slice_from(1)); + result.push_str(tstr.as_str().slice_from(1)); } } } @@ -294,7 +294,7 @@ pub fn sanitize(s: &str) -> String { if result.len() > 0u && result.as_bytes()[0] != '_' as u8 && ! char::is_XID_start(result.as_bytes()[0] as char) { - return format!("_{}", result.as_slice()); + return format!("_{}", result.as_str()); } return result; @@ -320,12 +320,12 @@ pub fn mangle>(mut path: PI, fn push(n: &mut String, s: &str) { let sani = sanitize(s); - n.push_str(format!("{}{}", sani.len(), sani).as_slice()); + n.push_str(format!("{}{}", sani.len(), sani).as_str()); } // First, connect each component with pairs. for e in path { - push(&mut n, token::get_name(e.name()).get().as_slice()) + push(&mut n, token::get_name(e.name()).get().as_str()) } match hash { @@ -363,17 +363,17 @@ pub fn mangle_exported_name(ccx: &CrateContext, path: PathElems, hash.push_char(EXTRA_CHARS.as_bytes()[extra2] as char); hash.push_char(EXTRA_CHARS.as_bytes()[extra3] as char); - exported_name(path, hash.as_slice()) + exported_name(path, hash.as_str()) } pub fn mangle_internal_name_by_type_and_seq(ccx: &CrateContext, t: ty::t, name: &str) -> String { let s = ppaux::ty_to_string(ccx.tcx(), t); - let path = [PathName(token::intern(s.as_slice())), + let path = [PathName(token::intern(s.as_str())), gensym_name(name)]; let hash = get_symbol_hash(ccx, t); - mangle(ast_map::Values(path.iter()), Some(hash.as_slice())) + mangle(ast_map::Values(path.iter()), Some(hash.as_str())) } pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String { @@ -409,7 +409,7 @@ pub fn remove(sess: &Session, path: &Path) { Err(e) => { sess.err(format!("failed to remove {}: {}", path.display(), - e).as_slice()); + e).as_str()); } } } @@ -424,7 +424,7 @@ pub fn link_binary(sess: &Session, for &crate_type in sess.crate_types.borrow().iter() { if invalid_output_for_target(sess, crate_type) { sess.bug(format!("invalid output type `{}` for target os `{}`", - crate_type, sess.targ_cfg.os).as_slice()); + crate_type, sess.targ_cfg.os).as_str()); } let out_file = link_binary_output(sess, trans, crate_type, outputs, crate_name); @@ -540,12 +540,12 @@ fn link_binary_output(sess: &Session, if !out_is_writeable { sess.fatal(format!("output file {} is not writeable -- check its \ permissions.", - out_filename.display()).as_slice()); + out_filename.display()).as_str()); } else if !obj_is_writeable { sess.fatal(format!("object file {} is not writeable -- check its \ permissions.", - obj_filename.display()).as_slice()); + obj_filename.display()).as_str()); } match crate_type { @@ -598,7 +598,7 @@ fn link_rlib<'a>(sess: &'a Session, for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() { match kind { cstore::NativeStatic => { - ab.add_native_library(l.as_slice()).unwrap(); + ab.add_native_library(l.as_str()).unwrap(); } cstore::NativeFramework | cstore::NativeUnknown => {} } @@ -651,7 +651,7 @@ fn link_rlib<'a>(sess: &'a Session, Err(e) => { sess.err(format!("failed to write {}: {}", metadata.display(), - e).as_slice()); + e).as_str()); sess.abort_if_errors(); } } @@ -675,20 +675,20 @@ fn link_rlib<'a>(sess: &'a Session, let bc_data = match fs::File::open(&bc_filename).read_to_end() { Ok(buffer) => buffer, Err(e) => sess.fatal(format!("failed to read bytecode: {}", - e).as_slice()) + e).as_str()) }; let bc_data_deflated = match flate::deflate_bytes(bc_data.as_slice()) { Some(compressed) => compressed, None => sess.fatal(format!("failed to compress bytecode from {}", - bc_filename.display()).as_slice()) + bc_filename.display()).as_str()) }; let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) { Ok(file) => file, Err(e) => { sess.fatal(format!("failed to create compressed bytecode \ - file: {}", e).as_slice()) + file: {}", e).as_str()) } }; @@ -697,7 +697,7 @@ fn link_rlib<'a>(sess: &'a Session, Ok(()) => {} Err(e) => { sess.err(format!("failed to write compressed bytecode: \ - {}", e).as_slice()); + {}", e).as_str()); sess.abort_if_errors() } }; @@ -772,11 +772,11 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { let p = match *path { Some(ref p) => p.clone(), None => { sess.err(format!("could not find rlib for: `{}`", - name).as_slice()); + name).as_str()); continue } }; - ab.add_rlib(&p, name.as_slice(), sess.lto()).unwrap(); + ab.add_rlib(&p, name.as_str(), sess.lto()).unwrap(); let native_libs = csearch::get_native_libraries(&sess.cstore, cnum); all_native_libs.extend(native_libs.move_iter()); @@ -798,7 +798,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { cstore::NativeUnknown => "library", cstore::NativeFramework => "framework", }; - sess.note(format!("{}: {}", name, *lib).as_slice()); + sess.note(format!("{}: {}", name, *lib).as_str()); } } @@ -812,7 +812,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, // The invocations of cc share some flags across platforms let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.as_slice()); + let mut cmd = Command::new(pname.as_str()); cmd.args(sess.targ_cfg.target_strs.cc_args.as_slice()); link_args(&mut cmd, sess, dylib, tmpdir.path(), @@ -833,8 +833,8 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, if !prog.status.success() { sess.err(format!("linking with `{}` failed: {}", pname, - prog.status).as_slice()); - sess.note(format!("{}", &cmd).as_slice()); + prog.status).as_str()); + sess.note(format!("{}", &cmd).as_str()); let mut output = prog.error.clone(); output.push_all(prog.output.as_slice()); sess.note(str::from_utf8(output.as_slice()).unwrap()); @@ -846,7 +846,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e).as_slice()); + e).as_str()); sess.abort_if_errors(); } } @@ -859,7 +859,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, match Command::new("dsymutil").arg(out_filename).output() { Ok(..) => {} Err(e) => { - sess.err(format!("failed to run dsymutil: {}", e).as_slice()); + sess.err(format!("failed to run dsymutil: {}", e).as_str()); sess.abort_if_errors(); } } @@ -945,8 +945,8 @@ fn link_args(cmd: &mut Command, // be used during an exploit of a vulnerability in any code. if sess.targ_cfg.os == abi::OsLinux { let mut args = sess.opts.cg.link_args.iter().chain(used_link_args.iter()); - if !dylib && sess.opts.cg.relocation_model.as_slice() == "pic" && - !args.any(|x| x.as_slice() == "-static") { + if !dylib && sess.opts.cg.relocation_model.as_str() == "pic" && + !args.any(|x| x.as_str() == "-static") { cmd.arg("-pie"); } } @@ -1100,7 +1100,7 @@ fn link_args(cmd: &mut Command, // addl_lib_search_paths if sess.opts.cg.rpath { let sysroot = sess.sysroot(); - let target_triple = sess.opts.target_triple.as_slice(); + let target_triple = sess.opts.target_triple.as_str(); let get_install_prefix_lib_path = || { let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX"); let tlib = filesearch::relative_target_lib_path(sysroot, target_triple); @@ -1190,7 +1190,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { } else { // -force_load is the OSX equivalent of --whole-archive, but it // involves passing the full path to the library to link. - let lib = archive::find_library(l.as_slice(), + let lib = archive::find_library(l.as_str(), sess.targ_cfg.os, search_path.as_slice(), &sess.diagnostic().handler); @@ -1209,7 +1209,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { cmd.arg(format!("-l{}", l)); } cstore::NativeFramework => { - cmd.arg("-framework").arg(l.as_slice()); + cmd.arg("-framework").arg(l.as_str()); } cstore::NativeStatic => unreachable!(), } @@ -1292,7 +1292,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, let name = cratepath.filename_str().unwrap(); let name = name.slice(3, name.len() - 5); // chop off lib/.rlib time(sess.time_passes(), - format!("altering {}.rlib", name).as_slice(), + format!("altering {}.rlib", name).as_str(), (), |()| { let dst = tmpdir.join(cratepath.filename().unwrap()); match fs::copy(&cratepath, &dst) { @@ -1301,7 +1301,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, sess.err(format!("failed to copy {} to {}: {}", cratepath.display(), dst.display(), - e).as_slice()); + e).as_str()); sess.abort_if_errors(); } } @@ -1314,9 +1314,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, maybe_ar_prog: sess.opts.cg.ar.clone() }; let mut archive = Archive::open(config); - archive.remove_file(format!("{}.o", name).as_slice()); + archive.remove_file(format!("{}.o", name).as_str()); let files = archive.files(); - if files.iter().any(|s| s.as_slice().ends_with(".o")) { + if files.iter().any(|s| s.as_str().ends_with(".o")) { cmd.arg(dst); } }); @@ -1380,7 +1380,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) { } cstore::NativeFramework => { cmd.arg("-framework"); - cmd.arg(lib.as_slice()); + cmd.arg(lib.as_str()); } cstore::NativeStatic => { sess.bug("statics shouldn't be propagated"); diff --git a/src/librustc/back/lto.rs b/src/librustc/back/lto.rs index d7f183faa0192..c5a3323dadbdc 100644 --- a/src/librustc/back/lto.rs +++ b/src/librustc/back/lto.rs @@ -52,7 +52,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(p) => p, None => { sess.fatal(format!("could not find rlib for: `{}`", - name).as_slice()); + name).as_str()); } }; @@ -61,18 +61,18 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, let file = file.slice(3, file.len() - 5); // chop off lib/.rlib debug!("reading {}", file); let bc_encoded = time(sess.time_passes(), - format!("read {}.bytecode.deflate", name).as_slice(), + format!("read {}.bytecode.deflate", name).as_str(), (), |_| { archive.read(format!("{}.bytecode.deflate", - file).as_slice()) + file).as_str()) }); let bc_encoded = match bc_encoded { Some(data) => data, None => { sess.fatal(format!("missing compressed bytecode in {} \ (perhaps it was compiled with -C codegen-units > 1)", - path.display()).as_slice()); + path.display()).as_str()); }, }; let bc_extractor = if is_versioned_bytecode_format(bc_encoded) { @@ -91,12 +91,12 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(inflated) => inflated, None => { sess.fatal(format!("failed to decompress bc of `{}`", - name).as_slice()) + name).as_str()) } } } else { sess.fatal(format!("Unsupported bytecode format version {}", - version).as_slice()) + version).as_str()) } } } else { @@ -107,21 +107,21 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(bc) => bc, None => { sess.fatal(format!("failed to decompress bc of `{}`", - name).as_slice()) + name).as_str()) } } } }; let bc_decoded = time(sess.time_passes(), - format!("decode {}.bc", file).as_slice(), + format!("decode {}.bc", file).as_str(), (), bc_extractor); let ptr = bc_decoded.as_slice().as_ptr(); debug!("linking {}", name); time(sess.time_passes(), - format!("ll link {}", name).as_slice(), + format!("ll link {}", name).as_str(), (), |()| unsafe { if !llvm::LLVMRustLinkInExternalBitcode(llmod, @@ -129,14 +129,14 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, bc_decoded.len() as libc::size_t) { write::llvm_err(sess.diagnostic().handler(), format!("failed to load bc of `{}`", - name.as_slice())); + name.as_str())); } }); } // Internalize everything but the reachable symbols of the current module let cstrs: Vec<::std::c_str::CString> = - reachable.iter().map(|s| s.as_slice().to_c_str()).collect(); + reachable.iter().map(|s| s.as_str().to_c_str()).collect(); let arr: Vec<*const i8> = cstrs.iter().map(|c| c.as_ptr()).collect(); let ptr = arr.as_ptr(); unsafe { diff --git a/src/librustc/back/write.rs b/src/librustc/back/write.rs index 627d455f06e11..de578cbaf5795 100644 --- a/src/librustc/back/write.rs +++ b/src/librustc/back/write.rs @@ -47,13 +47,13 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! { unsafe { let cstr = llvm::LLVMRustGetLastError(); if cstr == ptr::null() { - handler.fatal(msg.as_slice()); + handler.fatal(msg.as_str()); } else { let err = CString::new(cstr, true); let err = String::from_utf8_lossy(err.as_bytes()); handler.fatal(format!("{}: {}", - msg.as_slice(), - err.as_slice()).as_slice()); + msg.as_str(), + err.as_str()).as_str()); } } } @@ -104,13 +104,13 @@ impl SharedEmitter { match diag.code { Some(ref code) => { handler.emit_with_code(None, - diag.msg.as_slice(), - code.as_slice(), + diag.msg.as_str(), + code.as_str(), diag.lvl); }, None => { handler.emit(None, - diag.msg.as_slice(), + diag.msg.as_str(), diag.lvl); }, } @@ -153,16 +153,16 @@ impl Emitter for SharedEmitter { fn target_feature<'a>(sess: &'a Session) -> &'a str { match sess.targ_cfg.os { abi::OsAndroid => { - if "" == sess.opts.cg.target_feature.as_slice() { + if "" == sess.opts.cg.target_feature.as_str() { "+v7" } else { - sess.opts.cg.target_feature.as_slice() + sess.opts.cg.target_feature.as_str() } }, abi::OsiOS if sess.targ_cfg.arch == abi::Arm => { "+v7,+thumb2,+vfp3,+neon" }, - _ => sess.opts.cg.target_feature.as_slice() + _ => sess.opts.cg.target_feature.as_str() } } @@ -176,7 +176,7 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel { } fn create_target_machine(sess: &Session) -> TargetMachineRef { - let reloc_model = match sess.opts.cg.relocation_model.as_slice() { + let reloc_model = match sess.opts.cg.relocation_model.as_str() { "pic" => llvm::RelocPIC, "static" => llvm::RelocStatic, "default" => llvm::RelocDefault, @@ -185,7 +185,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { sess.err(format!("{} is not a valid relocation mode", sess.opts .cg - .relocation_model).as_slice()); + .relocation_model).as_str()); sess.abort_if_errors(); unreachable!(); } @@ -206,7 +206,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { sess.targ_cfg.os != abi::OsWindows; let fdata_sections = ffunction_sections; - let code_model = match sess.opts.cg.code_model.as_slice() { + let code_model = match sess.opts.cg.code_model.as_str() { "default" => llvm::CodeModelDefault, "small" => llvm::CodeModelSmall, "kernel" => llvm::CodeModelKernel, @@ -216,7 +216,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { sess.err(format!("{} is not a valid code model", sess.opts .cg - .code_model).as_slice()); + .code_model).as_str()); sess.abort_if_errors(); unreachable!(); } @@ -226,9 +226,9 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { sess.targ_cfg .target_strs .target_triple - .as_slice() + .as_str() .with_c_str(|t| { - sess.opts.cg.target_cpu.as_slice().with_c_str(|cpu| { + sess.opts.cg.target_cpu.as_str().with_c_str(|cpu| { target_feature(sess).with_c_str(|features| { llvm::LLVMRustCreateTargetMachine( t, cpu, features, @@ -340,7 +340,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, if config.emit_no_opt_bc { let ext = format!("{}.no-opt.bc", name_extra); - output_names.with_extension(ext.as_slice()).with_c_str(|buf| { + output_names.with_extension(ext.as_str()).with_c_str(|buf| { llvm::LLVMWriteBitcodeToFile(llmod, buf); }) } @@ -357,7 +357,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, // If we're verifying or linting, add them to the function pass // manager. let addpass = |pass: &str| { - pass.as_slice().with_c_str(|s| llvm::LLVMRustAddPass(fpm, s)) + pass.as_str().with_c_str(|s| llvm::LLVMRustAddPass(fpm, s)) }; if !config.no_verify { assert!(addpass("verify")); } @@ -369,10 +369,10 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, } for pass in config.passes.iter() { - pass.as_slice().with_c_str(|s| { + pass.as_str().with_c_str(|s| { if !llvm::LLVMRustAddPass(mpm, s) { cgcx.handler.warn(format!("unknown pass {}, ignoring", - *pass).as_slice()); + *pass).as_str()); } }) } @@ -394,7 +394,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, if config.emit_lto_bc { let name = format!("{}.lto.bc", name_extra); - output_names.with_extension(name.as_slice()).with_c_str(|buf| { + output_names.with_extension(name.as_str()).with_c_str(|buf| { llvm::LLVMWriteBitcodeToFile(llmod, buf); }) } @@ -424,7 +424,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, if config.emit_bc { let ext = format!("{}.bc", name_extra); - output_names.with_extension(ext.as_slice()).with_c_str(|buf| { + output_names.with_extension(ext.as_str()).with_c_str(|buf| { llvm::LLVMWriteBitcodeToFile(llmod, buf); }) } @@ -432,7 +432,7 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, time(config.time_passes, "codegen passes", (), |()| { if config.emit_ir { let ext = format!("{}.ll", name_extra); - output_names.with_extension(ext.as_slice()).with_c_str(|output| { + output_names.with_extension(ext.as_str()).with_c_str(|output| { with_codegen(tm, llmod, config.no_builtins, |cpm| { llvm::LLVMRustPrintModule(cpm, llmod, output); }) @@ -440,14 +440,14 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, } if config.emit_asm { - let path = output_names.with_extension(format!("{}.s", name_extra).as_slice()); + let path = output_names.with_extension(format!("{}.s", name_extra).as_str()); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFile); }); } if config.emit_obj { - let path = output_names.with_extension(format!("{}.o", name_extra).as_slice()); + let path = output_names.with_extension(format!("{}.o", name_extra).as_str()); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFile); }); @@ -600,7 +600,7 @@ pub fn run_passes(sess: &Session, // 2) Multiple codegen units, with `-o some_name`. We have // no good solution for this case, so warn the user. sess.warn(format!("ignoring -o because multiple .{} files were produced", - ext).as_slice()); + ext).as_str()); } else { // 3) Multiple codegen units, but no `-o some_name`. We // just leave the `foo.0.x` files in place. @@ -633,13 +633,13 @@ pub fn run_passes(sess: &Session, }; let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.as_slice()); + let mut cmd = Command::new(pname.as_str()); cmd.args(sess.targ_cfg.target_strs.cc_args.as_slice()); cmd.arg("-nostdlib"); for index in range(0, trans.modules.len()) { - cmd.arg(crate_output.with_extension(format!("{}.o", index).as_slice())); + cmd.arg(crate_output.with_extension(format!("{}.o", index).as_str())); } cmd.arg("-r") @@ -658,7 +658,7 @@ pub fn run_passes(sess: &Session, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e).as_slice()); + e).as_str()); sess.abort_if_errors(); }, } @@ -721,12 +721,12 @@ pub fn run_passes(sess: &Session, for i in range(0, trans.modules.len()) { if modules_config.emit_obj { let ext = format!("{}.o", i); - remove(sess, &crate_output.with_extension(ext.as_slice())); + remove(sess, &crate_output.with_extension(ext.as_str())); } if modules_config.emit_bc && !save_bitcode { let ext = format!("{}.bc", i); - remove(sess, &crate_output.with_extension(ext.as_slice())); + remove(sess, &crate_output.with_extension(ext.as_str())); } } @@ -829,7 +829,7 @@ fn run_work_multithreaded(sess: &Session, pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname.as_slice()); + let mut cmd = Command::new(pname.as_str()); cmd.arg("-c").arg("-o").arg(outputs.path(OutputTypeObject)) .arg(outputs.temp_path(OutputTypeAssembly)); @@ -840,8 +840,8 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { if !prog.status.success() { sess.err(format!("linking with `{}` failed: {}", pname, - prog.status).as_slice()); - sess.note(format!("{}", &cmd).as_slice()); + prog.status).as_str()); + sess.note(format!("{}", &cmd).as_str()); let mut note = prog.error.clone(); note.push_all(prog.output.as_slice()); sess.note(str::from_utf8(note.as_slice()).unwrap()); @@ -851,7 +851,7 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e).as_slice()); + e).as_str()); sess.abort_if_errors(); } } @@ -884,7 +884,7 @@ unsafe fn configure_llvm(sess: &Session) { if sess.print_llvm_passes() { add("-debug-pass=Structure"); } for arg in sess.opts.cg.llvm_args.iter() { - add((*arg).as_slice()); + add((*arg).as_str()); } } diff --git a/src/librustc/driver/config.rs b/src/librustc/driver/config.rs index 8f4f54ce96735..2c20670949910 100644 --- a/src/librustc/driver/config.rs +++ b/src/librustc/driver/config.rs @@ -362,23 +362,23 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions { let mut cg = basic_codegen_options(); for option in matches.opt_strs("C").move_iter() { - let mut iter = option.as_slice().splitn(1, '='); + let mut iter = option.as_str().splitn(1, '='); let key = iter.next().unwrap(); let value = iter.next(); let option_to_lookup = key.replace("-", "_"); let mut found = false; for &(candidate, setter, _) in CG_OPTIONS.iter() { - if option_to_lookup.as_slice() != candidate { continue } + if option_to_lookup.as_str() != candidate { continue } if !setter(&mut cg, value) { match value { Some(..) => { early_error(format!("codegen option `{}` takes no \ - value", key).as_slice()) + value", key).as_str()) } None => { early_error(format!("codegen option `{0}` requires \ a value (-C {0}=)", - key).as_slice()) + key).as_str()) } } } @@ -387,7 +387,7 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions } if !found { early_error(format!("unknown codegen option: `{}`", - key).as_slice()); + key).as_str()); } } return cg; @@ -494,15 +494,15 @@ static architecture_abis : &'static [(&'static str, abi::Architecture)] = &[ ("mips", abi::Mips)]; pub fn build_target_config(sopts: &Options) -> Config { - let os = match get_os(sopts.target_triple.as_slice()) { + let os = match get_os(sopts.target_triple.as_str()) { Some(os) => os, None => early_error("unknown operating system") }; - let arch = match get_arch(sopts.target_triple.as_slice()) { + let arch = match get_arch(sopts.target_triple.as_str()) { Some(arch) => arch, None => { early_error(format!("unknown architecture: {}", - sopts.target_triple.as_slice()).as_slice()) + sopts.target_triple.as_str()).as_str()) } }; let (int_type, uint_type) = match arch { @@ -607,7 +607,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let unparsed_crate_types = matches.opt_strs("crate-type"); let crate_types = parse_crate_types_from_list(unparsed_crate_types) - .unwrap_or_else(|e| early_error(e.as_slice())); + .unwrap_or_else(|e| early_error(e.as_str())); let parse_only = matches.opt_present("parse-only"); let no_trans = matches.opt_present("no-trans"); @@ -618,7 +618,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { for &level in [lint::Allow, lint::Warn, lint::Deny, lint::Forbid].iter() { for lint_name in matches.opt_strs(level.as_str()).move_iter() { - if lint_name.as_slice() == "help" { + if lint_name.as_str() == "help" { describe_lints = true; } else { lint_opts.push((lint_name.replace("-", "_").into_string(), level)); @@ -633,14 +633,14 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let mut this_bit = 0; for tuple in debug_map.iter() { let (name, bit) = match *tuple { (ref a, _, b) => (a, b) }; - if *name == debug_flag.as_slice() { + if *name == debug_flag.as_str() { this_bit = bit; break; } } if this_bit == 0 { early_error(format!("unknown debug flag: {}", - *debug_flag).as_slice()) + *debug_flag).as_str()) } debugging_opts |= this_bit; } @@ -653,8 +653,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { if !parse_only && !no_trans { let unparsed_output_types = matches.opt_strs("emit"); for unparsed_output_type in unparsed_output_types.iter() { - for part in unparsed_output_type.as_slice().split(',') { - let output_type = match part.as_slice() { + for part in unparsed_output_type.as_str().split(',') { + let output_type = match part.as_str() { "asm" => write::OutputTypeAssembly, "ir" => write::OutputTypeLlvmAssembly, "bc" => write::OutputTypeBitcode, @@ -662,7 +662,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { "link" => write::OutputTypeExe, _ => { early_error(format!("unknown emission type: `{}`", - part).as_slice()) + part).as_str()) } }; output_types.push(output_type) @@ -687,7 +687,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { } Default } else if matches.opt_present("opt-level") { - match matches.opt_str("opt-level").as_ref().map(|s| s.as_slice()) { + match matches.opt_str("opt-level").as_ref().map(|s| s.as_str()) { None | Some("0") => No, Some("1") => Less, @@ -696,7 +696,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("optimization level needs to be \ between 0-3 (instead was `{}`)", - arg).as_slice()); + arg).as_str()); } } } else { @@ -710,7 +710,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { } FullDebugInfo } else if matches.opt_present("debuginfo") { - match matches.opt_str("debuginfo").as_ref().map(|s| s.as_slice()) { + match matches.opt_str("debuginfo").as_ref().map(|s| s.as_str()) { Some("0") => NoDebugInfo, Some("1") => LimitedDebugInfo, None | @@ -718,7 +718,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("optimization level needs to be between \ 0-3 (instead was `{}`)", - arg).as_slice()); + arg).as_str()); } } } else { @@ -726,7 +726,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { }; let addl_lib_search_paths = matches.opt_strs("L").iter().map(|s| { - Path::new(s.as_slice()) + Path::new(s.as_str()) }).collect(); let cfg = parse_cfgspecs(matches.opt_strs("cfg")); @@ -744,7 +744,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { } let cg = build_codegen_options(matches); - let color = match matches.opt_str("color").as_ref().map(|s| s.as_slice()) { + let color = match matches.opt_str("color").as_ref().map(|s| s.as_str()) { Some("auto") => Auto, Some("always") => Always, Some("never") => Never, @@ -754,13 +754,13 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("argument for --color must be auto, always \ or never (instead was `{}`)", - arg).as_slice()) + arg).as_str()) } }; let mut externs = HashMap::new(); for arg in matches.opt_strs("extern").iter() { - let mut parts = arg.as_slice().splitn(1, '='); + let mut parts = arg.as_str().splitn(1, '='); let name = match parts.next() { Some(s) => s, None => early_error("--extern value must not be empty"), @@ -806,7 +806,7 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result = Vec::new(); for unparsed_crate_type in list_list.iter() { - for part in unparsed_crate_type.as_slice().split(',') { + for part in unparsed_crate_type.as_str().split(',') { let new_part = match part { "lib" => default_lib_output(), "rlib" => CrateTypeRlib, diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 4c71c2df44d3e..6d7c12afe3ea3 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -75,7 +75,7 @@ pub fn compile_input(sess: Session, let id = link::find_crate_name(Some(&sess), krate.attrs.as_slice(), input); let (expanded_crate, ast_map) - = match phase_2_configure_and_expand(&sess, krate, id.as_slice(), + = match phase_2_configure_and_expand(&sess, krate, id.as_str(), addl_plugins) { None => return, Some(p) => p, @@ -83,7 +83,7 @@ pub fn compile_input(sess: Session, (outputs, expanded_crate, ast_map, id) }; - write_out_deps(&sess, input, &outputs, id.as_slice()); + write_out_deps(&sess, input, &outputs, id.as_str()); if stop_after_phase_2(&sess) { return; } @@ -513,7 +513,7 @@ pub fn phase_6_link_output(sess: &Session, link::link_binary(sess, trans, outputs, - trans.link.crate_name.as_slice())); + trans.link.crate_name.as_str())); } pub fn stop_after_phase_3(sess: &Session) -> bool { @@ -608,7 +608,7 @@ fn write_out_deps(sess: &Session, Ok(()) => {} Err(e) => { sess.fatal(format!("error writing dependencies to `{}`: {}", - deps_filename.display(), e).as_slice()); + deps_filename.display(), e).as_str()); } } } @@ -679,7 +679,7 @@ pub fn collect_crate_types(session: &Session, if !res { session.warn(format!("dropping unsupported crate type `{}` \ for target os `{}`", - *crate_type, session.targ_cfg.os).as_slice()); + *crate_type, session.targ_cfg.os).as_str()); } res diff --git a/src/librustc/driver/mod.rs b/src/librustc/driver/mod.rs index 5e00b9e9e0e05..ededcebb56a6d 100644 --- a/src/librustc/driver/mod.rs +++ b/src/librustc/driver/mod.rs @@ -54,12 +54,12 @@ fn run_compiler(args: &[String]) { let descriptions = diagnostics::registry::Registry::new(super::DIAGNOSTICS); match matches.opt_str("explain") { Some(ref code) => { - match descriptions.find_description(code.as_slice()) { + match descriptions.find_description(code.as_str()) { Some(ref description) => { println!("{}", description); } None => { - early_error(format!("no extended information for {}", code).as_slice()); + early_error(format!("no extended information for {}", code).as_str()); } } return; @@ -79,7 +79,7 @@ fn run_compiler(args: &[String]) { early_error("no input filename given"); } 1u => { - let ifile = matches.free.get(0).as_slice(); + let ifile = matches.free.get(0).as_str(); if ifile == "-" { let contents = io::stdin().read_to_end().unwrap(); let src = String::from_utf8(contents).unwrap(); @@ -97,7 +97,7 @@ fn run_compiler(args: &[String]) { let ofile = matches.opt_str("o").map(|o| Path::new(o)); let pretty = matches.opt_default("pretty", "normal").map(|a| { - pretty::parse_pretty(&sess, a.as_slice()) + pretty::parse_pretty(&sess, a.as_str()) }); match pretty { Some((ppm, opt_uii)) => { @@ -131,7 +131,7 @@ fn run_compiler(args: &[String]) { /// Prints version information and returns None on success or an error /// message on failure. pub fn version(binary: &str, matches: &getopts::Matches) -> Option { - let verbose = match matches.opt_str("version").as_ref().map(|s| s.as_slice()) { + let verbose = match matches.opt_str("version").as_ref().map(|s| s.as_str()) { None => false, Some("verbose") => true, Some(s) => return Some(format!("Unrecognized argument: {}", s)) @@ -155,7 +155,7 @@ Additional help: -C help Print codegen options -W help Print 'lint' options and default settings -Z help Print internal options for debugging rustc\n", - getopts::usage(message.as_slice(), + getopts::usage(message.as_str(), config::optgroups().as_slice())); } @@ -214,7 +214,7 @@ Available lint options: for lint in lints.move_iter() { let name = lint.name_lower().replace("_", "-"); println!(" {} {:7.7s} {}", - padded(name.as_slice()), lint.default_level.as_str(), lint.desc); + padded(name.as_str()), lint.default_level.as_str(), lint.desc); } println!("\n"); }; @@ -240,7 +240,7 @@ Available lint options: .collect::().replace("_", "-"); let desc = to.move_iter().map(|x| x.as_str()).collect::>().connect(", "); println!(" {} {}", - padded(name.as_slice()), desc); + padded(name.as_str()), desc); } println!("\n"); }; @@ -311,7 +311,7 @@ pub fn handle_options(mut args: Vec) -> Option { match getopts::getopts(args.as_slice(), config::optgroups().as_slice()) { Ok(m) => m, Err(f) => { - early_error(f.to_string().as_slice()); + early_error(f.to_string().as_str()); } }; @@ -323,13 +323,13 @@ pub fn handle_options(mut args: Vec) -> Option { // Don't handle -W help here, because we might first load plugins. let r = matches.opt_strs("Z"); - if r.iter().any(|x| x.as_slice() == "help") { + if r.iter().any(|x| x.as_str() == "help") { describe_debug_flags(); return None; } let cg_flags = matches.opt_strs("C"); - if cg_flags.iter().any(|x| x.as_slice() == "help") { + if cg_flags.iter().any(|x| x.as_str() == "help") { describe_codegen_flags(); return None; } @@ -341,7 +341,7 @@ pub fn handle_options(mut args: Vec) -> Option { if matches.opt_present("version") { match version("rustc", &matches) { - Some(err) => early_error(err.as_slice()), + Some(err) => early_error(err.as_str()), None => return None } } @@ -373,7 +373,7 @@ fn print_crate_info(sess: &Session, let metadata = driver::collect_crate_metadata(sess, attrs.as_slice()); *sess.crate_metadata.borrow_mut() = metadata; for &style in crate_types.iter() { - let fname = link::filename_for_input(sess, style, id.as_slice(), + let fname = link::filename_for_input(sess, style, id.as_str(), &t_outputs.with_extension("")); println!("{}", fname.filename_display()); } @@ -470,7 +470,7 @@ pub fn monitor(f: proc():Send) { "run with `RUST_BACKTRACE=1` for a backtrace".to_string(), ]; for note in xs.iter() { - emitter.emit(None, note.as_slice(), None, diagnostic::Note) + emitter.emit(None, note.as_str(), None, diagnostic::Note) } match r.read_to_string() { @@ -479,7 +479,7 @@ pub fn monitor(f: proc():Send) { emitter.emit(None, format!("failed to read internal \ stderr: {}", - e).as_slice(), + e).as_str(), None, diagnostic::Error) } diff --git a/src/librustc/driver/pretty.rs b/src/librustc/driver/pretty.rs index a3227e4dbf1d4..70918fb55d6e0 100644 --- a/src/librustc/driver/pretty.rs +++ b/src/librustc/driver/pretty.rs @@ -67,7 +67,7 @@ pub fn parse_pretty(sess: &Session, name: &str) -> (PpMode, Option`, `typed`, `identified`, \ - or `expanded,identified`; got {}", name).as_slice()); + or `expanded,identified`; got {}", name).as_str()); } }; let opt_second = opt_second.and_then::(from_str); @@ -301,7 +301,7 @@ impl<'tcx> pprust::PpAnn for TypedAnnotation<'tcx> { try!(pp::word(&mut s.s, ppaux::ty_to_string( tcx, - ty::expr_ty(tcx, expr)).as_slice())); + ty::expr_ty(tcx, expr)).as_str())); s.pclose() } _ => Ok(()) @@ -386,7 +386,7 @@ impl UserIdentifiedItem { user_option, self.reconstructed_input(), is_wrong_because); - sess.fatal(message.as_slice()) + sess.fatal(message.as_str()) }; let mut saw_node = ast::DUMMY_NODE_ID; @@ -444,7 +444,7 @@ pub fn pretty_print_input(sess: Session, let is_expanded = needs_expansion(&ppm); let (krate, ast_map) = if needs_ast_map(&ppm, &opt_uii) { - let k = driver::phase_2_configure_and_expand(&sess, krate, id.as_slice(), None); + let k = driver::phase_2_configure_and_expand(&sess, krate, id.as_str(), None); let (krate, ast_map) = match k { None => return, Some(p) => p, @@ -456,7 +456,7 @@ pub fn pretty_print_input(sess: Session, let src_name = driver::source_name(input); let src = Vec::from_slice(sess.codemap() - .get_filemap(src_name.as_slice()) + .get_filemap(src_name.as_str()) .src .as_bytes()); let mut rdr = MemReader::new(src); @@ -518,7 +518,7 @@ pub fn pretty_print_input(sess: Session, debug!("pretty printing flow graph for {}", opt_uii); let uii = opt_uii.unwrap_or_else(|| { sess.fatal(format!("`pretty flowgraph=..` needs NodeId (int) or - unique path suffix (b::c::d)").as_slice()) + unique path suffix (b::c::d)").as_str()) }); let ast_map = ast_map.expect("--pretty flowgraph missing ast_map"); @@ -526,7 +526,7 @@ pub fn pretty_print_input(sess: Session, let node = ast_map.find(nodeid).unwrap_or_else(|| { sess.fatal(format!("--pretty flowgraph couldn't find id: {}", - nodeid).as_slice()) + nodeid).as_str()) }); let code = blocks::Code::from_node(node); @@ -546,8 +546,8 @@ pub fn pretty_print_input(sess: Session, // point to what was found, if there's an // accessible span. match ast_map.opt_span(nodeid) { - Some(sp) => sess.span_fatal(sp, message.as_slice()), - None => sess.fatal(message.as_slice()) + Some(sp) => sess.span_fatal(sp, message.as_str()), + None => sess.fatal(message.as_str()) } } } diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index 135e21e4e0184..e790283be2adf 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -154,7 +154,7 @@ impl Session { // cases later on pub fn impossible_case(&self, sp: Span, msg: &str) -> ! { self.span_bug(sp, - format!("impossible case reached: {}", msg).as_slice()); + format!("impossible case reached: {}", msg).as_str()); } pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) } pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) } @@ -193,7 +193,7 @@ impl Session { } pub fn target_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> { filesearch::FileSearch::new(self.sysroot(), - self.opts.target_triple.as_slice(), + self.opts.target_triple.as_str(), &self.opts.addl_lib_search_paths) } pub fn host_filesearch<'a>(&'a self) -> filesearch::FileSearch<'a> { diff --git a/src/librustc/front/feature_gate.rs b/src/librustc/front/feature_gate.rs index 225fc28cd6d98..d1642a3f949ca 100644 --- a/src/librustc/front/feature_gate.rs +++ b/src/librustc/front/feature_gate.rs @@ -126,7 +126,7 @@ impl<'a> Context<'a> { self.sess.span_err(span, explain); self.sess.span_note(span, format!("add #![feature({})] to the \ crate attributes to enable", - feature).as_slice()); + feature).as_str()); } } @@ -139,7 +139,7 @@ impl<'a> Context<'a> { } fn has_feature(&self, feature: &str) -> bool { - self.features.iter().any(|n| n.as_slice() == feature) + self.features.iter().any(|n| n.as_str() == feature) } } @@ -293,7 +293,7 @@ impl<'a> Visitor<()> for Context<'a> { if id == token::str_to_ident(quote) { self.gate_feature("quote", path.span, - format!("{}{}", quote, msg).as_slice()); + format!("{}{}", quote, msg).as_str()); } } } diff --git a/src/librustc/front/std_inject.rs b/src/librustc/front/std_inject.rs index 32e0c323d1f94..ccc0e889448a0 100644 --- a/src/librustc/front/std_inject.rs +++ b/src/librustc/front/std_inject.rs @@ -64,7 +64,7 @@ impl<'a> fold::Folder for StandardLibraryInjector<'a> { // The name to use in `extern crate "name" as std;` let actual_crate_name = match self.sess.opts.alt_std_name { - Some(ref s) => token::intern_and_get_ident(s.as_slice()), + Some(ref s) => token::intern_and_get_ident(s.as_str()), None => token::intern_and_get_ident("std"), }; diff --git a/src/librustc/front/test.rs b/src/librustc/front/test.rs index 63e93d266c770..7f110a9394e9d 100644 --- a/src/librustc/front/test.rs +++ b/src/librustc/front/test.rs @@ -478,7 +478,7 @@ fn mk_tests(cx: &TestCtxt) -> Gc { fn is_test_crate(krate: &ast::Crate) -> bool { match attr::find_crate_name(krate.attrs.as_slice()) { - Some(ref s) if "test" == s.get().as_slice() => true, + Some(ref s) if "test" == s.get().as_str() => true, _ => false } } @@ -522,7 +522,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> Gc { // path to the #[test] function: "foo::bar::baz" let path_string = ast_util::path_name_i(path.as_slice()); - let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string.as_slice())); + let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string.as_str())); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 138947e8a873b..c22d4c7732e28 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -446,15 +446,15 @@ impl HeapMemory { if n_uniq > 0 { let s = ty_to_string(cx.tcx, ty); let m = format!("type uses owned (Box type) pointers: {}", s); - cx.span_lint(OWNED_HEAP_MEMORY, span, m.as_slice()); - cx.span_lint(HEAP_MEMORY, span, m.as_slice()); + cx.span_lint(OWNED_HEAP_MEMORY, span, m.as_str()); + cx.span_lint(HEAP_MEMORY, span, m.as_str()); } if n_box > 0 { let s = ty_to_string(cx.tcx, ty); let m = format!("type uses managed (@ type) pointers: {}", s); - cx.span_lint(MANAGED_HEAP_MEMORY, span, m.as_slice()); - cx.span_lint(HEAP_MEMORY, span, m.as_slice()); + cx.span_lint(MANAGED_HEAP_MEMORY, span, m.as_str()); + cx.span_lint(HEAP_MEMORY, span, m.as_str()); } } } @@ -734,7 +734,7 @@ impl LintPass for UnusedResult { msg.push_str(s.get()); } } - cx.span_lint(UNUSED_MUST_USE, sp, msg.as_slice()); + cx.span_lint(UNUSED_MUST_USE, sp, msg.as_str()); return true; } } @@ -776,7 +776,7 @@ impl NonCamelCaseTypes { } else { format!("{} `{}` should have a camel case name such as `{}`", sort, s, c) }; - cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.as_slice()); + cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.as_str()); } } } @@ -881,7 +881,7 @@ impl NonSnakeCase { let mut buf = String::new(); if s.is_empty() { continue; } for ch in s.chars() { - if !buf.is_empty() && buf.as_slice() != "'" && ch.is_uppercase() { + if !buf.is_empty() && buf.as_str() != "'" && ch.is_uppercase() { words.push(buf); buf = String::new(); } @@ -897,7 +897,7 @@ impl NonSnakeCase { if !is_snake_case(ident) { cx.span_lint(NON_SNAKE_CASE, span, format!("{} `{}` should have a snake case name such as `{}`", - sort, s, to_snake_case(s.get())).as_slice()); + sort, s, to_snake_case(s.get())).as_str()); } } } @@ -992,7 +992,7 @@ impl LintPass for NonUppercaseStatics { format!("static constant `{}` should have an uppercase name \ such as `{}`", s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::().as_slice()).as_slice()); + .collect::().as_str()).as_str()); } } _ => {} @@ -1009,7 +1009,7 @@ impl LintPass for NonUppercaseStatics { format!("static constant in pattern `{}` should have an uppercase \ name such as `{}`", s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::().as_slice()).as_slice()); + .collect::().as_str()).as_str()); } } _ => {} @@ -1031,7 +1031,7 @@ impl UnnecessaryParens { if !necessary { cx.span_lint(UNNECESSARY_PARENS, value.span, format!("unnecessary parentheses around {}", - msg).as_slice()) + msg).as_str()) } } _ => {} @@ -1333,7 +1333,7 @@ impl MissingDoc { }); if !has_doc { cx.span_lint(MISSING_DOC, sp, - format!("missing documentation for {}", desc).as_slice()); + format!("missing documentation for {}", desc).as_str()); } } } @@ -1518,7 +1518,7 @@ impl LintPass for Stability { _ => format!("use of {} item", label) }; - cx.span_lint(lint, e.span, msg.as_slice()); + cx.span_lint(lint, e.span, msg.as_str()); } } diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 26ed5cbfb2cc1..3527dd578088d 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -119,11 +119,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg.as_slice()), - (Some(sess), false) => sess.bug(msg.as_slice()), + (None, _) => early_error(msg.as_str()), + (Some(sess), false) => sess.bug(msg.as_str()), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg.as_slice()), + (Some(sess), true) => sess.err(msg.as_str()), } } @@ -144,11 +144,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg.as_slice()), - (Some(sess), false) => sess.bug(msg.as_slice()), + (None, _) => early_error(msg.as_str()), + (Some(sess), false) => sess.bug(msg.as_str()), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg.as_slice()), + (Some(sess), true) => sess.err(msg.as_str()), } } } @@ -209,12 +209,12 @@ impl LintStore { pub fn process_command_line(&mut self, sess: &Session) { for &(ref lint_name, level) in sess.opts.lint_opts.iter() { - match self.by_name.find_equiv(&lint_name.as_slice()) { + match self.by_name.find_equiv(&lint_name.as_str()) { Some(&lint_id) => self.set_level(lint_id, (level, CommandLine)), None => { match self.lint_groups.iter().map(|(&x, pair)| (x, pair.ref0().clone())) .collect::>>() - .find_equiv(&lint_name.as_slice()) { + .find_equiv(&lint_name.as_str()) { Some(v) => { v.iter() .map(|lint_id: &LintId| @@ -222,7 +222,7 @@ impl LintStore { .collect::>(); } None => sess.err(format!("unknown {} flag: {}", - level.as_str(), lint_name).as_slice()), + level.as_str(), lint_name).as_str()), } } } @@ -333,10 +333,10 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint, if level == Forbid { level = Deny; } match (level, span) { - (Warn, Some(sp)) => sess.span_warn(sp, msg.as_slice()), - (Warn, None) => sess.warn(msg.as_slice()), - (Deny, Some(sp)) => sess.span_err(sp, msg.as_slice()), - (Deny, None) => sess.err(msg.as_slice()), + (Warn, Some(sp)) => sess.span_warn(sp, msg.as_str()), + (Warn, None) => sess.warn(msg.as_str()), + (Deny, Some(sp)) => sess.span_err(sp, msg.as_str()), + (Deny, None) => sess.err(msg.as_str()), _ => sess.bug("impossible level in raw_emit_lint"), } @@ -429,7 +429,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { None => { self.span_lint(builtin::UNRECOGNIZED_LINT, span, format!("unknown `{}` attribute: `{}`", - level.as_str(), lint_name).as_slice()); + level.as_str(), lint_name).as_str()); continue; } } @@ -445,7 +445,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { self.tcx.sess.span_err(span, format!("{}({}) overruled by outer forbid({})", level.as_str(), lint_name, - lint_name).as_slice()); + lint_name).as_str()); } else if now != level { let src = self.lints.get_level_source(lint_id).val1(); self.level_stack.push((lint_id, (now, src))); @@ -669,7 +669,7 @@ impl<'a, 'tcx> IdVisitingOperation for Context<'a, 'tcx> { None => {} Some(lints) => { for (lint_id, span, msg) in lints.move_iter() { - self.span_lint(lint_id.lint, span, msg.as_slice()) + self.span_lint(lint_id.lint, span, msg.as_str()) } } } @@ -735,7 +735,7 @@ pub fn check_crate(tcx: &ty::ctxt, for &(lint, span, ref msg) in v.iter() { tcx.sess.span_bug(span, format!("unprocessed lint {} at {}: {}", - lint.as_str(), tcx.map.node_to_string(*id), *msg).as_slice()) + lint.as_str(), tcx.map.node_to_string(*id), *msg).as_str()) } } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 321eee3d5fcef..87d372179f6d7 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -88,11 +88,11 @@ fn warn_if_multiple_versions(diag: &SpanHandler, cstore: &CStore) { for (name, dupes) in map.move_iter() { if dupes.len() == 1 { continue } diag.handler().warn( - format!("using multiple versions of crate `{}`", name).as_slice()); + format!("using multiple versions of crate `{}`", name).as_str()); for dupe in dupes.move_iter() { let data = cstore.get_crate_data(dupe); diag.span_note(data.span, "used here"); - loader::note_crate_name(diag, data.name().as_slice()); + loader::note_crate_name(diag, data.name().as_str()); } } } @@ -124,8 +124,8 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { Some(info) => { let (cnum, _, _) = resolve_crate(e, &None, - info.ident.as_slice(), - info.name.as_slice(), + info.ident.as_str(), + info.name.as_str(), None, i.span); e.sess.cstore.add_extern_mod_stmt_cnum(info.id, cnum); @@ -150,7 +150,7 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option { let name = match *path_opt { Some((ref path_str, _)) => { let name = path_str.get().to_string(); - validate_crate_name(Some(e.sess), name.as_slice(), + validate_crate_name(Some(e.sess), name.as_str(), Some(i.span)); name } @@ -181,7 +181,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option) { for c in s.chars() { if c.is_alphanumeric() { continue } if c == '_' || c == '-' { continue } - err(format!("invalid character `{}` in crate name: `{}`", c, s).as_slice()); + err(format!("invalid character `{}` in crate name: `{}`", c, s).as_str()); } match sess { Some(sess) => sess.abort_if_errors(), @@ -241,7 +241,7 @@ fn visit_item(e: &Env, i: &ast::Item) { } else { e.sess.span_err(m.span, format!("unknown kind: `{}`", - k).as_slice()); + k).as_str()); cstore::NativeUnknown } } @@ -281,7 +281,7 @@ fn existing_match(e: &Env, name: &str, hash: Option<&Svh>) -> Option { let mut ret = None; e.sess.cstore.iter_crate_data(|cnum, data| { - if data.name().as_slice() != name { return } + if data.name().as_str() != name { return } match hash { Some(hash) if *hash == data.hash() => { ret = Some(cnum); return } @@ -302,7 +302,7 @@ fn existing_match(e: &Env, name: &str, match e.sess.opts.externs.find_equiv(&name) { Some(locs) => { let found = locs.iter().any(|l| { - let l = fs::realpath(&Path::new(l.as_slice())).ok(); + let l = fs::realpath(&Path::new(l.as_str())).ok(); l == source.dylib || l == source.rlib }); if found { @@ -381,7 +381,7 @@ fn resolve_crate<'a>(e: &mut Env, hash: hash.map(|a| &*a), filesearch: e.sess.target_filesearch(), os: e.sess.targ_cfg.os, - triple: e.sess.targ_cfg.target_strs.target_triple.as_slice(), + triple: e.sess.targ_cfg.target_strs.target_triple.as_str(), root: root, rejected_via_hash: vec!(), rejected_via_triple: vec!(), @@ -407,8 +407,8 @@ fn resolve_crate_deps(e: &mut Env, decoder::get_crate_deps(cdata).iter().map(|dep| { debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); let (local_cnum, _, _) = resolve_crate(e, root, - dep.name.as_slice(), - dep.name.as_slice(), + dep.name.as_str(), + dep.name.as_str(), Some(&dep.hash), span); (dep.cnum, local_cnum) @@ -431,15 +431,15 @@ impl<'a> PluginMetadataReader<'a> { pub fn read_plugin_metadata(&mut self, krate: &ast::ViewItem) -> PluginMetadata { let info = extract_crate_info(&self.env, krate).unwrap(); - let target_triple = self.env.sess.targ_cfg.target_strs.target_triple.as_slice(); + let target_triple = self.env.sess.targ_cfg.target_strs.target_triple.as_str(); let is_cross = target_triple != driver::host_triple(); let mut should_link = info.should_link && !is_cross; let os = config::get_os(driver::host_triple()).unwrap(); let mut load_ctxt = loader::Context { sess: self.env.sess, span: krate.span, - ident: info.ident.as_slice(), - crate_name: info.name.as_slice(), + ident: info.ident.as_str(), + crate_name: info.name.as_str(), hash: None, filesearch: self.env.sess.host_filesearch(), triple: driver::host_triple(), @@ -462,7 +462,7 @@ impl<'a> PluginMetadataReader<'a> { let message = format!("crate `{}` contains a plugin_registrar fn but \ only a version for triple `{}` could be found (need {})", info.ident, target_triple, driver::host_triple()); - self.env.sess.span_err(krate.span, message.as_slice()); + self.env.sess.span_err(krate.span, message.as_str()); // need to abort now because the syntax expansion // code will shortly attempt to load and execute // code from the found library. @@ -481,7 +481,7 @@ impl<'a> PluginMetadataReader<'a> { let message = format!("plugin crate `{}` only found in rlib format, \ but must be available in dylib format", info.ident); - self.env.sess.span_err(krate.span, message.as_slice()); + self.env.sess.span_err(krate.span, message.as_str()); // No need to abort because the loading code will just ignore this // empty dylib. } @@ -490,11 +490,11 @@ impl<'a> PluginMetadataReader<'a> { macros: macros, registrar_symbol: registrar, }; - if should_link && existing_match(&self.env, info.name.as_slice(), + if should_link && existing_match(&self.env, info.name.as_str(), None).is_none() { // register crate now to avoid double-reading metadata - register_crate(&mut self.env, &None, info.ident.as_slice(), - info.name.as_slice(), krate.span, library); + register_crate(&mut self.env, &None, info.ident.as_str(), + info.name.as_str(), krate.span, library); } pc } diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index ac161ef8bdefe..d3c9c27213b68 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -93,7 +93,7 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec // FIXME #1920: This path is not always correct if the crate is not linked // into the root namespace. - (vec!(ast_map::PathMod(token::intern(cdata.name.as_slice())))).append( + (vec!(ast_map::PathMod(token::intern(cdata.name.as_str())))).append( path.as_slice()) } diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 904ca2416e0ff..bb2051374793e 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -1096,7 +1096,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec { } reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| { let name = docstr(depdoc, tag_crate_dep_crate_name); - let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).as_slice()); + let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).as_str()); deps.push(CrateDep { cnum: crate_num, name: name, diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index a105a56a09b15..76f7e3a0d5ff1 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -105,7 +105,7 @@ fn encode_impl_type_basename(rbml_w: &mut Encoder, name: Ident) { } pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) { - rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).as_slice()); + rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).as_str()); } #[deriving(Clone)] @@ -266,7 +266,7 @@ fn encode_symbol(ecx: &EncodeContext, } None => { ecx.diag.handler().bug( - format!("encode_symbol: id not found {}", id).as_slice()); + format!("encode_symbol: id not found {}", id).as_str()); } } rbml_w.end_tag(); @@ -387,12 +387,12 @@ fn encode_reexported_static_method(rbml_w: &mut Encoder, exp.name, token::get_ident(method_ident)); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(method_def_id).as_slice()); + rbml_w.wr_str(def_to_string(method_def_id).as_str()); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); rbml_w.wr_str(format!("{}::{}", exp.name, - token::get_ident(method_ident)).as_slice()); + token::get_ident(method_ident)).as_str()); rbml_w.end_tag(); rbml_w.end_tag(); } @@ -483,7 +483,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext, // encoded metadata for static methods relative to Bar, // but not yet for Foo. // - if path_differs || original_name.get() != exp.name.as_slice() { + if path_differs || original_name.get() != exp.name.as_str() { if !encode_reexported_static_base_methods(ecx, rbml_w, exp) { if encode_reexported_static_trait_methods(ecx, rbml_w, exp) { debug!("(encode reexported static methods) {} \ @@ -551,10 +551,10 @@ fn encode_reexports(ecx: &EncodeContext, id); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(exp.def_id).as_slice()); + rbml_w.wr_str(def_to_string(exp.def_id).as_str()); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); - rbml_w.wr_str(exp.name.as_slice()); + rbml_w.wr_str(exp.name.as_str()); rbml_w.end_tag(); rbml_w.end_tag(); encode_reexported_static_methods(ecx, rbml_w, path.clone(), exp); @@ -584,13 +584,13 @@ fn encode_info_for_mod(ecx: &EncodeContext, // Encode info about all the module children. for item in md.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id)).as_slice()); + rbml_w.wr_str(def_to_string(local_def(item.id)).as_str()); rbml_w.end_tag(); each_auxiliary_node_id(*item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id)).as_slice()); + auxiliary_node_id)).as_str()); rbml_w.end_tag(); true }); @@ -604,7 +604,7 @@ fn encode_info_for_mod(ecx: &EncodeContext, did, ecx.tcx.map.node_to_string(did)); rbml_w.start_tag(tag_mod_impl); - rbml_w.wr_str(def_to_string(local_def(did)).as_slice()); + rbml_w.wr_str(def_to_string(local_def(did)).as_str()); rbml_w.end_tag(); } _ => {} @@ -641,7 +641,7 @@ fn encode_visibility(rbml_w: &mut Encoder, visibility: Visibility) { Public => 'y', Inherited => 'i', }; - rbml_w.wr_str(ch.to_string().as_slice()); + rbml_w.wr_str(ch.to_string().as_str()); rbml_w.end_tag(); } @@ -653,7 +653,7 @@ fn encode_unboxed_closure_kind(rbml_w: &mut Encoder, ty::FnMutUnboxedClosureKind => 'm', ty::FnOnceUnboxedClosureKind => 'o', }; - rbml_w.wr_str(ch.to_string().as_slice()); + rbml_w.wr_str(ch.to_string().as_str()); rbml_w.end_tag(); } @@ -814,7 +814,7 @@ fn encode_generics(rbml_w: &mut Encoder, rbml_w.end_tag(); rbml_w.wr_tagged_str(tag_region_param_def_def_id, - def_to_string(param.def_id).as_slice()); + def_to_string(param.def_id).as_str()); rbml_w.wr_tagged_u64(tag_region_param_def_space, param.space.to_uint() as u64); @@ -1079,7 +1079,7 @@ fn encode_info_for_item(ecx: &EncodeContext, // Encode all the items in this module. for foreign_item in fm.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).as_slice()); + rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).as_str()); rbml_w.end_tag(); } encode_visibility(rbml_w, vis); @@ -1287,7 +1287,7 @@ fn encode_info_for_item(ecx: &EncodeContext, rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(method_def_id.def_id()).as_slice()); + rbml_w.wr_str(def_to_string(method_def_id.def_id()).as_str()); rbml_w.end_tag(); } encode_path(rbml_w, path.clone()); @@ -1732,7 +1732,7 @@ fn encode_macro_def(ecx: &EncodeContext, let def = ecx.tcx.sess.codemap().span_to_snippet(*span) .expect("Unable to find source for macro"); rbml_w.start_tag(tag_macro_def); - rbml_w.wr_str(def.as_slice()); + rbml_w.wr_str(def.as_str()); rbml_w.end_tag(); } @@ -1854,13 +1854,13 @@ fn encode_misc_info(ecx: &EncodeContext, rbml_w.start_tag(tag_misc_info_crate_items); for &item in krate.module.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id)).as_slice()); + rbml_w.wr_str(def_to_string(local_def(item.id)).as_str()); rbml_w.end_tag(); each_auxiliary_node_id(item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id)).as_slice()); + auxiliary_node_id)).as_str()); rbml_w.end_tag(); true }); @@ -2008,13 +2008,13 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter, parms: EncodeParams, krate: let mut rbml_w = writer::Encoder::new(wr); - encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.as_slice()); + encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.as_str()); encode_crate_triple(&mut rbml_w, tcx.sess .targ_cfg .target_strs .target_triple - .as_slice()); + .as_str()); encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash); encode_dylib_dependency_formats(&mut rbml_w, &ecx); diff --git a/src/librustc/metadata/filesearch.rs b/src/librustc/metadata/filesearch.rs index 49c24b190b22b..e03f13ac539ef 100644 --- a/src/librustc/metadata/filesearch.rs +++ b/src/librustc/metadata/filesearch.rs @@ -199,7 +199,7 @@ pub fn rust_path() -> Vec { let mut env_rust_path: Vec = match get_rust_path() { Some(env_path) => { let env_path_components = - env_path.as_slice().split_str(PATH_ENTRY_SEPARATOR); + env_path.as_str().split_str(PATH_ENTRY_SEPARATOR); env_path_components.map(|s| Path::new(s)).collect() } None => Vec::new() diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 5face544c89bf..cf0b1ae908ec7 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -335,17 +335,17 @@ impl<'a> Context<'a> { &Some(ref r) => format!("{} which `{}` depends on", message, r.ident) }; - self.sess.span_err(self.span, message.as_slice()); + self.sess.span_err(self.span, message.as_str()); let mismatches = self.rejected_via_triple.iter(); if self.rejected_via_triple.len() > 0 { self.sess.span_note(self.span, format!("expected triple of {}", - self.triple).as_slice()); + self.triple).as_str()); for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path {}{}, triple {}: {}", - self.ident, "#", i+1, got, path.display()).as_slice()); + self.ident, "#", i+1, got, path.display()).as_str()); } } if self.rejected_via_hash.len() > 0 { @@ -355,7 +355,7 @@ impl<'a> Context<'a> { for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path {}{}: {}", - self.ident, "#", i+1, path.display()).as_slice()); + self.ident, "#", i+1, path.display()).as_str()); } match self.root { &None => {} @@ -363,7 +363,7 @@ impl<'a> Context<'a> { for (i, path) in r.paths().iter().enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path #{}: {}", - r.ident, i+1, path.display()).as_slice()); + r.ident, i+1, path.display()).as_str()); } } } @@ -411,16 +411,16 @@ impl<'a> Context<'a> { None => return FileDoesntMatch, Some(file) => file, }; - let (hash, rlib) = if file.starts_with(rlib_prefix.as_slice()) && + let (hash, rlib) = if file.starts_with(rlib_prefix.as_str()) && file.ends_with(".rlib") { (file.slice(rlib_prefix.len(), file.len() - ".rlib".len()), true) } else if dypair.map_or(false, |(_, suffix)| { - file.starts_with(dylib_prefix.get_ref().as_slice()) && + file.starts_with(dylib_prefix.get_ref().as_str()) && file.ends_with(suffix) }) { let (_, suffix) = dypair.unwrap(); - let dylib_prefix = dylib_prefix.get_ref().as_slice(); + let dylib_prefix = dylib_prefix.get_ref().as_str(); (file.slice(dylib_prefix.len(), file.len() - suffix.len()), false) } else { @@ -473,26 +473,26 @@ impl<'a> Context<'a> { _ => { self.sess.span_err(self.span, format!("multiple matching crates for `{}`", - self.crate_name).as_slice()); + self.crate_name).as_str()); self.sess.note("candidates:"); for lib in libraries.iter() { match lib.dylib { Some(ref p) => { self.sess.note(format!("path: {}", - p.display()).as_slice()); + p.display()).as_str()); } None => {} } match lib.rlib { Some(ref p) => { self.sess.note(format!("path: {}", - p.display()).as_slice()); + p.display()).as_str()); } None => {} } let data = lib.metadata.as_slice(); let name = decoder::get_crate_name(data); - note_crate_name(self.sess.diagnostic(), name.as_slice()); + note_crate_name(self.sess.diagnostic(), name.as_str()); } None } @@ -545,11 +545,11 @@ impl<'a> Context<'a> { format!("multiple {} candidates for `{}` \ found", flavor, - self.crate_name).as_slice()); + self.crate_name).as_str()); self.sess.span_note(self.span, format!(r"candidate #1: {}", ret.get_ref() - .display()).as_slice()); + .display()).as_str()); error = 1; ret = None; } @@ -557,7 +557,7 @@ impl<'a> Context<'a> { error += 1; self.sess.span_note(self.span, format!(r"candidate #{}: {}", error, - lib.display()).as_slice()); + lib.display()).as_str()); continue } *slot = Some(metadata); @@ -569,7 +569,7 @@ impl<'a> Context<'a> { fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool { if self.should_match_name { match decoder::maybe_get_crate_name(crate_data) { - Some(ref name) if self.crate_name == name.as_slice() => {} + Some(ref name) if self.crate_name == name.as_str() => {} _ => { info!("Rejecting via crate name"); return false } } } @@ -584,7 +584,7 @@ impl<'a> Context<'a> { None => { debug!("triple not present"); return false } Some(t) => t, }; - if triple.as_slice() != self.triple { + if triple.as_str() != self.triple { info!("Rejecting via crate triple: expected {} got {}", self.triple, triple); self.rejected_via_triple.push(CrateMismatch { path: libpath.clone(), @@ -636,10 +636,10 @@ impl<'a> Context<'a> { // rlibs/dylibs. let sess = self.sess; let dylibname = self.dylibname(); - let mut locs = locs.iter().map(|l| Path::new(l.as_slice())).filter(|loc| { + let mut locs = locs.iter().map(|l| Path::new(l.as_str())).filter(|loc| { if !loc.exists() { sess.err(format!("extern location does not exist: {}", - loc.display()).as_slice()); + loc.display()).as_str()); return false; } let file = loc.filename_str().unwrap(); @@ -656,7 +656,7 @@ impl<'a> Context<'a> { } } sess.err(format!("extern location is of an unknown type: {}", - loc.display()).as_slice()); + loc.display()).as_str()); false }); @@ -690,7 +690,7 @@ impl<'a> Context<'a> { } pub fn note_crate_name(diag: &SpanHandler, name: &str) { - diag.handler().note(format!("crate name: {}", name).as_slice()); + diag.handler().note(format!("crate name: {}", name).as_str()); } impl ArchiveMetadata { @@ -779,7 +779,7 @@ fn get_metadata_section_imp(os: abi::Os, filename: &Path) -> Result ty::TraitStore { '&' => ty::RegionTraitStore(parse_region(st, conv), parse_mutability(st)), c => { st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'", - c).as_slice()) + c).as_str()) } } } @@ -268,7 +268,7 @@ fn parse_bound_region(st: &mut PState, conv: conv_did) -> ty::BoundRegion { } '[' => { let def = parse_def(st, RegionParameter, |x,y| conv(x,y)); - let ident = token::str_to_ident(parse_str(st, ']').as_slice()); + let ident = token::str_to_ident(parse_str(st, ']').as_str()); ty::BrNamed(def, ident.name) } 'f' => { @@ -298,7 +298,7 @@ fn parse_region(st: &mut PState, conv: conv_did) -> ty::Region { assert_eq!(next(st), '|'); let index = parse_uint(st); assert_eq!(next(st), '|'); - let nm = token::str_to_ident(parse_str(st, ']').as_slice()); + let nm = token::str_to_ident(parse_str(st, ']').as_str()); ty::ReEarlyBound(node_id, space, index, nm.name) } 'f' => { @@ -528,7 +528,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi { assert_eq!(next(st), '['); scan(st, |c| c == ']', |bytes| { let abi_str = str::from_utf8(bytes).unwrap(); - abi::lookup(abi_str.as_slice()).expect(abi_str) + abi::lookup(abi_str.as_str()).expect(abi_str) }) } diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index b7597b50b4906..8a7320ac7d7ce 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -123,7 +123,7 @@ pub fn decode_inlined_item(cdata: &cstore::crate_metadata, // Do an Option dance to use the path after it is moved below. let s = ast_map::path_to_string(ast_map::Values(path.iter())); path_as_str = Some(s); - path_as_str.as_ref().map(|x| x.as_slice()) + path_as_str.as_ref().map(|x| x.as_str()) }); let mut ast_dsr = reader::Decoder::new(ast_doc); let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); @@ -1785,7 +1785,7 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, None => { xcx.dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", - tag).as_slice()); + tag).as_str()); } Some(value) => { let val_doc = entry_doc.get(c::tag_table_val as uint); @@ -1875,7 +1875,7 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, _ => { xcx.dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", - tag).as_slice()); + tag).as_str()); } } } diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index df637e7a052dd..ec03f4a7b25cb 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -414,7 +414,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { format!("cannot borrow `{}` as mutable \ more than once at a time", self.bccx.loan_path_to_string( - &*new_loan.loan_path)).as_slice()); + &*new_loan.loan_path)).as_str()); } (ty::UniqueImmBorrow, _) => { @@ -423,7 +423,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { format!("closure requires unique access to `{}` \ but {} is already borrowed", self.bccx.loan_path_to_string(&*new_loan.loan_path), - old_pronoun).as_slice()); + old_pronoun).as_str()); } (_, ty::UniqueImmBorrow) => { @@ -432,7 +432,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { format!("cannot borrow `{}` as {} because \ previous closure requires unique access", self.bccx.loan_path_to_string(&*new_loan.loan_path), - new_loan.kind.to_user_str()).as_slice()); + new_loan.kind.to_user_str()).as_str()); } (_, _) => { @@ -443,7 +443,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.loan_path_to_string(&*new_loan.loan_path), new_loan.kind.to_user_str(), old_pronoun, - old_loan.kind.to_user_str()).as_slice()); + old_loan.kind.to_user_str()).as_str()); } } @@ -453,7 +453,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { span, format!("borrow occurs due to use of `{}` in closure", self.bccx.loan_path_to_string( - &*new_loan.loan_path)).as_slice()); + &*new_loan.loan_path)).as_str()); } _ => { } } @@ -502,7 +502,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_note( old_loan.span, - format!("{}; {}", borrow_summary, rule_summary).as_slice()); + format!("{}; {}", borrow_summary, rule_summary).as_str()); let old_loan_span = self.tcx().map.span(old_loan.kill_scope); self.bccx.span_end_note(old_loan_span, @@ -572,13 +572,13 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( span, format!("cannot use `{}` because it was mutably borrowed", - self.bccx.loan_path_to_string(copy_path).as_slice()) - .as_slice()); + self.bccx.loan_path_to_string(copy_path).as_str()) + .as_str()); self.bccx.span_note( loan_span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path).as_slice()) - .as_slice()); + self.bccx.loan_path_to_string(&*loan_path).as_str()) + .as_str()); } } } @@ -597,20 +597,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { let err_message = match move_kind { move_data::Captured => format!("cannot move `{}` into closure because it is borrowed", - self.bccx.loan_path_to_string(move_path).as_slice()), + self.bccx.loan_path_to_string(move_path).as_str()), move_data::Declared | move_data::MoveExpr | move_data::MovePat => format!("cannot move out of `{}` because it is borrowed", - self.bccx.loan_path_to_string(move_path).as_slice()) + self.bccx.loan_path_to_string(move_path).as_str()) }; - self.bccx.span_err(span, err_message.as_slice()); + self.bccx.span_err(span, err_message.as_str()); self.bccx.span_note( loan_span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path).as_slice()) - .as_slice()); + self.bccx.loan_path_to_string(&*loan_path).as_str()) + .as_str()); } } } @@ -751,14 +751,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { format!("cannot assign to {} {} `{}`", assignee_cmt.mutbl.to_user_str(), self.bccx.cmt_to_string(&*assignee_cmt), - self.bccx.loan_path_to_string(&*lp)).as_slice()); + self.bccx.loan_path_to_string(&*lp)).as_str()); } None => { self.bccx.span_err( assignment_span, format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt)).as_slice()); + self.bccx.cmt_to_string(&*assignee_cmt)).as_str()); } } return; @@ -883,11 +883,11 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( span, format!("cannot assign to `{}` because it is borrowed", - self.bccx.loan_path_to_string(loan_path)).as_slice()); + self.bccx.loan_path_to_string(loan_path)).as_str()); self.bccx.span_note( loan.span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(loan_path)).as_slice()); + self.bccx.loan_path_to_string(loan_path)).as_str()); } } diff --git a/src/librustc/middle/borrowck/gather_loans/mod.rs b/src/librustc/middle/borrowck/gather_loans/mod.rs index 11189390df565..cc00fa9542de1 100644 --- a/src/librustc/middle/borrowck/gather_loans/mod.rs +++ b/src/librustc/middle/borrowck/gather_loans/mod.rs @@ -294,7 +294,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { self.tcx().sess.span_bug( cmt.span, format!("invalid borrow lifetime: {:?}", - loan_region).as_slice()); + loan_region).as_str()); } }; debug!("loan_scope = {:?}", loan_scope); diff --git a/src/librustc/middle/borrowck/gather_loans/move_error.rs b/src/librustc/middle/borrowck/gather_loans/move_error.rs index 4f9e51a64f451..8f4342aec8704 100644 --- a/src/librustc/middle/borrowck/gather_loans/move_error.rs +++ b/src/librustc/middle/borrowck/gather_loans/move_error.rs @@ -121,7 +121,7 @@ fn report_cannot_move_out_of(bccx: &BorrowckCtxt, move_from: mc::cmt) { bccx.span_err( move_from.span, format!("cannot move out of {}", - bccx.cmt_to_string(&*move_from)).as_slice()); + bccx.cmt_to_string(&*move_from)).as_str()); } mc::cat_downcast(ref b) | @@ -133,7 +133,7 @@ fn report_cannot_move_out_of(bccx: &BorrowckCtxt, move_from: mc::cmt) { move_from.span, format!("cannot move out of type `{}`, \ which defines the `Drop` trait", - b.ty.user_string(bccx.tcx)).as_slice()); + b.ty.user_string(bccx.tcx)).as_str()); }, _ => fail!("this path should not cause illegal move") } @@ -153,10 +153,10 @@ fn note_move_destination(bccx: &BorrowckCtxt, format!("attempting to move value to here (to prevent the move, \ use `ref {0}` or `ref mut {0}` to capture value by \ reference)", - pat_name).as_slice()); + pat_name).as_str()); } else { bccx.span_note(move_to_span, format!("and here (use `ref {0}` or `ref mut {0}`)", - pat_name).as_slice()); + pat_name).as_str()); } } diff --git a/src/librustc/middle/borrowck/graphviz.rs b/src/librustc/middle/borrowck/graphviz.rs index e75378de5a5c7..49a911cf4eb55 100644 --- a/src/librustc/middle/borrowck/graphviz.rs +++ b/src/librustc/middle/borrowck/graphviz.rs @@ -60,7 +60,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { if seen_one { sets.push_str(" "); } else { seen_one = true; } sets.push_str(variant.short_name()); sets.push_str(": "); - sets.push_str(self.dataflow_for_variant(e, n, variant).as_slice()); + sets.push_str(self.dataflow_for_variant(e, n, variant).as_str()); } sets } @@ -87,7 +87,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { set.push_str(", "); } let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp); - set.push_str(loan_str.as_slice()); + set.push_str(loan_str.as_str()); saw_some = true; true }); diff --git a/src/librustc/middle/borrowck/mod.rs b/src/librustc/middle/borrowck/mod.rs index 5969e7e0c42d5..ef037ba9370b9 100644 --- a/src/librustc/middle/borrowck/mod.rs +++ b/src/librustc/middle/borrowck/mod.rs @@ -493,7 +493,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { pub fn report(&self, err: BckError) { self.span_err( err.span, - self.bckerr_to_string(&err).as_slice()); + self.bckerr_to_string(&err).as_str()); self.note_and_explain_bckerr(err); } @@ -514,7 +514,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { use_span, format!("{} of possibly uninitialized variable: `{}`", verb, - self.loan_path_to_string(lp)).as_slice()); + self.loan_path_to_string(lp)).as_str()); } _ => { let partially = if lp == moved_lp {""} else {"partially "}; @@ -523,7 +523,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { format!("{} of {}moved value: `{}`", verb, partially, - self.loan_path_to_string(lp)).as_slice()); + self.loan_path_to_string(lp)).as_str()); } } @@ -539,7 +539,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.bug(format!("MoveExpr({:?}) maps to \ {:?}, not Expr", move.id, - r).as_slice()) + r).as_str()) } }; let suggestion = move_suggestion(self.tcx, expr_ty, @@ -549,7 +549,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { format!("`{}` moved here because it has type `{}`, which is {}", self.loan_path_to_string(moved_lp), expr_ty.user_string(self.tcx), - suggestion).as_slice()); + suggestion).as_str()); } move_data::MovePat => { @@ -559,7 +559,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { which is moved by default (use `ref` to \ override)", self.loan_path_to_string(moved_lp), - pat_ty.user_string(self.tcx)).as_slice()); + pat_ty.user_string(self.tcx)).as_str()); } move_data::Captured => { @@ -571,7 +571,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.bug(format!("Captured({:?}) maps to \ {:?}, not Expr", move.id, - r).as_slice()) + r).as_str()) } }; let suggestion = move_suggestion(self.tcx, expr_ty, @@ -583,7 +583,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { has type `{}`, which is {}", self.loan_path_to_string(moved_lp), expr_ty.user_string(self.tcx), - suggestion).as_slice()); + suggestion).as_str()); } } @@ -611,7 +611,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err( span, format!("re-assignment of immutable variable `{}`", - self.loan_path_to_string(lp)).as_slice()); + self.loan_path_to_string(lp)).as_str()); self.tcx.sess.span_note(assign.span, "prior assignment occurs here"); } @@ -724,23 +724,23 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err( span, format!("{} in an aliasable location", - prefix).as_slice()); + prefix).as_str()); } mc::AliasableStatic(..) | mc::AliasableStaticMut(..) => { self.tcx.sess.span_err( span, - format!("{} in a static location", prefix).as_slice()); + format!("{} in a static location", prefix).as_str()); } mc::AliasableManaged => { self.tcx.sess.span_err( span, - format!("{} in a `@` pointer", prefix).as_slice()); + format!("{} in a `@` pointer", prefix).as_str()); } mc::AliasableBorrowed => { self.tcx.sess.span_err( span, - format!("{} in a `&` reference", prefix).as_slice()); + format!("{} in a `&` reference", prefix).as_str()); } } } @@ -778,12 +778,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { note_and_explain_region( self.tcx, format!("{} would have to be valid for ", - descr).as_slice(), + descr).as_str(), loan_scope, "..."); note_and_explain_region( self.tcx, - format!("...but {} is only valid for ", descr).as_slice(), + format!("...but {} is only valid for ", descr).as_str(), ptr_scope, ""); } @@ -808,7 +808,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } mc::PositionalField(idx) => { out.push_char('.'); - out.push_str(idx.to_string().as_slice()); + out.push_str(idx.to_string().as_str()); } } } diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index 6e9b27655af79..76ae401660a25 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -611,14 +611,14 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.tcx.sess.span_bug( expr.span, format!("no loop scope for id {:?}", - loop_id).as_slice()); + loop_id).as_str()); } r => { self.tcx.sess.span_bug( expr.span, format!("bad entry `{:?}` in def_map for label", - r).as_slice()); + r).as_str()); } } } diff --git a/src/librustc/middle/cfg/graphviz.rs b/src/librustc/middle/cfg/graphviz.rs index 0cccae8b8c9cd..f98dc73de92a5 100644 --- a/src/librustc/middle/cfg/graphviz.rs +++ b/src/librustc/middle/cfg/graphviz.rs @@ -31,14 +31,14 @@ pub struct LabelledCFG<'a>{ fn replace_newline_with_backslash_l(s: String) -> String { // Replacing newlines with \\l causes each line to be left-aligned, // improving presentation of (long) pretty-printed expressions. - if s.as_slice().contains("\n") { + if s.as_str().contains("\n") { let mut s = s.replace("\n", "\\l"); // Apparently left-alignment applies to the line that precedes // \l, not the line that follows; so, add \l at end of string // if not already present, ensuring last line gets left-aligned // as well. let mut last_two: Vec<_> = - s.as_slice().chars().rev().take(2).collect(); + s.as_str().chars().rev().take(2).collect(); last_two.reverse(); if last_two.as_slice() != ['\\', 'l'] { s = s.append("\\l"); @@ -50,7 +50,7 @@ fn replace_newline_with_backslash_l(s: String) -> String { } impl<'a> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a> { - fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.as_slice()) } + fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.as_str()) } fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> { dot::Id::new(format!("N{:u}", i.node_id())) @@ -85,7 +85,7 @@ impl<'a> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a> { let s = replace_newline_with_backslash_l(s); label = label.append(format!("exiting scope_{} {}", i, - s.as_slice()).as_slice()); + s.as_str()).as_str()); } dot::EscStr(label.into_maybe_owned()) } diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs index a40294328c71b..1a18c6f261cda 100644 --- a/src/librustc/middle/check_loop.rs +++ b/src/librustc/middle/check_loop.rs @@ -65,12 +65,12 @@ impl<'a> CheckLoopVisitor<'a> { Closure => { self.sess.span_err(span, format!("`{}` inside of a closure", - name).as_slice()); + name).as_str()); } Normal => { self.sess.span_err(span, format!("`{}` outside of loop", - name).as_slice()); + name).as_str()); } } } diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 6676ea9851db0..415ebf56127e0 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -70,7 +70,7 @@ impl fmt::Show for Matrix { for (column, pat_str) in row.move_iter().enumerate() { try!(write!(f, " ")); f.width = Some(*column_widths.get(column)); - try!(f.pad(pat_str.as_slice())); + try!(f.pad(pat_str.as_str())); try!(write!(f, " +")); } try!(write!(f, "\n")); @@ -212,7 +212,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &Expr) { pat.span, format!("refutable pattern in `for` loop binding: \ `{}` not covered", - pat_to_string(&*uncovered_pat)).as_slice()); + pat_to_string(&*uncovered_pat)).as_str()); }, None => {} } @@ -952,7 +952,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, format!("binding pattern {} is not an \ identifier: {:?}", p.id, - p.node).as_slice()); + p.node).as_str()); } } } diff --git a/src/librustc/middle/check_static.rs b/src/librustc/middle/check_static.rs index ca58b4b6e60a4..0da30aaad58bf 100644 --- a/src/librustc/middle/check_static.rs +++ b/src/librustc/middle/check_static.rs @@ -65,7 +65,7 @@ impl<'a, 'tcx> CheckStaticVisitor<'a, 'tcx> { match result { None => { false } Some(msg) => { - self.tcx.sess.span_err(span, msg.as_slice()); + self.tcx.sess.span_err(span, msg.as_str()); true } } diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 2b4b6756f9f85..4b2862af85feb 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -363,7 +363,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: Gc) -> Gc { pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val { match eval_const_expr_partial(tcx, e) { Ok(r) => r, - Err(s) => tcx.sess.span_fatal(e.span, s.as_slice()) + Err(s) => tcx.sess.span_fatal(e.span, s.as_str()) } } diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index c32f8db2380e4..5b116a4e99545 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -551,7 +551,7 @@ fn bits_to_string(words: &[uint]) -> String { let mut v = word; for _ in range(0u, uint::BYTES) { result.push_char(sep); - result.push_str(format!("{:02x}", v & 0xFF).as_slice()); + result.push_str(format!("{:02x}", v & 0xFF).as_str()); v >>= 8; sep = '-'; } diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index f275c81871659..ad7ede282ee2a 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -310,7 +310,7 @@ fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool { for attr in lint::gather_attrs(attrs).move_iter() { match attr { Ok((ref name, lint::Allow, _)) - if name.get() == dead_code.as_slice() => return true, + if name.get() == dead_code.as_str() => return true, _ => (), } } diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 38e4426e4c878..3dfdb286d0287 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -118,7 +118,7 @@ fn calculate_type(sess: &session::Session, let src = sess.cstore.get_used_crate_source(cnum).unwrap(); if src.rlib.is_some() { return } sess.err(format!("dependency `{}` not found in rlib format", - data.name).as_slice()); + data.name).as_str()); }); return Vec::new(); } @@ -187,7 +187,7 @@ fn calculate_type(sess: &session::Session, match kind { cstore::RequireStatic => "rlib", cstore::RequireDynamic => "dylib", - }).as_slice()); + }).as_str()); } } } @@ -212,7 +212,7 @@ fn add_library(sess: &session::Session, let data = sess.cstore.get_crate_data(cnum); sess.err(format!("cannot satisfy dependencies so `{}` only \ shows up once", - data.name).as_slice()); + data.name).as_str()); sess.note("having upstream crates all available in one format \ will likely make this go away"); } diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index d2362b7e9429b..a551c0b478179 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -527,7 +527,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { self.tcx().sess.span_bug( callee.span, format!("unexpected callee type {}", - callee_ty.repr(self.tcx())).as_slice()) + callee_ty.repr(self.tcx())).as_str()) } }; match overloaded_call_type { @@ -721,7 +721,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => self.tcx().sess.span_bug(expr.span, format!("bad overloaded deref type {}", - method_ty.repr(self.tcx())).as_slice()) + method_ty.repr(self.tcx())).as_str()) }; let bk = ty::BorrowKind::from_mutbl(m); self.delegate.borrow(expr.id, expr.span, cmt, diff --git a/src/librustc/middle/kind.rs b/src/librustc/middle/kind.rs index e8b0afa98c2d0..1314be720d2a6 100644 --- a/src/librustc/middle/kind.rs +++ b/src/librustc/middle/kind.rs @@ -332,7 +332,7 @@ fn with_appropriate_checker(cx: &Context, ref s => { cx.tcx.sess.bug(format!("expect fn type in kind checker, not \ {:?}", - s).as_slice()); + s).as_str()); } } } @@ -706,7 +706,7 @@ fn check_bounds_on_structs_or_enums_in_type_if_possible(cx: &mut Context, does not fulfill `{}`", ty_to_string(cx.tcx, ty), missing.user_string( - cx.tcx)).as_slice()); + cx.tcx)).as_str()); }) } _ => {} @@ -767,7 +767,7 @@ fn check_copy(cx: &Context, ty: ty::t, sp: Span, reason: &str) { span_err!(cx.tcx.sess, sp, E0148, "copying a value of non-copyable type `{}`", ty_to_string(cx.tcx, ty)); - span_note!(cx.tcx.sess, sp, "{}", reason.as_slice()); + span_note!(cx.tcx.sess, sp, "{}", reason.as_str()); } } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 84fc8ff2c38ca..347b60ecadbfb 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -320,7 +320,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { self.tcx .sess .span_bug(span, format!("no variable registered for id {}", - node_id).as_slice()); + node_id).as_str()); } } } @@ -596,7 +596,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.ir.tcx.sess.span_bug( span, format!("no live node registered for node {}", - node_id).as_slice()); + node_id).as_str()); } } } diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 0d3dd8f91d967..cc1c5758dcf2c 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -220,7 +220,7 @@ pub fn deref_kind(tcx: &ty::ctxt, t: ty::t) -> deref_kind { None => { tcx.sess.bug( format!("deref_kind() invoked on non-derefable type {}", - ty_to_string(tcx, t)).as_slice()); + ty_to_string(tcx, t)).as_str()); } } } @@ -626,7 +626,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { span, format!("Upvar of non-closure {} - {}", fn_node_id, - ty.repr(self.tcx())).as_slice()); + ty.repr(self.tcx())).as_str()); } } } @@ -795,7 +795,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { self.tcx().sess.span_bug( node.span(), format!("Explicit deref of non-derefable type: {}", - base_cmt.ty.repr(self.tcx())).as_slice()); + base_cmt.ty.repr(self.tcx())).as_str()); } } } @@ -873,7 +873,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { self.tcx().sess.span_bug( elt.span(), format!("Explicit index of non-index type `{}`", - base_cmt.ty.repr(self.tcx())).as_slice()); + base_cmt.ty.repr(self.tcx())).as_str()); } } } diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index cdb7d114af903..aadb1d3395c8d 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -542,10 +542,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { match result { None => true, Some((span, msg, note)) => { - self.tcx.sess.span_err(span, msg.as_slice()); + self.tcx.sess.span_err(span, msg.as_str()); match note { Some((span, msg)) => { - self.tcx.sess.span_note(span, msg.as_slice()) + self.tcx.sess.span_note(span, msg.as_str()) } None => {}, } @@ -658,7 +658,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { UnnamedField(idx) => format!("field #{} of {} is private", idx + 1, struct_desc), }; - self.tcx.sess.span_err(span, msg.as_slice()); + self.tcx.sess.span_err(span, msg.as_str()); } // Given the ID of a method, checks to ensure it's in scope. @@ -679,7 +679,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { method_id, None, format!("method `{}`", - string).as_slice())); + string).as_str())); } // Checks that a path is in scope. @@ -698,7 +698,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { Some(origdid), format!("{} `{}`", tyname, - name).as_slice()) + name).as_str()) }; match *self.last_private_map.get(&path_id) { diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 7ba5144985ef2..2a102c2570061 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -251,7 +251,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { None => { self.tcx.sess.bug(format!("found unmapped ID in worklist: \ {}", - search_item).as_slice()) + search_item).as_str()) } } } @@ -351,7 +351,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { .bug(format!("found unexpected thingy in worklist: {}", self.tcx .map - .node_to_string(search_item)).as_slice()) + .node_to_string(search_item)).as_str()) } } } diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index 854b8b9ba7711..d631971df2f5e 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -1128,14 +1128,14 @@ impl<'a> Resolver<'a> { self.resolve_error(sp, format!("duplicate definition of {} `{}`", namespace_error_to_string(duplicate_type), - token::get_ident(name)).as_slice()); + token::get_ident(name)).as_str()); { let r = child.span_for_namespace(ns); for sp in r.iter() { self.session.span_note(*sp, format!("first definition of {} `{}` here", namespace_error_to_string(duplicate_type), - token::get_ident(name)).as_slice()); + token::get_ident(name)).as_str()); } } } @@ -2173,7 +2173,7 @@ impl<'a> Resolver<'a> { .as_slice(), import_directive.subclass), help); - self.resolve_error(span, msg.as_slice()); + self.resolve_error(span, msg.as_str()); } Indeterminate => break, // Bail out. We'll come around next time. Success(()) => () // Good. Continue. @@ -2794,7 +2794,7 @@ impl<'a> Resolver<'a> { ValueNS => "value", }, token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg.as_str()); } Some(_) | None => {} } @@ -2820,7 +2820,7 @@ impl<'a> Resolver<'a> { let msg = format!("import `{}` conflicts with imported \ crate in this module", token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg.as_str()); } Some(_) | None => {} } @@ -2844,7 +2844,7 @@ impl<'a> Resolver<'a> { let msg = format!("import `{}` conflicts with value \ in this module", token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg.as_str()); match value.value_span { None => {} Some(span) => { @@ -2867,7 +2867,7 @@ impl<'a> Resolver<'a> { let msg = format!("import `{}` conflicts with type in \ this module", token::get_name(name).get()); - self.session.span_err(import_span, msg.as_slice()); + self.session.span_err(import_span, msg.as_str()); match ty.type_span { None => {} Some(span) => { @@ -2898,7 +2898,7 @@ impl<'a> Resolver<'a> { .span_err(span, format!("an external crate named `{}` has already \ been imported into this module", - token::get_name(name).get()).as_slice()); + token::get_name(name).get()).as_str()); } } @@ -2917,7 +2917,7 @@ impl<'a> Resolver<'a> { format!("the name `{}` conflicts with an external \ crate that has been imported into this \ module", - token::get_name(name).get()).as_slice()); + token::get_name(name).get()).as_str()); } } @@ -2965,7 +2965,7 @@ impl<'a> Resolver<'a> { let segment_name = token::get_ident(name); let module_name = self.module_to_string(&*search_module); let mut span = span; - let msg = if "???" == module_name.as_slice() { + let msg = if "???" == module_name.as_str() { span.hi = span.lo + Pos::from_uint(segment_name.get().len()); match search_parent_externals(name.name, @@ -3095,7 +3095,7 @@ impl<'a> Resolver<'a> { match module_prefix_result { Failed(None) => { let mpath = self.idents_to_string(module_path); - let mpath = mpath.as_slice(); + let mpath = mpath.as_str(); match mpath.rfind(':') { Some(idx) => { let msg = format!("Could not find `{}` in `{}`", @@ -3530,13 +3530,13 @@ impl<'a> Resolver<'a> { .codemap() .span_to_snippet(imports.get(index).span) .unwrap(); - if sn.as_slice().contains("::") { + if sn.as_str().contains("::") { self.resolve_error(imports.get(index).span, "unresolved import"); } else { let err = format!("unresolved import (maybe you meant `{}::*`?)", - sn.as_slice().slice(0, sn.len())); - self.resolve_error(imports.get(index).span, err.as_slice()); + sn.as_str().slice(0, sn.len())); + self.resolve_error(imports.get(index).span, err.as_str()); } } @@ -4258,7 +4258,7 @@ impl<'a> Resolver<'a> { }; let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str); - self.resolve_error(trait_reference.path.span, msg.as_slice()); + self.resolve_error(trait_reference.path.span, msg.as_str()); } Some(def) => { match def { @@ -4279,7 +4279,7 @@ impl<'a> Resolver<'a> { trait_reference.path.span, format!("`type` aliases cannot \ be used for traits") - .as_slice()); + .as_str()); } _ => {} } @@ -4304,7 +4304,7 @@ impl<'a> Resolver<'a> { predicate.span, format!("undeclared type parameter `{}`", token::get_ident( - predicate.ident)).as_slice()); + predicate.ident)).as_str()); } } @@ -4480,7 +4480,7 @@ impl<'a> Resolver<'a> { self.resolve_error(span, format!("method `{}` is not a member of trait `{}`", token::get_name(method_name), - path_str).as_slice()); + path_str).as_str()); } } } @@ -4546,7 +4546,7 @@ impl<'a> Resolver<'a> { format!("variable `{}` from pattern #1 is \ not bound in pattern #{}", token::get_name(key), - i + 1).as_slice()); + i + 1).as_str()); } Some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { @@ -4555,7 +4555,7 @@ impl<'a> Resolver<'a> { format!("variable `{}` is bound with different \ mode in pattern #{} than in pattern #1", token::get_name(key), - i + 1).as_slice()); + i + 1).as_str()); } } } @@ -4568,7 +4568,7 @@ impl<'a> Resolver<'a> { format!("variable `{}` from pattern {}{} is \ not bound in pattern {}1", token::get_name(key), - "#", i + 1, "#").as_slice()); + "#", i + 1, "#").as_str()); } } } @@ -4689,7 +4689,7 @@ impl<'a> Resolver<'a> { None => { let msg = format!("use of undeclared type name `{}`", self.path_idents_to_string(path)); - self.resolve_error(ty.span, msg.as_slice()); + self.resolve_error(ty.span, msg.as_str()); } } @@ -4754,7 +4754,7 @@ impl<'a> Resolver<'a> { format!("declaration of `{}` shadows an enum \ variant or unit-like struct in \ scope", - token::get_name(renamed)).as_slice()); + token::get_name(renamed)).as_str()); } FoundConst(def, lp) if mode == RefutableMode => { debug!("(resolving pattern) resolving `{}` to \ @@ -4822,7 +4822,7 @@ impl<'a> Resolver<'a> { format!("identifier `{}` is bound \ more than once in the same \ pattern", - token::get_ident(ident)).as_slice()); + token::get_ident(ident)).as_str()); } // Else, not bound in the same pattern: do // nothing. @@ -4846,7 +4846,7 @@ impl<'a> Resolver<'a> { path.segments .last() .unwrap() - .identifier)).as_slice()); + .identifier)).as_str()); } None => { self.resolve_error(path.span, @@ -4855,7 +4855,7 @@ impl<'a> Resolver<'a> { path.segments .last() .unwrap() - .identifier)).as_slice()); + .identifier)).as_str()); } } @@ -4886,7 +4886,7 @@ impl<'a> Resolver<'a> { def: {:?}", result); let msg = format!("`{}` does not name a structure", self.path_idents_to_string(path)); - self.resolve_error(path.span, msg.as_slice()); + self.resolve_error(path.span, msg.as_str()); } } } @@ -5127,7 +5127,7 @@ impl<'a> Resolver<'a> { }; self.resolve_error(span, format!("failed to resolve. {}", - msg.as_slice())); + msg.as_str())); return None; } Indeterminate => fail!("indeterminate unexpected"), @@ -5203,7 +5203,7 @@ impl<'a> Resolver<'a> { }; self.resolve_error(span, format!("failed to resolve. {}", - msg.as_slice())); + msg.as_str())); return None; } @@ -5319,7 +5319,7 @@ impl<'a> Resolver<'a> { fn resolve_error(&self, span: Span, s: T) { if self.emit_errors { - self.session.span_err(span, s.as_slice()); + self.session.span_err(span, s.as_str()); } } @@ -5529,12 +5529,12 @@ impl<'a> Resolver<'a> { format!("`{}` is a structure name, but \ this expression \ uses it like a function name", - wrong_name).as_slice()); + wrong_name).as_str()); self.session.span_note(expr.span, format!("Did you mean to write: \ `{} {{ /* fields */ }}`?", - wrong_name).as_slice()); + wrong_name).as_str()); } _ => { @@ -5551,7 +5551,7 @@ impl<'a> Resolver<'a> { }); if method_scope && token::get_name(self.self_name).get() - == wrong_name.as_slice() { + == wrong_name.as_str() { self.resolve_error( expr.span, "`self` is not available \ @@ -5563,7 +5563,7 @@ impl<'a> Resolver<'a> { NoSuggestion => { // limit search to 5 to reduce the number // of stupid suggestions - self.find_best_match_for_name(wrong_name.as_slice(), 5) + self.find_best_match_for_name(wrong_name.as_str(), 5) .map_or("".to_string(), |x| format!("`{}`", x)) } @@ -5585,7 +5585,7 @@ impl<'a> Resolver<'a> { expr.span, format!("unresolved name `{}`.{}", wrong_name, - msg).as_slice()); + msg).as_str()); } } } @@ -5614,7 +5614,7 @@ impl<'a> Resolver<'a> { def: {:?}", result); let msg = format!("`{}` does not name a structure", self.path_idents_to_string(path)); - self.resolve_error(path.span, msg.as_slice()); + self.resolve_error(path.span, msg.as_str()); } } @@ -5682,7 +5682,7 @@ impl<'a> Resolver<'a> { self.resolve_error( expr.span, format!("use of undeclared label `{}`", - token::get_ident(label)).as_slice()) + token::get_ident(label)).as_str()) } Some(DlDef(def @ DefLabel(_))) => { // Since this def is a label, it is never read. @@ -5817,7 +5817,7 @@ impl<'a> Resolver<'a> { then {:?}", node_id, *old_value, - def).as_slice()); + def).as_str()); } }); } @@ -5832,7 +5832,7 @@ impl<'a> Resolver<'a> { self.resolve_error(pat.span, format!("cannot use `ref` binding mode \ with {}", - descr).as_slice()); + descr).as_str()); } } } diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 1bc37e2f1e445..d803e58b9667c 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -331,7 +331,7 @@ impl<'a> LifetimeContext<'a> { self.sess.span_err( lifetime_ref.span, format!("use of undeclared lifetime name `{}`", - token::get_name(lifetime_ref.name)).as_slice()); + token::get_name(lifetime_ref.name)).as_str()); } fn check_lifetime_defs<'b>(&mut self, @@ -347,7 +347,7 @@ impl<'a> LifetimeContext<'a> { lifetime.lifetime.span, format!("illegal lifetime parameter name: `{}`", token::get_name(lifetime.lifetime.name)) - .as_slice()); + .as_str()); } } @@ -360,7 +360,7 @@ impl<'a> LifetimeContext<'a> { format!("lifetime name `{}` declared twice in \ the same scope", token::get_name(lifetime_j.lifetime.name)) - .as_slice()); + .as_str()); } } diff --git a/src/librustc/middle/save/mod.rs b/src/librustc/middle/save/mod.rs index 7350413643c8d..af4e5de43e87b 100644 --- a/src/librustc/middle/save/mod.rs +++ b/src/librustc/middle/save/mod.rs @@ -92,7 +92,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // dump info about all the external crates referenced from this crate self.sess.cstore.iter_crate_data(|n, cmd| { - self.fmt.external_crate_str(krate.span, cmd.name.as_slice(), n); + self.fmt.external_crate_str(krate.span, cmd.name.as_str(), n); }); self.fmt.recorder.record("end_external_crates\n"); } @@ -141,7 +141,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.as_slice(), + qualname.as_str(), scope_id); } } @@ -159,7 +159,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.as_slice(), + qualname.as_str(), scope_id); } } @@ -178,7 +178,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let (ref span, ref qualname) = sub_paths[len-2]; self.fmt.sub_type_ref_str(path.span, *span, - qualname.as_slice()); + qualname.as_str()); // write the other sub-paths if len <= 2 { @@ -188,7 +188,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname.as_slice(), + qualname.as_str(), scope_id); } } @@ -197,7 +197,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { fn lookup_type_ref(&self, ref_id: NodeId) -> Option { if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) { self.sess.bug(format!("def_map has no key for {} in lookup_type_ref", - ref_id).as_slice()); + ref_id).as_str()); } let def = *self.analysis.ty_cx.def_map.borrow().get(&ref_id); match def { @@ -210,7 +210,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&ref_id) { self.sess.span_bug(span, format!("def_map has no key for {} in lookup_def_kind", - ref_id).as_slice()); + ref_id).as_str()); } let def = *def_map.get(&ref_id); match def { @@ -238,7 +238,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { def::DefMethod(_, _) | def::DefPrimTy(_) => { self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {:?}", - def).as_slice()); + def).as_str()); }, } } @@ -259,8 +259,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { span_utils.span_for_last_ident(p.span), id, qualname, - path_to_string(p).as_slice(), - typ.as_slice()); + path_to_string(p).as_str(), + typ.as_str()); } self.collected_paths.clear(); } @@ -282,14 +282,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { match item.node { ast::ItemImpl(_, _, ty, _) => { let mut result = String::from_str("<"); - result.push_str(ty_to_string(&*ty).as_slice()); + result.push_str(ty_to_string(&*ty).as_str()); match ty::trait_of_item(&self.analysis.ty_cx, ast_util::local_def(method.id)) { Some(def_id) => { result.push_str(" as "); result.push_str( - ty::item_path_str(&self.analysis.ty_cx, def_id).as_slice()); + ty::item_path_str(&self.analysis.ty_cx, def_id).as_str()); }, None => {} } @@ -298,7 +298,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => { self.sess.span_bug(method.span, format!("Container {} for method {} not an impl?", - impl_id.node, method.id).as_slice()); + impl_id.node, method.id).as_str()); }, } }, @@ -308,7 +308,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { impl_id.node, method.id, self.analysis.ty_cx.map.get(impl_id.node) - ).as_slice()); + ).as_str()); }, }, None => match ty::trait_of_item(&self.analysis.ty_cx, @@ -323,20 +323,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => { self.sess.span_bug(method.span, format!("Could not find container {} for method {}", - def_id.node, method.id).as_slice()); + def_id.node, method.id).as_str()); } } }, None => { self.sess.span_bug(method.span, format!("Could not find container for method {}", - method.id).as_slice()); + method.id).as_str()); }, }, }; qualname.push_str(get_ident(method.pe_ident()).get()); - let qualname = qualname.as_slice(); + let qualname = qualname.as_str(); // record the decl for this def (if it has one) let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx, @@ -419,13 +419,13 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.field_str(field.span, Some(sub_span), field.node.id, - name.get().as_slice(), - qualname.as_slice(), - typ.as_slice(), + name.get().as_str(), + qualname.as_str(), + typ.as_str(), scope_id), None => self.sess.span_bug(field.span, format!("Could not find sub-span for field {}", - qualname).as_slice()), + qualname).as_str()), } }, _ => (), @@ -453,7 +453,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(full_span, Some(*param_ss), param.id, - name.as_slice(), + name.as_str(), ""); } self.visit_generics(generics, e); @@ -471,10 +471,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.fn_str(item.span, sub_span, item.id, - qualname.as_slice(), + qualname.as_str(), e.cur_scope); - self.process_formals(&decl.inputs, qualname.as_slice(), e); + self.process_formals(&decl.inputs, qualname.as_str(), e); // walk arg and return types for arg in decl.inputs.iter() { @@ -485,7 +485,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // walk the body self.visit_block(&*body, DxrVisitorEnv::new_nested(item.id)); - self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id, e); + self.process_generic_params(ty_params, item.span, qualname.as_str(), item.id, e); } fn process_static(&mut self, @@ -508,9 +508,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, get_ident(item.ident).get(), - qualname.as_slice(), - value.as_slice(), - ty_to_string(&*typ).as_slice(), + qualname.as_str(), + value.as_str(), + ty_to_string(&*typ).as_str(), e.cur_scope); // walk type and init value @@ -534,16 +534,16 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, ctor_id, - qualname.as_slice(), + qualname.as_str(), e.cur_scope); // fields for field in def.fields.iter() { - self.process_struct_field_def(field, qualname.as_slice(), item.id); + self.process_struct_field_def(field, qualname.as_str(), item.id); self.visit_ty(&*field.node.ty, e); } - self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id, e); + self.process_generic_params(ty_params, item.span, qualname.as_str(), item.id, e); } fn process_enum(&mut self, @@ -556,11 +556,11 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span), item.id, - qualname.as_slice(), + qualname.as_str(), e.cur_scope), None => self.sess.span_bug(item.span, format!("Could not find subspan for enum {}", - qualname).as_slice()), + qualname).as_str()), } for variant in enum_definition.variants.iter() { let name = get_ident(variant.node.name); @@ -574,8 +574,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, name, - qualname.as_slice(), - val.as_slice(), + qualname.as_str(), + val.as_str(), item.id); for arg in args.iter() { self.visit_ty(&*arg.ty, e); @@ -591,19 +591,19 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, ctor_id, - qualname.as_slice(), - val.as_slice(), + qualname.as_str(), + val.as_str(), item.id); for field in struct_def.fields.iter() { - self.process_struct_field_def(field, qualname.as_slice(), variant.node.id); + self.process_struct_field_def(field, qualname.as_str(), variant.node.id); self.visit_ty(&*field.node.ty, e); } } } } - self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id, e); + self.process_generic_params(ty_params, item.span, qualname.as_str(), item.id, e); } fn process_impl(&mut self, @@ -662,7 +662,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.trait_str(item.span, sub_span, item.id, - qualname.as_slice(), + qualname.as_str(), e.cur_scope); // super-traits @@ -694,7 +694,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { } // walk generics and methods - self.process_generic_params(generics, item.span, qualname.as_slice(), item.id, e); + self.process_generic_params(generics, item.span, qualname.as_str(), item.id, e); for method in methods.iter() { self.visit_trait_item(method, e) } @@ -713,9 +713,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.mod_str(item.span, sub_span, item.id, - qualname.as_slice(), + qualname.as_str(), e.cur_scope, - filename.as_slice()); + filename.as_str()); visit::walk_mod(self, m, DxrVisitorEnv::new_nested(item.id)); } @@ -732,7 +732,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { if !def_map.contains_key(&ex.id) { self.sess.span_bug(ex.span, format!("def_map has no key for {} in visit_expr", - ex.id).as_slice()); + ex.id).as_str()); } let def = def_map.get(&ex.id); let sub_span = self.span.span_for_last_ident(ex.span); @@ -814,7 +814,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { e.cur_scope), _ => self.sess.span_bug(ex.span, format!("Unexpected def kind while looking up path in '{}'", - self.span.snippet(ex.span)).as_slice()), + self.span.snippet(ex.span)).as_str()), } // modules or types in the path prefix match *def { @@ -961,7 +961,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { None => { self.sess.span_bug(p.span, format!("Could not find struct_def for `{}`", - self.span.snippet(p.span)).as_slice()); + self.span.snippet(p.span)).as_str()); } }; // The AST doesn't give us a span for the struct field, so we have @@ -973,7 +973,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.sess.span_bug(p.span, format!("Mismatched field count in '{}', found {}, expected {}", self.span.snippet(p.span), field_spans.len(), fields.len() - ).as_slice()); + ).as_str()); } for (field, &span) in fields.iter().zip(field_spans.iter()) { self.visit_pat(&*field.pat, e); @@ -1057,11 +1057,11 @@ impl<'l, 'tcx> Visitor for DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(item.span, sub_span, item.id, - qualname.as_slice(), - value.as_slice()); + qualname.as_str(), + value.as_str()); self.visit_ty(&*ty, e); - self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id, e); + self.process_generic_params(ty_params, item.span, qualname.as_str(), item.id, e); }, ast::ItemMac(_) => (), _ => visit::walk_item(self, item, e), @@ -1121,12 +1121,12 @@ impl<'l, 'tcx> Visitor for DxrVisitor<'l, 'tcx> { None => { self.sess.span_bug(method_type.span, format!("Could not find trait for method {}", - method_type.id).as_slice()); + method_type.id).as_str()); }, }; qualname.push_str(get_ident(method_type.ident).get()); - let qualname = qualname.as_slice(); + let qualname = qualname.as_str(); let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn); self.fmt.method_decl_str(method_type.span, @@ -1238,7 +1238,7 @@ impl<'l, 'tcx> Visitor for DxrVisitor<'l, 'tcx> { id, cnum, name, - s.as_slice(), + s.as_str(), e.cur_scope); }, } @@ -1347,8 +1347,8 @@ impl<'l, 'tcx> Visitor for DxrVisitor<'l, 'tcx> { return } - let id = String::from_str("$").append(ex.id.to_string().as_slice()); - self.process_formals(&decl.inputs, id.as_slice(), e); + let id = String::from_str("$").append(ex.id.to_string().as_str()); + self.process_formals(&decl.inputs, id.as_str(), e); // walk arg and return types for arg in decl.inputs.iter() { @@ -1397,15 +1397,15 @@ impl<'l, 'tcx> Visitor for DxrVisitor<'l, 'tcx> { if !def_map.contains_key(&id) { self.sess.span_bug(p.span, format!("def_map has no key for {} in visit_arm", - id).as_slice()); + id).as_str()); } let def = def_map.get(&id); match *def { def::DefBinding(id, _) => self.fmt.variable_str(p.span, sub_span, id, - path_to_string(p).as_slice(), - value.as_slice(), + path_to_string(p).as_str(), + value.as_str(), ""), def::DefVariant(_,id,_) => self.fmt.ref_str(ref_kind, p.span, @@ -1455,9 +1455,9 @@ impl<'l, 'tcx> Visitor for DxrVisitor<'l, 'tcx> { self.fmt.variable_str(p.span, sub_span, id, - path_to_string(p).as_slice(), - value.as_slice(), - typ.as_slice()); + path_to_string(p).as_str(), + value.as_str(), + typ.as_str()); } self.collected_paths.clear(); @@ -1510,7 +1510,7 @@ pub fn process_crate(sess: &Session, match fs::mkdir_recursive(&root_path, io::UserRWX) { Err(e) => sess.err(format!("Could not create directory {}: {}", - root_path.display(), e).as_slice()), + root_path.display(), e).as_str()), _ => (), } @@ -1527,7 +1527,7 @@ pub fn process_crate(sess: &Session, Ok(f) => box f, Err(e) => { let disp = root_path.display(); - sess.fatal(format!("Could not open {}: {}", disp, e).as_slice()); + sess.fatal(format!("Could not open {}: {}", disp, e).as_str()); } }; root_path.pop(); @@ -1550,7 +1550,7 @@ pub fn process_crate(sess: &Session, err_count: Cell::new(0) }}; - visitor.dump_crate_info(cratename.as_slice(), krate); + visitor.dump_crate_info(cratename.as_str(), krate); visit::walk_crate(&mut visitor, krate, DxrVisitorEnv::new()); } diff --git a/src/librustc/middle/save/recorder.rs b/src/librustc/middle/save/recorder.rs index 0695b6b360c2c..2d3c64e4a6e69 100644 --- a/src/librustc/middle/save/recorder.rs +++ b/src/librustc/middle/save/recorder.rs @@ -39,7 +39,7 @@ impl Recorder { assert!(self.dump_spans); let result = format!("span,kind,{},{},text,\"{}\"\n", kind, su.extent_str(span), escape(su.snippet(span))); - self.record(result.as_slice()); + self.record(result.as_str()); } } @@ -151,14 +151,14 @@ impl<'a> FmtStrs<'a> { if values.len() != fields.len() { self.span.sess.span_bug(span, format!( "Mismatch between length of fields for '{}', expected '{}', found '{}'", - kind, fields.len(), values.len()).as_slice()); + kind, fields.len(), values.len()).as_str()); } let values = values.iter().map(|s| { if s.len() > 1020 { - s.as_slice().slice_to(1020) + s.as_str().slice_to(1020) } else { - s.as_slice() + s.as_str() } }); @@ -170,7 +170,7 @@ impl<'a> FmtStrs<'a> { String::from_str(v) } ))); - Some(strs.fold(String::new(), |s, ss| s.append(ss.as_slice()))) + Some(strs.fold(String::new(), |s, ss| s.append(ss.as_str()))) } pub fn record_without_span(&mut self, @@ -182,7 +182,7 @@ impl<'a> FmtStrs<'a> { if needs_span { self.span.sess.span_bug(span, format!( "Called record_without_span for '{}' which does requires a span", - label).as_slice()); + label).as_str()); } assert!(!dump_spans); @@ -196,7 +196,7 @@ impl<'a> FmtStrs<'a> { }; let result = String::from_str(label); - self.recorder.record(result.append(values_str.as_slice()).append("\n").as_slice()); + self.recorder.record(result.append(values_str.as_str()).append("\n").as_str()); } pub fn record_with_span(&mut self, @@ -216,7 +216,7 @@ impl<'a> FmtStrs<'a> { if !needs_span { self.span.sess.span_bug(span, format!("Called record_with_span for '{}' \ - which does not require a span", label).as_slice()); + which does not require a span", label).as_str()); } let values_str = match self.make_values_str(label, fields, values, span) { @@ -224,7 +224,7 @@ impl<'a> FmtStrs<'a> { None => return, }; let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str); - self.recorder.record(result.as_slice()); + self.recorder.record(result.as_str()); } pub fn check_and_record(&mut self, @@ -252,7 +252,7 @@ impl<'a> FmtStrs<'a> { // the local case they can be overridden in one block and there is no nice way // to refer to such a scope in english, so we just hack it by appending the // variable def's node id - let qualname = String::from_str(name).append("$").append(id.to_string().as_slice()); + let qualname = String::from_str(name).append("$").append(id.to_string().as_str()); self.check_and_record(Variable, span, sub_span, diff --git a/src/librustc/middle/save/span_utils.rs b/src/librustc/middle/save/span_utils.rs index 57006d5e72b9b..706f3c54c12a7 100644 --- a/src/librustc/middle/save/span_utils.rs +++ b/src/librustc/middle/save/span_utils.rs @@ -214,7 +214,7 @@ impl<'a> SpanUtils<'a> { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line).as_slice()); + self.snippet(span), loc.file.name, loc.line).as_str()); } if result.is_none() && is_ident(&prev.tok) && bracket_count == 0 { return self.make_sub_span(span, Some(prev.sp)); @@ -240,7 +240,7 @@ impl<'a> SpanUtils<'a> { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, format!( "Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line).as_slice()); + self.snippet(span), loc.file.name, loc.line).as_str()); } return result } diff --git a/src/librustc/middle/subst.rs b/src/librustc/middle/subst.rs index c1c23dff98406..edb8539ae78a4 100644 --- a/src/librustc/middle/subst.rs +++ b/src/librustc/middle/subst.rs @@ -588,7 +588,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { (space={}, index={})", region_name.as_str(), self.root_ty.repr(self.tcx()), - space, i).as_slice()); + space, i).as_str()); } } } @@ -641,7 +641,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { when substituting (root type={})", p.repr(this.tcx()), source_ty.repr(this.tcx()), - this.root_ty.repr(this.tcx())).as_slice()); + this.root_ty.repr(this.tcx())).as_str()); } } } diff --git a/src/librustc/middle/trans/_match.rs b/src/librustc/middle/trans/_match.rs index dd0668b8fa0e7..42063f87e9742 100644 --- a/src/librustc/middle/trans/_match.rs +++ b/src/librustc/middle/trans/_match.rs @@ -783,7 +783,7 @@ fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>, let did = langcall(cx, None, format!("comparison of `{}`", - cx.ty_to_string(rhs_t)).as_slice(), + cx.ty_to_string(rhs_t)).as_str(), StrEqFnLangItem); callee::trans_lang_call(cx, did, [lhs, rhs], None) } @@ -1290,7 +1290,7 @@ fn create_bindings_map(bcx: Block, pat: Gc, "__llmatch"); trmode = TrByCopy(alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident).as_slice())); + bcx.ident(ident).as_str())); } ast::BindByValue(_) => { // in this case, the final type of the variable will be T, @@ -1298,13 +1298,13 @@ fn create_bindings_map(bcx: Block, pat: Gc, // above llmatch = alloca_no_lifetime(bcx, llvariable_ty.ptr_to(), - bcx.ident(ident).as_slice()); + bcx.ident(ident).as_str()); trmode = TrByMove; } ast::BindByRef(_) => { llmatch = alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident).as_slice()); + bcx.ident(ident).as_str()); trmode = TrByRef; } }; @@ -1553,7 +1553,7 @@ fn mk_binding_alloca<'blk, 'tcx, A>(bcx: Block<'blk, 'tcx>, let var_ty = node_id_type(bcx, p_id); // Allocate memory on stack for the binding. - let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).as_slice()); + let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).as_str()); // Subtle: be sure that we *populate* the memory *before* // we schedule the cleanup. @@ -1601,7 +1601,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if bcx.sess().asm_comments() { add_comment(bcx, format!("bind_irrefutable_pat(pat={})", - pat.repr(bcx.tcx())).as_slice()); + pat.repr(bcx.tcx())).as_str()); } let _indenter = indenter(); diff --git a/src/librustc/middle/trans/adt.rs b/src/librustc/middle/trans/adt.rs index e62e3563a0a2c..3951471ff9fb5 100644 --- a/src/librustc/middle/trans/adt.rs +++ b/src/librustc/middle/trans/adt.rs @@ -217,7 +217,7 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr { cx.sess().bug(format!("non-C-like enum {} with specified \ discriminants", ty::item_path_str(cx.tcx(), - def_id)).as_slice()); + def_id)).as_str()); } if cases.len() == 1 { @@ -271,7 +271,7 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr { }).collect(), dtor); } _ => cx.sess().bug(format!("adt::represent_type called on non-ADT type: {}", - ty_to_string(cx.tcx(), t)).as_slice()) + ty_to_string(cx.tcx(), t)).as_str()) } } @@ -843,7 +843,7 @@ pub fn fold_variants<'blk, 'tcx>( for (discr, case) in cases.iter().enumerate() { let mut variant_cx = fcx.new_temp_block( - format!("enum-variant-iter-{}", discr.to_string()).as_slice() + format!("enum-variant-iter-{}", discr.to_string()).as_str() ); let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true); AddCase(llswitch, rhs_val, variant_cx.llbb); diff --git a/src/librustc/middle/trans/asm.rs b/src/librustc/middle/trans/asm.rs index 7fb692c270ee3..7ed26659f337b 100644 --- a/src/librustc/middle/trans/asm.rs +++ b/src/librustc/middle/trans/asm.rs @@ -83,7 +83,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) .chain(ext_constraints.move_iter()) .collect::>() .connect(",") - .as_slice()); + .as_str()); let mut clobbers = get_clobbers(); if !ia.clobbers.get().is_empty() && !clobbers.is_empty() { @@ -95,12 +95,12 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) // Add the clobbers to our constraints list if clobbers.len() != 0 && constraints.len() != 0 { constraints.push_char(','); - constraints.push_str(clobbers.as_slice()); + constraints.push_str(clobbers.as_str()); } else { - constraints.push_str(clobbers.as_slice()); + constraints.push_str(clobbers.as_str()); } - debug!("Asm Constraints: {:?}", constraints.as_slice()); + debug!("Asm Constraints: {:?}", constraints.as_str()); let num_outputs = outputs.len(); @@ -119,7 +119,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) }; let r = ia.asm.get().with_c_str(|a| { - constraints.as_slice().with_c_str(|c| { + constraints.as_str().with_c_str(|c| { InlineAsmCall(bcx, a, c, diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index dc2aa16eb7284..8f7410954a459 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -358,7 +358,7 @@ fn require_alloc_fn(bcx: Block, info_ty: ty::t, it: LangItem) -> ast::DefId { Err(s) => { bcx.sess().fatal(format!("allocation of `{}` {}", bcx.ty_to_string(info_ty), - s).as_slice()); + s).as_str()); } } } @@ -526,7 +526,7 @@ pub fn unset_split_stack(f: ValueRef) { // silently mangles such symbols, breaking our linkage model. pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: String) { if ccx.all_llvm_symbols().borrow().contains(&sym) { - ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).as_slice()); + ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).as_str()); } ccx.all_llvm_symbols().borrow_mut().insert(sym); } @@ -562,7 +562,7 @@ pub fn get_res_dtor(ccx: &CrateContext, [glue::get_drop_glue_type(ccx, t)], ty::mk_nil()); get_extern_fn(ccx, &mut *ccx.externs().borrow_mut(), - name.as_slice(), + name.as_str(), llvm::CCallConv, llty, dtor_ty) @@ -811,8 +811,8 @@ pub fn iter_structural_ty<'a, 'blk, 'tcx>(cx: Block<'blk, 'tcx>, let variant_cx = fcx.new_temp_block( format!("enum-iter-variant-{}", - variant.disr_val.to_string().as_slice()) - .as_slice()); + variant.disr_val.to_string().as_str()) + .as_str()); match adt::trans_case(cx, &*repr, variant.disr_val) { _match::SingleResult(r) => { AddCase(llswitch, r.val, variant_cx.llbb) @@ -913,7 +913,7 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>( } _ => { cx.sess().bug(format!("fail-if-zero on unexpected type: {}", - ty_to_string(cx.tcx(), rhs_t)).as_slice()); + ty_to_string(cx.tcx(), rhs_t)).as_str()); } }; let bcx = with_cond(cx, is_zero, |bcx| { @@ -967,25 +967,25 @@ pub fn trans_external_path(ccx: &CrateContext, did: ast::DefId, t: ty::t) -> Val match fn_ty.abi.for_target(ccx.sess().targ_cfg.os, ccx.sess().targ_cfg.arch) { Some(Rust) | Some(RustCall) => { - get_extern_rust_fn(ccx, t, name.as_slice(), did) + get_extern_rust_fn(ccx, t, name.as_str(), did) } Some(RustIntrinsic) => { ccx.sess().bug("unexpected intrinsic in trans_external_path") } Some(..) | None => { foreign::register_foreign_item_fn(ccx, fn_ty.abi, t, - name.as_slice(), None) + name.as_str(), None) } } } ty::ty_closure(_) => { - get_extern_rust_fn(ccx, t, name.as_slice(), did) + get_extern_rust_fn(ccx, t, name.as_str(), did) } _ => { let llty = type_of(ccx, t); get_extern_const(&mut *ccx.externs().borrow_mut(), ccx.llmod(), - name.as_slice(), + name.as_str(), llty) } } @@ -1417,7 +1417,7 @@ fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { None if id == ast::DUMMY_NODE_ID => false, _ => tcx.sess.bug(format!("unexpected variant in has_nested_returns: {}", - tcx.map.path_to_string(id)).as_slice()) + tcx.map.path_to_string(id)).as_str()) } } @@ -1983,7 +1983,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, _ => ccx.sess().bug( format!("trans_enum_variant_constructor: \ unexpected ctor return type {}", - ctor_ty.repr(tcx)).as_slice()) + ctor_ty.repr(tcx)).as_str()) }; // Get location to store the result. If the user does not care about @@ -2048,7 +2048,7 @@ fn trans_enum_variant_or_tuple_like_struct(ccx: &CrateContext, _ => ccx.sess().bug( format!("trans_enum_variant_or_tuple_like_struct: \ unexpected ctor return type {}", - ty_to_string(ccx.tcx(), ctor_ty)).as_slice()) + ty_to_string(ccx.tcx(), ctor_ty)).as_str()) }; let arena = TypedArena::new(); @@ -2124,7 +2124,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span, lvlsrc, Some(sp), format!("enum variant is more than three times larger \ ({} bytes) than the next largest (ignoring padding)", - largest).as_slice()); + largest).as_str()); ccx.sess().span_note(enum_def.variants.get(largest_index).span, "this variant is the largest"); @@ -2346,7 +2346,7 @@ fn register_fn(ccx: &CrateContext, _ => fail!("expected bare rust fn") }; - let llfn = decl_rust_fn(ccx, node_type, sym.as_slice()); + let llfn = decl_rust_fn(ccx, node_type, sym.as_str()); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2547,7 +2547,7 @@ pub fn register_fn_llvmty(ccx: &CrateContext, llfty: Type) -> ValueRef { debug!("register_fn_llvmty id={} sym={}", node_id, sym); - let llfn = decl_fn(ccx, sym.as_slice(), cc, llfty, ty::mk_nil()); + let llfn = decl_fn(ccx, sym.as_str(), cc, llfty, ty::mk_nil()); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2599,7 +2599,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext, let (start_fn, args) = if use_start_lang_item { let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) { Ok(id) => id, - Err(s) => { ccx.sess().fatal(s.as_slice()); } + Err(s) => { ccx.sess().fatal(s.as_str()); } }; let start_fn = if start_def_id.krate == ast::LOCAL_CRATE { get_item_val(ccx, start_def_id.node) @@ -2705,7 +2705,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { unsafe { let llty = llvm::LLVMTypeOf(v); - let g = sym.as_slice().with_c_str(|buf| { + let g = sym.as_str().with_c_str(|buf| { llvm::LLVMAddGlobal(ccx.llmod(), llty, buf) }); @@ -2805,7 +2805,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let ty = ty::node_id_to_type(ccx.tcx(), ni.id); let name = foreign::link_name(&*ni); foreign::register_foreign_item_fn(ccx, abi, ty, - name.get().as_slice(), + name.get().as_str(), Some(ni.span)) } ast::ForeignItemStatic(..) => { @@ -2866,7 +2866,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { ref variant => { ccx.sess().bug(format!("get_item_val(): unexpected variant: {:?}", - variant).as_slice()) + variant).as_str()) } }; @@ -3061,7 +3061,7 @@ pub fn trans_crate(krate: ast::Crate, let link_meta = link::build_link_meta(&tcx.sess, &krate, name); let codegen_units = tcx.sess.opts.cg.codegen_units; - let shared_ccx = SharedCrateContext::new(link_meta.crate_name.as_slice(), + let shared_ccx = SharedCrateContext::new(link_meta.crate_name.as_str(), codegen_units, tcx, exp_map2, diff --git a/src/librustc/middle/trans/builder.rs b/src/librustc/middle/trans/builder.rs index ca7adb97f3d54..6cf18dc766b97 100644 --- a/src/librustc/middle/trans/builder.rs +++ b/src/librustc/middle/trans/builder.rs @@ -762,8 +762,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_string(sp)); - debug!("{}", s.as_slice()); - self.add_comment(s.as_slice()); + debug!("{}", s.as_str()); + self.add_comment(s.as_str()); } } @@ -773,7 +773,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let comment_text = format!("{} {}", "#", sanitized.replace("\n", "\n\t# ")); self.count_insn("inlineasm"); - let asm = comment_text.as_slice().with_c_str(|c| { + let asm = comment_text.as_str().with_c_str(|c| { unsafe { llvm::LLVMConstInlineAsm(Type::func([], &Type::void(self.ccx)).to_ref(), c, noname(), False, False) diff --git a/src/librustc/middle/trans/callee.rs b/src/librustc/middle/trans/callee.rs index 5d66ec0a4b936..da2085b083389 100644 --- a/src/librustc/middle/trans/callee.rs +++ b/src/librustc/middle/trans/callee.rs @@ -125,7 +125,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) expr.span, format!("type of callee is neither bare-fn nor closure: \ {}", - bcx.ty_to_string(datum.ty)).as_slice()); + bcx.ty_to_string(datum.ty)).as_str()); } } } @@ -211,7 +211,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) bcx.tcx().sess.span_bug( ref_expr.span, format!("cannot translate def {:?} \ - to a callable thing!", def).as_slice()); + to a callable thing!", def).as_str()); } } } @@ -324,7 +324,7 @@ pub fn trans_unboxing_shim(bcx: Block, }); let llfn = decl_internal_rust_fn(ccx, boxed_function_type, - function_name.as_slice()); + function_name.as_str()); let block_arena = TypedArena::new(); let empty_param_substs = param_substs::empty(); diff --git a/src/librustc/middle/trans/cleanup.rs b/src/librustc/middle/trans/cleanup.rs index 4d54308031e92..d1491b4eb43a1 100644 --- a/src/librustc/middle/trans/cleanup.rs +++ b/src/librustc/middle/trans/cleanup.rs @@ -395,7 +395,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { self.ccx.sess().bug( format!("no cleanup scope {} found", - self.ccx.tcx().map.node_to_string(cleanup_scope)).as_slice()); + self.ccx.tcx().map.node_to_string(cleanup_scope)).as_str()); } fn schedule_clean_in_custom_scope(&self, @@ -597,7 +597,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx LoopExit(id, _) => { self.ccx.sess().bug(format!( "cannot exit from scope {:?}, \ - not in scope", id).as_slice()); + not in scope", id).as_str()); } } } @@ -666,7 +666,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx let name = scope.block_name("clean"); debug!("generating cleanups for {}", name); let bcx_in = self.new_block(label.is_unwind(), - name.as_slice(), + name.as_str(), None); let mut bcx_out = bcx_in; for cleanup in scope.cleanups.iter().rev() { @@ -717,7 +717,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx Some(llbb) => { return llbb; } None => { let name = last_scope.block_name("unwind"); - pad_bcx = self.new_block(true, name.as_slice(), None); + pad_bcx = self.new_block(true, name.as_str(), None); last_scope.cached_landing_pad = Some(pad_bcx.llbb); } } @@ -1005,7 +1005,7 @@ pub fn temporary_scope(tcx: &ty::ctxt, } None => { tcx.sess.bug(format!("no temporary scope available for expr {}", - id).as_slice()) + id).as_str()) } } } diff --git a/src/librustc/middle/trans/closure.rs b/src/librustc/middle/trans/closure.rs index 7bbdf332fe11f..415d131932a09 100644 --- a/src/librustc/middle/trans/closure.rs +++ b/src/librustc/middle/trans/closure.rs @@ -203,7 +203,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if ccx.sess().asm_comments() { add_comment(bcx, format!("Copy {} into closure", - bv.to_string(ccx)).as_slice()); + bv.to_string(ccx)).as_str()); } let bound_data = GEPi(bcx, llbox, [0u, abi::box_field_body, i]); @@ -382,7 +382,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let s = tcx.map.with_path(id, |path| { mangle_internal_name_by_path_and_seq(path, "closure") }); - let llfn = decl_internal_rust_fn(ccx, fty, s.as_slice()); + let llfn = decl_internal_rust_fn(ccx, fty, s.as_str()); // set an inline hint for all closures set_inline_hint(llfn); @@ -441,7 +441,7 @@ pub fn get_or_create_declaration_if_unboxed_closure(ccx: &CrateContext, mangle_internal_name_by_path_and_seq(path, "unboxed_closure") }); - let llfn = decl_internal_rust_fn(ccx, function_type, symbol.as_slice()); + let llfn = decl_internal_rust_fn(ccx, function_type, symbol.as_str()); // set an inline hint for all closures set_inline_hint(llfn); @@ -543,7 +543,7 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext, ccx.sess().bug(format!("get_wrapper_for_bare_fn: \ expected a statically resolved fn, got \ {:?}", - def).as_slice()); + def).as_str()); } }; @@ -561,7 +561,7 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext, _ => { ccx.sess().bug(format!("get_wrapper_for_bare_fn: \ expected a closure ty, got {}", - closure_ty.repr(tcx)).as_slice()); + closure_ty.repr(tcx)).as_str()); } }; @@ -569,9 +569,9 @@ pub fn get_wrapper_for_bare_fn(ccx: &CrateContext, mangle_internal_name_by_path_and_seq(path, "as_closure") }); let llfn = if is_local { - decl_internal_rust_fn(ccx, closure_ty, name.as_slice()) + decl_internal_rust_fn(ccx, closure_ty, name.as_str()) } else { - decl_rust_fn(ccx, closure_ty, name.as_slice()) + decl_rust_fn(ccx, closure_ty, name.as_str()) }; ccx.closure_bare_wrapper_cache().borrow_mut().insert(fn_ptr, llfn); diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index 4ac9ae64d5a08..b4ec5b3f38244 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -117,7 +117,7 @@ pub fn gensym_name(name: &str) -> PathElem { let num = token::gensym(name).uint(); // use one colon which will get translated to a period by the mangler, and // we're guaranteed that `num` is globally unique for this crate. - PathName(token::gensym(format!("{}:{}", name, num).as_slice())) + PathName(token::gensym(format!("{}:{}", name, num).as_str())) } pub struct tydesc_info { @@ -475,7 +475,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> { Some(&v) => v, None => { self.tcx().sess.bug(format!( - "no def associated with node id {:?}", nid).as_slice()); + "no def associated with node id {:?}", nid).as_str()); } } } @@ -795,7 +795,7 @@ pub fn node_id_substs(bcx: Block, format!("type parameters for node {:?} include inference types: \ {}", node, - substs.repr(bcx.tcx())).as_slice()); + substs.repr(bcx.tcx())).as_str()); } substs.substp(tcx, bcx.fcx.param_substs) @@ -889,8 +889,8 @@ pub fn langcall(bcx: Block, Err(s) => { let msg = format!("{} {}", msg, s); match span { - Some(span) => bcx.tcx().sess.span_fatal(span, msg.as_slice()), - None => bcx.tcx().sess.fatal(msg.as_slice()), + Some(span) => bcx.tcx().sess.span_fatal(span, msg.as_str()), + None => bcx.tcx().sess.fatal(msg.as_str()), } } } diff --git a/src/librustc/middle/trans/consts.rs b/src/librustc/middle/trans/consts.rs index 8f6a3864b37ea..548c51344d74f 100644 --- a/src/librustc/middle/trans/consts.rs +++ b/src/librustc/middle/trans/consts.rs @@ -63,7 +63,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit) _ => cx.sess().span_bug(lit.span, format!("integer literal has type {} (expected int \ or uint)", - ty_to_string(cx.tcx(), lit_int_ty)).as_slice()) + ty_to_string(cx.tcx(), lit_int_ty)).as_str()) } } ast::LitFloat(ref fs, t) => { @@ -165,13 +165,13 @@ fn const_deref(cx: &CrateContext, v: ValueRef, t: ty::t, explicit: bool) } _ => { cx.sess().bug(format!("unexpected dereferenceable type {}", - ty_to_string(cx.tcx(), t)).as_slice()) + ty_to_string(cx.tcx(), t)).as_str()) } } } None => { cx.sess().bug(format!("cannot dereference const of type {}", - ty_to_string(cx.tcx(), t)).as_slice()) + ty_to_string(cx.tcx(), t)).as_str()) } } } @@ -220,7 +220,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef cx.sess() .span_bug(e.span, format!("unexpected static function: {:?}", - store).as_slice()) + store).as_str()) } ty::AutoDerefRef(ref adj) => { let mut ty = ety; @@ -283,7 +283,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef } _ => cx.sess().span_bug(e.span, format!("unimplemented type in const unsize: {}", - ty_to_string(cx.tcx(), ty)).as_slice()) + ty_to_string(cx.tcx(), ty)).as_str()) } } _ => { @@ -291,7 +291,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef .span_bug(e.span, format!("unimplemented const \ autoref {:?}", - autoref).as_slice()) + autoref).as_str()) } } } @@ -312,7 +312,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef } cx.sess().bug(format!("const {} of type {} has size {} instead of {}", e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety), - csize, tsize).as_slice()); + csize, tsize).as_str()); } (llconst, inlineable, ety_adjusted) } @@ -466,7 +466,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr, _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).as_slice()) + ty_to_string(cx.tcx(), bt)).as_str()) }, ty::ty_rptr(_, mt) => match ty::get(mt.ty).sty { ty::ty_vec(_, Some(u)) => { @@ -475,12 +475,12 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr, _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).as_slice()) + ty_to_string(cx.tcx(), bt)).as_str()) }, _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt)).as_slice()) + ty_to_string(cx.tcx(), bt)).as_str()) }; let len = llvm::LLVMConstIntGetZExtValue(len) as u64; diff --git a/src/librustc/middle/trans/context.rs b/src/librustc/middle/trans/context.rs index 3b4b50c7e85ac..a6a1a2a457b3b 100644 --- a/src/librustc/middle/trans/context.rs +++ b/src/librustc/middle/trans/context.rs @@ -220,14 +220,14 @@ unsafe fn create_context_and_module(sess: &Session, mod_name: &str) -> (ContextR sess.targ_cfg .target_strs .data_layout - .as_slice() + .as_str() .with_c_str(|buf| { llvm::LLVMSetDataLayout(llmod, buf); }); sess.targ_cfg .target_strs .target_triple - .as_slice() + .as_str() .with_c_str(|buf| { llvm::LLVMRustSetNormalizedTarget(llmod, buf); }); @@ -286,7 +286,7 @@ impl<'tcx> SharedCrateContext<'tcx> { // such as a function name in the module. // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 let llmod_id = format!("{}.{}.rs", crate_name, i); - let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id.as_slice()); + let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id.as_str()); shared_ccx.local_ccxs.push(local_ccx); } @@ -384,7 +384,7 @@ impl LocalCrateContext { .targ_cfg .target_strs .data_layout - .as_slice()); + .as_str()); let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo { Some(debuginfo::CrateDebugContext::new(llmod)) diff --git a/src/librustc/middle/trans/controlflow.rs b/src/librustc/middle/trans/controlflow.rs index dd9e41a61bf87..de3ae9648be2c 100644 --- a/src/librustc/middle/trans/controlflow.rs +++ b/src/librustc/middle/trans/controlflow.rs @@ -49,7 +49,7 @@ pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>, debug!("trans_stmt({})", s.repr(cx.tcx())); if cx.sess().asm_comments() { - add_span_comment(cx, s.span, s.repr(cx.tcx()).as_slice()); + add_span_comment(cx, s.span, s.repr(cx.tcx()).as_str()); } let mut bcx = cx; @@ -177,7 +177,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } let name = format!("then-block-{}-", thn.id); - let then_bcx_in = bcx.fcx.new_id_block(name.as_slice(), thn.id); + let then_bcx_in = bcx.fcx.new_id_block(name.as_str(), thn.id); let then_bcx_out = trans_block(then_bcx_in, &*thn, dest); trans::debuginfo::clear_source_location(bcx.fcx); @@ -425,7 +425,7 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, Some(&def::DefLabel(loop_id)) => loop_id, ref r => { bcx.tcx().sess.bug(format!("{:?} in def-map for label", - r).as_slice()) + r).as_str()) } } } @@ -492,7 +492,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let v_str = C_str_slice(ccx, fail_str); let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name.as_slice()); + let filename = token::intern_and_get_ident(loc.file.name.as_str()); let filename = C_str_slice(ccx, filename); let line = C_int(ccx, loc.line as int); let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false); @@ -517,7 +517,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Extract the file/line from the span let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name.as_slice()); + let filename = token::intern_and_get_ident(loc.file.name.as_str()); // Invoke the lang item let filename = C_str_slice(ccx, filename); diff --git a/src/librustc/middle/trans/datum.rs b/src/librustc/middle/trans/datum.rs index 84d9f2cb740a3..c6e97688bcaba 100644 --- a/src/librustc/middle/trans/datum.rs +++ b/src/librustc/middle/trans/datum.rs @@ -526,7 +526,7 @@ impl Datum { } _ => bcx.tcx().sess.bug( format!("Unexpected unsized type in get_element: {}", - bcx.ty_to_string(self.ty)).as_slice()) + bcx.ty_to_string(self.ty)).as_str()) }; Datum { val: val, diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index e1c7ef4d50f0a..ea1ad67b79cc7 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -279,7 +279,7 @@ impl TypeMap { metadata: DIType) { if !self.type_to_metadata.insert(ty::type_id(type_), metadata) { cx.sess().bug(format!("Type metadata for ty::t '{}' is already in the TypeMap!", - ppaux::ty_to_string(cx.tcx(), type_)).as_slice()); + ppaux::ty_to_string(cx.tcx(), type_)).as_str()); } } @@ -292,7 +292,7 @@ impl TypeMap { if !self.unique_id_to_metadata.insert(unique_type_id, metadata) { let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id); cx.sess().bug(format!("Type metadata for unique id '{}' is already in the TypeMap!", - unique_type_id_str.as_slice()).as_slice()); + unique_type_id_str.as_str()).as_str()); } } @@ -371,20 +371,20 @@ impl TypeMap { self.get_unique_type_id_of_type(cx, component_type); let component_type_id = self.get_unique_type_id_as_string(component_type_id); - unique_type_id.push_str(component_type_id.as_slice()); + unique_type_id.push_str(component_type_id.as_str()); } }, ty::ty_box(inner_type) => { unique_type_id.push_char('@'); let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id.as_str()); }, ty::ty_uniq(inner_type) => { unique_type_id.push_char('~'); let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id.as_str()); }, ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => { unique_type_id.push_char('*'); @@ -394,7 +394,7 @@ impl TypeMap { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id.as_str()); }, ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => { unique_type_id.push_char('&'); @@ -404,12 +404,12 @@ impl TypeMap { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id.as_str()); }, ty::ty_vec(inner_type, optional_length) => { match optional_length { Some(len) => { - unique_type_id.push_str(format!("[{}]", len).as_slice()); + unique_type_id.push_str(format!("[{}]", len).as_str()); } None => { unique_type_id.push_str("[]"); @@ -418,7 +418,7 @@ impl TypeMap { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id.as_slice()); + unique_type_id.push_str(inner_type_id.as_str()); }, ty::ty_trait(ref trait_data) => { unique_type_id.push_str("trait "); @@ -443,7 +443,7 @@ impl TypeMap { self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id.as_slice()); + unique_type_id.push_str(parameter_type_id.as_str()); unique_type_id.push_char(','); } @@ -454,7 +454,7 @@ impl TypeMap { unique_type_id.push_str(")->"); let return_type_id = self.get_unique_type_id_of_type(cx, sig.output); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id.as_slice()); + unique_type_id.push_str(return_type_id.as_str()); }, ty::ty_closure(box ty::ClosureTy { fn_style, onceness, @@ -485,7 +485,7 @@ impl TypeMap { self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id.as_slice()); + unique_type_id.push_str(parameter_type_id.as_str()); unique_type_id.push_char(','); } @@ -497,7 +497,7 @@ impl TypeMap { let return_type_id = self.get_unique_type_id_of_type(cx, sig.output); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id.as_slice()); + unique_type_id.push_str(return_type_id.as_str()); unique_type_id.push_char(':'); @@ -513,8 +513,8 @@ impl TypeMap { }, _ => { cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {:?}", - ppaux::ty_to_string(cx.tcx(), type_).as_slice(), - ty::get(type_).sty).as_slice()) + ppaux::ty_to_string(cx.tcx(), type_).as_str(), + ty::get(type_).sty).as_str()) } }; @@ -559,7 +559,7 @@ impl TypeMap { output.push_str(crate_hash.as_str()); output.push_str("/"); - output.push_str(def_id.node.to_str_radix(16).as_slice()); + output.push_str(def_id.node.to_str_radix(16).as_str()); // Maybe check that there is no self type here. @@ -572,7 +572,7 @@ impl TypeMap { type_map.get_unique_type_id_of_type(cx, type_parameter); let param_type_id = type_map.get_unique_type_id_as_string(param_type_id); - output.push_str(param_type_id.as_slice()); + output.push_str(param_type_id.as_str()); output.push_char(','); } @@ -592,7 +592,7 @@ impl TypeMap { let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type); let enum_variant_type_id = format!("{}::{}", self.get_unique_type_id_as_string(enum_type_id) - .as_slice(), + .as_str(), variant_name); let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id)); UniqueTypeId(interner_key) @@ -605,7 +605,7 @@ impl TypeMap { let element_type_id = self.get_unique_type_id_of_type(cx, element_type); let gc_box_type_id = format!("{{GC_BOX<{}>}}", self.get_unique_type_id_as_string(element_type_id) - .as_slice()); + .as_str()); let interner_key = self.unique_id_interner.intern(Rc::new(gc_box_type_id)); UniqueTypeId(interner_key) } @@ -785,19 +785,19 @@ pub fn create_global_var_metadata(cx: &CrateContext, create_global_var_metadata() - Captured var-id refers to \ unexpected ast_item variant: {:?}", - var_item).as_slice()) + var_item).as_str()) } } }, _ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \ - Captured var-id refers to unexpected \ ast_map variant: {:?}", - var_item).as_slice()) + var_item).as_str()) }; let (file_metadata, line_number) = if span != codemap::DUMMY_SP { let loc = span_start(cx, span); - (file_metadata(cx, loc.file.name.as_slice()), loc.line as c_uint) + (file_metadata(cx, loc.file.name.as_str()), loc.line as c_uint) } else { (UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER) }; @@ -808,11 +808,11 @@ pub fn create_global_var_metadata(cx: &CrateContext, let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id)); let var_name = token::get_ident(ident).get().to_string(); let linkage_name = - namespace_node.mangled_name_of_contained_item(var_name.as_slice()); + namespace_node.mangled_name_of_contained_item(var_name.as_str()); let var_scope = namespace_node.scope; - var_name.as_slice().with_c_str(|var_name| { - linkage_name.as_slice().with_c_str(|linkage_name| { + var_name.as_str().with_c_str(|var_name| { + linkage_name.as_str().with_c_str(|linkage_name| { unsafe { llvm::LLVMDIBuilderCreateStaticVariable(DIB(cx), var_scope, @@ -848,7 +848,7 @@ pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) { None => { bcx.sess().span_bug(span, format!("no entry in lllocals table for {:?}", - node_id).as_slice()); + node_id).as_str()); } }; @@ -898,7 +898,7 @@ pub fn create_captured_var_metadata(bcx: Block, "debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ ast_map variant: {:?}", - ast_item).as_slice()); + ast_item).as_str()); } } } @@ -908,7 +908,7 @@ pub fn create_captured_var_metadata(bcx: Block, format!("debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ ast_map variant: {:?}", - ast_item).as_slice()); + ast_item).as_str()); } }; @@ -1011,7 +1011,7 @@ pub fn create_argument_metadata(bcx: Block, arg: &ast::Arg) { None => { bcx.sess().span_bug(span, format!("no entry in llargs table for {:?}", - node_id).as_slice()); + node_id).as_str()); } }; @@ -1162,7 +1162,7 @@ pub fn create_function_debug_context(cx: &CrateContext, ast::ExprProc(fn_decl, top_level_block) | ast::ExprUnboxedFn(_, _, fn_decl, top_level_block) => { let name = format!("fn{}", token::gensym("fn")); - let name = token::str_to_ident(name.as_slice()); + let name = token::str_to_ident(name.as_str()); (name, fn_decl, // This is not quite right. It should actually inherit // the generics of the enclosing function. @@ -1196,7 +1196,7 @@ pub fn create_function_debug_context(cx: &CrateContext, cx.sess() .bug(format!("create_function_debug_context: \ unexpected sort of node: {:?}", - fnitem).as_slice()) + fnitem).as_str()) } } } @@ -1207,7 +1207,7 @@ pub fn create_function_debug_context(cx: &CrateContext, } _ => cx.sess().bug(format!("create_function_debug_context: \ unexpected sort of node: {:?}", - fnitem).as_slice()) + fnitem).as_str()) }; // This can be the case for functions inlined from another crate @@ -1216,7 +1216,7 @@ pub fn create_function_debug_context(cx: &CrateContext, } let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name.as_slice()); + let file_metadata = file_metadata(cx, loc.file.name.as_str()); let function_type_metadata = unsafe { let fn_signature = get_function_signature(cx, @@ -1243,11 +1243,11 @@ pub fn create_function_debug_context(cx: &CrateContext, let (linkage_name, containing_scope) = if has_path { let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id)); let linkage_name = namespace_node.mangled_name_of_contained_item( - function_name.as_slice()); + function_name.as_str()); let containing_scope = namespace_node.scope; (linkage_name, containing_scope) } else { - (function_name.as_slice().to_string(), file_metadata) + (function_name.as_str().to_string(), file_metadata) }; // Clang sets this parameter to the opening brace of the function's block, @@ -1256,8 +1256,8 @@ pub fn create_function_debug_context(cx: &CrateContext, let is_local_to_unit = is_node_local_to_unit(cx, fn_ast_id); - let fn_metadata = function_name.as_slice().with_c_str(|function_name| { - linkage_name.as_slice().with_c_str(|linkage_name| { + let fn_metadata = function_name.as_str().with_c_str(|function_name| { + linkage_name.as_str().with_c_str(|linkage_name| { unsafe { llvm::LLVMDIBuilderCreateFunction( DIB(cx), @@ -1362,7 +1362,7 @@ pub fn create_function_debug_context(cx: &CrateContext, actual_self_type, true); - name_to_append_suffix_to.push_str(actual_self_type_name.as_slice()); + name_to_append_suffix_to.push_str(actual_self_type_name.as_str()); if generics.is_type_parameterized() { name_to_append_suffix_to.push_str(","); @@ -1402,7 +1402,7 @@ pub fn create_function_debug_context(cx: &CrateContext, let actual_type_name = compute_debuginfo_type_name(cx, actual_type, true); - name_to_append_suffix_to.push_str(actual_type_name.as_slice()); + name_to_append_suffix_to.push_str(actual_type_name.as_str()); if index != generics.ty_params.len() - 1 { name_to_append_suffix_to.push_str(","); @@ -1514,7 +1514,7 @@ fn compile_unit_metadata(cx: &CrateContext) { }); fn fallback_path(cx: &CrateContext) -> CString { - cx.link_meta().crate_name.as_slice().to_c_str() + cx.link_meta().crate_name.as_str().to_c_str() } } @@ -1528,7 +1528,7 @@ fn declare_local(bcx: Block, let cx: &CrateContext = bcx.ccx(); let filename = span_start(cx, span).file.name.clone(); - let file_metadata = file_metadata(cx, filename.as_slice()); + let file_metadata = file_metadata(cx, filename.as_str()); let name = token::get_ident(variable_ident); let loc = span_start(cx, span); @@ -1647,7 +1647,7 @@ fn scope_metadata(fcx: &FunctionContext, fcx.ccx.sess().span_bug(span, format!("debuginfo: Could not find scope info for node {:?}", - node).as_slice()); + node).as_str()); } } } @@ -1705,7 +1705,7 @@ fn pointer_type_metadata(cx: &CrateContext, let pointer_llvm_type = type_of::type_of(cx, pointer_type); let (pointer_size, pointer_align) = size_and_align_of(cx, pointer_llvm_type); let name = compute_debuginfo_type_name(cx, pointer_type, false); - let ptr_metadata = name.as_slice().with_c_str(|name| { + let ptr_metadata = name.as_str().with_c_str(|name| { unsafe { llvm::LLVMDIBuilderCreatePointerType( DIB(cx), @@ -1832,7 +1832,7 @@ impl RecursiveTypeDescription { cx.sess().bug(format!("Forward declaration of potentially recursive type \ '{}' was not found in TypeMap!", ppaux::ty_to_string(cx.tcx(), unfinished_type)) - .as_slice()); + .as_str()); } } @@ -1915,7 +1915,7 @@ fn prepare_struct_metadata(cx: &CrateContext, let struct_metadata_stub = create_struct_stub(cx, struct_llvm_type, - struct_name.as_slice(), + struct_name.as_str(), unique_type_id, containing_scope); @@ -1976,7 +1976,7 @@ fn prepare_tuple_metadata(cx: &CrateContext, unique_type_id, create_struct_stub(cx, tuple_llvm_type, - tuple_name.as_slice(), + tuple_name.as_str(), unique_type_id, UNKNOWN_SCOPE_METADATA), tuple_llvm_type, @@ -2319,7 +2319,7 @@ fn prepare_enum_metadata(cx: &CrateContext, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id); let loc = span_start(cx, definition_span); - let file_metadata = file_metadata(cx, loc.file.name.as_slice()); + let file_metadata = file_metadata(cx, loc.file.name.as_str()); let variants = ty::enum_variants(cx.tcx(), enum_def_id); @@ -2400,8 +2400,8 @@ fn prepare_enum_metadata(cx: &CrateContext, .borrow() .get_unique_type_id_as_string(unique_type_id); - let enum_metadata = enum_name.as_slice().with_c_str(|enum_name| { - unique_type_id_str.as_slice().with_c_str(|unique_type_id_str| { + let enum_metadata = enum_name.as_str().with_c_str(|enum_name| { + unique_type_id_str.as_str().with_c_str(|unique_type_id_str| { unsafe { llvm::LLVMDIBuilderCreateUnionType( DIB(cx), @@ -2508,7 +2508,7 @@ fn set_members_of_composite_type(cx: &CrateContext, Please use a rustc built with anewer \ version of LLVM.", llvm_version_major, - llvm_version_minor).as_slice()); + llvm_version_minor).as_str()); } else { cx.sess().bug("debuginfo::set_members_of_composite_type() - \ Already completed forward declaration re-encountered."); @@ -2528,7 +2528,7 @@ fn set_members_of_composite_type(cx: &CrateContext, ComputedMemberOffset => machine::llelement_offset(cx, composite_llvm_type, i) }; - member_description.name.as_slice().with_c_str(|member_name| { + member_description.name.as_str().with_c_str(|member_name| { unsafe { llvm::LLVMDIBuilderCreateMemberType( DIB(cx), @@ -2568,7 +2568,7 @@ fn create_struct_stub(cx: &CrateContext, .get_unique_type_id_as_string(unique_type_id); let metadata_stub = unsafe { struct_type_name.with_c_str(|name| { - unique_type_id_str.as_slice().with_c_str(|unique_type_id| { + unique_type_id_str.as_str().with_c_str(|unique_type_id| { // LLVMDIBuilderCreateStructType() wants an empty array. A null // pointer will lead to hard to trace and debug LLVM assertions // later on in llvm/lib/IR/Value.cpp. @@ -2605,7 +2605,7 @@ fn at_box_metadata(cx: &CrateContext, return_if_metadata_created_in_meantime!(cx, unique_type_id); let content_type_name = compute_debuginfo_type_name(cx, content_type, true); - let content_type_name = content_type_name.as_slice(); + let content_type_name = content_type_name.as_str(); let content_llvm_type = type_of::type_of(cx, content_type); let box_type_name = format!("GcBox<{}>", content_type_name); @@ -2665,7 +2665,7 @@ fn at_box_metadata(cx: &CrateContext, let gc_box_metadata = composite_type_metadata( cx, box_llvm_type, - box_type_name.as_slice(), + box_type_name.as_str(), gc_box_unique_id, member_descriptions, UNKNOWN_SCOPE_METADATA, @@ -2769,11 +2769,11 @@ fn vec_slice_metadata(cx: &CrateContext, assert!(member_descriptions.len() == member_llvm_types.len()); let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name.as_slice()); + let file_metadata = file_metadata(cx, loc.file.name.as_str()); let metadata = composite_type_metadata(cx, slice_llvm_type, - slice_type_name.as_slice(), + slice_type_name.as_str(), unique_type_id, member_descriptions, UNKNOWN_SCOPE_METADATA, @@ -2842,7 +2842,7 @@ fn trait_pointer_metadata(cx: &CrateContext, let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type); cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \ trait_pointer_metadata(): {}", - pp_type_name.as_slice()).as_slice()); + pp_type_name.as_str()).as_str()); } }; @@ -2856,7 +2856,7 @@ fn trait_pointer_metadata(cx: &CrateContext, composite_type_metadata(cx, trait_llvm_type, - trait_type_name.as_slice(), + trait_type_name.as_str(), unique_type_id, [], containing_scope, @@ -2982,7 +2982,7 @@ fn type_metadata(cx: &CrateContext, } _ => { cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {:?}", - sty).as_slice()) + sty).as_str()) } }; @@ -3000,9 +3000,9 @@ fn type_metadata(cx: &CrateContext, type id '{}' to already be in \ the debuginfo::TypeMap but it \ was not. (ty::t = {})", - unique_type_id_str.as_slice(), + unique_type_id_str.as_str(), ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message.as_slice()); + cx.sess().span_bug(usage_site_span, error_message.as_str()); } }; @@ -3015,9 +3015,9 @@ fn type_metadata(cx: &CrateContext, UniqueTypeId maps in \ debuginfo::TypeMap. \ UniqueTypeId={}, ty::t={}", - unique_type_id_str.as_slice(), + unique_type_id_str.as_str(), ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message.as_slice()); + cx.sess().span_bug(usage_site_span, error_message.as_str()); } } None => { @@ -3211,7 +3211,7 @@ fn populate_scope_map(cx: &CrateContext, &mut HashMap|) { // Create a new lexical scope and push it onto the stack let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo); - let file_metadata = file_metadata(cx, loc.file.name.as_slice()); + let file_metadata = file_metadata(cx, loc.file.name.as_str()); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3337,7 +3337,7 @@ fn populate_scope_map(cx: &CrateContext, let file_metadata = file_metadata(cx, loc.file .name - .as_slice()); + .as_str()); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3736,7 +3736,7 @@ fn push_debuginfo_type_name(cx: &CrateContext, match optional_length { Some(len) => { - output.push_str(format!(", ..{}", len).as_slice()); + output.push_str(format!(", ..{}", len).as_str()); } None => { /* nothing to do */ } }; @@ -3846,7 +3846,7 @@ fn push_debuginfo_type_name(cx: &CrateContext, ty::ty_open(_) | ty::ty_param(_) => { cx.sess().bug(format!("debuginfo: Trying to create type name for \ - unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).as_slice()); + unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).as_str()); } } @@ -3929,13 +3929,13 @@ impl NamespaceTreeNode { None => {} } let string = token::get_name(node.name); - output.push_str(format!("{}", string.get().len()).as_slice()); + output.push_str(format!("{}", string.get().len()).as_str()); output.push_str(string.get()); } let mut name = String::from_str("_ZN"); fill_nested(self, &mut name); - name.push_str(format!("{}", item_name.len()).as_slice()); + name.push_str(format!("{}", item_name.len()).as_str()); name.push_str(item_name); name.push_char('E'); name @@ -3943,7 +3943,7 @@ impl NamespaceTreeNode { } fn crate_root_namespace<'a>(cx: &'a CrateContext) -> &'a str { - cx.link_meta().crate_name.as_slice() + cx.link_meta().crate_name.as_str() } fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc { @@ -4020,7 +4020,7 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc { cx.sess().bug(format!("debuginfo::namespace_for_item(): \ path too short for {:?}", - def_id).as_slice()); + def_id).as_str()); } } }) diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index bfcf4a11bc207..0424f57cdce87 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -318,7 +318,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, unsized_info(bcx, k, id, ty_substs[tp_index], |t| t) } _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}", - bcx.ty_to_string(unsized_ty)).as_slice()) + bcx.ty_to_string(unsized_ty)).as_str()) }, &ty::UnsizeVtable(..) => PointerCast(bcx, @@ -434,7 +434,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let unboxed_ty = match ty::get(datum_ty).sty { ty::ty_uniq(t) => t, _ => bcx.sess().bug(format!("Expected ty_uniq, found {}", - bcx.ty_to_string(datum_ty)).as_slice()) + bcx.ty_to_string(datum_ty)).as_str()) }; let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span)); @@ -646,7 +646,7 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_datum_unadjusted reached \ fall-through case: {:?}", - expr.node).as_slice()); + expr.node).as_str()); } } } @@ -857,7 +857,7 @@ fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let symbol = csearch::get_symbol( &bcx.ccx().sess().cstore, did); - let llval = symbol.as_slice().with_c_str(|buf| { + let llval = symbol.as_str().with_c_str(|buf| { llvm::LLVMAddGlobal(bcx.ccx().llmod(), llty.to_ref(), buf) @@ -956,7 +956,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_stmt_unadjusted reached \ fall-through case: {:?}", - expr.node).as_slice()); + expr.node).as_str()); } } } @@ -1086,7 +1086,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_dps_unadjusted reached fall-through \ case: {:?}", - expr.node).as_slice()); + expr.node).as_str()); } } } @@ -1134,7 +1134,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.tcx().sess.span_bug(ref_expr.span, format!( "Non-DPS def {:?} referened by {}", - def, bcx.node_id_to_string(ref_expr.id)).as_slice()); + def, bcx.node_id_to_string(ref_expr.id)).as_str()); } } } @@ -1159,7 +1159,7 @@ fn trans_def_fn_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug(ref_expr.span, format!( "trans_def_fn_unadjusted invoked on: {:?} for {}", def, - ref_expr.repr(bcx.tcx())).as_slice()); + ref_expr.repr(bcx.tcx())).as_str()); } }; @@ -1186,7 +1186,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { bcx.sess().bug(format!( "trans_local_var: no llval for upvar {:?} found", - nid).as_slice()); + nid).as_str()); } } } @@ -1199,7 +1199,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.sess().unimpl(format!( "unsupported def type in trans_local_var: {:?}", - def).as_slice()); + def).as_str()); } }; @@ -1212,7 +1212,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { bcx.sess().bug(format!( "trans_local_var: no datum for local/arg {:?} found", - nid).as_slice()); + nid).as_str()); } }; debug!("take_local(nid={:?}, v={}, ty={})", @@ -1249,7 +1249,7 @@ pub fn with_field_tys(tcx: &ty::ctxt, tcx.sess.bug(format!( "cannot get field types from the enum type {} \ without a node ID", - ty.repr(tcx)).as_slice()); + ty.repr(tcx)).as_str()); } Some(node_id) => { let def = tcx.def_map.borrow().get_copy(&node_id); @@ -1274,7 +1274,7 @@ pub fn with_field_tys(tcx: &ty::ctxt, _ => { tcx.sess.bug(format!( "cannot get field types from the type {}", - ty.repr(tcx)).as_slice()); + ty.repr(tcx)).as_str()); } } } @@ -1957,7 +1957,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out).as_slice()) + k_out).as_str()) } } } @@ -1966,7 +1966,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out).as_slice()) + k_out).as_str()) }; return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock(); } @@ -2136,7 +2136,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug( expr.span, format!("deref invoked on expr of illegal type {}", - datum.ty.repr(bcx.tcx())).as_slice()); + datum.ty.repr(bcx.tcx())).as_str()); } }; diff --git a/src/librustc/middle/trans/foreign.rs b/src/librustc/middle/trans/foreign.rs index 2cb8c86094496..7bc453447e82e 100644 --- a/src/librustc/middle/trans/foreign.rs +++ b/src/librustc/middle/trans/foreign.rs @@ -212,13 +212,13 @@ pub fn register_foreign_item_fn(ccx: &CrateContext, abi: Abi, fty: ty::t, ccx.sess().span_fatal(s, format!("ABI `{}` has no suitable calling convention \ for target architecture", - abi.user_string(ccx.tcx())).as_slice()) + abi.user_string(ccx.tcx())).as_str()) } None => { ccx.sess().fatal( format!("ABI `{}` has no suitable calling convention \ for target architecture", - abi.user_string(ccx.tcx())).as_slice()) + abi.user_string(ccx.tcx())).as_str()) } } } @@ -384,7 +384,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ccx.sess().fatal( format!("ABI string `{}` has no suitable ABI \ for target architecture", - fn_abi.user_string(ccx.tcx())).as_slice()); + fn_abi.user_string(ccx.tcx())).as_str()); } }; @@ -495,7 +495,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) { abi => { let ty = ty::node_id_to_type(ccx.tcx(), foreign_item.id); register_foreign_item_fn(ccx, abi, ty, - lname.get().as_slice(), + lname.get().as_str(), Some(foreign_item.span)); // Unlike for other items, we shouldn't call // `base::update_linkage` here. Foreign items have @@ -631,7 +631,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext, ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \ expected a bare fn ty", ccx.tcx().map.path_to_string(id), - t.repr(tcx)).as_slice()); + t.repr(tcx)).as_str()); } }; @@ -639,7 +639,7 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: &CrateContext, ccx.tcx().map.path_to_string(id), id, t.repr(tcx)); - let llfn = base::decl_internal_rust_fn(ccx, t, ps.as_slice()); + let llfn = base::decl_internal_rust_fn(ccx, t, ps.as_str()); base::set_llvm_fn_attrs(attrs, llfn); base::trans_fn(ccx, decl, body, llfn, param_substs, id, []); llfn diff --git a/src/librustc/middle/trans/glue.rs b/src/librustc/middle/trans/glue.rs index 259f85098afee..e53a768dca11b 100644 --- a/src/librustc/middle/trans/glue.rs +++ b/src/librustc/middle/trans/glue.rs @@ -171,7 +171,7 @@ pub fn get_drop_glue(ccx: &CrateContext, t: ty::t) -> ValueRef { let (glue, new_sym) = match ccx.available_drop_glues().borrow().find(&t) { Some(old_sym) => { - let glue = decl_cdecl_fn(ccx, old_sym.as_slice(), llfnty, ty::mk_nil()); + let glue = decl_cdecl_fn(ccx, old_sym.as_str(), llfnty, ty::mk_nil()); (glue, None) }, None => { @@ -208,7 +208,7 @@ pub fn lazily_emit_visit_glue(ccx: &CrateContext, ti: &tydesc_info) -> ValueRef let (glue_fn, new_sym) = match ccx.available_visit_glues().borrow().find(&ti.ty) { Some(old_sym) => { - let glue_fn = decl_cdecl_fn(ccx, old_sym.as_slice(), llfnty, ty::mk_nil()); + let glue_fn = decl_cdecl_fn(ccx, old_sym.as_str(), llfnty, ty::mk_nil()); (glue_fn, None) }, None => { @@ -253,7 +253,7 @@ fn make_visit_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v: ValueRef, t: ty::t) ty::ReStatic) { Ok(pair) => pair, Err(s) => { - bcx.tcx().sess.fatal(s.as_slice()); + bcx.tcx().sess.fatal(s.as_str()); } }; let v = PointerCast(bcx, v, type_of(bcx.ccx(), object_ty).ptr_to()); @@ -307,7 +307,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, f.sig.inputs[0] } _ => bcx.sess().bug(format!("Expected function type, found {}", - bcx.ty_to_string(fty)).as_slice()) + bcx.ty_to_string(fty)).as_str()) }; let (struct_data, info) = if ty::type_is_sized(bcx.tcx(), t) { @@ -424,7 +424,7 @@ fn size_and_align_of_dst(bcx: Block, t :ty::t, info: ValueRef) -> (ValueRef, Val (Mul(bcx, info, C_uint(bcx.ccx(), unit_size as uint)), C_uint(bcx.ccx(), 8)) } _ => bcx.sess().bug(format!("Unexpected unsized type, found {}", - bcx.ty_to_string(t)).as_slice()) + bcx.ty_to_string(t)).as_str()) } } @@ -499,7 +499,7 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: ty::t) bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\ because the struct is unsized. See issue\ #16758", - bcx.ty_to_string(t)).as_slice()); + bcx.ty_to_string(t)).as_str()); trans_struct_drop(bcx, t, v0, dtor, did, substs) } } @@ -611,7 +611,7 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> tydesc_info { let llalign = llalign_of(ccx, llty); let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc"); debug!("+++ declare_tydesc {} {}", ppaux::ty_to_string(ccx.tcx(), t), name); - let gvar = name.as_slice().with_c_str(|buf| { + let gvar = name.as_str().with_c_str(|buf| { unsafe { llvm::LLVMAddGlobal(ccx.llmod(), ccx.tydesc_type().to_ref(), buf) } @@ -619,7 +619,7 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> tydesc_info { note_unique_llvm_symbol(ccx, name); let ty_name = token::intern_and_get_ident( - ppaux::ty_to_string(ccx.tcx(), t).as_slice()); + ppaux::ty_to_string(ccx.tcx(), t).as_str()); let ty_name = C_str_slice(ccx, ty_name); debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t)); @@ -639,8 +639,8 @@ fn declare_generic_glue(ccx: &CrateContext, t: ty::t, llfnty: Type, let fn_nm = mangle_internal_name_by_type_and_seq( ccx, t, - format!("glue_{}", name).as_slice()); - let llfn = decl_cdecl_fn(ccx, fn_nm.as_slice(), llfnty, ty::mk_nil()); + format!("glue_{}", name).as_str()); + let llfn = decl_cdecl_fn(ccx, fn_nm.as_str(), llfnty, ty::mk_nil()); note_unique_llvm_symbol(ccx, fn_nm.clone()); return (fn_nm, llfn); } diff --git a/src/librustc/middle/trans/intrinsic.rs b/src/librustc/middle/trans/intrinsic.rs index 36184b2eed37e..cb43f964b5f08 100644 --- a/src/librustc/middle/trans/intrinsic.rs +++ b/src/librustc/middle/trans/intrinsic.rs @@ -117,7 +117,7 @@ pub fn check_intrinsics(ccx: &CrateContext) { "" } else { "s" - }).as_slice()); + }).as_str()); } if ty::type_is_fat_ptr(ccx.tcx(), transmute_restriction.to) || ty::type_is_fat_ptr(ccx.tcx(), transmute_restriction.from) { diff --git a/src/librustc/middle/trans/monomorphize.rs b/src/librustc/middle/trans/monomorphize.rs index 1cf3e55967d4e..fb5ce30edc0c5 100644 --- a/src/librustc/middle/trans/monomorphize.rs +++ b/src/librustc/middle/trans/monomorphize.rs @@ -134,7 +134,7 @@ pub fn monomorphic_fn(ccx: &CrateContext, hash = format!("h{}", state.result()); ccx.tcx().map.with_path(fn_id.node, |path| { - exported_name(path, hash.as_slice()) + exported_name(path, hash.as_str()) }) }; @@ -144,9 +144,9 @@ pub fn monomorphic_fn(ccx: &CrateContext, let mut hash_id = Some(hash_id); let mk_lldecl = |abi: abi::Abi| { let lldecl = if abi != abi::Rust { - foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s.as_slice()) + foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s.as_str()) } else { - decl_internal_rust_fn(ccx, mono_ty, s.as_slice()) + decl_internal_rust_fn(ccx, mono_ty, s.as_str()) }; ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl); @@ -185,7 +185,7 @@ pub fn monomorphic_fn(ccx: &CrateContext, if abi != abi::Rust { foreign::trans_rust_fn_with_foreign_abi( ccx, &**decl, &**body, [], d, &psubsts, fn_id.node, - Some(hash.as_slice())); + Some(hash.as_str())); } else { trans_fn(ccx, &**decl, &**body, d, &psubsts, fn_id.node, []); } @@ -250,7 +250,7 @@ pub fn monomorphic_fn(ccx: &CrateContext, } _ => { ccx.sess().bug(format!("can't monomorphize a {:?}", - map_node).as_slice()) + map_node).as_str()) } } } @@ -276,7 +276,7 @@ pub fn monomorphic_fn(ccx: &CrateContext, ast_map::NodePat(..) | ast_map::NodeLocal(..) => { ccx.sess().bug(format!("can't monomorphize a {:?}", - map_node).as_slice()) + map_node).as_str()) } }; diff --git a/src/librustc/middle/trans/reflect.rs b/src/librustc/middle/trans/reflect.rs index 214726edd778d..418efbe273cb3 100644 --- a/src/librustc/middle/trans/reflect.rs +++ b/src/librustc/middle/trans/reflect.rs @@ -87,9 +87,9 @@ impl<'a, 'blk, 'tcx> Reflector<'a, 'blk, 'tcx> { let fcx = self.bcx.fcx; let tcx = self.bcx.tcx(); let mth_idx = ty::impl_or_trait_item_idx(token::str_to_ident(format!( - "visit_{}", ty_name).as_slice()), + "visit_{}", ty_name).as_str()), self.visitor_items.as_slice()).expect( - format!("couldn't find visit method for {}", ty_name).as_slice()); + format!("couldn't find visit method for {}", ty_name).as_str()); let method = match self.visitor_items[mth_idx] { ty::MethodTraitItem(ref method) => (*method).clone(), }; @@ -117,9 +117,9 @@ impl<'a, 'blk, 'tcx> Reflector<'a, 'blk, 'tcx> { bracket_name: &str, extra: &[ValueRef], inner: |&mut Reflector|) { - self.visit(format!("enter_{}", bracket_name).as_slice(), extra); + self.visit(format!("enter_{}", bracket_name).as_str(), extra); inner(self); - self.visit(format!("leave_{}", bracket_name).as_slice(), extra); + self.visit(format!("leave_{}", bracket_name).as_str(), extra); } pub fn leaf(&mut self, name: &str) { @@ -186,7 +186,7 @@ impl<'a, 'blk, 'tcx> Reflector<'a, 'blk, 'tcx> { ty::ty_trait(..) => { let extra = [ self.c_slice(token::intern_and_get_ident( - ty_to_string(tcx, t).as_slice())) + ty_to_string(tcx, t).as_str())) ]; self.visit("trait", extra); } @@ -201,7 +201,7 @@ impl<'a, 'blk, 'tcx> Reflector<'a, 'blk, 'tcx> { ty::ty_trait(..) => { let extra = [ self.c_slice(token::intern_and_get_ident( - ty_to_string(tcx, t).as_slice())) + ty_to_string(tcx, t).as_str())) ]; self.visit("trait", extra); } @@ -230,7 +230,7 @@ impl<'a, 'blk, 'tcx> Reflector<'a, 'blk, 'tcx> { ty::ty_trait(..) => { let extra = [ self.c_slice(token::intern_and_get_ident( - ty_to_string(tcx, t).as_slice())) + ty_to_string(tcx, t).as_str())) ]; self.visit("trait", extra); } @@ -302,7 +302,7 @@ impl<'a, 'blk, 'tcx> Reflector<'a, 'blk, 'tcx> { let extra = (vec!( self.c_slice( token::intern_and_get_ident(ty_to_string(tcx, - t).as_slice())), + t).as_str())), self.c_bool(named_fields), self.c_uint(len) )).append(self.c_size_and_align(t).as_slice()); @@ -339,7 +339,7 @@ impl<'a, 'blk, 'tcx> Reflector<'a, 'blk, 'tcx> { [opaqueptrty], ty::mk_u64()); let llfdecl = decl_internal_rust_fn(ccx, fn_ty, - sym.as_slice()); + sym.as_str()); let arena = TypedArena::new(); let empty_param_substs = param_substs::empty(); let fcx = new_fn_ctxt(ccx, llfdecl, ast::DUMMY_NODE_ID, false, diff --git a/src/librustc/middle/trans/type_of.rs b/src/librustc/middle/trans/type_of.rs index 54f24516867f0..7a591da951b06 100644 --- a/src/librustc/middle/trans/type_of.rs +++ b/src/librustc/middle/trans/type_of.rs @@ -164,7 +164,7 @@ pub fn sizing_type_of(cx: &CrateContext, t: ty::t) -> Type { let llsizingty = match ty::get(t).sty { _ if !ty::lltype_is_sized(cx.tcx(), t) => { cx.sess().bug(format!("trying to take the sizing type of {}, an unsized type", - ppaux::ty_to_string(cx.tcx(), t)).as_slice()) + ppaux::ty_to_string(cx.tcx(), t)).as_str()) } ty::ty_nil | ty::ty_bot => Type::nil(cx), @@ -212,7 +212,7 @@ pub fn sizing_type_of(cx: &CrateContext, t: ty::t) -> Type { ty::ty_infer(..) | ty::ty_param(..) | ty::ty_err(..) => { cx.sess().bug(format!("fictitious type {} in sizing_type_of()", - ppaux::ty_to_string(cx.tcx(), t)).as_slice()) + ppaux::ty_to_string(cx.tcx(), t)).as_str()) } ty::ty_vec(_, None) | ty::ty_trait(..) | ty::ty_str => fail!("unreachable") }; @@ -290,14 +290,14 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, an_enum, did, tps); - adt::incomplete_type_of(cx, &*repr, name.as_slice()) + adt::incomplete_type_of(cx, &*repr, name.as_str()) } ty::ty_unboxed_closure(did, _) => { // Only create the named struct, but don't fill it in. We // fill it in *after* placing it into the type cache. let repr = adt::represent_type(cx, t); let name = llvm_type_name(cx, an_unboxed_closure, did, []); - adt::incomplete_type_of(cx, &*repr, name.as_slice()) + adt::incomplete_type_of(cx, &*repr, name.as_str()) } ty::ty_box(typ) => { Type::at_box(cx, type_of(cx, typ)).ptr_to() @@ -355,7 +355,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, a_struct, did, tps); - adt::incomplete_type_of(cx, &*repr, name.as_slice()) + adt::incomplete_type_of(cx, &*repr, name.as_str()) } } @@ -374,7 +374,7 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type { } ty::ty_trait(..) => Type::opaque_trait(cx), _ => cx.sess().bug(format!("ty_open with sized type: {}", - ppaux::ty_to_string(cx.tcx(), t)).as_slice()) + ppaux::ty_to_string(cx.tcx(), t)).as_str()) }, ty::ty_infer(..) => cx.sess().bug("type_of with ty_infer"), diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index ee59de11fc3ff..0383f89953ba3 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -1309,7 +1309,7 @@ impl ParameterEnvironment { _ => { cx.sess.bug(format!("ParameterEnvironment::from_item(): \ `{}` is not an item", - cx.map.node_to_string(id)).as_slice()) + cx.map.node_to_string(id)).as_str()) } } } @@ -1372,7 +1372,7 @@ impl UnboxedClosureKind { }; match result { Ok(trait_did) => trait_did, - Err(err) => cx.sess.fatal(err.as_slice()), + Err(err) => cx.sess.fatal(err.as_str()), } } } @@ -1952,7 +1952,7 @@ pub fn sequence_element_type(cx: &ctxt, ty: t) -> t { ty_str => mk_mach_uint(ast::TyU8), ty_open(ty) => sequence_element_type(cx, ty), _ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}", - ty_to_string(cx, ty)).as_slice()), + ty_to_string(cx, ty)).as_str()), } } @@ -3056,7 +3056,7 @@ pub fn close_type(cx: &ctxt, t: t) -> t { match get(t).sty { ty_open(t) => mk_rptr(cx, ReStatic, mt {ty: t, mutbl:ast::MutImmutable}), _ => cx.sess.bug(format!("Trying to close a non-open type {}", - ty_to_string(cx, t)).as_slice()) + ty_to_string(cx, t)).as_str()) } } @@ -3101,7 +3101,7 @@ pub fn node_id_to_trait_ref(cx: &ctxt, id: ast::NodeId) -> Rc { Some(t) => t.clone(), None => cx.sess.bug( format!("node_id_to_trait_ref: no trait ref for node `{}`", - cx.map.node_to_string(id)).as_slice()) + cx.map.node_to_string(id)).as_str()) } } @@ -3114,7 +3114,7 @@ pub fn node_id_to_type(cx: &ctxt, id: ast::NodeId) -> t { Some(t) => t, None => cx.sess.bug( format!("node_id_to_type: no type for node `{}`", - cx.map.node_to_string(id)).as_slice()) + cx.map.node_to_string(id)).as_str()) } } @@ -3213,7 +3213,7 @@ pub fn ty_region(tcx: &ctxt, tcx.sess.span_bug( span, format!("ty_region() invoked on in appropriate ty: {:?}", - s).as_slice()); + s).as_str()); } } } @@ -3278,11 +3278,11 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span { Some(f) => { cx.sess.bug(format!("Node id {} is not an expr: {:?}", id, - f).as_slice()); + f).as_str()); } None => { cx.sess.bug(format!("Node id {} is not present \ - in the node map", id).as_slice()); + in the node map", id).as_str()); } } } @@ -3298,14 +3298,14 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString { cx.sess.bug( format!("Variable id {} maps to {:?}, not local", id, - pat).as_slice()); + pat).as_str()); } } } r => { cx.sess.bug(format!("Variable id {} maps to {:?}, not local", id, - r).as_slice()); + r).as_str()); } } } @@ -3348,7 +3348,7 @@ pub fn adjust_ty(cx: &ctxt, cx.sess.bug( format!("add_env adjustment on non-bare-fn: \ {:?}", - b).as_slice()); + b).as_str()); } } } @@ -3374,7 +3374,7 @@ pub fn adjust_ty(cx: &ctxt, {}", i, ty_to_string(cx, adjusted_ty)) - .as_slice()); + .as_str()); } } } @@ -3435,7 +3435,7 @@ pub fn unsize_ty(cx: &ctxt, } _ => cx.sess.span_bug(span, format!("UnsizeLength with bad sty: {}", - ty_to_string(cx, ty)).as_slice()) + ty_to_string(cx, ty)).as_str()) }, &UnsizeStruct(box ref k, tp_index) => match get(ty).sty { ty_struct(did, ref substs) => { @@ -3447,7 +3447,7 @@ pub fn unsize_ty(cx: &ctxt, } _ => cx.sess.span_bug(span, format!("UnsizeStruct with bad sty: {}", - ty_to_string(cx, ty)).as_slice()) + ty_to_string(cx, ty)).as_str()) }, &UnsizeVtable(bounds, def_id, ref substs) => { mk_trait(cx, def_id, substs.clone(), bounds) @@ -3508,7 +3508,7 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def { Some(&def) => def, None => { tcx.sess.span_bug(expr.span, format!( - "no def-map entry for expr {:?}", expr.id).as_slice()); + "no def-map entry for expr {:?}", expr.id).as_str()); } } } @@ -3592,7 +3592,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { expr.span, format!("uncategorized def for expr {:?}: {:?}", expr.id, - def).as_slice()); + def).as_str()); } } } @@ -3711,7 +3711,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) token::get_name(name), fields.iter() .map(|f| token::get_ident(f.ident).get().to_string()) - .collect::>()).as_slice()); + .collect::>()).as_str()); } pub fn impl_or_trait_item_idx(id: ast::Ident, trait_items: &[ImplOrTraitItem]) @@ -3972,14 +3972,14 @@ pub fn provided_trait_methods(cx: &ctxt, id: ast::DefId) -> Vec> { _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is \ not a trait", - id).as_slice()) + id).as_str()) } } } _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is not a \ trait", - id).as_slice()) + id).as_str()) } } } else { @@ -4316,7 +4316,7 @@ pub fn enum_variants(cx: &ctxt, id: ast::DefId) -> Rc>> { cx.sess .span_err(e.span, format!("expected constant: {}", - *err).as_slice()); + *err).as_str()); } }, None => {} @@ -4491,7 +4491,7 @@ fn each_super_struct(cx: &ctxt, mut did: ast::DefId, f: |ast::DefId|) { None => { cx.sess.bug( format!("ID not mapped to super-struct: {}", - cx.map.node_to_string(did.node)).as_slice()); + cx.map.node_to_string(did.node)).as_str()); } } } @@ -4513,7 +4513,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec { _ => { cx.sess.bug( format!("ID not mapped to struct fields: {}", - cx.map.node_to_string(did.node)).as_slice()); + cx.map.node_to_string(did.node)).as_str()); } } }); @@ -4566,7 +4566,7 @@ pub fn tup_fields(v: &[t]) -> Vec { v.iter().enumerate().map(|(i, &f)| { field { // FIXME #6993: change type of field to Name and get rid of new() - ident: ast::Ident::new(token::intern(i.to_string().as_slice())), + ident: ast::Ident::new(token::intern(i.to_string().as_str())), mt: mt { ty: f, mutbl: MutImmutable @@ -5383,7 +5383,7 @@ pub fn construct_parameter_environment( format!("push_region_bounds_from_defs: \ non free region: {} / {}", subst_region.repr(tcx), - bound_region.repr(tcx)).as_slice()); + bound_region.repr(tcx)).as_str()); } } } diff --git a/src/librustc/middle/typeck/astconv.rs b/src/librustc/middle/typeck/astconv.rs index f5fa6168a415c..619b24ec7bf91 100644 --- a/src/librustc/middle/typeck/astconv.rs +++ b/src/librustc/middle/typeck/astconv.rs @@ -220,7 +220,7 @@ fn ast_path_substs<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( format!("wrong number of type arguments: {} {}, found {}", expected, required_ty_param_count, - supplied_ty_param_count).as_slice()); + supplied_ty_param_count).as_str()); } else if supplied_ty_param_count > formal_ty_param_count { let expected = if required_ty_param_count < formal_ty_param_count { "expected at most" @@ -231,7 +231,7 @@ fn ast_path_substs<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( format!("wrong number of type arguments: {} {}, found {}", expected, formal_ty_param_count, - supplied_ty_param_count).as_slice()); + supplied_ty_param_count).as_str()); } if supplied_ty_param_count > required_ty_param_count @@ -371,7 +371,7 @@ pub fn ast_ty_to_prim_ty(tcx: &ty::ctxt, ast_ty: &ast::Ty) -> Option { None => { tcx.sess.span_bug(ast_ty.span, format!("unbound path {}", - path.repr(tcx)).as_slice()) + path.repr(tcx)).as_str()) } Some(&d) => d }; @@ -430,7 +430,7 @@ pub fn ast_ty_to_builtin_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( .sess .span_bug(ast_ty.span, format!("unbound path {}", - path.repr(this.tcx())).as_slice()) + path.repr(this.tcx())).as_str()) } Some(&d) => d }; @@ -807,7 +807,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( tcx.sess .span_bug(ast_ty.span, format!("unbound path {}", - path.repr(tcx)).as_slice()) + path.repr(tcx)).as_str()) } Some(&d) => d }; @@ -857,7 +857,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( def::DefMod(id) => { tcx.sess.span_fatal(ast_ty.span, format!("found module name used as a type: {}", - tcx.map.node_to_string(id.node)).as_slice()); + tcx.map.node_to_string(id.node)).as_str()); } def::DefPrimTy(_) => { fail!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call"); @@ -866,7 +866,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( tcx.sess.span_fatal(ast_ty.span, format!("found value name used \ as a type: {:?}", - a_def).as_slice()); + a_def).as_str()); } } } @@ -891,7 +891,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( ast_ty.span, format!("expected constant expr for vector \ length: {}", - *r).as_slice()); + *r).as_str()); } } } @@ -1228,14 +1228,14 @@ pub fn conv_existential_bounds<'tcx, AC: AstConv<'tcx>, RS:RegionScope>( this.tcx().sess.span_err( b.path.span, format!("only the builtin traits can be used \ - as closure or object bounds").as_slice()); + as closure or object bounds").as_str()); } if !unboxed_fn_ty_bounds.is_empty() { this.tcx().sess.span_err( span, format!("only the builtin traits can be used \ - as closure or object bounds").as_slice()); + as closure or object bounds").as_str()); } // The "main trait refs", rather annoyingly, have no type @@ -1287,7 +1287,7 @@ pub fn compute_opt_region_bound(tcx: &ty::ctxt, if region_bounds.len() > 1 { tcx.sess.span_err( region_bounds[1].span, - format!("only a single explicit lifetime bound is permitted").as_slice()); + format!("only a single explicit lifetime bound is permitted").as_str()); } if region_bounds.len() != 0 { @@ -1325,7 +1325,7 @@ pub fn compute_opt_region_bound(tcx: &ty::ctxt, tcx.sess.span_err( span, format!("ambiguous lifetime bound, \ - explicit lifetime bound required").as_slice()); + explicit lifetime bound required").as_str()); } return Some(r); } @@ -1355,7 +1355,7 @@ fn compute_region_bound<'tcx, AC: AstConv<'tcx>, RS:RegionScope>( None => { this.tcx().sess.span_err( span, - format!("explicit lifetime bound required").as_slice()); + format!("explicit lifetime bound required").as_str()); ty::ReStatic } } diff --git a/src/librustc/middle/typeck/check/_match.rs b/src/librustc/middle/typeck/check/_match.rs index 247178770d21a..939df1063a2cc 100644 --- a/src/librustc/middle/typeck/check/_match.rs +++ b/src/librustc/middle/typeck/check/_match.rs @@ -524,7 +524,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { tcx.sess.span_bug( path.span, format!("This shouldn't happen: failed to lookup structure. \ - item_did = {}", item_did).as_slice()) + item_did = {}", item_did).as_str()) }, } diff --git a/src/librustc/middle/typeck/check/method.rs b/src/librustc/middle/typeck/check/method.rs index 88d9a58ab141a..2a57c8964be52 100644 --- a/src/librustc/middle/typeck/check/method.rs +++ b/src/librustc/middle/typeck/check/method.rs @@ -296,7 +296,7 @@ fn construct_transformed_self_ty_for_object( _ => { tcx.sess.span_bug(span, format!("'impossible' transformed_self_ty: {}", - transformed_self_ty.repr(tcx)).as_slice()); + transformed_self_ty.repr(tcx)).as_str()); } } } @@ -1095,7 +1095,7 @@ impl<'a, 'tcx> LookupContext<'a, 'tcx> { ty_infer(TyVar(_)) => { self.bug(format!("unexpected type: {}", - self.ty_to_string(self_ty)).as_slice()); + self.ty_to_string(self_ty)).as_str()); } } } @@ -1349,7 +1349,7 @@ impl<'a, 'tcx> LookupContext<'a, 'tcx> { self.bug(format!( "{} was a subtype of {} but now is not?", self.ty_to_string(rcvr_ty), - self.ty_to_string(transformed_self_ty)).as_slice()); + self.ty_to_string(transformed_self_ty)).as_str()); } } diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 4f0f6121904a1..131143545ac3a 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -633,7 +633,7 @@ fn span_for_field(tcx: &ty::ctxt, field: &ty::field_ty, struct_id: ast::DefId) - None => { tcx.sess .bug(format!("Could not find field {}", - token::get_name(field.name)).as_slice()) + token::get_name(field.name)).as_str()) } } }, @@ -966,7 +966,7 @@ fn check_impl_items_against_trait(ccx: &CrateCtxt, "method `{}` is not a member of trait `{}`", token::get_ident(impl_item_ty.ident()), pprust::path_to_string( - &ast_trait_ref.path)).as_slice()); + &ast_trait_ref.path)).as_str()); } } } @@ -1046,7 +1046,7 @@ fn compare_impl_method(tcx: &ty::ctxt, but not in the trait", token::get_ident(trait_m.ident), ppaux::explicit_self_category_to_str( - &impl_m.explicit_self)).as_slice()); + &impl_m.explicit_self)).as_str()); return; } (_, &ty::StaticExplicitSelfCategory) => { @@ -1056,7 +1056,7 @@ fn compare_impl_method(tcx: &ty::ctxt, but not in the impl", token::get_ident(trait_m.ident), ppaux::explicit_self_category_to_str( - &trait_m.explicit_self)).as_slice()); + &trait_m.explicit_self)).as_str()); return; } _ => { @@ -1324,7 +1324,7 @@ fn compare_impl_method(tcx: &ty::ctxt, span, format!("lifetime parameters or bounds on method `{}` do \ not match the trait declaration", - token::get_ident(impl_m.ident)).as_slice()); + token::get_ident(impl_m.ident)).as_str()); return false; } @@ -1376,7 +1376,7 @@ fn compare_impl_method(tcx: &ty::ctxt, from its counterpart `{}` \ declared in the trait", impl_param.name.user_string(tcx), - trait_param.name.user_string(tcx)).as_slice()); + trait_param.name.user_string(tcx)).as_str()); true } else { false @@ -1386,14 +1386,14 @@ fn compare_impl_method(tcx: &ty::ctxt, tcx.sess.span_note( span, format!("the impl is missing the following bounds: `{}`", - missing.user_string(tcx)).as_slice()); + missing.user_string(tcx)).as_str()); } if extra.len() != 0 { tcx.sess.span_note( span, format!("the impl has the following extra bounds: `{}`", - extra.user_string(tcx)).as_slice()); + extra.user_string(tcx)).as_str()); } if err { @@ -1614,7 +1614,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.span_bug( span, format!("no type for local variable {:?}", - nid).as_slice()); + nid).as_str()); } } } @@ -1688,7 +1688,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(&t) => t, None => { self.tcx().sess.bug(format!("no type for expr in fcx {}", - self.tag()).as_slice()); + self.tag()).as_str()); } } } @@ -1700,7 +1700,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.bug( format!("no type for node {}: {} in fcx {}", id, self.tcx().map.node_to_string(id), - self.tag()).as_slice()); + self.tag()).as_str()); } } } @@ -1712,7 +1712,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.bug( format!("no method entry for node {}: {} in fcx {}", id, self.tcx().map.node_to_string(id), - self.tag()).as_slice()); + self.tag()).as_str()); } } } @@ -2152,7 +2152,7 @@ fn lookup_method_for_for_loop(fcx: &FnCtxt, Ok(trait_did) => trait_did, Err(ref err_string) => { fcx.tcx().sess.span_err(iterator_expr.span, - err_string.as_slice()); + err_string.as_str()); return ty::mk_err() } }; @@ -2180,7 +2180,7 @@ fn lookup_method_for_for_loop(fcx: &FnCtxt, fcx.tcx().sess.span_err(iterator_expr.span, format!("`for` loop expression has type `{}` which does \ not implement the `Iterator` trait", - ty_string).as_slice()); + ty_string).as_str()); } ty::mk_err() } @@ -4128,7 +4128,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fcx.infcx() .ty_to_string( actual_structure_type), - type_error_description).as_slice()); + type_error_description).as_str()); ty::note_and_explain_type_err(tcx, &type_error); } } @@ -5370,7 +5370,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { "get_tydesc" => { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { Ok(t) => t, - Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); } + Err(s) => { tcx.sess.span_fatal(it.span, s.as_str()); } }; let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { ty: tydesc_ty, @@ -5386,21 +5386,21 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { ty::mk_struct(ccx.tcx, did, subst::Substs::empty())), Err(msg) => { - tcx.sess.span_fatal(it.span, msg.as_slice()); + tcx.sess.span_fatal(it.span, msg.as_str()); } } }, "visit_tydesc" => { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { Ok(t) => t, - Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); } + Err(s) => { tcx.sess.span_fatal(it.span, s.as_str()); } }; let region0 = ty::ReLateBound(it.id, ty::BrAnon(0)); let region1 = ty::ReLateBound(it.id, ty::BrAnon(1)); let visitor_object_ty = match ty::visitor_object_ty(tcx, region0, region1) { Ok((_, vot)) => vot, - Err(s) => { tcx.sess.span_fatal(it.span, s.as_slice()); } + Err(s) => { tcx.sess.span_fatal(it.span, s.as_str()); } }; let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { diff --git a/src/librustc/middle/typeck/check/regionck.rs b/src/librustc/middle/typeck/check/regionck.rs index c08401375ca7d..9ff03b9678433 100644 --- a/src/librustc/middle/typeck/check/regionck.rs +++ b/src/librustc/middle/typeck/check/regionck.rs @@ -245,7 +245,7 @@ fn region_of_def(fcx: &FnCtxt, def: def::Def) -> ty::Region { } _ => { tcx.sess.bug(format!("unexpected def in region_of_def: {:?}", - def).as_slice()) + def).as_str()) } } } @@ -340,7 +340,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { Some(f) => f, None => { self.tcx().sess.bug( - format!("No fn-sig entry for id={}", id).as_slice()); + format!("No fn-sig entry for id={}", id).as_str()); } }; @@ -1161,7 +1161,7 @@ fn constrain_autoderefs(rcx: &mut Rcx, ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => rcx.tcx().sess.span_bug(deref_expr.span, format!("bad overloaded deref type {}", - method.ty.repr(rcx.tcx())).as_slice()) + method.ty.repr(rcx.tcx())).as_str()) }; { let mc = mc::MemCategorizationContext::new(rcx); @@ -1548,7 +1548,7 @@ fn link_reborrowed_region(rcx: &Rcx, span, format!("Illegal upvar id: {}", upvar_id.repr( - rcx.tcx())).as_slice()); + rcx.tcx())).as_str()); } } } diff --git a/src/librustc/middle/typeck/check/regionmanip.rs b/src/librustc/middle/typeck/check/regionmanip.rs index 60e502786ab3d..3fed427191c91 100644 --- a/src/librustc/middle/typeck/check/regionmanip.rs +++ b/src/librustc/middle/typeck/check/regionmanip.rs @@ -155,7 +155,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { ty::ty_open(_) => { self.tcx.sess.bug( format!("Unexpected type encountered while doing wf check: {}", - ty.repr(self.tcx)).as_slice()); + ty.repr(self.tcx)).as_str()); } } } diff --git a/src/librustc/middle/typeck/check/vtable.rs b/src/librustc/middle/typeck/check/vtable.rs index 16136fcf3e840..223c4eb264222 100644 --- a/src/librustc/middle/typeck/check/vtable.rs +++ b/src/librustc/middle/typeck/check/vtable.rs @@ -157,7 +157,7 @@ fn lookup_vtables_for_param(vcx: &VtableContext, format!("failed to find an implementation of \ trait {} for {}", vcx.infcx.trait_ref_to_string(&*trait_ref), - vcx.infcx.ty_to_string(ty)).as_slice()); + vcx.infcx.ty_to_string(ty)).as_str()); param_result.push(vtable_error) } } @@ -585,7 +585,7 @@ fn fixup_ty(vcx: &VtableContext, tcx.sess.span_err(span, format!("cannot determine a type for this bounded type \ parameter: {}", - fixup_err_to_string(e)).as_slice()); + fixup_err_to_string(e)).as_str()); Some(ty::mk_err()) } Err(_) => { diff --git a/src/librustc/middle/typeck/collect.rs b/src/librustc/middle/typeck/collect.rs index d1d76734941e8..ff42898a34bd9 100644 --- a/src/librustc/middle/typeck/collect.rs +++ b/src/librustc/middle/typeck/collect.rs @@ -161,7 +161,7 @@ impl<'a, 'tcx> AstConv<'tcx> for CrateCtxt<'a, 'tcx> { x => { self.tcx.sess.bug(format!("unexpected sort of node \ in get_item_ty(): {:?}", - x).as_slice()); + x).as_str()); } } } @@ -791,7 +791,7 @@ pub fn instantiate_trait_ref(ccx: &CrateCtxt, ccx.tcx.sess.span_fatal( ast_trait_ref.path.span, format!("`{}` is not a trait", - path_to_string(&ast_trait_ref.path)).as_slice()); + path_to_string(&ast_trait_ref.path)).as_str()); } } } @@ -818,7 +818,7 @@ fn get_trait_def(ccx: &CrateCtxt, trait_id: ast::DefId) -> Rc { ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item), _ => { ccx.tcx.sess.bug(format!("get_trait_def({}): not an item", - trait_id.node).as_slice()) + trait_id.node).as_str()) } } } @@ -838,7 +838,7 @@ pub fn trait_def_of_item(ccx: &CrateCtxt, it: &ast::Item) -> Rc { ref s => { tcx.sess.span_bug( it.span, - format!("trait_def_of_item invoked on {:?}", s).as_slice()); + format!("trait_def_of_item invoked on {:?}", s).as_str()); } }; @@ -1091,7 +1091,7 @@ fn add_unsized_bound(ccx: &CrateCtxt, nothing because the given \ bound is not a default. \ Only `Sized?` is supported.", - desc).as_slice()); + desc).as_str()); ty::try_add_builtin_trait(ccx.tcx, kind_id, bounds); diff --git a/src/librustc/middle/typeck/infer/coercion.rs b/src/librustc/middle/typeck/infer/coercion.rs index 44141f25418e4..084511197fed9 100644 --- a/src/librustc/middle/typeck/infer/coercion.rs +++ b/src/librustc/middle/typeck/infer/coercion.rs @@ -231,7 +231,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { self.get_ref().infcx.tcx.sess.span_bug( self.get_ref().trace.origin.span(), format!("failed to resolve even without \ - any force options: {:?}", e).as_slice()); + any force options: {:?}", e).as_str()); } } } diff --git a/src/librustc/middle/typeck/infer/combine.rs b/src/librustc/middle/typeck/infer/combine.rs index 66caf10cb408c..0b87fb095d101 100644 --- a/src/librustc/middle/typeck/infer/combine.rs +++ b/src/librustc/middle/typeck/infer/combine.rs @@ -388,7 +388,7 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C, a: ty::t, b: ty::t) -> cres ErrorReporting for InferCtxt<'a, 'tcx> { format!("{}: {} ({})", message_root_str, expected_found_str, - ty::type_err_to_str(self.tcx, terr)).as_slice()); + ty::type_err_to_str(self.tcx, terr)).as_str()); match trace.origin { infer::MatchExpressionArm(_, arm_span) => @@ -445,7 +445,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { consider adding an explicit lifetime bound `{}:{}`...", param_ty.user_string(self.tcx), param_ty.user_string(self.tcx), - sub.user_string(self.tcx)).as_slice()); + sub.user_string(self.tcx)).as_str()); } ty::ReStatic => { @@ -456,7 +456,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { "the parameter type `{}` may not live long enough; \ consider adding an explicit lifetime bound `{}:'static`...", param_ty.user_string(self.tcx), - param_ty.user_string(self.tcx)).as_slice()); + param_ty.user_string(self.tcx)).as_str()); } _ => { @@ -467,11 +467,11 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { "the parameter type `{}` may not live long enough; \ consider adding an explicit lifetime bound to `{}`", param_ty.user_string(self.tcx), - param_ty.user_string(self.tcx)).as_slice()); + param_ty.user_string(self.tcx)).as_str()); note_and_explain_region( self.tcx, format!("the parameter type `{}` must be valid for ", - param_ty.user_string(self.tcx)).as_slice(), + param_ty.user_string(self.tcx)).as_str(), sub, "..."); } @@ -513,7 +513,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).as_slice()); + .to_string()).as_str()); note_and_explain_region( self.tcx, "...the borrowed pointer is valid for ", @@ -525,7 +525,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).as_slice(), + .to_string()).as_str(), sup, ""); } @@ -571,7 +571,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { outlive the enclosing closure", ty::local_var_name_str(self.tcx, id).get() - .to_string()).as_slice()); + .to_string()).as_str()); note_and_explain_region( self.tcx, "captured variable is valid for ", @@ -589,7 +589,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { format!("captured variable `{}` must be 'static \ to be captured in a proc", ty::local_var_name_str(self.tcx, id).get()) - .as_slice()); + .as_str()); note_and_explain_region( self.tcx, "captured variable is only valid for ", @@ -630,7 +630,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { is captured in", self.ty_to_string(ty), ty::local_var_name_str(self.tcx, - var_node_id)).as_slice()); + var_node_id)).as_str()); note_and_explain_region( self.tcx, "`proc()` is valid for ", @@ -639,7 +639,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { note_and_explain_region( self.tcx, format!("the type `{}` is only valid for ", - self.ty_to_string(ty)).as_slice(), + self.ty_to_string(ty)).as_str(), sup, ""); } @@ -650,7 +650,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { the parameter `{}`) does not fulfill the \ required lifetime", self.ty_to_string(ty), - param_ty.user_string(self.tcx)).as_slice()); + param_ty.user_string(self.tcx)).as_str()); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -676,7 +676,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { span, format!("the type `{}` (provided as the value of \ a type parameter) is not valid at this point", - self.ty_to_string(ty)).as_slice()); + self.ty_to_string(ty)).as_str()); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -742,7 +742,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { span, format!("type of expression contains references \ that are not valid during the expression: `{}`", - self.ty_to_string(t)).as_slice()); + self.ty_to_string(t)).as_str()); note_and_explain_region( self.tcx, "type is only valid for ", @@ -764,7 +764,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { span, format!("in type `{}`, reference has a longer lifetime \ than the data it references", - self.ty_to_string(ty)).as_slice()); + self.ty_to_string(ty)).as_str()); note_and_explain_region( self.tcx, "the pointer is valid for ", @@ -779,7 +779,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { infer::Managed(span) => { self.tcx.sess.span_err( span, - format!("cannot put borrowed references into managed memory").as_slice()); + format!("cannot put borrowed references into managed memory").as_str()); } } } @@ -993,7 +993,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { names.push(lt_name); } names.sort(); - let name = token::str_to_ident(names.get(0).as_slice()).name; + let name = token::str_to_ident(names.get(0).as_str()).name; return (name_to_dummy_lifetime(name), Kept); } return (self.life_giver.give_lifetime(), Fresh); @@ -1226,7 +1226,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { .sess .fatal(format!( "unbound path {}", - pprust::path_to_string(path)).as_slice()) + pprust::path_to_string(path)).as_str()) } Some(&d) => d }; @@ -1415,7 +1415,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { opt_explicit_self, generics); let msg = format!("consider using an explicit lifetime \ parameter as shown: {}", suggested_fn); - self.tcx.sess.span_note(span, msg.as_slice()); + self.tcx.sess.span_note(span, msg.as_str()); } fn report_inference_failure(&self, @@ -1453,7 +1453,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { var_origin.span(), format!("cannot infer an appropriate lifetime{} \ due to conflicting requirements", - var_description).as_slice()); + var_description).as_str()); } fn note_region_origin(&self, origin: &SubregionOrigin) { @@ -1488,7 +1488,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { self.tcx.sess.span_note( trace.origin.span(), format!("...so that {} ({})", - desc, values_str).as_slice()); + desc, values_str).as_str()); } None => { // Really should avoid printing this error at @@ -1497,7 +1497,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { // doing right now. - nmatsakis self.tcx.sess.span_note( trace.origin.span(), - format!("...so that {}", desc).as_slice()); + format!("...so that {}", desc).as_str()); } } } @@ -1514,7 +1514,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { "...so that closure can access `{}`", ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string()).as_slice()) + .to_string()).as_str()) } infer::InfStackClosure(span) => { self.tcx.sess.span_note( @@ -1539,7 +1539,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { does not outlive the enclosing closure", ty::local_var_name_str( self.tcx, - id).get().to_string()).as_slice()); + id).get().to_string()).as_str()); } infer::ProcCapture(span, id) => { self.tcx.sess.span_note( @@ -1548,7 +1548,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { is 'static", ty::local_var_name_str( self.tcx, - id).get()).as_slice()); + id).get()).as_str()); } infer::IndexSlice(span) => { self.tcx.sess.span_note( @@ -1567,7 +1567,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { "...so that the variable `{}` can be captured \ into a proc", ty::local_var_name_str(self.tcx, - var_node_id)).as_slice()); + var_node_id)).as_str()); } infer::CallRcvr(span) => { self.tcx.sess.span_note( @@ -1601,7 +1601,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { span, format!("...so type `{}` of expression is valid during the \ expression", - self.ty_to_string(t)).as_slice()); + self.ty_to_string(t)).as_str()); } infer::BindingTypeIsNotValidAtDecl(span) => { self.tcx.sess.span_note( @@ -1613,7 +1613,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { span, format!("...so that the reference type `{}` \ does not outlive the data it points at", - self.ty_to_string(ty)).as_slice()); + self.ty_to_string(ty)).as_str()); } infer::Managed(span) => { self.tcx.sess.span_note( @@ -1627,7 +1627,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { when instantiated with `{}`, \ will meet its declared lifetime bounds.", param_ty.user_string(self.tcx), - self.ty_to_string(t)).as_slice()); + self.ty_to_string(t)).as_str()); } infer::RelateDefaultParamBound(span, t) => { self.tcx.sess.span_note( @@ -1635,13 +1635,13 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { format!("...so that type parameter \ instantiated with `{}`, \ will meet its declared lifetime bounds.", - self.ty_to_string(t)).as_slice()); + self.ty_to_string(t)).as_str()); } infer::RelateRegionParamBound(span) => { self.tcx.sess.span_note( span, format!("...so that the declared lifetime parameter bounds \ - are satisfied").as_slice()); + are satisfied").as_str()); } } } @@ -1745,10 +1745,10 @@ impl LifeGiver { let mut lifetime; loop { let mut s = String::from_str("'"); - s.push_str(num_to_string(self.counter.get()).as_slice()); + s.push_str(num_to_string(self.counter.get()).as_str()); if !self.taken.contains(&s) { lifetime = name_to_dummy_lifetime( - token::str_to_ident(s.as_slice()).name); + token::str_to_ident(s.as_str()).name); self.generated.borrow_mut().push(lifetime); break; } diff --git a/src/librustc/middle/typeck/infer/glb.rs b/src/librustc/middle/typeck/infer/glb.rs index 08d4f9f3a86e5..04f7a764a8272 100644 --- a/src/librustc/middle/typeck/infer/glb.rs +++ b/src/librustc/middle/typeck/infer/glb.rs @@ -255,7 +255,7 @@ impl<'f, 'tcx> Combine<'tcx> for Glb<'f, 'tcx> { this.fields.infcx.tcx.sess.span_bug( this.fields.trace.origin.span(), format!("could not find original bound region for {:?}", - r).as_slice()) + r).as_str()) } fn fresh_bound_variable(this: &Glb, binder_id: NodeId) -> ty::Region { diff --git a/src/librustc/middle/typeck/infer/lattice.rs b/src/librustc/middle/typeck/infer/lattice.rs index 6095e5b050423..a8f19e8dde825 100644 --- a/src/librustc/middle/typeck/infer/lattice.rs +++ b/src/librustc/middle/typeck/infer/lattice.rs @@ -123,7 +123,7 @@ pub fn var_ids<'tcx, T: Combine<'tcx>>(this: &T, r => { this.infcx().tcx.sess.span_bug( this.trace().origin.span(), - format!("found non-region-vid: {:?}", r).as_slice()); + format!("found non-region-vid: {:?}", r).as_str()); } }).collect() } diff --git a/src/librustc/middle/typeck/infer/lub.rs b/src/librustc/middle/typeck/infer/lub.rs index 276a226483741..3d3d39fcd1890 100644 --- a/src/librustc/middle/typeck/infer/lub.rs +++ b/src/librustc/middle/typeck/infer/lub.rs @@ -192,7 +192,7 @@ impl<'f, 'tcx> Combine<'tcx> for Lub<'f, 'tcx> { this.fields.trace.origin.span(), format!("region {:?} is not associated with \ any bound region from A!", - r0).as_slice()) + r0).as_str()) } } diff --git a/src/librustc/middle/typeck/infer/mod.rs b/src/librustc/middle/typeck/infer/mod.rs index 44ee7ba2de6e4..e6b9e5cb99329 100644 --- a/src/librustc/middle/typeck/infer/mod.rs +++ b/src/librustc/middle/typeck/infer/mod.rs @@ -752,7 +752,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { format!("resolve_type_vars_if_possible() yielded {} \ when supplied with {}", self.ty_to_string(dummy0), - self.ty_to_string(dummy1)).as_slice()); + self.ty_to_string(dummy1)).as_str()); } } } @@ -800,13 +800,13 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { .span_err(sp, format!("{}{}", mk_msg(None, actual_ty), - error_str).as_slice()) + error_str).as_str()) } Some(e) => { self.tcx.sess.span_err(sp, format!("{}{}", mk_msg(Some(self.ty_to_string(e)), actual_ty), - error_str).as_slice()); + error_str).as_str()); } } for err in err.iter() { diff --git a/src/librustc/middle/typeck/infer/region_inference/mod.rs b/src/librustc/middle/typeck/infer/region_inference/mod.rs index 7e61c254a656b..aeea8aaf96004 100644 --- a/src/librustc/middle/typeck/infer/region_inference/mod.rs +++ b/src/librustc/middle/typeck/infer/region_inference/mod.rs @@ -446,7 +446,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { origin.span(), format!("cannot relate bound region: {} <= {}", sub.repr(self.tcx), - sup.repr(self.tcx)).as_slice()); + sup.repr(self.tcx)).as_str()); } (_, ReStatic) => { // all regions are subregions of static, so we can ignore this @@ -725,7 +725,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.bug( format!("cannot relate bound region: LUB({}, {})", a.repr(self.tcx), - b.repr(self.tcx)).as_slice()); + b.repr(self.tcx)).as_str()); } (ReStatic, _) | (_, ReStatic) => { @@ -742,7 +742,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("lub_concrete_regions invoked with \ non-concrete regions: {}, {}", a, - b).as_slice()); + b).as_str()); } (ReFree(ref fr), ReScope(s_id)) | @@ -829,7 +829,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.bug( format!("cannot relate bound region: GLB({}, {})", a.repr(self.tcx), - b.repr(self.tcx)).as_slice()); + b.repr(self.tcx)).as_str()); } (ReStatic, r) | (r, ReStatic) => { @@ -849,7 +849,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("glb_concrete_regions invoked with \ non-concrete regions: {}, {}", a, - b).as_slice()); + b).as_str()); } (ReFree(ref fr), ReScope(s_id)) | @@ -1397,7 +1397,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { for var {}, lower_bounds={}, upper_bounds={}", node_idx, lower_bounds.repr(self.tcx), - upper_bounds.repr(self.tcx)).as_slice()); + upper_bounds.repr(self.tcx)).as_str()); } fn collect_error_for_contracting_node( @@ -1441,7 +1441,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("collect_error_for_contracting_node() could not find error \ for var {}, upper_bounds={}", node_idx, - upper_bounds.repr(self.tcx)).as_slice()); + upper_bounds.repr(self.tcx)).as_str()); } fn collect_concrete_regions(&self, diff --git a/src/librustc/middle/typeck/infer/test.rs b/src/librustc/middle/typeck/infer/test.rs index 198857fca5055..2dd60531ef4ff 100644 --- a/src/librustc/middle/typeck/infer/test.rs +++ b/src/librustc/middle/typeck/infer/test.rs @@ -64,7 +64,7 @@ fn remove_message(e: &mut ExpectErrorEmitter, msg: &str, lvl: Level) { } debug!("Error: {}", msg); - match e.messages.iter().position(|m| msg.contains(m.as_slice())) { + match e.messages.iter().position(|m| msg.contains(m.as_str())) { Some(i) => { e.messages.remove(i); } diff --git a/src/librustc/middle/typeck/mod.rs b/src/librustc/middle/typeck/mod.rs index 7104cb9584444..7c88c31b4495b 100644 --- a/src/librustc/middle/typeck/mod.rs +++ b/src/librustc/middle/typeck/mod.rs @@ -353,7 +353,7 @@ pub fn require_same_types(tcx: &ty::ctxt, format!("{}: {}", msg(), ty::type_err_to_str(tcx, - terr)).as_slice()); + terr)).as_str()); ty::note_and_explain_type_err(tcx, terr); false } @@ -403,7 +403,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt, format!("main has a non-function type: found \ `{}`", ppaux::ty_to_string(tcx, - main_t)).as_slice()); + main_t)).as_str()); } } } @@ -456,7 +456,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt, format!("start has a non-function type: found \ `{}`", ppaux::ty_to_string(tcx, - start_t)).as_slice()); + start_t)).as_str()); } } } diff --git a/src/librustc/middle/typeck/variance.rs b/src/librustc/middle/typeck/variance.rs index 7e8c53159fb8d..eed3f322a8a8c 100644 --- a/src/librustc/middle/typeck/variance.rs +++ b/src/librustc/middle/typeck/variance.rs @@ -557,7 +557,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { None => { self.tcx().sess.bug(format!( "no inferred index entry for {}", - self.tcx().map.node_to_string(param_id)).as_slice()); + self.tcx().map.node_to_string(param_id)).as_str()); } } } @@ -835,7 +835,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.tcx().sess.bug( format!("unexpected type encountered in \ variance inference: {}", - ty.repr(self.tcx())).as_slice()); + ty.repr(self.tcx())).as_str()); } } } @@ -910,7 +910,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { .sess .bug(format!("unexpected region encountered in variance \ inference: {}", - region.repr(self.tcx())).as_slice()); + region.repr(self.tcx())).as_str()); } } } @@ -1048,7 +1048,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { // attribute and report an error with various results if found. if ty::has_attr(tcx, item_def_id, "rustc_variance") { let found = item_variances.repr(tcx); - tcx.sess.span_err(tcx.map.span(item_id), found.as_slice()); + tcx.sess.span_err(tcx.map.span(item_id), found.as_str()); } let newly_added = tcx.item_variance_map.borrow_mut() diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index 7a36c423add04..c0ded44ea6c3b 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -85,7 +85,7 @@ fn verify(sess: &Session, items: &lang_items::LanguageItems) { $( if missing.contains(&lang_items::$item) && items.$name().is_none() { sess.err(format!("language item required, but not found: `{}`", - stringify!($name)).as_slice()); + stringify!($name)).as_str()); } )* @@ -100,7 +100,7 @@ impl<'a> Context<'a> { } else)* { self.sess.span_err(span, format!("unknown external lang item: `{}`", - name).as_slice()); + name).as_str()); } } } diff --git a/src/librustc/plugin/load.rs b/src/librustc/plugin/load.rs index 4f38c74893e46..670f7f6477518 100644 --- a/src/librustc/plugin/load.rs +++ b/src/librustc/plugin/load.rs @@ -141,17 +141,17 @@ impl<'a> PluginLoader<'a> { // this is fatal: there are almost certainly macros we need // inside this crate, so continue would spew "macro undefined" // errors - Err(err) => self.sess.span_fatal(vi.span, err.as_slice()) + Err(err) => self.sess.span_fatal(vi.span, err.as_str()) }; unsafe { let registrar = - match lib.symbol(symbol.as_slice()) { + match lib.symbol(symbol.as_str()) { Ok(registrar) => { mem::transmute::<*mut u8,PluginRegistrarFun>(registrar) } // again fatal if we can't register macros - Err(err) => self.sess.span_fatal(vi.span, err.as_slice()) + Err(err) => self.sess.span_fatal(vi.span, err.as_str()) }; self.plugins.registrars.push(registrar); diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 11f16f1ea9511..59c785b4af82d 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -55,11 +55,11 @@ pub fn note_and_explain_region(cx: &ctxt, (ref str, Some(span)) => { cx.sess.span_note( span, - format!("{}{}{}", prefix, *str, suffix).as_slice()); + format!("{}{}{}", prefix, *str, suffix).as_str()); } (ref str, None) => { cx.sess.note( - format!("{}{}{}", prefix, *str, suffix).as_slice()); + format!("{}{}{}", prefix, *str, suffix).as_str()); } } } @@ -268,13 +268,13 @@ pub fn ty_to_string(cx: &ctxt, typ: t) -> String { match fn_style { ast::NormalFn => {} _ => { - s.push_str(fn_style.to_string().as_slice()); + s.push_str(fn_style.to_string().as_str()); s.push_char(' '); } }; if abi != abi::Rust { - s.push_str(format!("extern {} ", abi.to_string()).as_slice()); + s.push_str(format!("extern {} ", abi.to_string()).as_str()); }; s.push_str("fn"); @@ -298,14 +298,14 @@ pub fn ty_to_string(cx: &ctxt, typ: t) -> String { match cty.store { ty::UniqTraitStore => {} ty::RegionTraitStore(region, _) => { - s.push_str(region_to_string(cx, "", true, region).as_slice()); + s.push_str(region_to_string(cx, "", true, region).as_str()); } } match cty.fn_style { ast::NormalFn => {} _ => { - s.push_str(cty.fn_style.to_string().as_slice()); + s.push_str(cty.fn_style.to_string().as_str()); s.push_char(' '); } }; @@ -317,7 +317,7 @@ pub fn ty_to_string(cx: &ctxt, typ: t) -> String { assert_eq!(cty.onceness, ast::Once); s.push_str("proc"); push_sig_to_string(cx, &mut s, '(', ')', &cty.sig, - bounds_str.as_slice()); + bounds_str.as_str()); } ty::RegionTraitStore(..) => { match cty.onceness { @@ -325,7 +325,7 @@ pub fn ty_to_string(cx: &ctxt, typ: t) -> String { ast::Once => s.push_str("once ") } push_sig_to_string(cx, &mut s, '|', '|', &cty.sig, - bounds_str.as_slice()); + bounds_str.as_str()); } } @@ -340,7 +340,7 @@ pub fn ty_to_string(cx: &ctxt, typ: t) -> String { bounds: &str) { s.push_char(bra); let strs: Vec = sig.inputs.iter().map(|a| fn_input_to_string(cx, *a)).collect(); - s.push_str(strs.connect(", ").as_slice()); + s.push_str(strs.connect(", ").as_str()); if sig.variadic { s.push_str(", ..."); } @@ -356,7 +356,7 @@ pub fn ty_to_string(cx: &ctxt, typ: t) -> String { if ty::type_is_bot(sig.output) { s.push_char('!'); } else { - s.push_str(ty_to_string(cx, sig.output).as_slice()); + s.push_str(ty_to_string(cx, sig.output).as_str()); } } } @@ -386,7 +386,7 @@ pub fn ty_to_string(cx: &ctxt, typ: t) -> String { } ty_rptr(r, ref tm) => { let mut buf = region_ptr_to_string(cx, r); - buf.push_str(mt_to_string(cx, tm).as_slice()); + buf.push_str(mt_to_string(cx, tm).as_str()); buf } ty_open(typ) => format!("opened<{}>", ty_to_string(cx, typ)), @@ -408,14 +408,14 @@ pub fn ty_to_string(cx: &ctxt, typ: t) -> String { ty_enum(did, ref substs) | ty_struct(did, ref substs) => { let base = ty::item_path_str(cx, did); let generics = ty::lookup_item_type(cx, did).generics; - parameterized(cx, base.as_slice(), substs, &generics) + parameterized(cx, base.as_str(), substs, &generics) } ty_trait(box ty::TyTrait { def_id: did, ref substs, ref bounds }) => { let base = ty::item_path_str(cx, did); let trait_def = ty::lookup_trait_def(cx, did); - let ty = parameterized(cx, base.as_slice(), + let ty = parameterized(cx, base.as_str(), substs, &trait_def.generics); let bound_str = bounds.user_string(cx); let bound_sep = if bound_str.is_empty() { "" } else { "+" }; @@ -511,7 +511,7 @@ pub fn parameterized(cx: &ctxt, pub fn ty_to_short_str(cx: &ctxt, typ: t) -> String { let mut s = typ.repr(cx).to_string(); if s.len() >= 32u { - s = s.as_slice().slice(0u, 32u).to_string(); + s = s.as_str().slice(0u, 32u).to_string(); } return s; } @@ -1069,7 +1069,7 @@ impl UserString for ty::TraitRef { fn user_string(&self, tcx: &ctxt) -> String { let base = ty::item_path_str(tcx, self.def_id); let trait_def = ty::lookup_trait_def(tcx, self.def_id); - parameterized(tcx, base.as_slice(), &self.substs, &trait_def.generics) + parameterized(tcx, base.as_str(), &self.substs, &trait_def.generics) } } diff --git a/src/librustc_back/archive.rs b/src/librustc_back/archive.rs index bd6770b32561c..d6f621a3ea3e4 100644 --- a/src/librustc_back/archive.rs +++ b/src/librustc_back/archive.rs @@ -51,7 +51,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, args: &str, cwd: Option<&Path>, paths: &[&Path]) -> ProcessOutput { let ar = match *maybe_ar_prog { - Some(ref ar) => ar.as_slice(), + Some(ref ar) => ar.as_str(), None => "ar" }; let mut cmd = Command::new(ar); @@ -73,22 +73,22 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, if !o.status.success() { handler.err(format!("{} failed with: {}", cmd, - o.status).as_slice()); + o.status).as_str()); handler.note(format!("stdout ---\n{}", str::from_utf8(o.output .as_slice()).unwrap()) - .as_slice()); + .as_str()); handler.note(format!("stderr ---\n{}", str::from_utf8(o.error .as_slice()).unwrap()) - .as_slice()); + .as_str()); handler.abort_if_errors(); } o }, Err(e) => { - handler.err(format!("could not exec `{}`: {}", ar.as_slice(), - e).as_slice()); + handler.err(format!("could not exec `{}`: {}", ar.as_str(), + e).as_str()); handler.abort_if_errors(); fail!("rustc::back::archive::run_ar() should not reach this point"); } @@ -107,16 +107,16 @@ pub fn find_library(name: &str, os: abi::Os, search_paths: &[Path], for path in search_paths.iter() { debug!("looking for {} inside {}", name, path.display()); - let test = path.join(oslibname.as_slice()); + let test = path.join(oslibname.as_str()); if test.exists() { return test } if oslibname != unixlibname { - let test = path.join(unixlibname.as_slice()); + let test = path.join(unixlibname.as_str()); if test.exists() { return test } } } handler.fatal(format!("could not find native static library `{}`, \ perhaps an -L flag is missing?", - name).as_slice()); + name).as_str()); } impl<'a> Archive<'a> { @@ -192,9 +192,9 @@ impl<'a> ArchiveBuilder<'a> { lto: bool) -> io::IoResult<()> { let object = format!("{}.o", name); let bytecode = format!("{}.bytecode.deflate", name); - let mut ignore = vec!(bytecode.as_slice(), METADATA_FILENAME); + let mut ignore = vec!(bytecode.as_str(), METADATA_FILENAME); if lto { - ignore.push(object.as_slice()); + ignore.push(object.as_str()); } self.add_archive(rlib, name, ignore.as_slice()) } @@ -305,7 +305,7 @@ impl<'a> ArchiveBuilder<'a> { } else { filename }; - let new_filename = self.work_dir.path().join(filename.as_slice()); + let new_filename = self.work_dir.path().join(filename.as_str()); try!(fs::rename(file, &new_filename)); self.members.push(Path::new(filename)); } diff --git a/src/librustc_back/arm.rs b/src/librustc_back/arm.rs index 134f7105ea76e..7857188c66a78 100644 --- a/src/librustc_back/arm.rs +++ b/src/librustc_back/arm.rs @@ -12,7 +12,7 @@ use target_strs; use syntax::abi; pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs::t { - let cc_args = if target_triple.as_slice().contains("thumb") { + let cc_args = if target_triple.as_str().contains("thumb") { vec!("-mthumb".to_string()) } else { vec!("-marm".to_string()) diff --git a/src/librustc_back/rpath.rs b/src/librustc_back/rpath.rs index 98fdd5a7fd7db..437a063828127 100644 --- a/src/librustc_back/rpath.rs +++ b/src/librustc_back/rpath.rs @@ -59,7 +59,7 @@ pub fn get_rpath_flags(config: RPathConfig) -> Vec { fn rpaths_to_flags(rpaths: &[String]) -> Vec { let mut ret = Vec::new(); for rpath in rpaths.iter() { - ret.push(format!("-Wl,-rpath,{}", (*rpath).as_slice())); + ret.push(format!("-Wl,-rpath,{}", (*rpath).as_str())); } return ret; } @@ -140,7 +140,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec { let mut set = HashSet::new(); let mut minimized = Vec::new(); for rpath in rpaths.iter() { - if set.insert(rpath.as_slice()) { + if set.insert(rpath.as_str()) { minimized.push(rpath.clone()); } } diff --git a/src/librustc_back/svh.rs b/src/librustc_back/svh.rs index 8e24fc1ad5bff..7f0ecafad693a 100644 --- a/src/librustc_back/svh.rs +++ b/src/librustc_back/svh.rs @@ -65,7 +65,7 @@ impl Svh { } pub fn as_str<'a>(&'a self) -> &'a str { - self.hash.as_slice() + self.hash.as_str() } pub fn calculate(metadata: &Vec, krate: &ast::Crate) -> Svh { diff --git a/src/librustrt/stack.rs b/src/librustrt/stack.rs index 3190e9f784149..c5234fd0334ac 100644 --- a/src/librustrt/stack.rs +++ b/src/librustrt/stack.rs @@ -107,7 +107,7 @@ extern fn stack_exhausted() { let task: Option> = Local::try_take(); let name = match task { Some(ref task) => { - task.name.as_ref().map(|n| n.as_slice()) + task.name.as_ref().map(|n| n.as_str()) } None => None }; diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index 733bc593922de..963a10e454cc2 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -965,7 +965,7 @@ impl Json { /// Returns None otherwise. pub fn as_string<'a>(&'a self) -> Option<&'a str> { match *self { - String(ref s) => Some(s.as_slice()), + String(ref s) => Some(s.as_str()), _ => None } } @@ -1961,7 +1961,7 @@ macro_rules! read_primitive { String(s) => { // re: #12967.. a type w/ numeric keys (ie HashMap etc) // is going to have a string here, as per JSON spec. - match std::from_str::from_str(s.as_slice()) { + match std::from_str::from_str(s.as_str()) { Some(f) => Ok(f), None => Err(ExpectedError("Number".to_string(), s)), } @@ -2000,7 +2000,7 @@ impl ::Decoder for Decoder { String(s) => { // re: #12967.. a type w/ numeric keys (ie HashMap etc) // is going to have a string here, as per JSON spec. - match std::from_str::from_str(s.as_slice()) { + match std::from_str::from_str(s.as_str()) { Some(f) => Ok(f), None => Err(ExpectedError("Number".to_string(), s)), } @@ -2018,7 +2018,7 @@ impl ::Decoder for Decoder { fn read_char(&mut self) -> DecodeResult { let s = try!(self.read_str()); { - let mut it = s.as_slice().chars(); + let mut it = s.as_str().chars(); match (it.next(), it.next()) { // exactly one character (Some(c), None) => return Ok(c), @@ -2077,7 +2077,7 @@ impl ::Decoder for Decoder { } }; let idx = match names.iter() - .position(|n| str::eq_slice(*n, name.as_slice())) { + .position(|n| str::eq_slice(*n, name.as_str())) { Some(idx) => idx, None => return Err(UnknownVariantError(name)) }; diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index 2cda00ad6c4af..91779f901d655 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -304,13 +304,13 @@ impl<'a, E, S:Encoder> Encodable for &'a str { impl> Encodable for String { fn encode(&self, s: &mut S) -> Result<(), E> { - s.emit_str(self.as_slice()) + s.emit_str(self.as_str()) } } impl> Decodable for String { fn decode(d: &mut D) -> Result { - Ok(String::from_str(try!(d.read_str()).as_slice())) + Ok(String::from_str(try!(d.read_str()).as_str())) } } diff --git a/src/libstd/ascii.rs b/src/libstd/ascii.rs index f7b23163dfe48..1ab16e2d52474 100644 --- a/src/libstd/ascii.rs +++ b/src/libstd/ascii.rs @@ -292,7 +292,7 @@ pub trait OwnedAsciiCast { impl OwnedAsciiCast for String { #[inline] fn is_ascii(&self) -> bool { - self.as_slice().is_ascii() + self.as_str().is_ascii() } #[inline] diff --git a/src/libstd/failure.rs b/src/libstd/failure.rs index 8d715de16e65a..9b0b2f22a8112 100644 --- a/src/libstd/failure.rs +++ b/src/libstd/failure.rs @@ -41,7 +41,7 @@ pub fn on_fail(obj: &Any + Send, file: &'static str, line: uint) { let msg = match obj.as_ref::<&'static str>() { Some(s) => *s, None => match obj.as_ref::() { - Some(s) => s.as_slice(), + Some(s) => s.as_str(), None => "Box", } }; @@ -72,7 +72,7 @@ pub fn on_fail(obj: &Any + Send, file: &'static str, line: uint) { (t.name.take(), t.unwinder.unwinding()) }; { - let n = name.as_ref().map(|n| n.as_slice()).unwrap_or(""); + let n = name.as_ref().map(|n| n.as_str()).unwrap_or(""); match local_stderr.replace(None) { Some(mut stderr) => { diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs index 0d0c9e933b0ab..c3f88ba8fd9dc 100644 --- a/src/libstd/io/mod.rs +++ b/src/libstd/io/mod.rs @@ -235,7 +235,7 @@ use os; use boxed::Box; use result::{Ok, Err, Result}; use rt::rtio; -use slice::{Slice, MutableSlice, ImmutableSlice}; +use slice::{MutableSlice, ImmutableSlice}; use str::{Str, StrSlice}; use str; use string::String; @@ -395,7 +395,7 @@ impl IoError { kind: kind, desc: desc, detail: if detail && kind == OtherIoError { - Some(os::error_string(errno).as_slice().chars().map(|c| c.to_lowercase()).collect()) + Some(os::error_string(errno).as_str().chars().map(|c| c.to_lowercase()).collect()) } else { None }, diff --git a/src/libstd/os.rs b/src/libstd/os.rs index ea278d55db956..5f2d9d9302612 100644 --- a/src/libstd/os.rs +++ b/src/libstd/os.rs @@ -46,7 +46,7 @@ use ptr::RawPtr; use ptr; use result::{Err, Ok, Result}; use slice::{Slice, ImmutableSlice, MutableSlice, ImmutablePartialEqSlice}; -use str::{Str, StrSlice, StrAllocating}; +use str::{StrSlice, StrAllocating}; use string::String; use sync::atomic::{AtomicInt, INIT_ATOMIC_INT, SeqCst}; use vec::Vec; diff --git a/src/libstd/path/mod.rs b/src/libstd/path/mod.rs index 5a5068f4d01d8..3df021bb2ebeb 100644 --- a/src/libstd/path/mod.rs +++ b/src/libstd/path/mod.rs @@ -833,7 +833,7 @@ pub struct Display<'a, P:'a> { impl<'a, P: GenericPath> fmt::Show for Display<'a, P> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.as_maybe_owned().as_slice().fmt(f) + self.as_maybe_owned().as_str().fmt(f) } } @@ -878,7 +878,7 @@ impl BytesContainer for String { } #[inline] fn container_as_str<'a>(&'a self) -> Option<&'a str> { - Some(self.as_slice()) + Some(self.as_str()) } #[inline] fn is_str(_: Option) -> bool { true } @@ -912,11 +912,11 @@ impl BytesContainer for CString { impl<'a> BytesContainer for str::MaybeOwned<'a> { #[inline] fn container_as_bytes<'b>(&'b self) -> &'b [u8] { - self.as_slice().as_bytes() + self.as_str().as_bytes() } #[inline] fn container_as_str<'b>(&'b self) -> Option<&'b str> { - Some(self.as_slice()) + Some(self.as_str()) } #[inline] fn is_str(_: Option) -> bool { true } diff --git a/src/libstd/path/posix.rs b/src/libstd/path/posix.rs index 06eab31d7bff5..277a27bd1f635 100644 --- a/src/libstd/path/posix.rs +++ b/src/libstd/path/posix.rs @@ -19,7 +19,6 @@ use hash; use io::Writer; use iter::{DoubleEndedIterator, AdditiveIterator, Extendable, Iterator, Map}; use option::{Option, None, Some}; -use str::Str; use str; use slice::{CloneableVector, Splits, Slice, VectorVector, ImmutablePartialEqSlice, ImmutableSlice}; diff --git a/src/libstd/path/windows.rs b/src/libstd/path/windows.rs index d9864cfaa6130..d448b8b2a521f 100644 --- a/src/libstd/path/windows.rs +++ b/src/libstd/path/windows.rs @@ -23,7 +23,7 @@ use io::Writer; use iter::{AdditiveIterator, DoubleEndedIterator, Extendable, Iterator, Map}; use mem; use option::{Option, Some, None}; -use slice::{Slice, ImmutableSlice}; +use slice::ImmutableSlice; use str::{CharSplits, Str, StrAllocating, StrVector, StrSlice}; use string::String; use unicode::char::UnicodeChar; @@ -209,7 +209,7 @@ impl GenericPathUnsafe for Path { unsafe fn set_filename_unchecked(&mut self, filename: T) { let filename = filename.container_as_str().unwrap(); match self.sepidx_or_prefix_len() { - None if ".." == self.repr.as_slice() => { + None if ".." == self.repr.as_str() => { let mut s = String::with_capacity(3 + filename.len()); s.push_str(".."); s.push_char(SEP); @@ -219,22 +219,22 @@ impl GenericPathUnsafe for Path { None => { self.update_normalized(filename); } - Some((_,idxa,end)) if self.repr.as_slice().slice(idxa,end) == ".." => { + Some((_,idxa,end)) if self.repr.as_str().slice(idxa,end) == ".." => { let mut s = String::with_capacity(end + 1 + filename.len()); - s.push_str(self.repr.as_slice().slice_to(end)); + s.push_str(self.repr.as_str().slice_to(end)); s.push_char(SEP); s.push_str(filename); self.update_normalized(s); } Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => { let mut s = String::with_capacity(idxb + filename.len()); - s.push_str(self.repr.as_slice().slice_to(idxb)); + s.push_str(self.repr.as_str().slice_to(idxb)); s.push_str(filename); self.update_normalized(s); } Some((idxb,_,_)) => { let mut s = String::with_capacity(idxb + 1 + filename.len()); - s.push_str(self.repr.as_slice().slice_to(idxb)); + s.push_str(self.repr.as_str().slice_to(idxb)); s.push_char(SEP); s.push_str(filename); self.update_normalized(s); @@ -261,7 +261,7 @@ impl GenericPathUnsafe for Path { } fn shares_volume(me: &Path, path: &str) -> bool { // path is assumed to have a prefix of Some(DiskPrefix) - let repr = me.repr.as_slice(); + let repr = me.repr.as_str(); match me.prefix { Some(DiskPrefix) => { repr.as_bytes()[0] == path.as_bytes()[0].to_ascii().to_upper().to_byte() @@ -293,7 +293,7 @@ impl GenericPathUnsafe for Path { else { None }; let pathlen = path_.as_ref().map_or(path.len(), |p| p.len()); let mut s = String::with_capacity(me.repr.len() + 1 + pathlen); - s.push_str(me.repr.as_slice()); + s.push_str(me.repr.as_str()); let plen = me.prefix_len(); // if me is "C:" we don't want to add a path separator match me.prefix { @@ -305,7 +305,7 @@ impl GenericPathUnsafe for Path { } match path_ { None => s.push_str(path), - Some(p) => s.push_str(p.as_slice()) + Some(p) => s.push_str(p.as_str()) }; me.update_normalized(s) } @@ -361,7 +361,7 @@ impl GenericPath for Path { /// Always returns a `Some` value. #[inline] fn as_str<'a>(&'a self) -> Option<&'a str> { - Some(self.repr.as_slice()) + Some(self.repr.as_str()) } #[inline] @@ -383,21 +383,21 @@ impl GenericPath for Path { /// Always returns a `Some` value. fn dirname_str<'a>(&'a self) -> Option<&'a str> { Some(match self.sepidx_or_prefix_len() { - None if ".." == self.repr.as_slice() => self.repr.as_slice(), + None if ".." == self.repr.as_str() => self.repr.as_str(), None => ".", - Some((_,idxa,end)) if self.repr.as_slice().slice(idxa, end) == ".." => { - self.repr.as_slice() + Some((_,idxa,end)) if self.repr.as_str().slice(idxa, end) == ".." => { + self.repr.as_str() } - Some((idxb,_,end)) if self.repr.as_slice().slice(idxb, end) == "\\" => { - self.repr.as_slice() + Some((idxb,_,end)) if self.repr.as_str().slice(idxb, end) == "\\" => { + self.repr.as_str() } - Some((0,idxa,_)) => self.repr.as_slice().slice_to(idxa), + Some((0,idxa,_)) => self.repr.as_str().slice_to(idxa), Some((idxb,idxa,_)) => { match self.prefix { Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => { - self.repr.as_slice().slice_to(idxa) + self.repr.as_str().slice_to(idxa) } - _ => self.repr.as_slice().slice_to(idxb) + _ => self.repr.as_str().slice_to(idxb) } } }) @@ -411,7 +411,7 @@ impl GenericPath for Path { /// See `GenericPath::filename_str` for info. /// Always returns a `Some` value if `filename` returns a `Some` value. fn filename_str<'a>(&'a self) -> Option<&'a str> { - let repr = self.repr.as_slice(); + let repr = self.repr.as_str(); match self.sepidx_or_prefix_len() { None if "." == repr || ".." == repr => None, None => Some(repr), @@ -442,14 +442,14 @@ impl GenericPath for Path { #[inline] fn pop(&mut self) -> bool { match self.sepidx_or_prefix_len() { - None if "." == self.repr.as_slice() => false, + None if "." == self.repr.as_str() => false, None => { self.repr = String::from_str("."); self.sepidx = None; true } Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false, - Some((idxb,_,end)) if self.repr.as_slice().slice(idxb, end) == "\\" => false, + Some((idxb,_,end)) if self.repr.as_str().slice(idxb, end) == "\\" => false, Some((idxb,idxa,_)) => { let trunc = match self.prefix { Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => { @@ -469,15 +469,15 @@ impl GenericPath for Path { if self.prefix.is_some() { Some(Path::new(match self.prefix { Some(DiskPrefix) if self.is_absolute() => { - self.repr.as_slice().slice_to(self.prefix_len()+1) + self.repr.as_str().slice_to(self.prefix_len()+1) } Some(VerbatimDiskPrefix) => { - self.repr.as_slice().slice_to(self.prefix_len()+1) + self.repr.as_str().slice_to(self.prefix_len()+1) } - _ => self.repr.as_slice().slice_to(self.prefix_len()) + _ => self.repr.as_str().slice_to(self.prefix_len()) })) } else if is_vol_relative(self) { - Some(Path::new(self.repr.as_slice().slice_to(1))) + Some(Path::new(self.repr.as_str().slice_to(1))) } else { None } @@ -496,7 +496,7 @@ impl GenericPath for Path { fn is_absolute(&self) -> bool { match self.prefix { Some(DiskPrefix) => { - let rest = self.repr.as_slice().slice_from(self.prefix_len()); + let rest = self.repr.as_str().slice_from(self.prefix_len()); rest.len() > 0 && rest.as_bytes()[0] == SEP_BYTE } Some(_) => true, @@ -518,7 +518,7 @@ impl GenericPath for Path { } else { let mut ita = self.str_components().map(|x|x.unwrap()); let mut itb = other.str_components().map(|x|x.unwrap()); - if "." == self.repr.as_slice() { + if "." == self.repr.as_str() { return itb.next() != Some(".."); } loop { @@ -671,7 +671,7 @@ impl Path { /// Does not distinguish between absolute and cwd-relative paths, e.g. /// C:\foo and C:foo. pub fn str_components<'a>(&'a self) -> StrComponents<'a> { - let repr = self.repr.as_slice(); + let repr = self.repr.as_str(); let s = match self.prefix { Some(_) => { let plen = self.prefix_len(); @@ -697,8 +697,8 @@ impl Path { } fn equiv_prefix(&self, other: &Path) -> bool { - let s_repr = self.repr.as_slice(); - let o_repr = other.repr.as_slice(); + let s_repr = self.repr.as_str(); + let o_repr = other.repr.as_str(); match (self.prefix, other.prefix) { (Some(DiskPrefix), Some(VerbatimDiskPrefix)) => { self.is_absolute() && @@ -728,8 +728,8 @@ impl Path { fn normalize_(s: S) -> (Option, String) { // make borrowck happy let (prefix, val) = { - let prefix = parse_prefix(s.as_slice()); - let path = Path::normalize__(s.as_slice(), prefix); + let prefix = parse_prefix(s.as_str()); + let path = Path::normalize__(s.as_str(), prefix); (prefix, path) }; (prefix, match val { @@ -852,8 +852,8 @@ impl Path { fn update_sepidx(&mut self) { let s = if self.has_nonsemantic_trailing_slash() { - self.repr.as_slice().slice_to(self.repr.len()-1) - } else { self.repr.as_slice() }; + self.repr.as_str().slice_to(self.repr.len()-1) + } else { self.repr.as_str() }; let idx = s.rfind(if !prefix_is_verbatim(self.prefix) { is_sep } else { is_sep_verbatim }); let prefixlen = self.prefix_len(); @@ -886,7 +886,7 @@ impl Path { } fn update_normalized(&mut self, s: S) { - let (prefix, path) = Path::normalize_(s.as_slice()); + let (prefix, path) = Path::normalize_(s.as_str()); self.repr = path; self.prefix = prefix; self.update_sepidx(); @@ -928,7 +928,7 @@ pub fn is_verbatim(path: &Path) -> bool { /// non-verbatim, the non-verbatim version is returned. /// Otherwise, None is returned. pub fn make_non_verbatim(path: &Path) -> Option { - let repr = path.repr.as_slice(); + let repr = path.repr.as_str(); let new_path = match path.prefix { Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None, Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()), @@ -948,7 +948,7 @@ pub fn make_non_verbatim(path: &Path) -> Option { } // now ensure normalization didn't change anything if repr.slice_from(path.prefix_len()) == - new_path.repr.as_slice().slice_from(new_path.prefix_len()) { + new_path.repr.as_str().slice_from(new_path.prefix_len()) { Some(new_path) } else { None diff --git a/src/libstd/rt/util.rs b/src/libstd/rt/util.rs index ed24ed2a569c7..0cae43303ecd8 100644 --- a/src/libstd/rt/util.rs +++ b/src/libstd/rt/util.rs @@ -46,7 +46,7 @@ pub fn min_stack() -> uint { 0 => {} n => return n - 1, } - let amt = os::getenv("RUST_MIN_STACK").and_then(|s| from_str(s.as_slice())); + let amt = os::getenv("RUST_MIN_STACK").and_then(|s| from_str(s.as_str())); let amt = amt.unwrap_or(2 * 1024 * 1024); // 0 is our sentinel value, so ensure that we'll never see 0 after // initialization has run @@ -59,7 +59,7 @@ pub fn min_stack() -> uint { pub fn default_sched_threads() -> uint { match os::getenv("RUST_THREADS") { Some(nstr) => { - let opt_n: Option = FromStr::from_str(nstr.as_slice()); + let opt_n: Option = FromStr::from_str(nstr.as_str()); match opt_n { Some(n) if n > 0 => n, _ => fail!("`RUST_THREADS` is `{}`, should be a positive integer", nstr) diff --git a/src/libstd/task.rs b/src/libstd/task.rs index 9cace9c80ef5a..d19dbf7338393 100644 --- a/src/libstd/task.rs +++ b/src/libstd/task.rs @@ -360,7 +360,7 @@ pub fn with_task_name(blk: |Option<&str>| -> U) -> U { let task = Local::borrow(None::); match task.name { - Some(ref name) => blk(Some(name.as_slice())), + Some(ref name) => blk(Some(name.as_str())), None => blk(None) } } @@ -372,7 +372,7 @@ pub fn name() -> Option { let task = Local::borrow(None::); match task.name { - Some(ref name) => Some(name.as_slice().to_string()), + Some(ref name) => Some(name.as_str().to_string()), None => None } } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 4e65082fe3ad2..2fe9af4e09e40 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -152,7 +152,7 @@ impl, E> Encodable for Ident { impl, E> Decodable for Ident { fn decode(d: &mut D) -> Result { - Ok(str_to_ident(try!(d.read_str()).as_slice())) + Ok(str_to_ident(try!(d.read_str()).as_str())) } } diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index d1f78c71e19df..74d077bde8f5a 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -90,7 +90,7 @@ pub fn path_to_string>(mut path: PI) -> String { if !s.is_empty() { s.push_str("::"); } - s.push_str(e.as_slice()); + s.push_str(e.as_str()); s }).to_string() } @@ -499,7 +499,7 @@ impl<'a,S:Str> NodesMatchingSuffix<'a,S> { None => return false, Some((node_id, name)) => (node_id, name), }; - if part.as_slice() != mod_name.as_str() { + if part.as_str() != mod_name.as_str() { return false; } cursor = self.map.get_parent(mod_id); @@ -537,7 +537,7 @@ impl<'a,S:Str> NodesMatchingSuffix<'a,S> { // We are looking at some node `n` with a given name and parent // id; do their names match what I am seeking? fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool { - name.as_str() == self.item_name.as_slice() && + name.as_str() == self.item_name.as_str() && self.suffix_matches(parent_of_n) } } diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 8ef13ef260424..54fd50c897bd1 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -219,11 +219,11 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> Ident { match *trait_ref { Some(ref trait_ref) => { pretty.push_char('.'); - pretty.push_str(pprust::path_to_string(&trait_ref.path).as_slice()); + pretty.push_str(pprust::path_to_string(&trait_ref.path).as_str()); } None => {} } - token::gensym_ident(pretty.as_slice()) + token::gensym_ident(pretty.as_str()) } pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod { diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index dd422d021493f..cfeee1a49f9a0 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -129,7 +129,7 @@ impl AttributeMethods for Attribute { let meta = mk_name_value_item_str( InternedString::new("doc"), token::intern_and_get_ident(strip_doc_comment_decoration( - comment.get()).as_slice())); + comment.get()).as_str())); if self.node.style == ast::AttrOuter { mk_attr_outer(self.node.id, meta) } else { @@ -420,7 +420,7 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[Gc]) { if !set.insert(name.clone()) { diagnostic.span_fatal(meta.span, format!("duplicate meta item `{}`", - name).as_slice()); + name).as_str()); } } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 2f30108c27bd1..25554a481c6fb 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -276,7 +276,7 @@ impl FileMap { let mut lines = self.lines.borrow_mut(); let begin: BytePos = *lines.get(line as uint) - self.start_pos; let begin = begin.to_uint(); - let slice = self.src.as_slice().slice_from(begin); + let slice = self.src.as_str().slice_from(begin); match slice.find('\n') { Some(e) => slice.slice_to(e).to_string(), None => slice.to_string() @@ -293,8 +293,8 @@ impl FileMap { } pub fn is_real_file(&self) -> bool { - !(self.name.as_slice().starts_with("<") && - self.name.as_slice().ends_with(">")) + !(self.name.as_str().starts_with("<") && + self.name.as_str().ends_with(">")) } } @@ -319,17 +319,17 @@ impl CodeMap { // Remove utf-8 BOM if any. // FIXME #12884: no efficient/safe way to remove from the start of a string // and reuse the allocation. - let mut src = if src.as_slice().starts_with("\ufeff") { - String::from_str(src.as_slice().slice_from(3)) + let mut src = if src.as_str().starts_with("\ufeff") { + String::from_str(src.as_str().slice_from(3)) } else { - String::from_str(src.as_slice()) + String::from_str(src.as_str()) }; // Append '\n' in case it's not already there. // This is a workaround to prevent CodeMap.lookup_filemap_idx from accidentally // overflowing into the next filemap in case the last byte of span is also the last // byte of filemap, which leads to incorrect results from CodeMap.span_to_*. - if src.len() > 0 && !src.as_slice().ends_with("\n") { + if src.len() > 0 && !src.as_str().ends_with("\n") { src.push_char('\n'); } @@ -409,14 +409,14 @@ impl CodeMap { if begin.fm.start_pos != end.fm.start_pos { None } else { - Some(begin.fm.src.as_slice().slice(begin.pos.to_uint(), - end.pos.to_uint()).to_string()) + Some(begin.fm.src.as_str().slice(begin.pos.to_uint(), + end.pos.to_uint()).to_string()) } } pub fn get_filemap(&self, filename: &str) -> Rc { for fm in self.files.borrow().iter() { - if filename == fm.name.as_slice() { + if filename == fm.name.as_str() { return fm.clone(); } } diff --git a/src/libsyntax/crateid.rs b/src/libsyntax/crateid.rs index 67605360a48cb..3b1b52eac3f50 100644 --- a/src/libsyntax/crateid.rs +++ b/src/libsyntax/crateid.rs @@ -36,12 +36,12 @@ impl fmt::Show for CrateId { try!(write!(f, "{}", self.path)); let version = match self.version { None => "0.0", - Some(ref version) => version.as_slice(), + Some(ref version) => version.as_str(), }; if self.path == self.name || self.path - .as_slice() - .ends_with(format!("/{}", self.name).as_slice()) { + .as_str() + .ends_with(format!("/{}", self.name).as_str()) { write!(f, "#{}", version) } else { write!(f, "#{}:{}", self.name, version) @@ -54,12 +54,12 @@ impl FromStr for CrateId { let pieces: Vec<&str> = s.splitn(1, '#').collect(); let path = pieces.get(0).to_string(); - if path.as_slice().starts_with("/") || path.as_slice().ends_with("/") || - path.as_slice().starts_with(".") || path.is_empty() { + if path.as_str().starts_with("/") || path.as_str().ends_with("/") || + path.as_str().starts_with(".") || path.is_empty() { return None; } - let path_pieces: Vec<&str> = path.as_slice() + let path_pieces: Vec<&str> = path.as_str() .rsplitn(1, '/') .collect(); let inferred_name = *path_pieces.get(0); @@ -107,7 +107,7 @@ impl CrateId { pub fn version_or_default<'a>(&'a self) -> &'a str { match self.version { None => "0.0", - Some(ref version) => version.as_slice(), + Some(ref version) => version.as_str(), } } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index c026a1c97c139..8000e4eae617b 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -115,7 +115,7 @@ impl SpanHandler { fail!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { - self.span_bug(sp, format!("unimplemented {}", msg).as_slice()); + self.span_bug(sp, format!("unimplemented {}", msg).as_str()); } pub fn handler<'a>(&'a self) -> &'a Handler { &self.handler @@ -158,7 +158,7 @@ impl Handler { self.err_count.get()); } } - self.fatal(s.as_slice()); + self.fatal(s.as_str()); } pub fn warn(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Warning); @@ -171,7 +171,7 @@ impl Handler { fail!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { - self.bug(format!("unimplemented {}", msg).as_slice()); + self.bug(format!("unimplemented {}", msg).as_str()); } pub fn emit(&self, cmsp: Option<(&codemap::CodeMap, Span)>, @@ -285,16 +285,16 @@ fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level, } try!(print_maybe_styled(dst, - format!("{}: ", lvl.to_string()).as_slice(), + format!("{}: ", lvl.to_string()).as_str(), term::attr::ForegroundColor(lvl.color()))); try!(print_maybe_styled(dst, - format!("{}", msg).as_slice(), + format!("{}", msg).as_str(), term::attr::Bold)); match code { Some(code) => { let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); - try!(print_maybe_styled(dst, format!(" [{}]", code.clone()).as_slice(), style)); + try!(print_maybe_styled(dst, format!(" [{}]", code.clone()).as_str(), style)); match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) { Some(_) => { try!(write!(&mut dst.dst, @@ -393,12 +393,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, // the span) let span_end = Span { lo: sp.hi, hi: sp.hi, expn_info: sp.expn_info}; let ses = cm.span_to_string(span_end); - try!(print_diagnostic(dst, ses.as_slice(), lvl, msg, code)); + try!(print_diagnostic(dst, ses.as_str(), lvl, msg, code)); if rsp.is_full_span() { try!(custom_highlight_lines(dst, cm, sp, lvl, lines)); } } else { - try!(print_diagnostic(dst, ss.as_slice(), lvl, msg, code)); + try!(print_diagnostic(dst, ss.as_str(), lvl, msg, code)); if rsp.is_full_span() { try!(highlight_lines(dst, cm, sp, lvl, lines)); } @@ -472,7 +472,7 @@ fn highlight_lines(err: &mut EmitterWriter, } } try!(print_maybe_styled(err, - format!("{}\n", s).as_slice(), + format!("{}\n", s).as_str(), term::attr::ForegroundColor(lvl.color()))); } Ok(()) @@ -517,7 +517,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, s.push_char('^'); s.push_char('\n'); print_maybe_styled(w, - s.as_slice(), + s.as_str(), term::attr::ForegroundColor(lvl.color())) } @@ -534,12 +534,12 @@ fn print_macro_backtrace(w: &mut EmitterWriter, codemap::MacroAttribute => ("#[", "]"), codemap::MacroBang => ("", "!") }; - try!(print_diagnostic(w, ss.as_slice(), Note, + try!(print_diagnostic(w, ss.as_str(), Note, format!("in expansion of {}{}{}", pre, ei.callee.name, - post).as_slice(), None)); + post).as_str(), None)); let ss = cm.span_to_string(ei.call_site); - try!(print_diagnostic(w, ss.as_slice(), Note, "expansion site", None)); + try!(print_diagnostic(w, ss.as_str(), Note, "expansion site", None)); try!(print_macro_backtrace(w, cm, ei.call_site)); } Ok(()) @@ -549,6 +549,6 @@ pub fn expect(diag: &SpanHandler, opt: Option, msg: || -> String) -> T { match opt { Some(ref t) => (*t).clone(), - None => diag.handler().bug(msg().as_slice()), + None => diag.handler().bug(msg().as_str()), } } diff --git a/src/libsyntax/diagnostics/macros.rs b/src/libsyntax/diagnostics/macros.rs index c344168b62a28..7846e37328d99 100644 --- a/src/libsyntax/diagnostics/macros.rs +++ b/src/libsyntax/diagnostics/macros.rs @@ -20,7 +20,7 @@ macro_rules! register_diagnostic( macro_rules! span_err( ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); - $session.span_err_with_code($span, format!($($message)*).as_slice(), stringify!($code)) + $session.span_err_with_code($span, format!($($message)*).as_str(), stringify!($code)) }) ) @@ -28,14 +28,14 @@ macro_rules! span_err( macro_rules! span_warn( ($session:expr, $span:expr, $code:ident, $($message:tt)*) => ({ __diagnostic_used!($code); - $session.span_warn_with_code($span, format!($($message)*).as_slice(), stringify!($code)) + $session.span_warn_with_code($span, format!($($message)*).as_str(), stringify!($code)) }) ) #[macro_export] macro_rules! span_note( ($session:expr, $span:expr, $($message:tt)*) => ({ - ($session).span_note($span, format!($($message)*).as_slice()) + ($session).span_note($span, format!($($message)*).as_str()) }) ) diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 25a6a4c01bd47..86088f7fc1395 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -58,7 +58,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, ecx.span_err(span, format!( "unknown diagnostic code {}; add to librustc/diagnostics.rs", token::get_ident(code).get() - ).as_slice()); + ).as_str()); } () }); @@ -67,7 +67,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, Some(previous_span) => { ecx.span_warn(span, format!( "diagnostic code {} already used", token::get_ident(code).get() - ).as_slice()); + ).as_str()); ecx.span_note(previous_span, "previous invocation"); }, None => () @@ -96,12 +96,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, if !diagnostics.insert(code.name, description) { ecx.span_err(span, format!( "diagnostic code {} already registered", token::get_ident(*code).get() - ).as_slice()); + ).as_str()); } }); let sym = Ident::new(token::gensym(( "__register_diagnostic_".to_string() + token::get_ident(*code).get() - ).as_slice())); + ).as_str())); MacItem::new(quote_item!(ecx, mod $sym {}).unwrap()) } @@ -121,7 +121,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, if !diagnostics_in_use.contains_key(code) { ecx.span_warn(span, format!( "diagnostic code {} never used", token::get_name(*code).get() - ).as_slice()); + ).as_str()); } description.map(|description| { ecx.expr_tuple(span, vec![ diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 8028d51a7b5cf..fec2da9b2860c 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -100,7 +100,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) (Some('+'), operand) => { Some(token::intern_and_get_ident(format!( "={}", - operand).as_slice())) + operand).as_str())) } _ => { cx.span_err(span, "output operand constraint lacks '=' or '+'"); @@ -206,7 +206,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) asm_str_style: asm_str_style.unwrap(), outputs: outputs, inputs: inputs, - clobbers: token::intern_and_get_ident(cons.as_slice()), + clobbers: token::intern_and_get_ident(cons.as_str()), volatile: volatile, alignstack: alignstack, dialect: dialect diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index d59b20dfc4c68..12cea2c474192 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -496,7 +496,7 @@ impl<'a> ExtCtxt<'a> { pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } pub fn mod_path(&self) -> Vec { let mut v = Vec::new(); - v.push(token::str_to_ident(self.ecfg.crate_name.as_slice())); + v.push(token::str_to_ident(self.ecfg.crate_name.as_str())); v.extend(self.mod_path.iter().map(|a| *a)); return v; } @@ -604,7 +604,7 @@ pub fn check_zero_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], name: &str) { if tts.len() != 0 { - cx.span_err(sp, format!("{} takes no arguments", name).as_slice()); + cx.span_err(sp, format!("{} takes no arguments", name).as_str()); } } @@ -616,7 +616,7 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, name: &str) -> Option { if tts.len() != 1 { - cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice()); + cx.span_err(sp, format!("{} takes 1 argument.", name).as_str()); } else { match tts[0] { ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())), @@ -625,7 +625,7 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, } _ => { cx.span_err(sp, - format!("{} requires a string.", name).as_slice()) + format!("{} requires a string.", name).as_str()) } } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 6bd1fba4b58a3..43a9b0a56d5b9 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -723,7 +723,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let expr_file = self.expr_str(span, token::intern_and_get_ident(loc.file .name - .as_slice())); + .as_str())); let expr_line = self.expr_uint(span, loc.line); let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line)); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index ea7a4d061c0c5..e7162ea1c7000 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -40,15 +40,15 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, ast::LitInt(i, ast::UnsignedIntLit(_)) | ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => { - accumulator.push_str(format!("{}", i).as_slice()); + accumulator.push_str(format!("{}", i).as_str()); } ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => { - accumulator.push_str(format!("-{}", i).as_slice()); + accumulator.push_str(format!("-{}", i).as_str()); } ast::LitNil => {} ast::LitBool(b) => { - accumulator.push_str(format!("{}", b).as_slice()); + accumulator.push_str(format!("{}", b).as_str()); } ast::LitByte(..) | ast::LitBinary(..) => { @@ -63,5 +63,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(accumulator.as_slice()))) + token::intern_and_get_ident(accumulator.as_str()))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 0ac26a3a90490..887c79e8a0270 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -42,7 +42,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(res_str.as_slice()); + let res = str_to_ident(res_str.as_str()); let e = box(GC) ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index 7cff6e8ff3c01..9559366a1f356 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -32,7 +32,7 @@ pub fn expand_deriving_bound(cx: &mut ExtCtxt, cx.span_bug(span, format!("expected built-in trait name but \ found {}", - *tname).as_slice()) + *tname).as_str()) } } }, diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index bbe96018f4b3d..aff07aba7bba2 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -72,12 +72,12 @@ fn cs_clone( cx.span_bug(trait_span, format!("non-matching enum variants in \ `deriving({})`", - name).as_slice()) + name).as_str()) } StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, format!("static method in `deriving({})`", - name).as_slice()) + name).as_str()) } } @@ -94,7 +94,7 @@ fn cs_clone( cx.span_bug(trait_span, format!("unnamed field in normal struct in \ `deriving({})`", - name).as_slice()) + name).as_str()) } }; cx.field_imm(field.span, ident, subcall(field)) diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index d909ffd2b49fb..1fb539c1d42a8 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -169,7 +169,7 @@ fn decode_static_fields(cx: &mut ExtCtxt, let fields = fields.iter().enumerate().map(|(i, &span)| { getarg(cx, span, token::intern_and_get_ident(format!("_field{}", - i).as_slice()), + i).as_str()), i) }).collect(); diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 02a748eed8e47..3fd102d839cfa 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -153,7 +153,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, Some(id) => token::get_ident(id), None => { token::intern_and_get_ident(format!("_field{}", - i).as_slice()) + i).as_str()) } }; let enc = cx.expr_method_call(span, self_, encode, vec!(blkencoder)); diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 50bdc296aad76..51a0fd19799a8 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -623,7 +623,7 @@ impl<'a> MethodDef<'a> { for (i, ty) in self.args.iter().enumerate() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); - let ident = cx.ident_of(format!("__arg_{}", i).as_slice()); + let ident = cx.ident_of(format!("__arg_{}", i).as_str()); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); @@ -732,7 +732,7 @@ impl<'a> MethodDef<'a> { type_ident, struct_def, format!("__self_{}", - i).as_slice(), + i).as_str(), ast::MutImmutable); patterns.push(pat); raw_fields.push(ident_expr); @@ -892,15 +892,15 @@ impl<'a> MethodDef<'a> { .collect::>(); let self_arg_idents = self_arg_names.iter() - .map(|name|cx.ident_of(name.as_slice())) + .map(|name|cx.ident_of(name.as_str())) .collect::>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a uint // corresponding to its variant index. let vi_idents : Vec = self_arg_names.iter() - .map(|name| { let vi_suffix = format!("{:s}_vi", name.as_slice()); - cx.ident_of(vi_suffix.as_slice()) }) + .map(|name| { let vi_suffix = format!("{:s}_vi", name.as_str()); + cx.ident_of(vi_suffix.as_str()) }) .collect::>(); // Builds, via callback to call_substructure_method, the @@ -923,7 +923,7 @@ impl<'a> MethodDef<'a> { self_pats = self_arg_names.iter() .map(|self_arg_name| trait_.create_enum_variant_pattern( - cx, &*variant, self_arg_name.as_slice(), + cx, &*variant, self_arg_name.as_str(), ast::MutImmutable)) .collect(); @@ -1254,7 +1254,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a struct with named and unnamed fields in `deriving`"); } }; - let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice()); + let ident = cx.ident_of(format!("{}_{}", prefix, i).as_str()); paths.push(codemap::Spanned{span: sp, node: ident}); let val = cx.expr( sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident))))); @@ -1299,7 +1299,7 @@ impl<'a> TraitDef<'a> { let mut ident_expr = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); - let ident = cx.ident_of(format!("{}_{}", prefix, i).as_slice()); + let ident = cx.ident_of(format!("{}_{}", prefix, i).as_str()); let path1 = codemap::Spanned{span: sp, node: ident}; paths.push(path1); let expr_path = cx.expr_path(cx.path_ident(sp, ident)); diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index a9b5c8a413463..5c0da78ccdfcd 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -101,7 +101,7 @@ pub fn expand_meta_deriving(cx: &mut ExtCtxt, cx.span_err(titem.span, format!("unknown `deriving` \ trait: `{}`", - *tname).as_slice()); + *tname).as_str()); } }; } diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index e0dfbb232f554..aa4b4310b28cb 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -126,7 +126,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0]; let meth = cx.ident_of("write_fmt"); - let s = token::intern_and_get_ident(format_string.as_slice()); + let s = token::intern_and_get_ident(format_string.as_str()); let format_string = cx.expr_str(span, s); // phew, not our responsibility any more! diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index aae92ae85fc5b..234eb11a7f012 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT Some(v) => v }; - let e = match os::getenv(var.as_slice()) { + let e = match os::getenv(var.as_str()) { None => { cx.expr_path(cx.path_all(sp, true, @@ -53,7 +53,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT cx.ident_of("Some")), vec!(cx.expr_str(sp, token::intern_and_get_ident( - s.as_slice())))) + s.as_str())))) } }; MacExpr::new(e) @@ -80,7 +80,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) 1 => { token::intern_and_get_ident(format!("environment variable `{}` \ not defined", - var).as_slice()) + var).as_str()) } 2 => { match expr_to_string(cx, *exprs.get(1), "expected string literal") { @@ -99,7 +99,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, msg.get()); cx.expr_uint(sp, 0) } - Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.as_slice())) + Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.as_str())) }; MacExpr::new(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d0f3cf6f9d7ad..44c8091dbb636 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -136,7 +136,7 @@ fn expand_mac_invoc(mac: &ast::Mac, span: &codemap::Span, fld.cx.span_err( pth.span, format!("macro undefined: '{}!'", - extnamestr.get()).as_slice()); + extnamestr.get()).as_str()); // let compilation continue None @@ -172,8 +172,8 @@ fn expand_mac_invoc(mac: &ast::Mac, span: &codemap::Span, fld.cx.span_err( pth.span, format!("non-expression macro in expression position: {}", - extnamestr.get().as_slice() - ).as_slice()); + extnamestr.get().as_str() + ).as_str()); return None; } }; @@ -183,7 +183,7 @@ fn expand_mac_invoc(mac: &ast::Mac, span: &codemap::Span, fld.cx.span_err( pth.span, format!("'{}' is not a tt-style macro", - extnamestr.get()).as_slice()); + extnamestr.get()).as_str()); None } } @@ -392,7 +392,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) None => { fld.cx.span_err(pth.span, format!("macro undefined: '{}!'", - extnamestr).as_slice()); + extnamestr).as_str()); // let compilation continue return SmallVector::zero(); } @@ -405,7 +405,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) format!("macro {}! expects no ident argument, \ given '{}'", extnamestr, - token::get_ident(it.ident)).as_slice()); + token::get_ident(it.ident)).as_str()); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -424,7 +424,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(pth.span, format!("macro {}! expects an ident argument", - extnamestr.get()).as_slice()); + extnamestr.get()).as_str()); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -443,7 +443,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(pth.span, format!("macro {}! expects an ident argument", - extnamestr.get()).as_slice()); + extnamestr.get()).as_str()); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -460,7 +460,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) _ => { fld.cx.span_err(it.span, format!("{}! is not legal in item position", - extnamestr.get()).as_slice()); + extnamestr.get()).as_str()); return SmallVector::zero(); } } @@ -478,7 +478,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) // result of expanding a LetSyntaxTT, and thus doesn't // need to be marked. Not that it could be marked anyway. // create issue to recommend refactoring here? - fld.cx.syntax_env.insert(intern(name.as_slice()), ext); + fld.cx.syntax_env.insert(intern(name.as_str()), ext); if attr::contains_name(it.attrs.as_slice(), "macro_export") { fld.cx.exported_macros.push(it); } @@ -493,7 +493,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) Right(None) => { fld.cx.span_err(pth.span, format!("non-item macro in item position: {}", - extnamestr.get()).as_slice()); + extnamestr.get()).as_str()); return SmallVector::zero(); } }; @@ -743,7 +743,7 @@ fn expand_pat(p: Gc, fld: &mut MacroExpander) -> Gc { None => { fld.cx.span_err(pth.span, format!("macro undefined: '{}!'", - extnamestr).as_slice()); + extnamestr).as_str()); // let compilation continue return DummyResult::raw_pat(p.span); } @@ -772,7 +772,7 @@ fn expand_pat(p: Gc, fld: &mut MacroExpander) -> Gc { format!( "non-pattern macro in pattern position: {}", extnamestr.get() - ).as_slice() + ).as_str() ); return DummyResult::raw_pat(p.span); } @@ -784,7 +784,7 @@ fn expand_pat(p: Gc, fld: &mut MacroExpander) -> Gc { _ => { fld.cx.span_err(p.span, format!("{}! is not legal in pattern position", - extnamestr.get()).as_slice()); + extnamestr.get()).as_str()); return DummyResult::raw_pat(p.span); } } diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 0bb32c73ca264..4961d40eb81fa 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -134,7 +134,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool, _ => { ecx.span_err(p.span, format!("expected ident for named argument, found `{}`", - p.this_token_to_string()).as_slice()); + p.this_token_to_string()).as_str()); return (invocation, None); } }; @@ -147,7 +147,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool, Some(prev) => { ecx.span_err(e.span, format!("duplicate argument named `{}`", - name).as_slice()); + name).as_str()); ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here"); continue } @@ -238,7 +238,7 @@ impl<'a, 'b> Context<'a, 'b> { let msg = format!("invalid reference to argument `{}` ({:s})", arg, self.describe_num_args()); - self.ecx.span_err(self.fmtsp, msg.as_slice()); + self.ecx.span_err(self.fmtsp, msg.as_str()); return; } { @@ -258,7 +258,7 @@ impl<'a, 'b> Context<'a, 'b> { Some(e) => e.span, None => { let msg = format!("there is no argument named `{}`", name); - self.ecx.span_err(self.fmtsp, msg.as_slice()); + self.ecx.span_err(self.fmtsp, msg.as_str()); return; } }; @@ -301,19 +301,19 @@ impl<'a, 'b> Context<'a, 'b> { format!("argument redeclared with type `{}` when \ it was previously `{}`", *ty, - *cur).as_slice()); + *cur).as_str()); } (&Known(ref cur), _) => { self.ecx.span_err(sp, format!("argument used to format with `{}` was \ attempted to not be used for formatting", - *cur).as_slice()); + *cur).as_str()); } (_, &Known(ref ty)) => { self.ecx.span_err(sp, format!("argument previously used as a format \ argument attempted to be used as `{}`", - *ty).as_slice()); + *ty).as_str()); } (_, _) => { self.ecx.span_err(sp, "argument declared with multiple formats"); @@ -378,7 +378,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Translate the accumulated string literals to a literal expression fn trans_literal_string(&mut self) -> Gc { let sp = self.fmtsp; - let s = token::intern_and_get_ident(self.literal.as_slice()); + let s = token::intern_and_get_ident(self.literal.as_str()); self.literal.clear(); self.ecx.expr_str(sp, s) } @@ -548,7 +548,7 @@ impl<'a, 'b> Context<'a, 'b> { continue // error already generated } - let name = self.ecx.ident_of(format!("__arg{}", i).as_slice()); + let name = self.ecx.ident_of(format!("__arg{}", i).as_str()); pats.push(self.ecx.pat_ident(e.span, name)); heads.push(self.ecx.expr_addr_of(e.span, e)); locals.push(self.format_arg(e.span, Exact(i), @@ -561,7 +561,7 @@ impl<'a, 'b> Context<'a, 'b> { }; let lname = self.ecx.ident_of(format!("__arg{}", - *name).as_slice()); + *name).as_str()); pats.push(self.ecx.pat_ident(e.span, lname)); heads.push(self.ecx.expr_addr_of(e.span, e)); *names.get_mut(*self.name_positions.get(name)) = @@ -664,7 +664,7 @@ impl<'a, 'b> Context<'a, 'b> { let (krate, fmt_fn) = match *ty { Known(ref tyname) => { - match tyname.as_slice() { + match tyname.as_str() { "" => ("std", "secret_show"), "?" => ("debug", "secret_poly"), "b" => ("std", "secret_bool"), @@ -684,7 +684,7 @@ impl<'a, 'b> Context<'a, 'b> { self.ecx .span_err(sp, format!("unknown format trait `{}`", - *tyname).as_slice()); + *tyname).as_str()); ("std", "dummy") } } @@ -798,7 +798,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, Some(error) => { cx.ecx.span_err(efmt.span, format!("invalid format string: {}", - error).as_slice()); + error).as_str()); return DummyResult::raw_expr(sp); } None => {} diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 808e671f868d3..ed5ad4edd6a12 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -483,7 +483,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, } fn ids_ext(strs: Vec ) -> Vec { - strs.iter().map(|str| str_to_ident((*str).as_slice())).collect() + strs.iter().map(|str| str_to_ident((*str).as_str())).collect() } fn id_ext(str: &str) -> ast::Ident { diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 5cc0ec4a12290..028404418d60f 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = topmost_expn_info(cx.backtrace().unwrap()); let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo); - let filename = token::intern_and_get_ident(loc.file.name.as_slice()); + let filename = token::intern_and_get_ident(loc.file.name.as_str()); base::MacExpr::new(cx.expr_str(topmost.call_site, filename)) } @@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box { let s = pprust::tts_to_string(tts); base::MacExpr::new(cx.expr_str(sp, - token::intern_and_get_ident(s.as_slice()))) + token::intern_and_get_ident(s.as_str()))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) @@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) .connect("::"); base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(string.as_slice()))) + token::intern_and_get_ident(string.as_str()))) } /// include! : parse the given file as an expr @@ -116,7 +116,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, format!("couldn't read {}: {}", file.display(), - e).as_slice()); + e).as_str()); return DummyResult::expr(sp); } Ok(bytes) => bytes, @@ -126,7 +126,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information let filename = file.display().to_string(); - let interned = token::intern_and_get_ident(src.as_slice()); + let interned = token::intern_and_get_ident(src.as_str()); cx.codemap().new_filemap(filename, src); base::MacExpr::new(cx.expr_str(sp, interned)) @@ -134,7 +134,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Err(_) => { cx.span_err(sp, format!("{} wasn't a utf-8 file", - file.display()).as_slice()); + file.display()).as_str()); return DummyResult::expr(sp); } } @@ -152,7 +152,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, format!("couldn't read {}: {}", file.display(), - e).as_slice()); + e).as_str()); return DummyResult::expr(sp); } Ok(bytes) => { @@ -174,7 +174,7 @@ fn topmost_expn_info(expn_info: Gc) -> Gc .. } => { // Don't recurse into file using "include!" - if "include" == name.as_slice() { + if "include" == name.as_str() { expn_info } else { topmost_expn_info(next_expn_info) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 509d5bd442182..e66a6bf348706 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -189,7 +189,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[Matcher], res: &[Rc]) p_s.span_diagnostic .span_fatal(span, format!("duplicated bind name: {}", - string.get()).as_slice()) + string.get()).as_str()) } ret_val.insert(bind_name, res[idx].clone()); } @@ -214,10 +214,10 @@ pub fn parse_or_else(sess: &ParseSess, match parse(sess, cfg, rdr, ms.as_slice()) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str.as_slice()) + sess.span_diagnostic.span_fatal(sp, str.as_str()) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str.as_slice()) + sess.span_diagnostic.span_fatal(sp, str.as_str()) } } } @@ -442,7 +442,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { _ => { let token_str = token::to_string(&p.token); p.fatal((format!("expected ident, found {}", - token_str.as_slice())).as_slice()) + token_str.as_str())).as_str()) } }, "path" => { @@ -458,7 +458,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { "matchers" => token::NtMatchers(p.parse_matchers()), _ => { p.fatal(format!("unsupported builtin nonterminal parser: {}", - name).as_slice()) + name).as_str()) } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index d8f0eb32ad7bf..105526f5143c2 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -52,7 +52,7 @@ impl<'a> ParserAnyMacro<'a> { following", token_str); let span = parser.span; - parser.span_err(span, msg.as_slice()); + parser.span_err(span, msg.as_str()); } } } @@ -200,13 +200,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, msg.as_slice()) + Error(sp, ref msg) => cx.span_fatal(sp, msg.as_str()) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, best_fail_msg.as_slice()); + cx.span_fatal(best_fail_spot, best_fail_msg.as_str()); } /// This procedure performs the expansion of the diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 726a7315f6991..06cd0195cfd1d 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -92,7 +92,7 @@ fn lookup_cur_matched(r: &TtReader, name: Ident) -> Rc { r.sp_diag .span_fatal(r.cur_span, format!("unknown macro variable `{}`", - token::get_ident(name)).as_slice()); + token::get_ident(name)).as_str()); } } } @@ -224,7 +224,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), msg.as_slice()); + r.sp_diag.span_fatal(sp.clone(), msg.as_str()); } LisConstraint(len, _) => { if len == 0 { @@ -270,7 +270,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ format!("variable '{}' is still repeating at this depth", - token::get_ident(ident)).as_slice()); + token::get_ident(ident)).as_str()); } } } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index eca02d06ca9f8..9f14a30818fcd 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -93,7 +93,7 @@ impl<'a> ParserAttr for Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `#`, found `{}`", - token_str).as_slice()); + token_str).as_str()); } }; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index c53638ed07d13..49c7335f9cbc5 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -64,21 +64,21 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let mut j = lines.len(); // first line of all-stars should be omitted if lines.len() > 0 && - lines.get(0).as_slice().chars().all(|c| c == '*') { + lines.get(0).as_str().chars().all(|c| c == '*') { i += 1; } - while i < j && lines.get(i).as_slice().trim().is_empty() { + while i < j && lines.get(i).as_str().trim().is_empty() { i += 1; } // like the first, a last line of all stars should be omitted if j > i && lines.get(j - 1) - .as_slice() + .as_str() .chars() .skip(1) .all(|c| c == '*') { j -= 1; } - while j > i && lines.get(j - 1).as_slice().trim().is_empty() { + while j > i && lines.get(j - 1).as_str().trim().is_empty() { j -= 1; } return lines.slice(i, j).iter().map(|x| (*x).clone()).collect(); @@ -90,7 +90,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { let mut can_trim = true; let mut first = true; for line in lines.iter() { - for (j, c) in line.as_slice().chars().enumerate() { + for (j, c) in line.as_str().chars().enumerate() { if j > i || !"* \t".contains_char(c) { can_trim = false; break; @@ -115,7 +115,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { if can_trim { lines.iter().map(|line| { - line.as_slice().slice(i + 1, line.len()).to_string() + line.as_str().slice(i + 1, line.len()).to_string() }).collect() } else { lines @@ -186,7 +186,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(line.as_slice()) { + if is_doc_comment(line.as_str()) { break; } lines.push(line); @@ -223,10 +223,10 @@ fn all_whitespace(s: &str, col: CharPos) -> Option { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(s.as_slice(), col) { + let s1 = match all_whitespace(s.as_str(), col) { Some(col) => { if col < len { - s.as_slice().slice(col, len).to_string() + s.as_str().slice(col, len).to_string() } else { "".to_string() } @@ -260,10 +260,10 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(curr_line.as_slice()) { + if is_block_doc_comment(curr_line.as_str()) { return } - assert!(!curr_line.as_slice().contains_char('\n')); + assert!(!curr_line.as_str().contains_char('\n')); lines.push(curr_line); } else { let mut level: int = 1; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index da43f08a4e5b1..c22a88c2aea5b 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -181,7 +181,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); char::escape_default(c, |c| m.push_char(c)); - self.fatal_span_(from_pos, to_pos, m.as_slice()); + self.fatal_span_(from_pos, to_pos, m.as_str()); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -190,7 +190,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); char::escape_default(c, |c| m.push_char(c)); - self.err_span_(from_pos, to_pos, m.as_slice()); + self.err_span_(from_pos, to_pos, m.as_str()); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -199,8 +199,8 @@ impl<'a> StringReader<'a> { m.push_str(": "); let from = self.byte_offset(from_pos).to_uint(); let to = self.byte_offset(to_pos).to_uint(); - m.push_str(self.filemap.src.as_slice().slice(from, to)); - self.fatal_span_(from_pos, to_pos, m.as_slice()); + m.push_str(self.filemap.src.as_str().slice(from, to)); + self.fatal_span_(from_pos, to_pos, m.as_str()); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -252,7 +252,7 @@ impl<'a> StringReader<'a> { /// Calls `f` with a string slice of the source text spanning from `start` /// up to but excluding `end`. fn with_str_from_to(&self, start: BytePos, end: BytePos, f: |s: &str| -> T) -> T { - f(self.filemap.src.as_slice().slice( + f(self.filemap.src.as_str().slice( self.byte_offset(start).to_uint(), self.byte_offset(end).to_uint())) } @@ -308,7 +308,7 @@ impl<'a> StringReader<'a> { let last_char = self.curr.unwrap(); let next = self.filemap .src - .as_slice() + .as_str() .char_range_at(current_byte_offset); let byte_offset_diff = next.next - current_byte_offset; self.pos = self.pos + Pos::from_uint(byte_offset_diff); @@ -330,7 +330,7 @@ impl<'a> StringReader<'a> { pub fn nextch(&self) -> Option { let offset = self.byte_offset(self.pos).to_uint(); if offset < self.filemap.src.len() { - Some(self.filemap.src.as_slice().char_at(offset)) + Some(self.filemap.src.as_str().char_at(offset)) } else { None } @@ -342,7 +342,7 @@ impl<'a> StringReader<'a> { pub fn nextnextch(&self) -> Option { let offset = self.byte_offset(self.pos).to_uint(); - let s = self.filemap.deref().src.as_slice(); + let s = self.filemap.deref().src.as_str(); if offset >= s.len() { return None } let str::CharRange { next, .. } = s.char_range_at(offset); if next < s.len() { @@ -519,7 +519,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_maybe_owned() }; - token::DOC_COMMENT(token::intern(string.as_slice())) + token::DOC_COMMENT(token::intern(string.as_str())) } else { token::COMMENT }; @@ -1046,7 +1046,7 @@ impl<'a> StringReader<'a> { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(format!("'{}", lifetime_name).as_slice()) + str_to_ident(format!("'{}", lifetime_name).as_str()) }); // Conjure up a "keyword checking ident" to make sure that diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 585b98925cc58..8e66c16e3d263 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -231,7 +231,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) Err(e) => { err(format!("couldn't read {}: {}", path.display(), - e).as_slice()); + e).as_str()); unreachable!() } }; @@ -241,7 +241,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) path.as_str().unwrap().to_string()) } None => { - err(format!("{} is not UTF-8 encoded", path.display()).as_slice()) + err(format!("{} is not UTF-8 encoded", path.display()).as_str()) } } unreachable!() @@ -371,7 +371,7 @@ pub fn char_lit(lit: &str) -> (char, int) { } let msg = format!("lexer should have rejected a bad character escape {}", lit); - let msg2 = msg.as_slice(); + let msg2 = msg.as_str(); let esc: |uint| -> Option<(char, int)> = |len| num::from_str_radix(lit.slice(2, len), 16) @@ -415,7 +415,7 @@ pub fn str_lit(lit: &str) -> String { match c { '\\' => { let ch = chars.peek().unwrap_or_else(|| { - fail!("{}", error(i).as_slice()) + fail!("{}", error(i).as_str()) }).val1(); if ch == '\n' { @@ -423,7 +423,7 @@ pub fn str_lit(lit: &str) -> String { } else if ch == '\r' { chars.next(); let ch = chars.peek().unwrap_or_else(|| { - fail!("{}", error(i).as_slice()) + fail!("{}", error(i).as_str()) }).val1(); if ch != '\n' { @@ -441,7 +441,7 @@ pub fn str_lit(lit: &str) -> String { }, '\r' => { let ch = chars.peek().unwrap_or_else(|| { - fail!("{}", error(i).as_slice()) + fail!("{}", error(i).as_str()) }).val1(); if ch != '\n' { @@ -495,7 +495,7 @@ pub fn float_lit(s: &str) -> ast::Lit_ { debug!("float_lit: {}", s); // FIXME #2252: bounds checking float literals is defered until trans let s2 = s.chars().filter(|&c| c != '_').collect::(); - let s = s2.as_slice(); + let s = s2.as_str(); let mut ty = None; @@ -570,11 +570,11 @@ pub fn binary_lit(lit: &str) -> Rc> { match chars.next() { Some((i, b'\\')) => { let em = error(i); - match chars.peek().expect(em.as_slice()).val1() { + match chars.peek().expect(em.as_str()).val1() { b'\n' => eat(&mut chars), b'\r' => { chars.next(); - if chars.peek().expect(em.as_slice()).val1() != b'\n' { + if chars.peek().expect(em.as_str()).val1() != b'\n' { fail!("lexer accepted bare CR"); } eat(&mut chars); @@ -592,7 +592,7 @@ pub fn binary_lit(lit: &str) -> Rc> { }, Some((i, b'\r')) => { let em = error(i); - if chars.peek().expect(em.as_slice()).val1() != b'\n' { + if chars.peek().expect(em.as_str()).val1() != b'\n' { fail!("lexer accepted bare CR"); } chars.next(); @@ -610,7 +610,7 @@ pub fn integer_lit(s: &str, sd: &SpanHandler, sp: Span) -> ast::Lit_ { // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); - let mut s = s2.as_slice(); + let mut s = s2.as_str(); debug!("parse_integer_lit: {}", s); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 9ed9e626c3d32..da9164aad051b 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -116,13 +116,13 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { kind_str: &str, desc: &str) { self.span_err(sp, - format!("obsolete syntax: {}", kind_str).as_slice()); + format!("obsolete syntax: {}", kind_str).as_str()); if !self.obsolete_set.contains(&kind) { self.sess .span_diagnostic .handler() - .note(format!("{}", desc).as_slice()); + .note(format!("{}", desc).as_str()); self.obsolete_set.insert(kind); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 328bdf883356c..5c96bc68092dc 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -400,12 +400,12 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(t); let last_span = self.last_span; self.span_fatal(last_span, format!("unexpected token: `{}`", - token_str).as_slice()); + token_str).as_str()); } pub fn unexpected(&mut self) -> ! { let this_token = self.this_token_to_string(); - self.fatal(format!("unexpected token: `{}`", this_token).as_slice()); + self.fatal(format!("unexpected token: `{}`", this_token).as_str()); } /// Expect and consume the token t. Signal an error if @@ -418,7 +418,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", token_str, - this_token_str).as_slice()) + this_token_str).as_str()) } } @@ -436,7 +436,7 @@ impl<'a> Parser<'a> { i.fold(b, |b,a| { let mut b = b; b.push_str("`, `"); - b.push_str(Parser::token_to_string(a).as_slice()); + b.push_str(Parser::token_to_string(a).as_str()); b }) } @@ -457,7 +457,7 @@ impl<'a> Parser<'a> { (format!("expected `{}`, found `{}`", expect, actual)) - }).as_slice() + }).as_str() ) } } @@ -540,7 +540,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal((format!("expected ident, found `{}`", - token_str)).as_slice()) + token_str)).as_str()) } } } @@ -589,7 +589,7 @@ impl<'a> Parser<'a> { let id_interned_str = token::get_name(kw.to_name()); let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", - id_interned_str, token_str).as_slice()) + id_interned_str, token_str).as_str()) } } @@ -600,7 +600,7 @@ impl<'a> Parser<'a> { let span = self.span; self.span_err(span, format!("expected identifier, found keyword `{}`", - token_str).as_slice()); + token_str).as_str()); } } @@ -609,7 +609,7 @@ impl<'a> Parser<'a> { if token::is_reserved_keyword(&self.token) { let token_str = self.this_token_to_string(); self.fatal(format!("`{}` is a reserved keyword", - token_str).as_slice()) + token_str).as_str()) } } @@ -629,7 +629,7 @@ impl<'a> Parser<'a> { Parser::token_to_string(&token::BINOP(token::AND)); self.fatal(format!("expected `{}`, found `{}`", found_token, - token_str).as_slice()) + token_str).as_str()) } } } @@ -650,7 +650,7 @@ impl<'a> Parser<'a> { Parser::token_to_string(&token::BINOP(token::OR)); self.fatal(format!("expected `{}`, found `{}`", token_str, - found_token).as_slice()) + found_token).as_str()) } } } @@ -702,7 +702,7 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(&token::LT); self.fatal(format!("expected `{}`, found `{}`", token_str, - found_token).as_slice()) + found_token).as_str()) } } @@ -753,7 +753,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", gt_str, - this_token_str).as_slice()) + this_token_str).as_str()) } } } @@ -1310,7 +1310,7 @@ impl<'a> Parser<'a> { _ => { let token_str = p.this_token_to_string(); p.fatal((format!("expected `;` or `{{`, found `{}`", - token_str)).as_slice()) + token_str)).as_str()) } } }) @@ -1487,7 +1487,7 @@ impl<'a> Parser<'a> { TyInfer } else { let msg = format!("expected type, found token {:?}", self.token); - self.fatal(msg.as_slice()); + self.fatal(msg.as_str()); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1608,11 +1608,11 @@ impl<'a> Parser<'a> { &self.sess.span_diagnostic, self.span), token::LIT_FLOAT(s) => parse::float_lit(s.as_str()), token::LIT_STR(s) => { - LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()), + LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_str()), ast::CookedStr) } token::LIT_STR_RAW(s, n) => { - LitStr(token::intern_and_get_ident(parse::raw_str_lit(s.as_str()).as_slice()), + LitStr(token::intern_and_get_ident(parse::raw_str_lit(s.as_str()).as_str()), ast::RawStr(n)) } token::LIT_BINARY(i) => @@ -1796,7 +1796,7 @@ impl<'a> Parser<'a> { }; } _ => { - self.fatal(format!("expected a lifetime name").as_slice()); + self.fatal(format!("expected a lifetime name").as_str()); } } } @@ -1835,7 +1835,7 @@ impl<'a> Parser<'a> { let msg = format!("expected `,` or `>` after lifetime \ name, got: {:?}", self.token); - self.fatal(msg.as_slice()); + self.fatal(msg.as_str()); } } } @@ -2320,7 +2320,7 @@ impl<'a> Parser<'a> { self.bump(); let last_span = self.last_span; self.span_err(last_span, - format!("unexpected token: `{}`", n.as_str()).as_slice()); + format!("unexpected token: `{}`", n.as_str()).as_str()); self.span_note(last_span, "try parenthesizing the first index; e.g., `(foo.0).1`"); self.abort_if_errors(); @@ -2415,7 +2415,7 @@ impl<'a> Parser<'a> { }; let token_str = p.this_token_to_string(); p.fatal(format!("incorrect close delimiter: `{}`", - token_str).as_slice()) + token_str).as_str()) }, /* we ought to allow different depths of unquotation */ token::DOLLAR if p.quote_depth > 0u => { @@ -2959,7 +2959,7 @@ impl<'a> Parser<'a> { if self.token != token::RBRACE { let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", "}", - token_str).as_slice()) + token_str).as_str()) } etc = true; break; @@ -2980,7 +2980,7 @@ impl<'a> Parser<'a> { BindByRef(..) | BindByValue(MutMutable) => { let token_str = self.this_token_to_string(); self.fatal(format!("unexpected `{}`", - token_str).as_slice()) + token_str).as_str()) } _ => {} } @@ -3396,7 +3396,7 @@ impl<'a> Parser<'a> { let tok_str = self.this_token_to_string(); self.fatal(format!("expected {}`(` or `{{`, found `{}`", ident_str, - tok_str).as_slice()) + tok_str).as_str()) } }; @@ -3924,7 +3924,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `self`, found `{}`", - token_str).as_slice()) + token_str).as_str()) } } } @@ -4082,7 +4082,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `,` or `)`, found `{}`", - token_str).as_slice()) + token_str).as_str()) } } } @@ -4403,7 +4403,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name)).as_slice()); + token::get_ident(class_name)).as_str()); } self.bump(); } else if self.token == token::LPAREN { @@ -4427,7 +4427,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name)).as_slice()); + token::get_ident(class_name)).as_str()); } self.expect(&token::SEMI); } else if self.eat(&token::SEMI) { @@ -4438,7 +4438,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, `(`, or `;` after struct \ name, found `{}`", "{", - token_str).as_slice()) + token_str).as_str()) } let _ = ast::DUMMY_NODE_ID; // FIXME: Workaround for crazy bug. @@ -4469,7 +4469,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal(span, format!("expected `,`, or `}}`, found `{}`", - token_str).as_slice()) + token_str).as_str()) } } a_var @@ -4549,7 +4549,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected item, found `{}`", - token_str).as_slice()) + token_str).as_str()) } } } @@ -4634,8 +4634,8 @@ impl<'a> Parser<'a> { let mod_name = mod_string.get().to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(default_path_str.as_slice()); - let secondary_path = dir_path.join(secondary_path_str.as_slice()); + let default_path = dir_path.join(default_path_str.as_str()); + let secondary_path = dir_path.join(secondary_path_str.as_str()); let default_exists = default_path.exists(); let secondary_exists = secondary_path.exists(); @@ -4650,13 +4650,13 @@ impl<'a> Parser<'a> { format!("maybe move this module `{0}` \ to its own directory via \ `{0}/mod.rs`", - this_module).as_slice()); + this_module).as_str()); if default_exists || secondary_exists { self.span_note(id_sp, format!("... or maybe `use` the module \ `{}` instead of possibly \ redeclaring it", - mod_name).as_slice()); + mod_name).as_str()); } self.abort_if_errors(); } @@ -4668,7 +4668,7 @@ impl<'a> Parser<'a> { self.span_fatal(id_sp, format!("file not found for module \ `{}`", - mod_name).as_slice()); + mod_name).as_str()); } (true, true) => { self.span_fatal( @@ -4677,7 +4677,7 @@ impl<'a> Parser<'a> { and {}", mod_name, default_path_str, - secondary_path_str).as_slice()); + secondary_path_str).as_str()); } } } @@ -4698,11 +4698,11 @@ impl<'a> Parser<'a> { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); for p in included_mod_stack.slice(i, len).iter() { - err.push_str(p.display().as_maybe_owned().as_slice()); + err.push_str(p.display().as_maybe_owned().as_str()); err.push_str(" -> "); } - err.push_str(path.display().as_maybe_owned().as_slice()); - self.span_fatal(id_sp, err.as_slice()); + err.push_str(path.display().as_maybe_owned().as_str()); + self.span_fatal(id_sp, err.as_str()); } None => () } @@ -4843,7 +4843,7 @@ impl<'a> Parser<'a> { self.span_fatal(span, format!("expected extern crate name but \ found `{}`", - token_str).as_slice()); + token_str).as_str()); } }; @@ -5016,7 +5016,7 @@ impl<'a> Parser<'a> { format!("illegal ABI: expected one of [{}], \ found `{}`", abi::all_names().connect(", "), - the_string).as_slice()); + the_string).as_str()); None } } @@ -5073,7 +5073,7 @@ impl<'a> Parser<'a> { format!("`extern mod` is obsolete, use \ `extern crate` instead \ to refer to external \ - crates.").as_slice()) + crates.").as_str()) } return self.parse_item_extern_crate(lo, visibility, attrs); } @@ -5101,7 +5101,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal(span, format!("expected `{}` or `fn`, found `{}`", "{", - token_str).as_slice()); + token_str).as_str()); } let is_virtual = self.eat_keyword(keywords::Virtual); @@ -5323,7 +5323,7 @@ impl<'a> Parser<'a> { } s.push_char('`'); let last_span = self.last_span; - self.span_fatal(last_span, s.as_slice()); + self.span_fatal(last_span, s.as_str()); } return IoviNone(attrs); } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index cce14be1ba526..8d8d5f5e571c8 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -593,7 +593,7 @@ impl InternedString { #[inline] pub fn get<'a>(&'a self) -> &'a str { - self.string.as_slice() + self.string.as_str() } } @@ -611,26 +611,26 @@ impl BytesContainer for InternedString { impl fmt::Show for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.string.as_slice()) + write!(f, "{}", self.string.as_str()) } } impl<'a> Equiv<&'a str> for InternedString { fn equiv(&self, other: & &'a str) -> bool { - (*other) == self.string.as_slice() + (*other) == self.string.as_str() } } impl, E> Decodable for InternedString { fn decode(d: &mut D) -> Result { Ok(get_name(get_ident_interner().intern( - try!(d.read_str()).as_slice()))) + try!(d.read_str()).as_str()))) } } impl, E> Encodable for InternedString { fn encode(&self, s: &mut S) -> Result<(), E> { - s.emit_str(self.string.as_slice()) + s.emit_str(self.string.as_str()) } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 70da4e11961f9..79da1aa15ad08 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -135,7 +135,7 @@ pub fn buf_str(toks: Vec, } s.push_str(format!("{}={}", szs.get(i), - tok_str(toks.get(i).clone())).as_slice()); + tok_str(toks.get(i).clone())).as_str()); i += 1u; i %= n; } @@ -597,7 +597,7 @@ impl Printer { assert_eq!(l, len); // assert!(l <= space); self.space -= len; - self.print_str(s.as_slice()) + self.print_str(s.as_str()) } Eof => { // Eof should never get here. diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index a4dff45ad359f..aeb524fe7b4fe 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -460,7 +460,7 @@ impl<'a> State<'a> { pub fn synth_comment(&mut self, text: String) -> IoResult<()> { try!(word(&mut self.s, "/*")); try!(space(&mut self.s)); - try!(word(&mut self.s, text.as_slice())); + try!(word(&mut self.s, text.as_str())); try!(space(&mut self.s)); word(&mut self.s, "*/") } @@ -698,7 +698,7 @@ impl<'a> State<'a> { } ast::ForeignItemStatic(ref t, m) => { try!(self.head(visibility_qualified(item.vis, - "static").as_slice())); + "static").as_str())); if m { try!(self.word_space("mut")); } @@ -721,7 +721,7 @@ impl<'a> State<'a> { match item.node { ast::ItemStatic(ref ty, m, ref expr) => { try!(self.head(visibility_qualified(item.vis, - "static").as_slice())); + "static").as_str())); if m == ast::MutMutable { try!(self.word_space("mut")); } @@ -751,7 +751,7 @@ impl<'a> State<'a> { } ast::ItemMod(ref _mod) => { try!(self.head(visibility_qualified(item.vis, - "mod").as_slice())); + "mod").as_str())); try!(self.print_ident(item.ident)); try!(self.nbsp()); try!(self.bopen()); @@ -760,7 +760,7 @@ impl<'a> State<'a> { } ast::ItemForeignMod(ref nmod) => { try!(self.head("extern")); - try!(self.word_nbsp(nmod.abi.to_string().as_slice())); + try!(self.word_nbsp(nmod.abi.to_string().as_str())); try!(self.bopen()); try!(self.print_foreign_mod(nmod, item.attrs.as_slice())); try!(self.bclose(item.span)); @@ -769,7 +769,7 @@ impl<'a> State<'a> { try!(self.ibox(indent_unit)); try!(self.ibox(0u)); try!(self.word_nbsp(visibility_qualified(item.vis, - "type").as_slice())); + "type").as_str())); try!(self.print_ident(item.ident)); try!(self.print_generics(params)); try!(self.end()); // end the inner ibox @@ -795,7 +795,7 @@ impl<'a> State<'a> { try!(self.word_space("virtual")); } try!(self.head(visibility_qualified(item.vis, - "struct").as_slice())); + "struct").as_str())); try!(self.print_struct(&**struct_def, generics, item.ident, item.span)); } @@ -805,7 +805,7 @@ impl<'a> State<'a> { ref ty, ref impl_items) => { try!(self.head(visibility_qualified(item.vis, - "impl").as_slice())); + "impl").as_str())); if generics.is_parameterized() { try!(self.print_generics(generics)); try!(space(&mut self.s)); @@ -837,7 +837,7 @@ impl<'a> State<'a> { } ast::ItemTrait(ref generics, ref unbound, ref bounds, ref methods) => { try!(self.head(visibility_qualified(item.vis, - "trait").as_slice())); + "trait").as_str())); try!(self.print_ident(item.ident)); try!(self.print_generics(generics)); match unbound { @@ -883,7 +883,7 @@ impl<'a> State<'a> { generics: &ast::Generics, ident: ast::Ident, span: codemap::Span, visibility: ast::Visibility) -> IoResult<()> { - try!(self.head(visibility_qualified(visibility, "enum").as_slice())); + try!(self.head(visibility_qualified(visibility, "enum").as_str())); try!(self.print_ident(ident)); try!(self.print_generics(generics)); try!(self.print_where_clause(generics)); @@ -986,7 +986,7 @@ impl<'a> State<'a> { match *tt { ast::TTDelim(ref tts) => self.print_tts(tts.as_slice()), ast::TTTok(_, ref tk) => { - try!(word(&mut self.s, parse::token::to_string(tk).as_slice())); + try!(word(&mut self.s, parse::token::to_string(tk).as_str())); match *tk { parse::token::DOC_COMMENT(..) => { hardbreak(&mut self.s) @@ -1003,7 +1003,7 @@ impl<'a> State<'a> { match *sep { Some(ref tk) => { try!(word(&mut self.s, - parse::token::to_string(tk).as_slice())); + parse::token::to_string(tk).as_str())); } None => () } @@ -1666,7 +1666,7 @@ impl<'a> State<'a> { |s, &(ref co, ref o, is_rw)| { match co.get().slice_shift_char() { (Some('='), operand) if is_rw => { - try!(s.print_string(format!("+{}", operand).as_slice(), + try!(s.print_string(format!("+{}", operand).as_str(), ast::CookedStr)) } _ => try!(s.print_string(co.get(), ast::CookedStr)) @@ -1743,7 +1743,7 @@ impl<'a> State<'a> { pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> { if self.encode_idents_with_hygiene { let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, encoded.as_slice())) + try!(word(&mut self.s, encoded.as_str())) } else { try!(word(&mut self.s, token::get_ident(ident).get())) } @@ -1751,7 +1751,7 @@ impl<'a> State<'a> { } pub fn print_uint(&mut self, i: uint) -> IoResult<()> { - word(&mut self.s, i.to_string().as_slice()) + word(&mut self.s, i.to_string().as_str()) } pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> { @@ -2554,7 +2554,7 @@ impl<'a> State<'a> { try!(self.maybe_print_comment(lit.span.lo)); match self.next_lit(lit.span.lo) { Some(ref ltrl) => { - return word(&mut self.s, (*ltrl).lit.as_slice()); + return word(&mut self.s, (*ltrl).lit.as_str()); } _ => () } @@ -2564,32 +2564,32 @@ impl<'a> State<'a> { let mut res = String::from_str("b'"); (byte as char).escape_default(|c| res.push_char(c)); res.push_char('\''); - word(&mut self.s, res.as_slice()) + word(&mut self.s, res.as_str()) } ast::LitChar(ch) => { let mut res = String::from_str("'"); ch.escape_default(|c| res.push_char(c)); res.push_char('\''); - word(&mut self.s, res.as_slice()) + word(&mut self.s, res.as_str()) } ast::LitInt(i, t) => { match t { ast::SignedIntLit(st, ast::Plus) => { word(&mut self.s, - ast_util::int_ty_to_string(st, Some(i as i64)).as_slice()) + ast_util::int_ty_to_string(st, Some(i as i64)).as_str()) } ast::SignedIntLit(st, ast::Minus) => { word(&mut self.s, - ast_util::int_ty_to_string(st, Some(-(i as i64))).as_slice()) + ast_util::int_ty_to_string(st, Some(-(i as i64))).as_str()) } ast::UnsignedIntLit(ut) => { - word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i)).as_slice()) + word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i)).as_str()) } ast::UnsuffixedIntLit(ast::Plus) => { - word(&mut self.s, format!("{}", i).as_slice()) + word(&mut self.s, format!("{}", i).as_str()) } ast::UnsuffixedIntLit(ast::Minus) => { - word(&mut self.s, format!("-{}", i).as_slice()) + word(&mut self.s, format!("-{}", i).as_str()) } } } @@ -2598,7 +2598,7 @@ impl<'a> State<'a> { format!( "{}{}", f.get(), - ast_util::float_ty_to_string(t).as_slice()).as_slice()) + ast_util::float_ty_to_string(t).as_str()).as_str()) } ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()), ast::LitNil => word(&mut self.s, "()"), @@ -2607,7 +2607,7 @@ impl<'a> State<'a> { } ast::LitBinary(ref v) => { let escaped: String = v.iter().map(|&b| b as char).collect(); - word(&mut self.s, format!("b\"{}\"", escaped.escape_default()).as_slice()) + word(&mut self.s, format!("b\"{}\"", escaped.escape_default()).as_str()) } } } @@ -2648,7 +2648,7 @@ impl<'a> State<'a> { comments::Mixed => { assert_eq!(cmnt.lines.len(), 1u); try!(zerobreak(&mut self.s)); - try!(word(&mut self.s, cmnt.lines.get(0).as_slice())); + try!(word(&mut self.s, cmnt.lines.get(0).as_str())); zerobreak(&mut self.s) } comments::Isolated => { @@ -2657,7 +2657,7 @@ impl<'a> State<'a> { // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { - try!(word(&mut self.s, line.as_slice())); + try!(word(&mut self.s, line.as_str())); } try!(hardbreak(&mut self.s)); } @@ -2666,13 +2666,13 @@ impl<'a> State<'a> { comments::Trailing => { try!(word(&mut self.s, " ")); if cmnt.lines.len() == 1u { - try!(word(&mut self.s, cmnt.lines.get(0).as_slice())); + try!(word(&mut self.s, cmnt.lines.get(0).as_str())); hardbreak(&mut self.s) } else { try!(self.ibox(0u)); for line in cmnt.lines.iter() { if !line.is_empty() { - try!(word(&mut self.s, line.as_slice())); + try!(word(&mut self.s, line.as_str())); } try!(hardbreak(&mut self.s)); } @@ -2682,7 +2682,7 @@ impl<'a> State<'a> { comments::BlankLine => { // We need to do at least one, possibly two hardbreaks. let is_semi = match self.s.last_token() { - pp::String(s, _) => ";" == s.as_slice(), + pp::String(s, _) => ";" == s.as_str(), _ => false }; if is_semi || self.is_begin() || self.is_end() { @@ -2705,7 +2705,7 @@ impl<'a> State<'a> { string=st)) } }; - word(&mut self.s, st.as_slice()) + word(&mut self.s, st.as_str()) } pub fn next_comment(&mut self) -> Option { @@ -2736,7 +2736,7 @@ impl<'a> State<'a> { Some(abi::Rust) => Ok(()), Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(abi.to_string().as_slice()) + self.word_nbsp(abi.to_string().as_str()) } None => Ok(()) } @@ -2747,7 +2747,7 @@ impl<'a> State<'a> { match opt_abi { Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(abi.to_string().as_slice()) + self.word_nbsp(abi.to_string().as_str()) } None => Ok(()) } @@ -2758,12 +2758,12 @@ impl<'a> State<'a> { opt_fn_style: Option, abi: abi::Abi, vis: ast::Visibility) -> IoResult<()> { - try!(word(&mut self.s, visibility_qualified(vis, "").as_slice())); + try!(word(&mut self.s, visibility_qualified(vis, "").as_str())); try!(self.print_opt_fn_style(opt_fn_style)); if abi != abi::Rust { try!(self.word_nbsp("extern")); - try!(self.word_nbsp(abi.to_string().as_slice())); + try!(self.word_nbsp(abi.to_string().as_str())); } word(&mut self.s, "fn") diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 452b5a5251222..e581144cc92aa 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -99,14 +99,14 @@ impl Eq for RcStr {} impl Ord for RcStr { fn cmp(&self, other: &RcStr) -> Ordering { - self.as_slice().cmp(&other.as_slice()) + self.as_str().cmp(&other.as_str()) } } impl Str for RcStr { #[inline] - fn as_slice<'a>(&'a self) -> &'a str { - let s: &'a str = self.string.as_slice(); + fn as_str<'a>(&'a self) -> &'a str { + let s: &'a str = self.string.as_str(); s } } @@ -114,7 +114,7 @@ impl Str for RcStr { impl fmt::Show for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Show; - self.as_slice().fmt(f) + self.as_str().fmt(f) } } @@ -196,7 +196,7 @@ impl StrInterner { /// strings may never be removed from the interner, this is safe. pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str { let vect = self.vect.borrow(); - let s: &str = vect.get(idx.uint()).as_slice(); + let s: &str = vect.get(idx.uint()).as_str(); unsafe { mem::transmute(s) } diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs index 36883c8fcf4f2..156df45637a07 100644 --- a/src/libterm/terminfo/mod.rs +++ b/src/libterm/terminfo/mod.rs @@ -81,10 +81,10 @@ impl Terminal for TerminfoTerminal { } }; - let entry = open(term.as_slice()); + let entry = open(term.as_str()); if entry.is_err() { if os::getenv("MSYSCON").map_or(false, |s| { - "mintty.exe" == s.as_slice() + "mintty.exe" == s.as_str() }) { // msys terminal return Some(TerminfoTerminal {out: out, ti: msys_terminfo(), num_colors: 8}); diff --git a/src/libterm/terminfo/parser/compiled.rs b/src/libterm/terminfo/parser/compiled.rs index 2826ecc1a1222..fa14b3becd702 100644 --- a/src/libterm/terminfo/parser/compiled.rs +++ b/src/libterm/terminfo/parser/compiled.rs @@ -218,7 +218,7 @@ pub fn parse(file: &mut io::Reader, longnames: bool) Err(_) => return Err("input not utf-8".to_string()), }; - let term_names: Vec = names_str.as_slice() + let term_names: Vec = names_str.as_str() .split('|') .map(|s| s.to_string()) .collect(); diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs index ebec59924e8f3..ec7b2cb1dc1fe 100644 --- a/src/libterm/terminfo/searcher.rs +++ b/src/libterm/terminfo/searcher.rs @@ -36,7 +36,7 @@ pub fn get_dbpath_for_term(term: &str) -> Option> { dirs_to_search.push(homedir.unwrap().join(".terminfo")) } match getenv("TERMINFO_DIRS") { - Some(dirs) => for i in dirs.as_slice().split(':') { + Some(dirs) => for i in dirs.as_str().split(':') { if i == "" { dirs_to_search.push(Path::new("/usr/share/terminfo")); } else { @@ -60,13 +60,13 @@ pub fn get_dbpath_for_term(term: &str) -> Option> { for p in dirs_to_search.iter() { if p.exists() { let f = first_char.to_string(); - let newp = p.join_many([f.as_slice(), term]); + let newp = p.join_many([f.as_str(), term]); if newp.exists() { return Some(box newp); } // on some installations the dir is named after the hex of the char (e.g. OS X) let f = format!("{:x}", first_char as uint); - let newp = p.join_many([f.as_slice(), term]); + let newp = p.join_many([f.as_str(), term]); if newp.exists() { return Some(box newp); }