diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 27174de8e7416..1ac2c9fc6bec6 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -8,10 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -/*! - * Concurrency-enabled mechanisms for sharing mutable and/or immutable state - * between tasks. - */ +#![stable] + +//! Concurrency-enabled mechanisms for sharing mutable and/or immutable state +//! between tasks. use core::atomics; use core::clone::Clone; @@ -51,6 +51,7 @@ use heap::deallocate; /// } /// ``` #[unsafe_no_drop_flag] +#[stable] pub struct Arc { // FIXME #12808: strange name to try to avoid interfering with // field accesses of the contained type via Deref @@ -62,6 +63,7 @@ pub struct Arc { /// Weak pointers will not keep the data inside of the `Arc` alive, and can be /// used to break cycles between `Arc` pointers. #[unsafe_no_drop_flag] +#[experimental = "Weak pointers may not belong in this module."] pub struct Weak { // FIXME #12808: strange name to try to avoid interfering with // field accesses of the contained type via Deref @@ -77,6 +79,7 @@ struct ArcInner { impl Arc { /// Create an atomically reference counted wrapper. #[inline] + #[stable] pub fn new(data: T) -> Arc { // Start the weak pointer count as 1 which is the weak pointer that's // held by all the strong pointers (kinda), see std/rc.rs for more info @@ -103,6 +106,7 @@ impl Arc { /// Weak pointers will not keep the data alive. Once all strong references /// to the underlying data have been dropped, the data itself will be /// destroyed. + #[experimental = "Weak pointers may not belong in this module."] pub fn downgrade(&self) -> Weak { // See the clone() impl for why this is relaxed self.inner().weak.fetch_add(1, atomics::Relaxed); @@ -110,7 +114,7 @@ impl Arc { } } -#[unstable] +#[unstable = "waiting on stability of Clone"] impl Clone for Arc { /// Duplicate an atomically reference counted wrapper. /// @@ -135,6 +139,7 @@ impl Clone for Arc { } } +#[experimental = "Deref is experimental."] impl Deref for Arc { #[inline] fn deref<'a>(&'a self) -> &'a T { @@ -169,6 +174,7 @@ impl Arc { } #[unsafe_destructor] +#[experimental = "waiting on stability of Drop"] impl Drop for Arc { fn drop(&mut self) { // This structure has #[unsafe_no_drop_flag], so this drop glue may run @@ -212,6 +218,7 @@ impl Drop for Arc { } } +#[experimental = "Weak pointers may not belong in this module."] impl Weak { /// Attempts to upgrade this weak reference to a strong reference. /// @@ -237,7 +244,7 @@ impl Weak { } } -#[unstable] +#[experimental = "Weak pointers may not belong in this module."] impl Clone for Weak { #[inline] fn clone(&self) -> Weak { @@ -248,6 +255,7 @@ impl Clone for Weak { } #[unsafe_destructor] +#[experimental = "Weak pointers may not belong in this module."] impl Drop for Weak { fn drop(&mut self) { // see comments above for why this check is here diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 89f6e934ad259..58278d5664e5e 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -87,10 +87,12 @@ impl Ord for Box { impl Eq for Box {} /// Extension methods for an owning `Any` trait object -#[unstable = "post-DST, the signature of `downcast` will change to take `Box`"] +#[unstable = "post-DST and coherence changes, this will not be a trait but \ + rather a direct `impl` on `Box`"] pub trait BoxAny { /// Returns the boxed value if it is of type `T`, or /// `Err(Self)` if it isn't. + #[unstable = "naming conventions around accessing innards may change"] fn downcast(self) -> Result, Self>; /// Deprecated; this method has been renamed to `downcast`. @@ -100,6 +102,7 @@ pub trait BoxAny { } } +#[stable] impl BoxAny for Box { #[inline] fn downcast(self) -> Result, Box> { diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 8d4e788bc8035..b31931c6de3bc 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -379,7 +379,6 @@ impl Drop for Weak { } } -#[unstable] #[experimental = "Weak pointers may not belong in this module."] impl Clone for Weak { #[inline] diff --git a/src/libcore/any.rs b/src/libcore/any.rs index 297da495799e7..1809988847bc7 100644 --- a/src/libcore/any.rs +++ b/src/libcore/any.rs @@ -44,7 +44,7 @@ //! // try to convert our value to a String. If successful, we want to //! // output the String's length as well as its value. If not, it's a //! // different type: just print it out unadorned. -//! match value_any.as_ref::() { +//! match value_any.downcast_ref::() { //! Some(as_string) => { //! println!("String ({}): {}", as_string.len(), as_string); //! } @@ -69,51 +69,72 @@ //! } //! ``` +#![stable] + use mem::{transmute, transmute_copy}; use option::{Option, Some, None}; use raw::TraitObject; use intrinsics::TypeId; /// A type with no inhabitants +#[deprecated = "this type is being removed, define a type locally if \ + necessary"] pub enum Void { } /////////////////////////////////////////////////////////////////////////////// // Any trait /////////////////////////////////////////////////////////////////////////////// -/// The `Any` trait is implemented by all `'static` types, and can be used for dynamic typing +/// The `Any` trait is implemented by all `'static` types, and can be used for +/// dynamic typing /// -/// Every type with no non-`'static` references implements `Any`, so `Any` can be used as a trait -/// object to emulate the effects dynamic typing. -pub trait Any { +/// Every type with no non-`'static` references implements `Any`, so `Any` can +/// be used as a trait object to emulate the effects dynamic typing. +#[stable] +pub trait Any: AnyPrivate {} + +/// An inner trait to ensure that only this module can call `get_type_id()`. +trait AnyPrivate { /// Get the `TypeId` of `self` fn get_type_id(&self) -> TypeId; } -impl Any for T { - /// Get the `TypeId` of `self` - fn get_type_id(&self) -> TypeId { - TypeId::of::() - } +impl AnyPrivate for T { + fn get_type_id(&self) -> TypeId { TypeId::of::() } } +impl Any for T {} + /////////////////////////////////////////////////////////////////////////////// // Extension methods for Any trait objects. // Implemented as three extension traits so that the methods can be generic. /////////////////////////////////////////////////////////////////////////////// /// Extension methods for a referenced `Any` trait object +#[unstable = "this trait will not be necessary once DST lands, it will be a \ + part of `impl Any`"] pub trait AnyRefExt<'a> { /// Returns true if the boxed type is the same as `T` + #[stable] fn is(self) -> bool; /// Returns some reference to the boxed value if it is of type `T`, or /// `None` if it isn't. - fn as_ref(self) -> Option<&'a T>; + #[unstable = "naming conventions around acquiring references may change"] + fn downcast_ref(self) -> Option<&'a T>; + + /// Returns some reference to the boxed value if it is of type `T`, or + /// `None` if it isn't. + #[deprecated = "this function has been renamed to `downcast_ref`"] + fn as_ref(self) -> Option<&'a T> { + self.downcast_ref::() + } } +#[stable] impl<'a> AnyRefExt<'a> for &'a Any { #[inline] + #[stable] fn is(self) -> bool { // Get TypeId of the type this function is instantiated with let t = TypeId::of::(); @@ -126,7 +147,8 @@ impl<'a> AnyRefExt<'a> for &'a Any { } #[inline] - fn as_ref(self) -> Option<&'a T> { + #[unstable = "naming conventions around acquiring references may change"] + fn downcast_ref(self) -> Option<&'a T> { if self.is::() { unsafe { // Get the raw representation of the trait object @@ -142,15 +164,27 @@ impl<'a> AnyRefExt<'a> for &'a Any { } /// Extension methods for a mutable referenced `Any` trait object +#[unstable = "this trait will not be necessary once DST lands, it will be a \ + part of `impl Any`"] pub trait AnyMutRefExt<'a> { /// Returns some mutable reference to the boxed value if it is of type `T`, or /// `None` if it isn't. - fn as_mut(self) -> Option<&'a mut T>; + #[unstable = "naming conventions around acquiring references may change"] + fn downcast_mut(self) -> Option<&'a mut T>; + + /// Returns some mutable reference to the boxed value if it is of type `T`, or + /// `None` if it isn't. + #[deprecated = "this function has been renamed to `downcast_mut`"] + fn as_mut(self) -> Option<&'a mut T> { + self.downcast_mut::() + } } +#[stable] impl<'a> AnyMutRefExt<'a> for &'a mut Any { #[inline] - fn as_mut(self) -> Option<&'a mut T> { + #[unstable = "naming conventions around acquiring references may change"] + fn downcast_mut(self) -> Option<&'a mut T> { if self.is::() { unsafe { // Get the raw representation of the trait object diff --git a/src/libcore/atomics.rs b/src/libcore/atomics.rs index e022fa2c370f2..466a1738e8288 100644 --- a/src/libcore/atomics.rs +++ b/src/libcore/atomics.rs @@ -12,29 +12,29 @@ use intrinsics; use std::kinds::marker; -use ty::Unsafe; +use cell::UnsafeCell; /// An atomic boolean type. pub struct AtomicBool { - v: Unsafe, + v: UnsafeCell, nocopy: marker::NoCopy } /// A signed atomic integer type, supporting basic atomic arithmetic operations pub struct AtomicInt { - v: Unsafe, + v: UnsafeCell, nocopy: marker::NoCopy } /// An unsigned atomic integer type, supporting basic atomic arithmetic operations pub struct AtomicUint { - v: Unsafe, + v: UnsafeCell, nocopy: marker::NoCopy } /// An unsafe atomic pointer. Only supports basic atomic operations pub struct AtomicPtr { - p: Unsafe, + p: UnsafeCell, nocopy: marker::NoCopy } @@ -69,17 +69,14 @@ pub enum Ordering { } /// An `AtomicBool` initialized to `false` -pub static INIT_ATOMIC_BOOL : AtomicBool = AtomicBool { v: Unsafe{value: 0, - marker1: marker::InvariantType}, - nocopy: marker::NoCopy }; +pub static INIT_ATOMIC_BOOL: AtomicBool = + AtomicBool { v: UnsafeCell { value: 0 }, nocopy: marker::NoCopy }; /// An `AtomicInt` initialized to `0` -pub static INIT_ATOMIC_INT : AtomicInt = AtomicInt { v: Unsafe{value: 0, - marker1: marker::InvariantType}, - nocopy: marker::NoCopy }; +pub static INIT_ATOMIC_INT: AtomicInt = + AtomicInt { v: UnsafeCell { value: 0 }, nocopy: marker::NoCopy }; /// An `AtomicUint` initialized to `0` -pub static INIT_ATOMIC_UINT : AtomicUint = AtomicUint { v: Unsafe{value: 0, - marker1: marker::InvariantType}, - nocopy: marker::NoCopy }; +pub static INIT_ATOMIC_UINT: AtomicUint = + AtomicUint { v: UnsafeCell { value: 0, }, nocopy: marker::NoCopy }; // NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly static UINT_TRUE: uint = -1; @@ -88,7 +85,7 @@ impl AtomicBool { /// Create a new `AtomicBool` pub fn new(v: bool) -> AtomicBool { let val = if v { UINT_TRUE } else { 0 }; - AtomicBool { v: Unsafe::new(val), nocopy: marker::NoCopy } + AtomicBool { v: UnsafeCell::new(val), nocopy: marker::NoCopy } } /// Load the value @@ -289,7 +286,7 @@ impl AtomicBool { impl AtomicInt { /// Create a new `AtomicInt` pub fn new(v: int) -> AtomicInt { - AtomicInt {v: Unsafe::new(v), nocopy: marker::NoCopy} + AtomicInt {v: UnsafeCell::new(v), nocopy: marker::NoCopy} } /// Load the value @@ -401,7 +398,7 @@ impl AtomicInt { impl AtomicUint { /// Create a new `AtomicUint` pub fn new(v: uint) -> AtomicUint { - AtomicUint { v: Unsafe::new(v), nocopy: marker::NoCopy } + AtomicUint { v: UnsafeCell::new(v), nocopy: marker::NoCopy } } /// Load the value @@ -513,7 +510,7 @@ impl AtomicUint { impl AtomicPtr { /// Create a new `AtomicPtr` pub fn new(p: *mut T) -> AtomicPtr { - AtomicPtr { p: Unsafe::new(p as uint), nocopy: marker::NoCopy } + AtomicPtr { p: UnsafeCell::new(p as uint), nocopy: marker::NoCopy } } /// Load the value diff --git a/src/libcore/bool.rs b/src/libcore/bool.rs index c523cf7843439..9d2ea816fdfd4 100644 --- a/src/libcore/bool.rs +++ b/src/libcore/bool.rs @@ -11,4 +11,6 @@ //! The boolean type #![doc(primitive = "bool")] +#![unstable = "this module is purely for documentation and it will likely be \ + removed from the public api"] diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index 51b5d0aded800..24ea3480c4397 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -160,12 +160,11 @@ use cmp::PartialEq; use kinds::{marker, Copy}; use ops::{Deref, DerefMut, Drop}; use option::{None, Option, Some}; -use ty::Unsafe; /// A mutable memory location that admits only `Copy` data. #[unstable = "likely to be renamed; otherwise stable"] pub struct Cell { - value: Unsafe, + value: UnsafeCell, noshare: marker::NoShare, } @@ -174,7 +173,7 @@ impl Cell { /// Creates a new `Cell` containing the given value. pub fn new(value: T) -> Cell { Cell { - value: Unsafe::new(value), + value: UnsafeCell::new(value), noshare: marker::NoShare, } } @@ -211,7 +210,7 @@ impl PartialEq for Cell { /// A mutable memory location with dynamically checked borrow rules #[unstable = "likely to be renamed; otherwise stable"] pub struct RefCell { - value: Unsafe, + value: UnsafeCell, borrow: Cell, nocopy: marker::NoCopy, noshare: marker::NoShare, @@ -228,7 +227,7 @@ impl RefCell { #[stable] pub fn new(value: T) -> RefCell { RefCell { - value: Unsafe::new(value), + value: UnsafeCell::new(value), borrow: Cell::new(UNUSED), nocopy: marker::NoCopy, noshare: marker::NoShare, @@ -401,3 +400,81 @@ impl<'b, T> DerefMut for RefMut<'b, T> { unsafe { &mut *self._parent.value.get() } } } + +/// The core primitive for interior mutability in Rust. +/// +/// `UnsafeCell` type that wraps a type T and indicates unsafe interior +/// operations on the wrapped type. Types with an `UnsafeCell` field are +/// considered to have an *unsafe interior*. The `UnsafeCell` type is the only +/// legal way to obtain aliasable data that is considered mutable. In general, +/// transmuting an &T type into an &mut T is considered undefined behavior. +/// +/// Although it is possible to put an `UnsafeCell` into static item, it is +/// not permitted to take the address of the static item if the item is not +/// declared as mutable. This rule exists because immutable static items are +/// stored in read-only memory, and thus any attempt to mutate their interior +/// can cause segfaults. Immutable static items containing `UnsafeCell` +/// instances are still useful as read-only initializers, however, so we do not +/// forbid them altogether. +/// +/// Types like `Cell` and `RefCell` use this type to wrap their internal data. +/// +/// `UnsafeCell` doesn't opt-out from any kind, instead, types with an +/// `UnsafeCell` interior are expected to opt-out from kinds themselves. +/// +/// # Example: +/// +/// ```rust +/// use std::cell::UnsafeCell; +/// use std::kinds::marker; +/// +/// struct NotThreadSafe { +/// value: UnsafeCell, +/// marker: marker::NoShare +/// } +/// ``` +/// +/// **NOTE:** `UnsafeCell` fields are public to allow static initializers. It +/// is not recommended to access its fields directly, `get` should be used +/// instead. +#[lang="unsafe"] +#[unstable = "this type may be renamed in the future"] +pub struct UnsafeCell { + /// Wrapped value + /// + /// This field should not be accessed directly, it is made public for static + /// initializers. + #[unstable] + pub value: T, +} + +impl UnsafeCell { + /// Construct a new instance of `UnsafeCell` which will wrapp the specified + /// value. + /// + /// All access to the inner value through methods is `unsafe`, and it is + /// highly discouraged to access the fields directly. + #[stable] + pub fn new(value: T) -> UnsafeCell { + UnsafeCell { value: value } + } + + /// Gets a mutable pointer to the wrapped value. + /// + /// This function is unsafe as the pointer returned is an unsafe pointer and + /// no guarantees are made about the aliasing of the pointers being handed + /// out in this or other tasks. + #[inline] + #[unstable = "conventions around acquiring an inner reference are still \ + under development"] + pub unsafe fn get(&self) -> *mut T { &self.value as *const T as *mut T } + + /// Unwraps the value + /// + /// This function is unsafe because there is no guarantee that this or other + /// tasks are currently inspecting the inner value. + #[inline] + #[unstable = "conventions around the name `unwrap` are still under \ + development"] + pub unsafe fn unwrap(self) -> T { self.value } +} diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 7b84c005db548..5277b473828fc 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -98,13 +98,15 @@ pub struct Formatter<'a> { args: &'a [Argument<'a>], } +enum Void {} + /// This struct represents the generic "argument" which is taken by the Xprintf /// family of functions. It contains a function to format the given value. At /// compile time it is ensured that the function and the value have the correct /// types, and then this struct is used to canonicalize arguments to one type. pub struct Argument<'a> { - formatter: extern "Rust" fn(&any::Void, &mut Formatter) -> Result, - value: &'a any::Void, + formatter: extern "Rust" fn(&Void, &mut Formatter) -> Result, + value: &'a Void, } impl<'a> Arguments<'a> { diff --git a/src/libcore/kinds.rs b/src/libcore/kinds.rs index 9a6cdb1c76976..f6a88b3419607 100644 --- a/src/libcore/kinds.rs +++ b/src/libcore/kinds.rs @@ -79,7 +79,7 @@ pub trait Copy { /// else that is not thread-safe) should use the `NoShare` marker type /// (from `std::kinds::marker`) to ensure that the compiler doesn't /// consider the user-defined type to be `Share`. Any types with -/// interior mutability must also use the `std::ty::Unsafe` wrapper +/// interior mutability must also use the `std::cell::UnsafeCell` wrapper /// around the value(s) which can be mutated when behind a `&` /// reference; not doing this is undefined behaviour (for example, /// `transmute`-ing from `&T` to `&mut T` is illegal). diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index faa4b75d7faab..2809bda4f6ed6 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -98,12 +98,18 @@ pub mod ptr; pub mod kinds; pub mod ops; -pub mod ty; pub mod cmp; pub mod clone; pub mod default; pub mod collections; +#[deprecated = "all functionality now lives in `std::cell`"] +/// Deprecated module in favor of `std::cell` +pub mod ty { + #[deprecated = "this type has been renamed to `UnsafeCell`"] + pub use Unsafe = cell::UnsafeCell; +} + /* Core types and methods on primitives */ pub mod any; diff --git a/src/libcore/should_not_exist.rs b/src/libcore/should_not_exist.rs deleted file mode 100644 index ed6b73df38d4d..0000000000000 --- a/src/libcore/should_not_exist.rs +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// As noted by this file name, this file should not exist. This file should not -// exist because it performs allocations which libcore is not allowed to do. The -// reason for this file's existence is that the `~[T]` type is a language- -// defined type. Traits are defined in libcore, such as `Clone`, which these -// types need to implement, but the implementation can only be found in -// libcore. -// -// Plan of attack for solving this problem: -// -// 1. Implement DST -// 2. Make `Box` not a language feature -// 3. Move `Box` to a separate crate, liballoc. -// 4. Implement relevant traits in liballoc, not libcore -// -// Currently, no progress has been made on this list. - -use clone::Clone; -use collections::Collection; -use finally::try_finally; -use intrinsics; -use iter::{range, Iterator}; -use mem; -use num::{CheckedMul, CheckedAdd}; -use option::{Some, None}; -use ptr::RawPtr; -use ptr; -use raw::Vec; -use slice::ImmutableVector; - -#[allow(ctypes)] -extern { - fn rust_allocate(size: uint, align: uint) -> *u8; - fn rust_deallocate(ptr: *u8, size: uint, align: uint); -} - -unsafe fn alloc(cap: uint) -> *mut Vec<()> { - let cap = cap.checked_add(&mem::size_of::>()).unwrap(); - // this should use the real alignment, but the new representation will take care of that - let ret = rust_allocate(cap, 8) as *mut Vec<()>; - if ret.is_null() { - intrinsics::abort(); - } - (*ret).fill = 0; - (*ret).alloc = cap; - ret -} - -// Arrays - -impl Clone for ~[A] { - #[inline] - fn clone(&self) -> ~[A] { - let len = self.len(); - let data_size = len.checked_mul(&mem::size_of::()).unwrap(); - let size = mem::size_of::>().checked_add(&data_size).unwrap(); - - unsafe { - let ret = alloc(size) as *mut Vec; - - let a_size = mem::size_of::(); - let a_size = if a_size == 0 {1} else {a_size}; - (*ret).fill = len * a_size; - (*ret).alloc = len * a_size; - - let mut i = 0; - let p = &mut (*ret).data as *mut _ as *mut A; - try_finally( - &mut i, (), - |i, ()| while *i < len { - ptr::write( - &mut(*p.offset(*i as int)), - self.unsafe_ref(*i).clone()); - *i += 1; - }, - |i| if *i < len { - // we must be failing, clean up after ourselves - for j in range(0, *i as int) { - ptr::read(&*p.offset(j)); - } - rust_deallocate(ret as *u8, 0, 8); - }); - mem::transmute(ret) - } - } -} diff --git a/src/libcore/tuple/mod.rs b/src/libcore/tuple/mod.rs index 4f34c64de1ba5..ead3564718018 100644 --- a/src/libcore/tuple/mod.rs +++ b/src/libcore/tuple/mod.rs @@ -60,7 +60,10 @@ //! ``` #![doc(primitive = "tuple")] +#![stable] +#[unstable = "this is just a documentation module and should not be part \ + of the public api"] pub use unit; use clone::Clone; @@ -79,41 +82,51 @@ macro_rules! tuple_impls { )+) => { $( #[allow(missing_doc)] + #[stable] pub trait $Tuple<$($T),+> { - $(fn $valN(self) -> $T;)+ - $(fn $refN<'a>(&'a self) -> &'a $T;)+ - $(fn $mutN<'a>(&'a mut self) -> &'a mut $T;)+ + $( + #[unstable = "may rename pending accessor naming conventions"] + fn $valN(self) -> $T; + #[unstable = "may rename pending accessor naming conventions"] + fn $refN<'a>(&'a self) -> &'a $T; + #[unstable = "may rename pending accessor naming conventions"] + fn $mutN<'a>(&'a mut self) -> &'a mut $T; + )+ } impl<$($T),+> $Tuple<$($T),+> for ($($T,)+) { $( #[inline] #[allow(unused_variable)] + #[unstable = "may rename pending accessor naming conventions"] fn $valN(self) -> $T { let ($($x,)+) = self; $ret } #[inline] #[allow(unused_variable)] + #[unstable = "may rename pending accessor naming conventions"] fn $refN<'a>(&'a self) -> &'a $T { let ($(ref $x,)+) = *self; $ret } #[inline] #[allow(unused_variable)] + #[unstable = "may rename pending accessor naming conventions"] fn $mutN<'a>(&'a mut self) -> &'a mut $T { let ($(ref mut $x,)+) = *self; $ret } )+ } - #[unstable] + #[unstable = "waiting for Clone to stabilize"] impl<$($T:Clone),+> Clone for ($($T,)+) { fn clone(&self) -> ($($T,)+) { ($(self.$refN().clone(),)+) } } + #[unstable = "waiting for PartialEq to stabilize"] impl<$($T:PartialEq),+> PartialEq for ($($T,)+) { #[inline] fn eq(&self, other: &($($T,)+)) -> bool { @@ -125,8 +138,10 @@ macro_rules! tuple_impls { } } + #[unstable = "waiting for Eq to stabilize"] impl<$($T:Eq),+> Eq for ($($T,)+) {} + #[unstable = "waiting for PartialOrd to stabilize"] impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) { #[inline] fn partial_cmp(&self, other: &($($T,)+)) -> Option { @@ -150,6 +165,7 @@ macro_rules! tuple_impls { } } + #[unstable = "waiting for Ord to stabilize"] impl<$($T:Ord),+> Ord for ($($T,)+) { #[inline] fn cmp(&self, other: &($($T,)+)) -> Ordering { @@ -157,6 +173,7 @@ macro_rules! tuple_impls { } } + #[stable] impl<$($T:Default),+> Default for ($($T,)+) { #[inline] fn default() -> ($($T,)+) { diff --git a/src/libcore/tuple/unit.rs b/src/libcore/tuple/unit.rs index a60b3d098d361..7f89f0e5ae399 100644 --- a/src/libcore/tuple/unit.rs +++ b/src/libcore/tuple/unit.rs @@ -9,6 +9,8 @@ // except according to those terms. #![doc(primitive = "unit")] +#![unstable = "this module is purely for documentation and it will likely be \ + removed from the public api"] //! The `()` type, sometimes called "unit" or "nil". //! diff --git a/src/libcore/ty.rs b/src/libcore/ty.rs index 5bdab6a78ca33..f8e03662b00ec 100644 --- a/src/libcore/ty.rs +++ b/src/libcore/ty.rs @@ -11,60 +11,3 @@ //! Types dealing with unsafe actions. use kinds::marker; - -/// Unsafe type that wraps a type T and indicates unsafe interior operations on the -/// wrapped type. Types with an `Unsafe` field are considered to have an *unsafe -/// interior*. The Unsafe type is the only legal way to obtain aliasable data that is -/// considered mutable. In general, transmuting an &T type into an &mut T is considered -/// undefined behavior. -/// -/// Although it is possible to put an Unsafe into static item, it is not permitted to -/// take the address of the static item if the item is not declared as mutable. This rule -/// exists because immutable static items are stored in read-only memory, and thus any -/// attempt to mutate their interior can cause segfaults. Immutable static items containing -/// Unsafe instances are still useful as read-only initializers, however, so we do not -/// forbid them altogether. -/// -/// Types like `Cell` and `RefCell` use this type to wrap their internal data. -/// -/// Unsafe doesn't opt-out from any kind, instead, types with an `Unsafe` interior -/// are expected to opt-out from kinds themselves. -/// -/// # Example: -/// -/// ```rust -/// use std::ty::Unsafe; -/// use std::kinds::marker; -/// -/// struct NotThreadSafe { -/// value: Unsafe, -/// marker1: marker::NoShare -/// } -/// ``` -/// -/// **NOTE:** Unsafe fields are public to allow static initializers. It is not recommended -/// to access its fields directly, `get` should be used instead. -#[lang="unsafe"] -pub struct Unsafe { - /// Wrapped value - pub value: T, - - /// Invariance marker - pub marker1: marker::InvariantType -} - -impl Unsafe { - - /// Static constructor - pub fn new(value: T) -> Unsafe { - Unsafe{value: value, marker1: marker::InvariantType} - } - - /// Gets a mutable pointer to the wrapped value - #[inline] - pub unsafe fn get(&self) -> *mut T { &self.value as *const T as *mut T } - - /// Unwraps the value - #[inline] - pub unsafe fn unwrap(self) -> T { self.value } -} diff --git a/src/libnative/io/helper_thread.rs b/src/libnative/io/helper_thread.rs index d18e92866bf7a..8e92aa56d3c8f 100644 --- a/src/libnative/io/helper_thread.rs +++ b/src/libnative/io/helper_thread.rs @@ -26,7 +26,7 @@ use std::mem; use std::rt::bookkeeping; use std::rt::mutex::StaticNativeMutex; use std::rt; -use std::ty::Unsafe; +use std::cell::UnsafeCell; use task; @@ -41,35 +41,26 @@ pub struct Helper { /// Internal lock which protects the remaining fields pub lock: StaticNativeMutex, - // You'll notice that the remaining fields are Unsafe, and this is + // You'll notice that the remaining fields are UnsafeCell, and this is // because all helper thread operations are done through &self, but we need // these to be mutable (once `lock` is held). /// Lazily allocated channel to send messages to the helper thread. - pub chan: Unsafe<*mut Sender>, + pub chan: UnsafeCell<*mut Sender>, /// OS handle used to wake up a blocked helper thread - pub signal: Unsafe, + pub signal: UnsafeCell, /// Flag if this helper thread has booted and been initialized yet. - pub initialized: Unsafe, + pub initialized: UnsafeCell, } macro_rules! helper_init( (static mut $name:ident: Helper<$m:ty>) => ( static mut $name: Helper<$m> = Helper { lock: ::std::rt::mutex::NATIVE_MUTEX_INIT, - chan: ::std::ty::Unsafe { - value: 0 as *mut Sender<$m>, - marker1: ::std::kinds::marker::InvariantType, - }, - signal: ::std::ty::Unsafe { - value: 0, - marker1: ::std::kinds::marker::InvariantType, - }, - initialized: ::std::ty::Unsafe { - value: false, - marker1: ::std::kinds::marker::InvariantType, - }, + chan: ::std::cell::UnsafeCell { value: 0 as *mut Sender<$m> }, + signal: ::std::cell::UnsafeCell { value: 0 }, + initialized: ::std::cell::UnsafeCell { value: false }, }; ) ) diff --git a/src/librustc/middle/typeck/variance.rs b/src/librustc/middle/typeck/variance.rs index d230b08096641..4a8bc97183ace 100644 --- a/src/librustc/middle/typeck/variance.rs +++ b/src/librustc/middle/typeck/variance.rs @@ -268,7 +268,7 @@ struct TermsContext<'a> { inferred_infos: Vec> , } -#[deriving(Show)] +#[deriving(Show, PartialEq)] enum ParamKind { TypeParam, RegionParam @@ -412,6 +412,7 @@ struct ConstraintContext<'a> { invariant_lang_items: [Option, ..2], covariant_lang_items: [Option, ..2], contravariant_lang_items: [Option, ..2], + unsafe_lang_item: Option, // These are pointers to common `ConstantTerm` instances covariant: VarianceTermPtr<'a>, @@ -451,6 +452,8 @@ fn add_constraints_from_crate<'a>(terms_cx: TermsContext<'a>, invariant_lang_items[RegionParam as uint] = terms_cx.tcx.lang_items.invariant_lifetime(); + let unsafe_lang_item = terms_cx.tcx.lang_items.unsafe_type(); + let covariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Covariant)); let contravariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Contravariant)); let invariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Invariant)); @@ -461,6 +464,7 @@ fn add_constraints_from_crate<'a>(terms_cx: TermsContext<'a>, invariant_lang_items: invariant_lang_items, covariant_lang_items: covariant_lang_items, contravariant_lang_items: contravariant_lang_items, + unsafe_lang_item: unsafe_lang_item, covariant: covariant, contravariant: contravariant, @@ -637,6 +641,8 @@ impl<'a> ConstraintContext<'a> { self.covariant } else if self.contravariant_lang_items[kind as uint] == Some(item_def_id) { self.contravariant + } else if kind == TypeParam && Some(item_def_id) == self.unsafe_lang_item { + self.invariant } else if param_def_id.krate == ast::LOCAL_CRATE { // Parameter on an item defined within current crate: // variance not yet inferred, so return a symbolic diff --git a/src/librustrt/exclusive.rs b/src/librustrt/exclusive.rs index 62313965768a6..179d050f598bf 100644 --- a/src/librustrt/exclusive.rs +++ b/src/librustrt/exclusive.rs @@ -10,7 +10,7 @@ use core::prelude::*; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use mutex; /// An OS mutex over some data. @@ -23,7 +23,7 @@ use mutex; /// > as part of `libsync` should almost always be favored. pub struct Exclusive { lock: mutex::NativeMutex, - data: Unsafe, + data: UnsafeCell, } /// An RAII guard returned via `lock` @@ -39,7 +39,7 @@ impl Exclusive { pub fn new(user_data: T) -> Exclusive { Exclusive { lock: unsafe { mutex::NativeMutex::new() }, - data: Unsafe::new(user_data), + data: UnsafeCell::new(user_data), } } diff --git a/src/librustrt/mutex.rs b/src/librustrt/mutex.rs index 6950d987d2f27..c999a08eb93b6 100644 --- a/src/librustrt/mutex.rs +++ b/src/librustrt/mutex.rs @@ -341,8 +341,7 @@ mod imp { use libc; use self::os::{PTHREAD_MUTEX_INITIALIZER, PTHREAD_COND_INITIALIZER, pthread_mutex_t, pthread_cond_t}; - use core::ty::Unsafe; - use core::kinds::marker; + use core::cell::UnsafeCell; type pthread_mutexattr_t = libc::c_void; type pthread_condattr_t = libc::c_void; @@ -466,19 +465,13 @@ mod imp { } pub struct Mutex { - lock: Unsafe, - cond: Unsafe, + lock: UnsafeCell, + cond: UnsafeCell, } pub static MUTEX_INIT: Mutex = Mutex { - lock: Unsafe { - value: PTHREAD_MUTEX_INITIALIZER, - marker1: marker::InvariantType, - }, - cond: Unsafe { - value: PTHREAD_COND_INITIALIZER, - marker1: marker::InvariantType, - }, + lock: UnsafeCell { value: PTHREAD_MUTEX_INITIALIZER }, + cond: UnsafeCell { value: PTHREAD_COND_INITIALIZER }, }; impl Mutex { @@ -487,8 +480,8 @@ mod imp { // is better to avoid initialization of potentially // opaque OS data before it landed let m = Mutex { - lock: Unsafe::new(PTHREAD_MUTEX_INITIALIZER), - cond: Unsafe::new(PTHREAD_COND_INITIALIZER), + lock: UnsafeCell::new(PTHREAD_MUTEX_INITIALIZER), + cond: UnsafeCell::new(PTHREAD_COND_INITIALIZER), }; return m; diff --git a/src/librustuv/access.rs b/src/librustuv/access.rs index bcbcde3fba515..9bd8af6419e0b 100644 --- a/src/librustuv/access.rs +++ b/src/librustuv/access.rs @@ -18,12 +18,12 @@ use alloc::arc::Arc; use std::mem; use std::rt::local::Local; use std::rt::task::{BlockedTask, Task}; -use std::ty::Unsafe; +use std::cell::UnsafeCell; use homing::HomingMissile; pub struct Access { - inner: Arc>, + inner: Arc>, } pub struct Guard<'a> { @@ -40,7 +40,7 @@ struct Inner { impl Access { pub fn new() -> Access { Access { - inner: Arc::new(Unsafe::new(Inner { + inner: Arc::new(UnsafeCell::new(Inner { queue: vec![], held: false, closed: false, diff --git a/src/librustuv/rc.rs b/src/librustuv/rc.rs index 2a1a6b9f26d47..7016ece642725 100644 --- a/src/librustuv/rc.rs +++ b/src/librustuv/rc.rs @@ -17,16 +17,16 @@ /// should suffice. use alloc::arc::Arc; -use std::ty::Unsafe; +use std::cell::UnsafeCell; pub struct Refcount { - rc: Arc>, + rc: Arc>, } impl Refcount { /// Creates a new refcount of 1 pub fn new() -> Refcount { - Refcount { rc: Arc::new(Unsafe::new(1)) } + Refcount { rc: Arc::new(UnsafeCell::new(1)) } } fn increment(&self) { diff --git a/src/libsync/comm/mod.rs b/src/libsync/comm/mod.rs index 2aec39521255e..eff4cea1c43f0 100644 --- a/src/libsync/comm/mod.rs +++ b/src/libsync/comm/mod.rs @@ -324,7 +324,7 @@ use alloc::boxed::Box; use core::cell::Cell; use core::kinds::marker; use core::mem; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use rustrt::local::Local; use rustrt::task::{Task, BlockedTask}; @@ -372,7 +372,7 @@ static RESCHED_FREQ: int = 256; /// one task #[unstable] pub struct Receiver { - inner: Unsafe>, + inner: UnsafeCell>, receives: Cell, // can't share in an arc marker: marker::NoShare, @@ -390,7 +390,7 @@ pub struct Messages<'a, T> { /// owned by one task, but it can be cloned to send to other tasks. #[unstable] pub struct Sender { - inner: Unsafe>, + inner: UnsafeCell>, sends: Cell, // can't share in an arc marker: marker::NoShare, @@ -400,7 +400,7 @@ pub struct Sender { /// owned by one task, but it can be cloned to send to other tasks. #[unstable = "this type may be renamed, but it will always exist"] pub struct SyncSender { - inner: Arc>>, + inner: Arc>>, // can't share in an arc marker: marker::NoShare, } @@ -436,15 +436,15 @@ pub enum TrySendError { } enum Flavor { - Oneshot(Arc>>), - Stream(Arc>>), - Shared(Arc>>), - Sync(Arc>>), + Oneshot(Arc>>), + Stream(Arc>>), + Shared(Arc>>), + Sync(Arc>>), } #[doc(hidden)] trait UnsafeFlavor { - fn inner_unsafe<'a>(&'a self) -> &'a Unsafe>; + fn inner_unsafe<'a>(&'a self) -> &'a UnsafeCell>; unsafe fn mut_inner<'a>(&'a self) -> &'a mut Flavor { &mut *self.inner_unsafe().get() } @@ -453,12 +453,12 @@ trait UnsafeFlavor { } } impl UnsafeFlavor for Sender { - fn inner_unsafe<'a>(&'a self) -> &'a Unsafe> { + fn inner_unsafe<'a>(&'a self) -> &'a UnsafeCell> { &self.inner } } impl UnsafeFlavor for Receiver { - fn inner_unsafe<'a>(&'a self) -> &'a Unsafe> { + fn inner_unsafe<'a>(&'a self) -> &'a UnsafeCell> { &self.inner } } @@ -486,7 +486,7 @@ impl UnsafeFlavor for Receiver { /// ``` #[unstable] pub fn channel() -> (Sender, Receiver) { - let a = Arc::new(Unsafe::new(oneshot::Packet::new())); + let a = Arc::new(UnsafeCell::new(oneshot::Packet::new())); (Sender::new(Oneshot(a.clone())), Receiver::new(Oneshot(a))) } @@ -524,7 +524,7 @@ pub fn channel() -> (Sender, Receiver) { #[unstable = "this function may be renamed to more accurately reflect the type \ of channel that is is creating"] pub fn sync_channel(bound: uint) -> (SyncSender, Receiver) { - let a = Arc::new(Unsafe::new(sync::Packet::new(bound))); + let a = Arc::new(UnsafeCell::new(sync::Packet::new(bound))); (SyncSender::new(a.clone()), Receiver::new(Sync(a))) } @@ -534,7 +534,11 @@ pub fn sync_channel(bound: uint) -> (SyncSender, Receiver) { impl Sender { fn new(inner: Flavor) -> Sender { - Sender { inner: Unsafe::new(inner), sends: Cell::new(0), marker: marker::NoShare } + Sender { + inner: UnsafeCell::new(inner), + sends: Cell::new(0), + marker: marker::NoShare, + } } /// Sends a value along this channel to be received by the corresponding @@ -618,7 +622,7 @@ impl Sender { if !(*p).sent() { return (*p).send(t); } else { - let a = Arc::new(Unsafe::new(stream::Packet::new())); + let a = Arc::new(UnsafeCell::new(stream::Packet::new())); match (*p).upgrade(Receiver::new(Stream(a.clone()))) { oneshot::UpSuccess => { let ret = (*a.get()).send(t); @@ -655,7 +659,7 @@ impl Clone for Sender { fn clone(&self) -> Sender { let (packet, sleeper) = match *unsafe { self.inner() } { Oneshot(ref p) => { - let a = Arc::new(Unsafe::new(shared::Packet::new())); + let a = Arc::new(UnsafeCell::new(shared::Packet::new())); unsafe { (*a.get()).postinit_lock(); match (*p.get()).upgrade(Receiver::new(Shared(a.clone()))) { @@ -665,7 +669,7 @@ impl Clone for Sender { } } Stream(ref p) => { - let a = Arc::new(Unsafe::new(shared::Packet::new())); + let a = Arc::new(UnsafeCell::new(shared::Packet::new())); unsafe { (*a.get()).postinit_lock(); match (*p.get()).upgrade(Receiver::new(Shared(a.clone()))) { @@ -708,7 +712,7 @@ impl Drop for Sender { //////////////////////////////////////////////////////////////////////////////// impl SyncSender { - fn new(inner: Arc>>) -> SyncSender { + fn new(inner: Arc>>) -> SyncSender { SyncSender { inner: inner, marker: marker::NoShare } } @@ -797,7 +801,7 @@ impl Drop for SyncSender { impl Receiver { fn new(inner: Flavor) -> Receiver { - Receiver { inner: Unsafe::new(inner), receives: Cell::new(0), marker: marker::NoShare } + Receiver { inner: UnsafeCell::new(inner), receives: Cell::new(0), marker: marker::NoShare } } /// Blocks waiting for a value on this receiver diff --git a/src/libsync/comm/sync.rs b/src/libsync/comm/sync.rs index 1d5a7d6ed9f58..e872952d9ee60 100644 --- a/src/libsync/comm/sync.rs +++ b/src/libsync/comm/sync.rs @@ -39,7 +39,7 @@ use alloc::boxed::Box; use collections::Vec; use collections::Collection; use core::mem; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use rustrt::local::Local; use rustrt::mutex::{NativeMutex, LockGuard}; use rustrt::task::{Task, BlockedTask}; @@ -53,7 +53,7 @@ pub struct Packet { /// The state field is protected by this mutex lock: NativeMutex, - state: Unsafe>, + state: UnsafeCell>, } struct State { @@ -133,7 +133,7 @@ impl Packet { Packet { channels: atomics::AtomicUint::new(1), lock: unsafe { NativeMutex::new() }, - state: Unsafe::new(State { + state: UnsafeCell::new(State { disconnected: false, blocker: NoneBlocked, cap: cap, diff --git a/src/libsync/lock.rs b/src/libsync/lock.rs index 1d119bafea199..e8418f9668f2a 100644 --- a/src/libsync/lock.rs +++ b/src/libsync/lock.rs @@ -21,7 +21,7 @@ use core::prelude::*; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use rustrt::local::Local; use rustrt::task::Task; @@ -174,8 +174,8 @@ impl<'a> Condvar<'a> { /// ``` pub struct Mutex { lock: raw::Mutex, - failed: Unsafe, - data: Unsafe, + failed: UnsafeCell, + data: UnsafeCell, } /// An guard which is created by locking a mutex. Through this guard the @@ -203,8 +203,8 @@ impl Mutex { pub fn new_with_condvars(user_data: T, num_condvars: uint) -> Mutex { Mutex { lock: raw::Mutex::new_with_condvars(num_condvars), - failed: Unsafe::new(false), - data: Unsafe::new(user_data), + failed: UnsafeCell::new(false), + data: UnsafeCell::new(user_data), } } @@ -274,8 +274,8 @@ impl<'a, T: Send> DerefMut for MutexGuard<'a, T> { /// ``` pub struct RWLock { lock: raw::RWLock, - failed: Unsafe, - data: Unsafe, + failed: UnsafeCell, + data: UnsafeCell, } /// A guard which is created by locking an rwlock in write mode. Through this @@ -309,8 +309,8 @@ impl RWLock { pub fn new_with_condvars(user_data: T, num_condvars: uint) -> RWLock { RWLock { lock: raw::RWLock::new_with_condvars(num_condvars), - failed: Unsafe::new(false), - data: Unsafe::new(user_data), + failed: UnsafeCell::new(false), + data: UnsafeCell::new(user_data), } } diff --git a/src/libsync/mpmc_bounded_queue.rs b/src/libsync/mpmc_bounded_queue.rs index 7343838f19e61..d54186dc22103 100644 --- a/src/libsync/mpmc_bounded_queue.rs +++ b/src/libsync/mpmc_bounded_queue.rs @@ -35,7 +35,7 @@ use core::prelude::*; use alloc::arc::Arc; use collections::Vec; use core::num::next_power_of_two; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use atomics::{AtomicUint,Relaxed,Release,Acquire}; @@ -46,7 +46,7 @@ struct Node { struct State { pad0: [u8, ..64], - buffer: Vec>>, + buffer: Vec>>, mask: uint, pad1: [u8, ..64], enqueue_pos: AtomicUint, @@ -72,7 +72,7 @@ impl State { capacity }; let buffer = Vec::from_fn(capacity, |i| { - Unsafe::new(Node { sequence:AtomicUint::new(i), value: None }) + UnsafeCell::new(Node { sequence:AtomicUint::new(i), value: None }) }); State{ pad0: [0, ..64], diff --git a/src/libsync/mpsc_intrusive.rs b/src/libsync/mpsc_intrusive.rs index 2b6886ab7f434..11f124293b124 100644 --- a/src/libsync/mpsc_intrusive.rs +++ b/src/libsync/mpsc_intrusive.rs @@ -39,7 +39,7 @@ use core::prelude::*; use core::atomics; use core::mem; -use core::ty::Unsafe; +use core::cell::UnsafeCell; // NB: all links are done as AtomicUint instead of AtomicPtr to allow for static // initialization. @@ -55,7 +55,7 @@ pub struct DummyNode { pub struct Queue { pub head: atomics::AtomicUint, - pub tail: Unsafe<*mut Node>, + pub tail: UnsafeCell<*mut Node>, pub stub: DummyNode, } @@ -63,7 +63,7 @@ impl Queue { pub fn new() -> Queue { Queue { head: atomics::AtomicUint::new(0), - tail: Unsafe::new(0 as *mut Node), + tail: UnsafeCell::new(0 as *mut Node), stub: DummyNode { next: atomics::AtomicUint::new(0), }, diff --git a/src/libsync/mpsc_queue.rs b/src/libsync/mpsc_queue.rs index 759695fe5b6dd..4f5dd07a6e59e 100644 --- a/src/libsync/mpsc_queue.rs +++ b/src/libsync/mpsc_queue.rs @@ -44,7 +44,7 @@ use core::prelude::*; use alloc::boxed::Box; use core::mem; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use atomics::{AtomicPtr, Release, Acquire, AcqRel, Relaxed}; @@ -71,7 +71,7 @@ struct Node { /// popper at a time (many pushers are allowed). pub struct Queue { head: AtomicPtr>, - tail: Unsafe<*mut Node>, + tail: UnsafeCell<*mut Node>, } impl Node { @@ -90,7 +90,7 @@ impl Queue { let stub = unsafe { Node::new(None) }; Queue { head: AtomicPtr::new(stub), - tail: Unsafe::new(stub), + tail: UnsafeCell::new(stub), } } diff --git a/src/libsync/mutex.rs b/src/libsync/mutex.rs index 990d743465d69..1aa84e8f8d149 100644 --- a/src/libsync/mutex.rs +++ b/src/libsync/mutex.rs @@ -61,9 +61,8 @@ use core::prelude::*; use alloc::boxed::Box; use core::atomics; -use core::kinds::marker; use core::mem; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use rustrt::local::Local; use rustrt::mutex; use rustrt::task::{BlockedTask, Task}; @@ -143,11 +142,11 @@ pub struct StaticMutex { lock: mutex::StaticNativeMutex, /// Type of locking operation currently on this mutex - flavor: Unsafe, + flavor: UnsafeCell, /// uint-cast of the green thread waiting for this mutex - green_blocker: Unsafe, + green_blocker: UnsafeCell, /// uint-cast of the native thread waiting for this mutex - native_blocker: Unsafe, + native_blocker: UnsafeCell, /// A concurrent mpsc queue used by green threads, along with a count used /// to figure out when to dequeue and enqueue. @@ -167,16 +166,13 @@ pub struct Guard<'a> { pub static MUTEX_INIT: StaticMutex = StaticMutex { lock: mutex::NATIVE_MUTEX_INIT, state: atomics::INIT_ATOMIC_UINT, - flavor: Unsafe { value: Unlocked, marker1: marker::InvariantType }, - green_blocker: Unsafe { value: 0, marker1: marker::InvariantType }, - native_blocker: Unsafe { value: 0, marker1: marker::InvariantType }, + flavor: UnsafeCell { value: Unlocked }, + green_blocker: UnsafeCell { value: 0 }, + native_blocker: UnsafeCell { value: 0 }, green_cnt: atomics::INIT_ATOMIC_UINT, q: q::Queue { head: atomics::INIT_ATOMIC_UINT, - tail: Unsafe { - value: 0 as *mut q::Node, - marker1: marker::InvariantType, - }, + tail: UnsafeCell { value: 0 as *mut q::Node }, stub: q::DummyNode { next: atomics::INIT_ATOMIC_UINT, } @@ -467,9 +463,9 @@ impl Mutex { Mutex { lock: box StaticMutex { state: atomics::AtomicUint::new(0), - flavor: Unsafe::new(Unlocked), - green_blocker: Unsafe::new(0), - native_blocker: Unsafe::new(0), + flavor: UnsafeCell::new(Unlocked), + green_blocker: UnsafeCell::new(0), + native_blocker: UnsafeCell::new(0), green_cnt: atomics::AtomicUint::new(0), q: q::Queue::new(), lock: unsafe { mutex::StaticNativeMutex::new() }, diff --git a/src/libsync/raw.rs b/src/libsync/raw.rs index cb047798946bd..e7a2d3e063996 100644 --- a/src/libsync/raw.rs +++ b/src/libsync/raw.rs @@ -21,7 +21,7 @@ use core::atomics; use core::finally::Finally; use core::kinds::marker; use core::mem; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use collections::{Vec, MutableSeq}; use mutex; @@ -91,7 +91,7 @@ struct Sem { // (for good reason). We have an internal invariant on this semaphore, // however, that the queue is never accessed outside of a locked // context. - inner: Unsafe> + inner: UnsafeCell> } struct SemInner { @@ -113,7 +113,7 @@ impl Sem { "semaphores cannot be initialized with negative values"); Sem { lock: mutex::Mutex::new(), - inner: Unsafe::new(SemInner { + inner: UnsafeCell::new(SemInner { waiters: WaitQueue::new(), count: count, blocked: q, diff --git a/src/libsync/spsc_queue.rs b/src/libsync/spsc_queue.rs index cf4d3222ed0ed..0cda1098ab447 100644 --- a/src/libsync/spsc_queue.rs +++ b/src/libsync/spsc_queue.rs @@ -39,7 +39,7 @@ use core::prelude::*; use alloc::boxed::Box; use core::mem; -use core::ty::Unsafe; +use core::cell::UnsafeCell; use atomics::{AtomicPtr, Relaxed, AtomicUint, Acquire, Release}; @@ -58,13 +58,13 @@ struct Node { /// time. pub struct Queue { // consumer fields - tail: Unsafe<*mut Node>, // where to pop from + tail: UnsafeCell<*mut Node>, // where to pop from tail_prev: AtomicPtr>, // where to pop from // producer fields - head: Unsafe<*mut Node>, // where to push to - first: Unsafe<*mut Node>, // where to get new nodes from - tail_copy: Unsafe<*mut Node>, // between first/tail + head: UnsafeCell<*mut Node>, // where to push to + first: UnsafeCell<*mut Node>, // where to get new nodes from + tail_copy: UnsafeCell<*mut Node>, // between first/tail // Cache maintenance fields. Additions and subtractions are stored // separately in order to allow them to use nonatomic addition/subtraction. @@ -103,11 +103,11 @@ impl Queue { let n2 = Node::new(); unsafe { (*n1).next.store(n2, Relaxed) } Queue { - tail: Unsafe::new(n2), + tail: UnsafeCell::new(n2), tail_prev: AtomicPtr::new(n1), - head: Unsafe::new(n2), - first: Unsafe::new(n1), - tail_copy: Unsafe::new(n1), + head: UnsafeCell::new(n2), + first: UnsafeCell::new(n1), + tail_copy: UnsafeCell::new(n1), cache_bound: bound, cache_additions: AtomicUint::new(0), cache_subtractions: AtomicUint::new(0), diff --git a/src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs b/src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs index 1c7516ef7e2bf..5928ded1e39c7 100644 --- a/src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs +++ b/src/test/compile-fail/borrowck-forbid-static-unsafe-interior.rs @@ -12,10 +12,10 @@ // static items with usnafe interior. use std::kinds::marker; -use std::ty::Unsafe; +use std::cell::UnsafeCell; struct MyUnsafe { - value: Unsafe + value: UnsafeCell } impl MyUnsafe { @@ -24,23 +24,23 @@ impl MyUnsafe { enum UnsafeEnum { VariantSafe, - VariantUnsafe(Unsafe) + VariantUnsafe(UnsafeCell) } static STATIC1: UnsafeEnum = VariantSafe; -static STATIC2: Unsafe = Unsafe{value: 1, marker1: marker::InvariantType}; +static STATIC2: UnsafeCell = UnsafeCell { value: 1 }; static STATIC3: MyUnsafe = MyUnsafe{value: STATIC2}; -static STATIC4: &'static Unsafe = &STATIC2; +static STATIC4: &'static UnsafeCell = &STATIC2; //~^ ERROR borrow of immutable static items with unsafe interior is not allowed struct Wrap { value: T } -static UNSAFE: Unsafe = Unsafe{value: 1, marker1: marker::InvariantType}; -static WRAPPED_UNSAFE: Wrap<&'static Unsafe> = Wrap { value: &UNSAFE }; +static UNSAFE: UnsafeCell = UnsafeCell{value: 1}; +static WRAPPED_UNSAFE: Wrap<&'static UnsafeCell> = Wrap { value: &UNSAFE }; //~^ ERROR borrow of immutable static items with unsafe interior is not allowed fn main() { diff --git a/src/test/compile-fail/typeck-unsafe-always-share.rs b/src/test/compile-fail/typeck-unsafe-always-share.rs index 72ef4a03eab72..a57654b029d16 100644 --- a/src/test/compile-fail/typeck-unsafe-always-share.rs +++ b/src/test/compile-fail/typeck-unsafe-always-share.rs @@ -8,15 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Verify that Unsafe is *always* share regardles `T` is share. +// Verify that UnsafeCell is *always* share regardles `T` is share. // ignore-tidy-linelength -use std::ty::Unsafe; +use std::cell::UnsafeCell; use std::kinds::marker; struct MyShare { - u: Unsafe + u: UnsafeCell } struct NoShare { @@ -28,10 +28,10 @@ fn test(s: T){ } fn main() { - let us = Unsafe::new(MyShare{u: Unsafe::new(0i)}); + let us = UnsafeCell::new(MyShare{u: UnsafeCell::new(0i)}); test(us); - let uns = Unsafe::new(NoShare{m: marker::NoShare}); + let uns = UnsafeCell::new(NoShare{m: marker::NoShare}); test(uns); let ms = MyShare{u: uns};