Skip to content

Generalize SizeEq to SizeFrom, support shrinking #2580

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
64 changes: 38 additions & 26 deletions src/impls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,13 @@ assert_unaligned!(bool);
// pattern 0x01.
const _: () = unsafe {
unsafe_impl!(=> TryFromBytes for bool; |byte| {
let byte = byte.transmute::<u8, invariant::Valid, _>();
let mut byte = byte;
let byte = byte.reborrow().into_shared().transmute::<u8, invariant::Valid, _>();
*byte.unaligned_as_ref() < 2
})
};
impl_size_eq!(bool, u8);

impl_size_from!(bool, u8);

// SAFETY:
// - `Immutable`: `char` self-evidently does not contain any `UnsafeCell`s.
Expand All @@ -134,13 +136,14 @@ const _: () = unsafe { unsafe_impl!(char: Immutable, FromZeros, IntoBytes) };
// `char`.
const _: () = unsafe {
unsafe_impl!(=> TryFromBytes for char; |c| {
let c = c.transmute::<Unalign<u32>, invariant::Valid, _>();
let c = c.read_unaligned().into_inner();
let mut c = c;
let c = c.reborrow().into_shared().transmute::<Unalign<u32>, invariant::Valid, _>();
let c = c.read_unaligned::<BecauseImmutable>().into_inner();
char::from_u32(c).is_some()
});
};

impl_size_eq!(char, Unalign<u32>);
impl_size_from!(char, Unalign<u32>);

// SAFETY: Per the Reference [1], `str` has the same layout as `[u8]`.
// - `Immutable`: `[u8]` does not contain any `UnsafeCell`s.
Expand All @@ -167,22 +170,23 @@ const _: () = unsafe { unsafe_impl!(str: Immutable, FromZeros, IntoBytes, Unalig
// Returns `Err` if the slice is not UTF-8.
const _: () = unsafe {
unsafe_impl!(=> TryFromBytes for str; |c| {
let c = c.transmute::<[u8], invariant::Valid, _>();
let mut c = c;
let c = c.reborrow().into_shared().transmute::<[u8], invariant::Valid, _>();
let c = c.unaligned_as_ref();
core::str::from_utf8(c).is_ok()
})
};

impl_size_eq!(str, [u8]);
impl_size_from!(str, [u8]);

macro_rules! unsafe_impl_try_from_bytes_for_nonzero {
($($nonzero:ident[$prim:ty]),*) => {
$(
unsafe_impl!(=> TryFromBytes for $nonzero; |n| {
impl_size_eq!($nonzero, Unalign<$prim>);

let n = n.transmute::<Unalign<$prim>, invariant::Valid, _>();
$nonzero::new(n.read_unaligned().into_inner()).is_some()
impl_size_from!($nonzero, Unalign<$prim>);
let mut n = n;
let n = n.reborrow().into_shared().transmute::<Unalign<$prim>, invariant::Valid, _>();
$nonzero::new(n.read_unaligned::<BecauseImmutable>().into_inner()).is_some()
});
)*
}
Expand Down Expand Up @@ -396,53 +400,59 @@ mod atomics {
($($($tyvar:ident)? => $atomic:ty [$prim:ty]),*) => {{
crate::util::macros::__unsafe();

use core::cell::UnsafeCell;
use crate::pointer::{PtrInner, SizeEq, TransmuteFrom, invariant::Valid};
use core::{cell::UnsafeCell};
use crate::pointer::{TransmuteFrom, PtrInner, SizeFrom, invariant::Valid};

$(
// SAFETY: The caller promised that `$atomic` and `$prim` have
// the same size and bit validity.
// the same size and bit validity. As a result of size equality,
// both impls of `SizeFrom::cast_from_raw` preserve referent
// size exactly.
unsafe impl<$($tyvar)?> TransmuteFrom<$atomic, Valid, Valid> for $prim {}
// SAFETY: The caller promised that `$atomic` and `$prim` have
// the same size and bit validity.
// the same size and bit validity. As a result of size equality,
// both impls of `SizeFrom::cast_from_raw` preserve referent
// size exactly.
unsafe impl<$($tyvar)?> TransmuteFrom<$prim, Valid, Valid> for $atomic {}

// SAFETY: The caller promised that `$atomic` and `$prim` have
// the same size.
unsafe impl<$($tyvar)?> SizeEq<$atomic> for $prim {
// SAFETY: See inline safety comment.
unsafe impl<$($tyvar)?> SizeFrom<$atomic> for $prim {
#[inline]
fn cast_from_raw(a: PtrInner<'_, $atomic>) -> PtrInner<'_, $prim> {
// SAFETY: The caller promised that `$atomic` and
// `$prim` have the same size. Thus, this cast preserves
// SAFETY: The caller promised that `$atomic` and `$prim`
// have the same size. Thus, this cast preserves
// address, referent size, and provenance.
unsafe { cast!(a) }
}
}
// SAFETY: See previous safety comment.
unsafe impl<$($tyvar)?> SizeEq<$prim> for $atomic {
unsafe impl<$($tyvar)?> SizeFrom<$prim> for $atomic {
#[inline]
fn cast_from_raw(p: PtrInner<'_, $prim>) -> PtrInner<'_, $atomic> {
// SAFETY: See previous safety comment.
unsafe { cast!(p) }
}
}
// SAFETY: The caller promised that `$atomic` and `$prim` have
// the same size. `UnsafeCell<T>` has the same size as `T` [1].

// SAFETY: The caller promised that `$atomic` and `$prim`
// have the same size. `UnsafeCell<T>` has the same size as
// `T` [1]. Thus, this cast preserves address, referent
// size, and provenance.
//
// [1] Per https://doc.rust-lang.org/1.85.0/std/cell/struct.UnsafeCell.html#memory-layout:
//
// `UnsafeCell<T>` has the same in-memory representation as
// its inner type `T`. A consequence of this guarantee is that
// it is possible to convert between `T` and `UnsafeCell<T>`.
unsafe impl<$($tyvar)?> SizeEq<$atomic> for UnsafeCell<$prim> {
unsafe impl<$($tyvar)?> SizeFrom<$atomic> for UnsafeCell<$prim> {
#[inline]
fn cast_from_raw(a: PtrInner<'_, $atomic>) -> PtrInner<'_, UnsafeCell<$prim>> {
// SAFETY: See previous safety comment.
unsafe { cast!(a) }
}
}
// SAFETY: See previous safety comment.
unsafe impl<$($tyvar)?> SizeEq<UnsafeCell<$prim>> for $atomic {
unsafe impl<$($tyvar)?> SizeFrom<UnsafeCell<$prim>> for $atomic {
#[inline]
fn cast_from_raw(p: PtrInner<'_, UnsafeCell<$prim>>) -> PtrInner<'_, $atomic> {
// SAFETY: See previous safety comment.
Expand All @@ -452,7 +462,9 @@ mod atomics {

// SAFETY: The caller promised that `$atomic` and `$prim` have
// the same bit validity. `UnsafeCell<T>` has the same bit
// validity as `T` [1].
// validity as `T` [1]. `UnsafeCell<T>` also has the same size
// as `T` [1], and so both impls of `SizeFrom::cast_from_raw`
// preserve referent size exactly.
//
// [1] Per https://doc.rust-lang.org/1.85.0/std/cell/struct.UnsafeCell.html#memory-layout:
//
Expand Down
6 changes: 3 additions & 3 deletions src/layout.rs
Original file line number Diff line number Diff line change
Expand Up @@ -612,15 +612,15 @@ pub(crate) use cast_from_raw::cast_from_raw;
mod cast_from_raw {
use crate::{pointer::PtrInner, *};

/// Implements [`<Dst as SizeEq<Src>>::cast_from_raw`][cast_from_raw].
/// Implements [`<Dst as SizeFrom<Src>>::cast_from_raw`][cast_from_raw].
///
/// # PME
///
/// Generates a post-monomorphization error if it is not possible to satisfy
/// the soundness conditions of [`SizeEq::cast_from_raw`][cast_from_raw]
/// the soundness conditions of [`SizeFrom::cast_from_raw`][cast_from_raw]
/// for `Src` and `Dst`.
///
/// [cast_from_raw]: crate::pointer::SizeEq::cast_from_raw
/// [cast_from_raw]: crate::pointer::SizeFrom::cast_from_raw
//
// FIXME(#1817): Support Sized->Unsized and Unsized->Sized casts
pub(crate) fn cast_from_raw<Src, Dst>(src: PtrInner<'_, Src>) -> PtrInner<'_, Dst>
Expand Down
35 changes: 14 additions & 21 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,7 +381,7 @@ use core::{
#[cfg(feature = "std")]
use std::io;

use crate::pointer::invariant::{self, BecauseExclusive};
use crate::pointer::{invariant, BecauseBidirectional};

#[cfg(any(feature = "alloc", test, kani))]
extern crate alloc;
Expand Down Expand Up @@ -1840,7 +1840,7 @@ pub unsafe trait TryFromBytes {
Self: KnownLayout + IntoBytes,
{
static_assert_dst_is_not_zst!(Self);
match Ptr::from_mut(bytes).try_cast_into_no_leftover::<Self, BecauseExclusive>(None) {
match Ptr::from_mut(bytes).try_cast_into_no_leftover(None) {
Ok(source) => {
// This call may panic. If that happens, it doesn't cause any soundness
// issues, as we have not generated any invalid state which we need to
Expand All @@ -1852,9 +1852,7 @@ pub unsafe trait TryFromBytes {
// condition will not happen.
match source.try_into_valid() {
Ok(source) => Ok(source.as_mut()),
Err(e) => {
Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into())
}
Err(e) => Err(e.map_src(|src| src.as_bytes().as_mut()).into()),
}
}
Err(e) => Err(e.map_src(Ptr::as_mut).into()),
Expand Down Expand Up @@ -2421,8 +2419,7 @@ pub unsafe trait TryFromBytes {
where
Self: KnownLayout<PointerMetadata = usize> + IntoBytes,
{
match Ptr::from_mut(source).try_cast_into_no_leftover::<Self, BecauseExclusive>(Some(count))
{
match Ptr::from_mut(source).try_cast_into_no_leftover(Some(count)) {
Ok(source) => {
// This call may panic. If that happens, it doesn't cause any soundness
// issues, as we have not generated any invalid state which we need to
Expand All @@ -2434,9 +2431,7 @@ pub unsafe trait TryFromBytes {
// condition will not happen.
match source.try_into_valid() {
Ok(source) => Ok(source.as_mut()),
Err(e) => {
Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into())
}
Err(e) => Err(e.map_src(|src| src.as_bytes().as_mut()).into()),
}
}
Err(e) => Err(e.map_src(Ptr::as_mut).into()),
Expand Down Expand Up @@ -2844,7 +2839,7 @@ fn try_mut_from_prefix_suffix<T: IntoBytes + TryFromBytes + KnownLayout + ?Sized
cast_type: CastType,
meta: Option<T::PointerMetadata>,
) -> Result<(&mut T, &mut [u8]), TryCastError<&mut [u8], T>> {
match Ptr::from_mut(candidate).try_cast_into::<T, BecauseExclusive>(cast_type, meta) {
match Ptr::from_mut(candidate).try_cast_into(cast_type, meta) {
Ok((candidate, prefix_suffix)) => {
// This call may panic. If that happens, it doesn't cause any soundness
// issues, as we have not generated any invalid state which we need to
Expand All @@ -2856,7 +2851,7 @@ fn try_mut_from_prefix_suffix<T: IntoBytes + TryFromBytes + KnownLayout + ?Sized
// condition will not happen.
match candidate.try_into_valid() {
Ok(valid) => Ok((valid.as_mut(), prefix_suffix.as_mut())),
Err(e) => Err(e.map_src(|src| src.as_bytes::<BecauseExclusive>().as_mut()).into()),
Err(e) => Err(e.map_src(|src| src.as_bytes().as_mut()).into()),
}
}
Err(e) => Err(e.map_src(Ptr::as_mut).into()),
Expand Down Expand Up @@ -3832,8 +3827,8 @@ pub unsafe trait FromBytes: FromZeros {
Self: IntoBytes + KnownLayout,
{
static_assert_dst_is_not_zst!(Self);
match Ptr::from_mut(source).try_cast_into_no_leftover::<_, BecauseExclusive>(None) {
Ok(ptr) => Ok(ptr.recall_validity::<_, (_, (_, _))>().as_mut()),
match Ptr::from_mut(source).try_cast_into_no_leftover(None) {
Ok(ptr) => Ok(ptr.recall_validity::<_, BecauseBidirectional>().as_mut()),
Err(err) => Err(err.map_src(|src| src.as_mut())),
}
}
Expand Down Expand Up @@ -4301,11 +4296,9 @@ pub unsafe trait FromBytes: FromZeros {
Self: IntoBytes + KnownLayout<PointerMetadata = usize> + Immutable,
{
let source = Ptr::from_mut(source);
let maybe_slf = source.try_cast_into_no_leftover::<_, BecauseImmutable>(Some(count));
let maybe_slf = source.try_cast_into_no_leftover(Some(count));
match maybe_slf {
Ok(slf) => Ok(slf
.recall_validity::<_, (_, (_, (BecauseExclusive, BecauseExclusive)))>()
.as_mut()),
Ok(slf) => Ok(slf.recall_validity::<_, BecauseBidirectional>().as_mut()),
Err(err) => Err(err.map_src(|s| s.as_mut())),
}
}
Expand Down Expand Up @@ -4662,7 +4655,7 @@ pub unsafe trait FromBytes: FromZeros {
// cannot be violated even though `buf` may have more permissive bit
// validity than `ptr`.
let ptr = unsafe { ptr.assume_validity::<invariant::Initialized>() };
let ptr = ptr.as_bytes::<BecauseExclusive>();
let ptr = ptr.as_bytes();
src.read_exact(ptr.as_mut())?;
// SAFETY: `buf` entirely consists of initialized bytes, and `Self` is
// `FromBytes`.
Expand Down Expand Up @@ -4781,9 +4774,9 @@ fn mut_from_prefix_suffix<T: FromBytes + IntoBytes + KnownLayout + ?Sized>(
cast_type: CastType,
) -> Result<(&mut T, &mut [u8]), CastError<&mut [u8], T>> {
let (slf, prefix_suffix) = Ptr::from_mut(source)
.try_cast_into::<_, BecauseExclusive>(cast_type, meta)
.try_cast_into(cast_type, meta)
.map_err(|err| err.map_src(|s| s.as_mut()))?;
Ok((slf.recall_validity::<_, (_, (_, _))>().as_mut(), prefix_suffix.as_mut()))
Ok((slf.recall_validity::<_, BecauseBidirectional>().as_mut(), prefix_suffix.as_mut()))
}

/// Analyzes whether a type is [`IntoBytes`].
Expand Down
Loading
Loading