Skip to content

Commit b400961

Browse files
committed
[WIP] rewrite TrustedRandomAccess into two directional variants
1 parent ea37e80 commit b400961

File tree

24 files changed

+982
-39
lines changed

24 files changed

+982
-39
lines changed

library/alloc/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,7 @@
156156
#![feature(str_internals)]
157157
#![feature(strict_provenance)]
158158
#![feature(trusted_fused)]
159+
#![feature(trusted_indexed_access)]
159160
#![feature(trusted_len)]
160161
#![feature(trusted_random_access)]
161162
#![feature(try_trait_v2)]

library/alloc/src/vec/in_place_collect.rs

+84-16
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,8 @@
157157
use crate::alloc::{handle_alloc_error, Global};
158158
use core::alloc::Allocator;
159159
use core::alloc::Layout;
160-
use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce};
160+
use core::iter::UncheckedIndexedIterator;
161+
use core::iter::{InPlaceIterable, SourceIter};
161162
use core::marker::PhantomData;
162163
use core::mem::{self, ManuallyDrop, SizedTypeProperties};
163164
use core::num::NonZeroUsize;
@@ -257,8 +258,9 @@ where
257258
// caveat: if they weren't we might not even make it to this point
258259
debug_assert_eq!(src_buf, src.buf.as_ptr());
259260
// check InPlaceIterable contract. This is only possible if the iterator advanced the
260-
// source pointer at all. If it uses unchecked access via TrustedRandomAccess
261-
// then the source pointer will stay in its initial position and we can't use it as reference
261+
// source pointer at all. If it uses unchecked access via UncheckedIndexedIterator
262+
// and doesn't perform cleanup then the source pointer will stay in its initial position
263+
// and we can't use it as reference.
262264
if src.ptr != src_ptr {
263265
debug_assert!(
264266
unsafe { dst_buf.add(len) as *const _ } <= src.ptr.as_ptr(),
@@ -369,28 +371,94 @@ where
369371
}
370372
}
371373

374+
// impl<T, I> SpecInPlaceCollect<T, I> for I
375+
// where
376+
// I: Iterator<Item = T> + TrustedRandomAccessNoCoerce,
377+
// {
378+
// #[inline]
379+
// unsafe fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
380+
// let len = self.size();
381+
// let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
382+
// for i in 0..len {
383+
// // Safety: InplaceIterable contract guarantees that for every element we read
384+
// // one slot in the underlying storage will have been freed up and we can immediately
385+
// // write back the result.
386+
// unsafe {
387+
// let dst = dst_buf.add(i);
388+
// debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
389+
// ptr::write(dst, self.__iterator_get_unchecked(i));
390+
// // Since this executes user code which can panic we have to bump the pointer
391+
// // after each step.
392+
// drop_guard.dst = dst.add(1);
393+
// }
394+
// }
395+
// mem::forget(drop_guard);
396+
// len
397+
// }
398+
// }
399+
372400
impl<T, I> SpecInPlaceCollect<T, I> for I
373401
where
374-
I: Iterator<Item = T> + TrustedRandomAccessNoCoerce,
402+
I: Iterator<Item = T> + UncheckedIndexedIterator,
375403
{
376404
#[inline]
377405
unsafe fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
378-
let len = self.size();
379-
let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
380-
for i in 0..len {
381-
// Safety: InplaceIterable contract guarantees that for every element we read
382-
// one slot in the underlying storage will have been freed up and we can immediately
383-
// write back the result.
406+
let len = self.size_hint().0;
407+
408+
if len == 0 {
409+
return 0;
410+
}
411+
412+
struct LoopGuard<'a, I>
413+
where
414+
I: Iterator + UncheckedIndexedIterator,
415+
{
416+
it: &'a mut I,
417+
len: usize,
418+
idx: usize,
419+
dst_buf: *mut I::Item,
420+
}
421+
422+
impl<I> Drop for LoopGuard<'_, I>
423+
where
424+
I: Iterator + UncheckedIndexedIterator,
425+
{
426+
#[inline]
427+
fn drop(&mut self) {
428+
unsafe {
429+
let new_len = self.len - self.idx;
430+
self.it.set_front_index_from_end_unchecked(new_len, self.len);
431+
if self.idx != self.len {
432+
let raw_slice =
433+
ptr::slice_from_raw_parts_mut::<I::Item>(self.dst_buf, self.idx);
434+
ptr::drop_in_place(raw_slice);
435+
}
436+
}
437+
}
438+
}
439+
440+
let mut state = LoopGuard { it: self, len, idx: 0, dst_buf };
441+
442+
loop {
384443
unsafe {
385-
let dst = dst_buf.add(i);
444+
let idx = state.idx;
445+
state.idx = idx.unchecked_add(1);
446+
let dst = state.dst_buf.add(idx);
386447
debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
387-
ptr::write(dst, self.__iterator_get_unchecked(i));
388-
// Since this executes user code which can panic we have to bump the pointer
389-
// after each step.
390-
drop_guard.dst = dst.add(1);
448+
dst.write(state.it.index_from_end_unchecked(state.len - idx));
449+
}
450+
if state.idx == len {
451+
break;
391452
}
392453
}
393-
mem::forget(drop_guard);
454+
455+
// disarm guard, we don't want the front elements to get dropped
456+
mem::forget(state);
457+
// since the guard is disarmed, update the iterator state
458+
if Self::CLEANUP_ON_DROP {
459+
unsafe { self.set_front_index_from_end_unchecked(0, len) };
460+
}
461+
394462
len
395463
}
396464
}

library/alloc/src/vec/into_iter.rs

+49-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use core::array;
88
use core::fmt;
99
use core::iter::{
1010
FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
11-
TrustedRandomAccessNoCoerce,
11+
TrustedRandomAccessNoCoerce, UncheckedIndexedIterator,
1212
};
1313
use core::marker::PhantomData;
1414
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
@@ -307,6 +307,54 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
307307
// them for `Drop`.
308308
unsafe { if T::IS_ZST { mem::zeroed() } else { self.ptr.add(i).read() } }
309309
}
310+
311+
#[inline]
312+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item {
313+
if T::IS_ZST {
314+
// SAFETY: conjuring a ZST
315+
unsafe { mem::zeroed() }
316+
} else {
317+
let end = non_null!(self.end, T);
318+
//self.ptr = unsafe { end.sub(idx).add(1) };
319+
unsafe { end.sub(idx).read() }
320+
}
321+
}
322+
323+
#[inline]
324+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item {
325+
if T::IS_ZST {
326+
// SAFETY: conjuring a ZST
327+
unsafe { mem::zeroed() }
328+
} else {
329+
//let end = non_null!(mut self.end, T);
330+
//*end = unsafe { self.ptr.add(idx) };
331+
unsafe { self.ptr.add(idx).read() }
332+
}
333+
}
334+
}
335+
336+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
337+
impl<T, A: Allocator> UncheckedIndexedIterator for IntoIter<T, A> {
338+
const MAY_HAVE_SIDE_EFFECT: bool = false;
339+
const CLEANUP_ON_DROP: bool = mem::needs_drop::<T>();
340+
341+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, _old_len: usize) {
342+
if T::IS_ZST {
343+
self.end = self.ptr.as_ptr().cast_const().wrapping_byte_add(new_len);
344+
} else {
345+
let end = non_null!(self.end, T);
346+
self.ptr = unsafe { end.sub(new_len) };
347+
}
348+
}
349+
350+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, _old_len: usize) {
351+
if T::IS_ZST {
352+
self.end = self.ptr.as_ptr().cast_const().wrapping_byte_add(new_len);
353+
} else {
354+
let end = non_null!(mut self.end, T);
355+
*end = unsafe { self.ptr.add(new_len) };
356+
}
357+
}
310358
}
311359

312360
#[stable(feature = "rust1", since = "1.0.0")]

library/core/benches/iter.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -457,11 +457,12 @@ fn bench_trusted_random_access_adapters(b: &mut Bencher) {
457457
.map(|(a, b)| a.wrapping_add(b))
458458
.fuse();
459459
let mut acc: usize = 0;
460-
let size = iter.size();
460+
let size = iter.size_hint().0;
461461
for i in 0..size {
462462
// SAFETY: TRA requirements are satisfied by 0..size iteration and then dropping the
463463
// iterator.
464-
acc = acc.wrapping_add(unsafe { iter.__iterator_get_unchecked(i) });
464+
// The iterator is not owning, so we skip cleanup.
465+
acc = acc.wrapping_add(unsafe { iter.index_from_start_unchecked(i) });
465466
}
466467
acc
467468
})

library/core/benches/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
#![feature(flt2dec)]
44
#![feature(test)]
55
#![feature(trusted_random_access)]
6+
#![feature(trusted_indexed_access)]
67
#![feature(iter_array_chunks)]
78
#![feature(iter_next_chunk)]
89
#![feature(iter_advance_by)]

library/core/src/array/iter.rs

+40-2
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,10 @@ use crate::num::NonZeroUsize;
44
use crate::{
55
fmt,
66
intrinsics::transmute_unchecked,
7-
iter::{self, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce},
8-
mem::MaybeUninit,
7+
iter::{
8+
self, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce, UncheckedIndexedIterator,
9+
},
10+
mem::{self, MaybeUninit},
911
ops::{IndexRange, Range},
1012
ptr,
1113
};
@@ -300,6 +302,24 @@ impl<T, const N: usize> Iterator for IntoIter<T, N> {
300302
// SAFETY: The caller must provide an idx that is in bound of the remainder.
301303
unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::<T>().read() }
302304
}
305+
306+
#[inline]
307+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item
308+
where
309+
Self: UncheckedIndexedIterator,
310+
{
311+
// SAFETY: The caller must provide an idx that is in bound of the remainder.
312+
unsafe { self.data.as_ptr().add(self.alive.end()).sub(idx).cast::<T>().read() }
313+
}
314+
315+
#[inline]
316+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item
317+
where
318+
Self: UncheckedIndexedIterator,
319+
{
320+
// SAFETY: The caller must provide an idx that is in bound of the remainder.
321+
unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::<T>().read() }
322+
}
303323
}
304324

305325
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
@@ -400,6 +420,24 @@ where
400420
const MAY_HAVE_SIDE_EFFECT: bool = false;
401421
}
402422

423+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
424+
impl<T, const N: usize> UncheckedIndexedIterator for IntoIter<T, N> {
425+
const MAY_HAVE_SIDE_EFFECT: bool = false;
426+
const CLEANUP_ON_DROP: bool = mem::needs_drop::<T>();
427+
428+
#[inline]
429+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, _old_len: usize) {
430+
// SAFETY: ...
431+
unsafe { self.alive.set_start_unchecked(self.alive.end() - new_len) };
432+
}
433+
434+
#[inline]
435+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, _old_len: usize) {
436+
// SAFETY: ...
437+
unsafe { self.alive.set_end_unchecked(self.alive.start() + new_len) };
438+
}
439+
}
440+
403441
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
404442
impl<T: Clone, const N: usize> Clone for IntoIter<T, N> {
405443
fn clone(&self) -> Self {

library/core/src/iter/adapters/cloned.rs

+43-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
use crate::iter::adapters::{
22
zip::try_get_unchecked, SourceIter, TrustedRandomAccess, TrustedRandomAccessNoCoerce,
33
};
4-
use crate::iter::{FusedIterator, InPlaceIterable, TrustedLen, UncheckedIterator};
4+
use crate::iter::traits::SpecIndexedAccess as _;
5+
use crate::iter::{
6+
FusedIterator, InPlaceIterable, TrustedLen, UncheckedIndexedIterator, UncheckedIterator,
7+
};
58
use crate::ops::Try;
69
use core::num::NonZeroUsize;
710

@@ -69,6 +72,24 @@ where
6972
// `Iterator::__iterator_get_unchecked`.
7073
unsafe { try_get_unchecked(&mut self.it, idx).clone() }
7174
}
75+
76+
#[inline]
77+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item
78+
where
79+
Self: UncheckedIndexedIterator,
80+
{
81+
// SAFETY: forwarding to unsafe function with the same preconditions
82+
unsafe { self.it.index_from_end_unchecked_inner(idx) }.clone()
83+
}
84+
85+
#[inline]
86+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item
87+
where
88+
Self: UncheckedIndexedIterator,
89+
{
90+
// SAFETY: forwarding to unsafe function with the same preconditions
91+
unsafe { self.it.index_from_start_unchecked_inner(idx) }.clone()
92+
}
7293
}
7394

7495
#[stable(feature = "iter_cloned", since = "1.1.0")]
@@ -134,6 +155,27 @@ where
134155
const MAY_HAVE_SIDE_EFFECT: bool = true;
135156
}
136157

158+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
159+
impl<I> UncheckedIndexedIterator for Cloned<I>
160+
where
161+
I: UncheckedIndexedIterator,
162+
{
163+
#[inline]
164+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, old_len: usize) {
165+
// SAFETY: forwarding to unsafe function with the same preconditions
166+
unsafe { self.it.set_front_index_from_end_unchecked(new_len, old_len) }
167+
}
168+
169+
#[inline]
170+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, old_len: usize) {
171+
// SAFETY: forwarding to unsafe function with the same preconditions
172+
unsafe { self.it.set_end_index_from_start_unchecked(new_len, old_len) }
173+
}
174+
175+
const MAY_HAVE_SIDE_EFFECT: bool = true;
176+
const CLEANUP_ON_DROP: bool = I::CLEANUP_ON_DROP;
177+
}
178+
137179
#[unstable(feature = "trusted_len", issue = "37572")]
138180
unsafe impl<'a, I, T: 'a> TrustedLen for Cloned<I>
139181
where

0 commit comments

Comments
 (0)