Skip to content

Commit 65717ef

Browse files
committed
Auto merge of #120682 - the8472:indexed-access, r=<try>
[WIP] rewrite TrustedRandomAccess into two directional variants r? `@ghost`
2 parents f067fd6 + d642974 commit 65717ef

File tree

21 files changed

+845
-45
lines changed

21 files changed

+845
-45
lines changed

library/alloc/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,7 @@
156156
#![feature(str_internals)]
157157
#![feature(strict_provenance)]
158158
#![feature(trusted_fused)]
159+
#![feature(trusted_indexed_access)]
159160
#![feature(trusted_len)]
160161
#![feature(trusted_random_access)]
161162
#![feature(try_trait_v2)]

library/alloc/src/vec/in_place_collect.rs

+86-24
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,8 @@
157157
use crate::alloc::{handle_alloc_error, Global};
158158
use core::alloc::Allocator;
159159
use core::alloc::Layout;
160-
use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce};
160+
use core::iter::UncheckedIndexedIterator;
161+
use core::iter::{InPlaceIterable, SourceIter};
161162
use core::marker::PhantomData;
162163
use core::mem::{self, ManuallyDrop, SizedTypeProperties};
163164
use core::num::NonZeroUsize;
@@ -237,7 +238,7 @@ where
237238
return SpecFromIterNested::from_iter(iterator);
238239
}
239240

240-
let (src_buf, src_ptr, src_cap, mut dst_buf, dst_end, dst_cap) = unsafe {
241+
let (src_buf, _src_ptr, src_cap, mut dst_buf, dst_end, dst_cap) = unsafe {
241242
let inner = iterator.as_inner().as_into_iter();
242243
(
243244
inner.buf.as_ptr(),
@@ -256,15 +257,11 @@ where
256257
// check if SourceIter contract was upheld
257258
// caveat: if they weren't we might not even make it to this point
258259
debug_assert_eq!(src_buf, src.buf.as_ptr());
259-
// check InPlaceIterable contract. This is only possible if the iterator advanced the
260-
// source pointer at all. If it uses unchecked access via TrustedRandomAccess
261-
// then the source pointer will stay in its initial position and we can't use it as reference
262-
if src.ptr != src_ptr {
263-
debug_assert!(
264-
unsafe { dst_buf.add(len) as *const _ } <= src.ptr.as_ptr(),
265-
"InPlaceIterable contract violation, write pointer advanced beyond read pointer"
266-
);
267-
}
260+
// check InPlaceIterable contract.
261+
debug_assert!(
262+
unsafe { dst_buf.add(len) as *const _ } <= src.ptr.as_ptr(),
263+
"InPlaceIterable contract violation, write pointer advanced beyond read pointer"
264+
);
268265

269266
// The ownership of the source allocation and the new `T` values is temporarily moved into `dst_guard`.
270267
// This is safe because
@@ -369,28 +366,93 @@ where
369366
}
370367
}
371368

369+
// impl<T, I> SpecInPlaceCollect<T, I> for I
370+
// where
371+
// I: Iterator<Item = T> + TrustedRandomAccessNoCoerce,
372+
// {
373+
// #[inline]
374+
// unsafe fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
375+
// let len = self.size();
376+
// let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
377+
// for i in 0..len {
378+
// // Safety: InplaceIterable contract guarantees that for every element we read
379+
// // one slot in the underlying storage will have been freed up and we can immediately
380+
// // write back the result.
381+
// unsafe {
382+
// let dst = dst_buf.add(i);
383+
// debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
384+
// ptr::write(dst, self.__iterator_get_unchecked(i));
385+
// // Since this executes user code which can panic we have to bump the pointer
386+
// // after each step.
387+
// drop_guard.dst = dst.add(1);
388+
// }
389+
// }
390+
// mem::forget(drop_guard);
391+
// len
392+
// }
393+
// }
394+
372395
impl<T, I> SpecInPlaceCollect<T, I> for I
373396
where
374-
I: Iterator<Item = T> + TrustedRandomAccessNoCoerce,
397+
I: Iterator<Item = T> + UncheckedIndexedIterator,
375398
{
376399
#[inline]
377400
unsafe fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
378-
let len = self.size();
379-
let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
380-
for i in 0..len {
381-
// Safety: InplaceIterable contract guarantees that for every element we read
382-
// one slot in the underlying storage will have been freed up and we can immediately
383-
// write back the result.
401+
let len = self.size_hint().0;
402+
403+
if len == 0 {
404+
return 0;
405+
}
406+
407+
struct LoopGuard<'a, I>
408+
where
409+
I: Iterator + UncheckedIndexedIterator,
410+
{
411+
it: &'a mut I,
412+
len: usize,
413+
idx: usize,
414+
dst_buf: *mut I::Item,
415+
}
416+
417+
impl<I> Drop for LoopGuard<'_, I>
418+
where
419+
I: Iterator + UncheckedIndexedIterator,
420+
{
421+
#[inline]
422+
fn drop(&mut self) {
423+
unsafe {
424+
let new_len = self.len - self.idx;
425+
self.it.set_front_index_from_end_unchecked(new_len, self.len);
426+
if self.idx != self.len {
427+
let raw_slice =
428+
ptr::slice_from_raw_parts_mut::<I::Item>(self.dst_buf, self.idx);
429+
ptr::drop_in_place(raw_slice);
430+
}
431+
}
432+
}
433+
}
434+
435+
let mut state = LoopGuard { it: self, len, idx: 0, dst_buf };
436+
437+
loop {
384438
unsafe {
385-
let dst = dst_buf.add(i);
439+
let idx = state.idx;
440+
state.idx = state.idx.unchecked_add(1);
441+
let dst = state.dst_buf.add(idx);
386442
debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
387-
ptr::write(dst, self.__iterator_get_unchecked(i));
388-
// Since this executes user code which can panic we have to bump the pointer
389-
// after each step.
390-
drop_guard.dst = dst.add(1);
443+
dst.write(state.it.index_from_end_unchecked(state.len - idx));
444+
//drop_guard.dst = dst_buf.add(i).add(1);
445+
}
446+
if state.idx == len {
447+
break;
391448
}
392449
}
393-
mem::forget(drop_guard);
450+
451+
// disarm guard, we don't want the front elements to get dropped
452+
mem::forget(state);
453+
// since the guard is disarmed, update the iterator state
454+
unsafe { self.set_front_index_from_end_unchecked(0, len) };
455+
394456
len
395457
}
396458
}

library/alloc/src/vec/into_iter.rs

+47-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use core::array;
88
use core::fmt;
99
use core::iter::{
1010
FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
11-
TrustedRandomAccessNoCoerce,
11+
TrustedRandomAccessNoCoerce, UncheckedIndexedIterator,
1212
};
1313
use core::marker::PhantomData;
1414
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
@@ -129,6 +129,7 @@ impl<T, A: Allocator> IntoIter<T, A> {
129129
/// This method is used by in-place iteration, refer to the vec::in_place_collect
130130
/// documentation for an overview.
131131
#[cfg(not(no_global_oom_handling))]
132+
#[inline]
132133
pub(super) fn forget_allocation_drop_remaining(&mut self) {
133134
let remaining = self.as_raw_mut_slice();
134135

@@ -307,6 +308,51 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
307308
// them for `Drop`.
308309
unsafe { if T::IS_ZST { mem::zeroed() } else { self.ptr.add(i).read() } }
309310
}
311+
312+
#[inline]
313+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item {
314+
if T::IS_ZST {
315+
// SAFETY: conjuring a ZST
316+
unsafe { mem::zeroed() }
317+
} else {
318+
let end = non_null!(self.end, T);
319+
//self.ptr = unsafe { end.sub(idx).add(1) };
320+
unsafe { end.sub(idx).read() }
321+
}
322+
}
323+
324+
#[inline]
325+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item {
326+
if T::IS_ZST {
327+
// SAFETY: conjuring a ZST
328+
unsafe { mem::zeroed() }
329+
} else {
330+
//let end = non_null!(mut self.end, T);
331+
//*end = unsafe { self.ptr.add(idx) };
332+
unsafe { self.ptr.add(idx).read() }
333+
}
334+
}
335+
}
336+
337+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
338+
impl<T, A: Allocator> UncheckedIndexedIterator for IntoIter<T, A> {
339+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, _old_len: usize) {
340+
if T::IS_ZST {
341+
self.end = self.ptr.as_ptr().cast_const().wrapping_byte_add(new_len);
342+
} else {
343+
let end = non_null!(self.end, T);
344+
self.ptr = unsafe { end.sub(new_len) };
345+
}
346+
}
347+
348+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, _old_len: usize) {
349+
if T::IS_ZST {
350+
self.end = self.ptr.as_ptr().cast_const().wrapping_byte_add(new_len);
351+
} else {
352+
let end = non_null!(mut self.end, T);
353+
*end = unsafe { self.ptr.add(new_len) };
354+
}
355+
}
310356
}
311357

312358
#[stable(feature = "rust1", since = "1.0.0")]

library/core/benches/iter.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -457,11 +457,12 @@ fn bench_trusted_random_access_adapters(b: &mut Bencher) {
457457
.map(|(a, b)| a.wrapping_add(b))
458458
.fuse();
459459
let mut acc: usize = 0;
460-
let size = iter.size();
460+
let size = iter.size_hint().0;
461461
for i in 0..size {
462462
// SAFETY: TRA requirements are satisfied by 0..size iteration and then dropping the
463463
// iterator.
464-
acc = acc.wrapping_add(unsafe { iter.__iterator_get_unchecked(i) });
464+
// The iterator is not owning, so we skip cleanup.
465+
acc = acc.wrapping_add(unsafe { iter.index_from_start_unchecked(i) });
465466
}
466467
acc
467468
})

library/core/benches/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
#![feature(flt2dec)]
44
#![feature(test)]
55
#![feature(trusted_random_access)]
6+
#![feature(trusted_indexed_access)]
67
#![feature(iter_array_chunks)]
78
#![feature(iter_next_chunk)]
89
#![feature(iter_advance_by)]

library/core/src/iter/adapters/cloned.rs

+40-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
use crate::iter::adapters::{
22
zip::try_get_unchecked, SourceIter, TrustedRandomAccess, TrustedRandomAccessNoCoerce,
33
};
4-
use crate::iter::{FusedIterator, InPlaceIterable, TrustedLen, UncheckedIterator};
4+
use crate::iter::traits::SpecIndexedAccess as _;
5+
use crate::iter::{
6+
FusedIterator, InPlaceIterable, TrustedLen, UncheckedIndexedIterator, UncheckedIterator,
7+
};
58
use crate::ops::Try;
69
use core::num::NonZeroUsize;
710

@@ -69,6 +72,24 @@ where
6972
// `Iterator::__iterator_get_unchecked`.
7073
unsafe { try_get_unchecked(&mut self.it, idx).clone() }
7174
}
75+
76+
#[inline]
77+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item
78+
where
79+
Self: UncheckedIndexedIterator,
80+
{
81+
// SAFETY: forwarding to unsafe function with the same preconditions
82+
unsafe { self.it.index_from_end_unchecked_inner(idx) }.clone()
83+
}
84+
85+
#[inline]
86+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item
87+
where
88+
Self: UncheckedIndexedIterator,
89+
{
90+
// SAFETY: forwarding to unsafe function with the same preconditions
91+
unsafe { self.it.index_from_start_unchecked_inner(idx) }.clone()
92+
}
7293
}
7394

7495
#[stable(feature = "iter_cloned", since = "1.1.0")]
@@ -134,6 +155,24 @@ where
134155
const MAY_HAVE_SIDE_EFFECT: bool = true;
135156
}
136157

158+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
159+
impl<I> UncheckedIndexedIterator for Cloned<I>
160+
where
161+
I: UncheckedIndexedIterator,
162+
{
163+
#[inline]
164+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, old_len: usize) {
165+
// SAFETY: forwarding to unsafe function with the same preconditions
166+
unsafe { self.it.set_front_index_from_end_unchecked(new_len, old_len) }
167+
}
168+
169+
#[inline]
170+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, old_len: usize) {
171+
// SAFETY: forwarding to unsafe function with the same preconditions
172+
unsafe { self.it.set_end_index_from_start_unchecked(new_len, old_len) }
173+
}
174+
}
175+
137176
#[unstable(feature = "trusted_len", issue = "37572")]
138177
unsafe impl<'a, I, T: 'a> TrustedLen for Cloned<I>
139178
where

library/core/src/iter/adapters/copied.rs

+38-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
use crate::iter::adapters::{
22
zip::try_get_unchecked, SourceIter, TrustedRandomAccess, TrustedRandomAccessNoCoerce,
33
};
4-
use crate::iter::{FusedIterator, InPlaceIterable, TrustedLen};
4+
use crate::iter::traits::SpecIndexedAccess;
5+
use crate::iter::{FusedIterator, InPlaceIterable, TrustedLen, UncheckedIndexedIterator};
56
use crate::mem::MaybeUninit;
67
use crate::mem::SizedTypeProperties;
78
use crate::num::NonZeroUsize;
@@ -102,6 +103,24 @@ where
102103
// `Iterator::__iterator_get_unchecked`.
103104
*unsafe { try_get_unchecked(&mut self.it, idx) }
104105
}
106+
107+
#[inline]
108+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item
109+
where
110+
Self: UncheckedIndexedIterator,
111+
{
112+
// SAFETY: forwarding to unsafe function with the same preconditions
113+
*unsafe { self.it.index_from_end_unchecked_inner(idx) }
114+
}
115+
116+
#[inline]
117+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item
118+
where
119+
Self: UncheckedIndexedIterator,
120+
{
121+
// SAFETY: forwarding to unsafe function with the same preconditions
122+
*unsafe { self.it.index_from_start_unchecked_inner(idx) }
123+
}
105124
}
106125

107126
#[stable(feature = "iter_copied", since = "1.36.0")]
@@ -159,6 +178,24 @@ where
159178
{
160179
}
161180

181+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
182+
impl<I> UncheckedIndexedIterator for Copied<I>
183+
where
184+
I: UncheckedIndexedIterator,
185+
{
186+
#[inline]
187+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, old_len: usize) {
188+
// SAFETY: forwarding to unsafe function with the same preconditions
189+
unsafe { self.it.set_front_index_from_end_unchecked(new_len, old_len) }
190+
}
191+
192+
#[inline]
193+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, old_len: usize) {
194+
// SAFETY: forwarding to unsafe function with the same preconditions
195+
unsafe { self.it.set_end_index_from_start_unchecked(new_len, old_len) }
196+
}
197+
}
198+
162199
#[doc(hidden)]
163200
#[unstable(feature = "trusted_random_access", issue = "none")]
164201
unsafe impl<I> TrustedRandomAccess for Copied<I> where I: TrustedRandomAccess {}

0 commit comments

Comments
 (0)