Skip to content

Commit 98b04dc

Browse files
committed
Auto merge of #120682 - the8472:indexed-access, r=<try>
[WIP] rewrite TrustedRandomAccess into two directional variants r? `@ghost`
2 parents bfe762e + 36d8523 commit 98b04dc

File tree

25 files changed

+1009
-43
lines changed

25 files changed

+1009
-43
lines changed

library/alloc/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -160,6 +160,7 @@
160160
#![feature(str_internals)]
161161
#![feature(strict_provenance)]
162162
#![feature(trusted_fused)]
163+
#![feature(trusted_indexed_access)]
163164
#![feature(trusted_len)]
164165
#![feature(trusted_random_access)]
165166
#![feature(try_trait_v2)]

library/alloc/src/vec/in_place_collect.rs

+88-17
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@
8080
//! # O(1) collect
8181
//!
8282
//! The main iteration itself is further specialized when the iterator implements
83-
//! [`TrustedRandomAccessNoCoerce`] to let the optimizer see that it is a counted loop with a single
83+
//! [`UncheckedIndexedIterator`] to let the optimizer see that it is a counted loop with a single
8484
//! [induction variable]. This can turn some iterators into a noop, i.e. it reduces them from O(n) to
8585
//! O(1). This particular optimization is quite fickle and doesn't always work, see [#79308]
8686
//!
@@ -157,8 +157,10 @@
157157
use crate::alloc::{handle_alloc_error, Global};
158158
use core::alloc::Allocator;
159159
use core::alloc::Layout;
160-
use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce};
160+
use core::iter::UncheckedIndexedIterator;
161+
use core::iter::{InPlaceIterable, SourceIter};
161162
use core::marker::PhantomData;
163+
use core::mem::needs_drop;
162164
use core::mem::{self, ManuallyDrop, SizedTypeProperties};
163165
use core::num::NonZero;
164166
use core::ptr::{self, NonNull};
@@ -257,8 +259,9 @@ where
257259
// caveat: if they weren't we might not even make it to this point
258260
debug_assert_eq!(src_buf, src.buf.as_ptr());
259261
// check InPlaceIterable contract. This is only possible if the iterator advanced the
260-
// source pointer at all. If it uses unchecked access via TrustedRandomAccess
261-
// then the source pointer will stay in its initial position and we can't use it as reference
262+
// source pointer at all. If it uses unchecked access via UncheckedIndexedIterator
263+
// and doesn't perform cleanup then the source pointer will stay in its initial position
264+
// and we can't use it as reference.
262265
if src.ptr != src_ptr {
263266
debug_assert!(
264267
unsafe { dst_buf.add(len) as *const _ } <= src.ptr.as_ptr(),
@@ -369,28 +372,96 @@ where
369372
}
370373
}
371374

375+
// impl<T, I> SpecInPlaceCollect<T, I> for I
376+
// where
377+
// I: Iterator<Item = T> + TrustedRandomAccessNoCoerce,
378+
// {
379+
// #[inline]
380+
// unsafe fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
381+
// let len = self.size();
382+
// let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
383+
// for i in 0..len {
384+
// // Safety: InplaceIterable contract guarantees that for every element we read
385+
// // one slot in the underlying storage will have been freed up and we can immediately
386+
// // write back the result.
387+
// unsafe {
388+
// let dst = dst_buf.add(i);
389+
// debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
390+
// ptr::write(dst, self.__iterator_get_unchecked(i));
391+
// // Since this executes user code which can panic we have to bump the pointer
392+
// // after each step.
393+
// drop_guard.dst = dst.add(1);
394+
// }
395+
// }
396+
// mem::forget(drop_guard);
397+
// len
398+
// }
399+
// }
400+
372401
impl<T, I> SpecInPlaceCollect<T, I> for I
373402
where
374-
I: Iterator<Item = T> + TrustedRandomAccessNoCoerce,
403+
I: Iterator<Item = T> + UncheckedIndexedIterator,
375404
{
376405
#[inline]
377406
unsafe fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize {
378-
let len = self.size();
379-
let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf };
380-
for i in 0..len {
381-
// Safety: InplaceIterable contract guarantees that for every element we read
382-
// one slot in the underlying storage will have been freed up and we can immediately
383-
// write back the result.
407+
let len = self.size_hint().0;
408+
409+
if len == 0 {
410+
return 0;
411+
}
412+
413+
struct LoopGuard<'a, I>
414+
where
415+
I: Iterator + UncheckedIndexedIterator,
416+
{
417+
it: &'a mut I,
418+
len: usize,
419+
idx: usize,
420+
dst_buf: *mut I::Item,
421+
}
422+
423+
impl<I> Drop for LoopGuard<'_, I>
424+
where
425+
I: Iterator + UncheckedIndexedIterator,
426+
{
427+
#[inline]
428+
fn drop(&mut self) {
429+
unsafe {
430+
let new_len = self.len - self.idx;
431+
if I::CLEANUP_ON_DROP {
432+
self.it.set_front_index_from_end_unchecked(new_len, self.len);
433+
}
434+
if needs_drop::<I::Item>() && self.idx != self.len {
435+
let raw_slice =
436+
ptr::slice_from_raw_parts_mut::<I::Item>(self.dst_buf, self.idx);
437+
ptr::drop_in_place(raw_slice);
438+
}
439+
}
440+
}
441+
}
442+
443+
let mut state = LoopGuard { it: self, len, idx: 0, dst_buf };
444+
445+
loop {
384446
unsafe {
385-
let dst = dst_buf.add(i);
447+
let idx = state.idx;
448+
state.idx = idx.unchecked_add(1);
449+
let dst = state.dst_buf.add(idx);
386450
debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
387-
ptr::write(dst, self.__iterator_get_unchecked(i));
388-
// Since this executes user code which can panic we have to bump the pointer
389-
// after each step.
390-
drop_guard.dst = dst.add(1);
451+
dst.write(state.it.index_from_end_unchecked(len - idx));
452+
}
453+
if state.idx == len {
454+
break;
391455
}
392456
}
393-
mem::forget(drop_guard);
457+
458+
// disarm guard, we don't want the front elements to get dropped
459+
mem::forget(state);
460+
// since the guard was disarmed, update the iterator state
461+
if Self::CLEANUP_ON_DROP {
462+
unsafe { self.set_front_index_from_end_unchecked(0, len) };
463+
}
464+
394465
len
395466
}
396467
}

library/alloc/src/vec/into_iter.rs

+55-3
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,10 @@ use core::array;
88
use core::fmt;
99
use core::iter::{
1010
FusedIterator, InPlaceIterable, SourceIter, TrustedFused, TrustedLen,
11-
TrustedRandomAccessNoCoerce,
11+
TrustedRandomAccessNoCoerce, UncheckedIndexedIterator,
1212
};
1313
use core::marker::PhantomData;
14-
use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
14+
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
1515
use core::num::NonZero;
1616
#[cfg(not(no_global_oom_handling))]
1717
use core::ops::Deref;
@@ -224,7 +224,8 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
224224
let exact = if T::IS_ZST {
225225
self.end.addr().wrapping_sub(self.ptr.as_ptr().addr())
226226
} else {
227-
unsafe { non_null!(self.end, T).sub_ptr(self.ptr) }
227+
// FIXME(#121239): this should use sub_ptr but llvm == 18 doesn't optimize as it should
228+
unsafe { non_null!(self.end, T).offset_from(self.ptr) as usize }
228229
};
229230
(exact, Some(exact))
230231
}
@@ -303,6 +304,57 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
303304
// them for `Drop`.
304305
unsafe { self.ptr.add(i).read() }
305306
}
307+
308+
#[inline]
309+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item {
310+
if T::IS_ZST {
311+
// SAFETY: conjuring a ZST
312+
unsafe { NonNull::dangling().read() }
313+
} else {
314+
let end = non_null!(self.end, T);
315+
unsafe { end.sub(idx).read() }
316+
//self.ptr = unsafe { end.sub(idx).add(1) };
317+
//unsafe { self.end.sub(idx).read() }
318+
}
319+
}
320+
321+
#[inline]
322+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item {
323+
if T::IS_ZST {
324+
// SAFETY: conjuring a ZST
325+
unsafe { NonNull::dangling().read() }
326+
} else {
327+
//let end = non_null!(mut self.end, T);
328+
//*end = unsafe { self.ptr.add(idx) };
329+
unsafe { self.ptr.add(idx).read() }
330+
}
331+
}
332+
}
333+
334+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
335+
impl<T, A: Allocator> UncheckedIndexedIterator for IntoIter<T, A> {
336+
const MAY_HAVE_SIDE_EFFECT: bool = false;
337+
const CLEANUP_ON_DROP: bool = mem::needs_drop::<T>();
338+
339+
#[inline]
340+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, _old_len: usize) {
341+
if T::IS_ZST {
342+
self.end = self.ptr.as_ptr().cast_const().wrapping_byte_add(new_len);
343+
} else {
344+
let end = non_null!(self.end, T);
345+
self.ptr = unsafe { end.sub(new_len) };
346+
}
347+
}
348+
349+
#[inline]
350+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, _old_len: usize) {
351+
if T::IS_ZST {
352+
self.end = self.ptr.as_ptr().cast_const().wrapping_byte_add(new_len);
353+
} else {
354+
let end = non_null!(mut self.end, T);
355+
*end = unsafe { self.ptr.add(new_len) };
356+
}
357+
}
306358
}
307359

308360
#[stable(feature = "rust1", since = "1.0.0")]

library/core/benches/iter.rs

+3-2
Original file line numberDiff line numberDiff line change
@@ -457,11 +457,12 @@ fn bench_trusted_random_access_adapters(b: &mut Bencher) {
457457
.map(|(a, b)| a.wrapping_add(b))
458458
.fuse();
459459
let mut acc: usize = 0;
460-
let size = iter.size();
460+
let size = iter.size_hint().0;
461461
for i in 0..size {
462462
// SAFETY: TRA requirements are satisfied by 0..size iteration and then dropping the
463463
// iterator.
464-
acc = acc.wrapping_add(unsafe { iter.__iterator_get_unchecked(i) });
464+
// The iterator is not owning, so we skip cleanup.
465+
acc = acc.wrapping_add(unsafe { iter.index_from_start_unchecked(i) });
465466
}
466467
acc
467468
})

library/core/benches/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
#![feature(flt2dec)]
44
#![feature(test)]
55
#![feature(trusted_random_access)]
6+
#![feature(trusted_indexed_access)]
67
#![feature(iter_array_chunks)]
78
#![feature(iter_next_chunk)]
89
#![feature(iter_advance_by)]

library/core/src/array/iter.rs

+40-2
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,10 @@ use crate::num::NonZero;
44
use crate::{
55
fmt,
66
intrinsics::transmute_unchecked,
7-
iter::{self, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce},
8-
mem::MaybeUninit,
7+
iter::{
8+
self, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce, UncheckedIndexedIterator,
9+
},
10+
mem::{self, MaybeUninit},
911
ops::{IndexRange, Range},
1012
ptr,
1113
};
@@ -300,6 +302,24 @@ impl<T, const N: usize> Iterator for IntoIter<T, N> {
300302
// SAFETY: The caller must provide an idx that is in bound of the remainder.
301303
unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::<T>().read() }
302304
}
305+
306+
#[inline]
307+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item
308+
where
309+
Self: UncheckedIndexedIterator,
310+
{
311+
// SAFETY: The caller must provide an idx that is in bound of the remainder.
312+
unsafe { self.data.as_ptr().add(self.alive.end()).sub(idx).cast::<T>().read() }
313+
}
314+
315+
#[inline]
316+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item
317+
where
318+
Self: UncheckedIndexedIterator,
319+
{
320+
// SAFETY: The caller must provide an idx that is in bound of the remainder.
321+
unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::<T>().read() }
322+
}
303323
}
304324

305325
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
@@ -400,6 +420,24 @@ where
400420
const MAY_HAVE_SIDE_EFFECT: bool = false;
401421
}
402422

423+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
424+
impl<T, const N: usize> UncheckedIndexedIterator for IntoIter<T, N> {
425+
const MAY_HAVE_SIDE_EFFECT: bool = false;
426+
const CLEANUP_ON_DROP: bool = mem::needs_drop::<T>();
427+
428+
#[inline]
429+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, _old_len: usize) {
430+
// SAFETY: ...
431+
unsafe { self.alive.set_start_unchecked(self.alive.end() - new_len) };
432+
}
433+
434+
#[inline]
435+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, _old_len: usize) {
436+
// SAFETY: ...
437+
unsafe { self.alive.set_end_unchecked(self.alive.start() + new_len) };
438+
}
439+
}
440+
403441
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
404442
impl<T: Clone, const N: usize> Clone for IntoIter<T, N> {
405443
fn clone(&self) -> Self {

library/core/src/iter/adapters/cloned.rs

+43-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
use crate::iter::adapters::{
22
zip::try_get_unchecked, SourceIter, TrustedRandomAccess, TrustedRandomAccessNoCoerce,
33
};
4-
use crate::iter::{FusedIterator, InPlaceIterable, TrustedLen, UncheckedIterator};
4+
use crate::iter::traits::SpecIndexedAccess as _;
5+
use crate::iter::{
6+
FusedIterator, InPlaceIterable, TrustedLen, UncheckedIndexedIterator, UncheckedIterator,
7+
};
58
use crate::ops::Try;
69
use core::num::NonZero;
710

@@ -69,6 +72,24 @@ where
6972
// `Iterator::__iterator_get_unchecked`.
7073
unsafe { try_get_unchecked(&mut self.it, idx).clone() }
7174
}
75+
76+
#[inline]
77+
unsafe fn index_from_end_unchecked(&mut self, idx: usize) -> Self::Item
78+
where
79+
Self: UncheckedIndexedIterator,
80+
{
81+
// SAFETY: forwarding to unsafe function with the same preconditions
82+
unsafe { self.it.index_from_end_unchecked_inner(idx) }.clone()
83+
}
84+
85+
#[inline]
86+
unsafe fn index_from_start_unchecked(&mut self, idx: usize) -> Self::Item
87+
where
88+
Self: UncheckedIndexedIterator,
89+
{
90+
// SAFETY: forwarding to unsafe function with the same preconditions
91+
unsafe { self.it.index_from_start_unchecked_inner(idx) }.clone()
92+
}
7293
}
7394

7495
#[stable(feature = "iter_cloned", since = "1.1.0")]
@@ -134,6 +155,27 @@ where
134155
const MAY_HAVE_SIDE_EFFECT: bool = true;
135156
}
136157

158+
#[unstable(feature = "trusted_indexed_access", issue = "none")]
159+
impl<I> UncheckedIndexedIterator for Cloned<I>
160+
where
161+
I: UncheckedIndexedIterator,
162+
{
163+
#[inline]
164+
unsafe fn set_front_index_from_end_unchecked(&mut self, new_len: usize, old_len: usize) {
165+
// SAFETY: forwarding to unsafe function with the same preconditions
166+
unsafe { self.it.set_front_index_from_end_unchecked(new_len, old_len) }
167+
}
168+
169+
#[inline]
170+
unsafe fn set_end_index_from_start_unchecked(&mut self, new_len: usize, old_len: usize) {
171+
// SAFETY: forwarding to unsafe function with the same preconditions
172+
unsafe { self.it.set_end_index_from_start_unchecked(new_len, old_len) }
173+
}
174+
175+
const MAY_HAVE_SIDE_EFFECT: bool = true;
176+
const CLEANUP_ON_DROP: bool = I::CLEANUP_ON_DROP;
177+
}
178+
137179
#[unstable(feature = "trusted_len", issue = "37572")]
138180
unsafe impl<'a, I, T: 'a> TrustedLen for Cloned<I>
139181
where

0 commit comments

Comments
 (0)