|
157 | 157 | use crate::alloc::{handle_alloc_error, Global};
|
158 | 158 | use core::alloc::Allocator;
|
159 | 159 | use core::alloc::Layout;
|
160 |
| -use core::iter::{InPlaceIterable, SourceIter, TrustedRandomAccessNoCoerce}; |
| 160 | +use core::iter::UncheckedIndexedIterator; |
| 161 | +use core::iter::{InPlaceIterable, SourceIter}; |
161 | 162 | use core::marker::PhantomData;
|
162 | 163 | use core::mem::{self, ManuallyDrop, SizedTypeProperties};
|
163 | 164 | use core::num::NonZeroUsize;
|
@@ -369,28 +370,91 @@ where
|
369 | 370 | }
|
370 | 371 | }
|
371 | 372 |
|
| 373 | +// impl<T, I> SpecInPlaceCollect<T, I> for I |
| 374 | +// where |
| 375 | +// I: Iterator<Item = T> + TrustedRandomAccessNoCoerce, |
| 376 | +// { |
| 377 | +// #[inline] |
| 378 | +// unsafe fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize { |
| 379 | +// let len = self.size(); |
| 380 | +// let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf }; |
| 381 | +// for i in 0..len { |
| 382 | +// // Safety: InplaceIterable contract guarantees that for every element we read |
| 383 | +// // one slot in the underlying storage will have been freed up and we can immediately |
| 384 | +// // write back the result. |
| 385 | +// unsafe { |
| 386 | +// let dst = dst_buf.add(i); |
| 387 | +// debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation"); |
| 388 | +// ptr::write(dst, self.__iterator_get_unchecked(i)); |
| 389 | +// // Since this executes user code which can panic we have to bump the pointer |
| 390 | +// // after each step. |
| 391 | +// drop_guard.dst = dst.add(1); |
| 392 | +// } |
| 393 | +// } |
| 394 | +// mem::forget(drop_guard); |
| 395 | +// len |
| 396 | +// } |
| 397 | +// } |
| 398 | + |
372 | 399 | impl<T, I> SpecInPlaceCollect<T, I> for I
|
373 | 400 | where
|
374 |
| - I: Iterator<Item = T> + TrustedRandomAccessNoCoerce, |
| 401 | + I: Iterator<Item = T> + UncheckedIndexedIterator, |
375 | 402 | {
|
376 | 403 | #[inline]
|
377 |
| - unsafe fn collect_in_place(&mut self, dst_buf: *mut T, end: *const T) -> usize { |
378 |
| - let len = self.size(); |
379 |
| - let mut drop_guard = InPlaceDrop { inner: dst_buf, dst: dst_buf }; |
380 |
| - for i in 0..len { |
381 |
| - // Safety: InplaceIterable contract guarantees that for every element we read |
382 |
| - // one slot in the underlying storage will have been freed up and we can immediately |
383 |
| - // write back the result. |
| 404 | + unsafe fn collect_in_place(&mut self, dst_buf: *mut T, _end: *const T) -> usize { |
| 405 | + let len = self.size_hint().0; |
| 406 | + |
| 407 | + if len == 0 { |
| 408 | + return 0; |
| 409 | + } |
| 410 | + |
| 411 | + struct LoopGuard<'a, I> |
| 412 | + where |
| 413 | + I: Iterator + UncheckedIndexedIterator, |
| 414 | + { |
| 415 | + it: &'a mut I, |
| 416 | + len: usize, |
| 417 | + idx: usize, |
| 418 | + dst_buf: *mut I::Item, |
| 419 | + } |
| 420 | + |
| 421 | + impl<I> Drop for LoopGuard<'_, I> |
| 422 | + where |
| 423 | + I: Iterator + UncheckedIndexedIterator, |
| 424 | + { |
| 425 | + #[inline] |
| 426 | + fn drop(&mut self) { |
| 427 | + unsafe { |
| 428 | + self.it.set_front_index_from_end_unchecked(self.len - self.idx); |
| 429 | + if self.idx != self.len { |
| 430 | + let raw_slice = |
| 431 | + ptr::slice_from_raw_parts_mut::<I::Item>(self.dst_buf, self.idx); |
| 432 | + ptr::drop_in_place(raw_slice); |
| 433 | + } |
| 434 | + } |
| 435 | + } |
| 436 | + } |
| 437 | + |
| 438 | + let mut state = LoopGuard { it: self, len, idx: 0, dst_buf }; |
| 439 | + |
| 440 | + loop { |
384 | 441 | unsafe {
|
385 |
| - let dst = dst_buf.add(i); |
386 |
| - debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation"); |
387 |
| - ptr::write(dst, self.__iterator_get_unchecked(i)); |
388 |
| - // Since this executes user code which can panic we have to bump the pointer |
389 |
| - // after each step. |
390 |
| - drop_guard.dst = dst.add(1); |
| 442 | + let dst = state.dst_buf.add(state.idx); |
| 443 | + |
| 444 | + dst.write(state.it.index_from_end_unchecked(state.len.unchecked_sub(state.idx))); |
| 445 | + //drop_guard.dst = dst_buf.add(i).add(1); |
| 446 | + state.idx = state.idx.unchecked_add(1); |
| 447 | + } |
| 448 | + if state.idx == len { |
| 449 | + break; |
391 | 450 | }
|
392 | 451 | }
|
393 |
| - mem::forget(drop_guard); |
| 452 | + |
| 453 | + // disarm guard, we don't want the front elements to get dropped |
| 454 | + mem::forget(state); |
| 455 | + // since the guard is disarmed, update the iterator state |
| 456 | + unsafe { self.set_front_index_from_end_unchecked(0) }; |
| 457 | + |
394 | 458 | len
|
395 | 459 | }
|
396 | 460 | }
|
|
0 commit comments