@@ -231,97 +231,105 @@ where
231
231
I : Iterator < Item = T > + InPlaceCollect ,
232
232
<I as SourceIter >:: Source : AsVecIntoIter ,
233
233
{
234
- default fn from_iter ( mut iterator : I ) -> Self {
234
+ default fn from_iter ( iterator : I ) -> Self {
235
235
// See "Layout constraints" section in the module documentation. We rely on const
236
236
// optimization here since these conditions currently cannot be expressed as trait bounds
237
- if const { !in_place_collectible :: < T , I :: Src > ( I :: MERGE_BY , I :: EXPAND_BY ) } {
238
- // fallback to more generic implementations
239
- return SpecFromIterNested :: from_iter ( iterator) ;
240
- }
241
-
242
- let ( src_buf, src_ptr, src_cap, mut dst_buf, dst_end, dst_cap) = unsafe {
243
- let inner = iterator. as_inner ( ) . as_into_iter ( ) ;
244
- (
245
- inner. buf . as_ptr ( ) ,
246
- inner. ptr ,
247
- inner. cap ,
248
- inner. buf . as_ptr ( ) as * mut T ,
249
- inner. end as * const T ,
250
- inner. cap * mem:: size_of :: < I :: Src > ( ) / mem:: size_of :: < T > ( ) ,
251
- )
237
+ let fun = const {
238
+ if !in_place_collectible :: < T , I :: Src > ( I :: MERGE_BY , I :: EXPAND_BY ) {
239
+ SpecFromIterNested :: < T , I > :: from_iter
240
+ } else {
241
+ from_iter
242
+ }
252
243
} ;
253
244
254
- // SAFETY: `dst_buf` and `dst_end` are the start and end of the buffer.
255
- let len = unsafe { SpecInPlaceCollect :: collect_in_place ( & mut iterator, dst_buf, dst_end) } ;
256
-
257
- let src = unsafe { iterator. as_inner ( ) . as_into_iter ( ) } ;
258
- // check if SourceIter contract was upheld
259
- // caveat: if they weren't we might not even make it to this point
260
- debug_assert_eq ! ( src_buf, src. buf. as_ptr( ) ) ;
261
- // check InPlaceIterable contract. This is only possible if the iterator advanced the
262
- // source pointer at all. If it uses unchecked access via UncheckedIndexedIterator
263
- // and doesn't perform cleanup then the source pointer will stay in its initial position
264
- // and we can't use it as reference.
265
- if src. ptr != src_ptr {
266
- debug_assert ! (
267
- unsafe { dst_buf. add( len) as * const _ } <= src. ptr. as_ptr( ) ,
268
- "InPlaceIterable contract violation, write pointer advanced beyond read pointer"
269
- ) ;
270
- }
245
+ fun ( iterator)
246
+ }
247
+ }
271
248
272
- // The ownership of the source allocation and the new `T` values is temporarily moved into `dst_guard`.
273
- // This is safe because
274
- // * `forget_allocation_drop_remaining` immediately forgets the allocation
275
- // before any panic can occur in order to avoid any double free, and then proceeds to drop
276
- // any remaining values at the tail of the source.
277
- // * the shrink either panics without invalidating the allocation, aborts or
278
- // succeeds. In the last case we disarm the guard.
279
- //
280
- // Note: This access to the source wouldn't be allowed by the TrustedRandomIteratorNoCoerce
281
- // contract (used by SpecInPlaceCollect below). But see the "O(1) collect" section in the
282
- // module documentation why this is ok anyway.
283
- let dst_guard =
284
- InPlaceDstDataSrcBufDrop { ptr : dst_buf, len, src_cap, src : PhantomData :: < I :: Src > } ;
285
- src. forget_allocation_drop_remaining ( ) ;
286
-
287
- // Adjust the allocation if the source had a capacity in bytes that wasn't a multiple
288
- // of the destination type size.
289
- // Since the discrepancy should generally be small this should only result in some
290
- // bookkeeping updates and no memmove.
291
- if needs_realloc :: < I :: Src , T > ( src_cap, dst_cap) {
292
- let alloc = Global ;
293
- debug_assert_ne ! ( src_cap, 0 ) ;
294
- debug_assert_ne ! ( dst_cap, 0 ) ;
295
- unsafe {
296
- // The old allocation exists, therefore it must have a valid layout.
297
- let src_align = mem:: align_of :: < I :: Src > ( ) ;
298
- let src_size = mem:: size_of :: < I :: Src > ( ) . unchecked_mul ( src_cap) ;
299
- let old_layout = Layout :: from_size_align_unchecked ( src_size, src_align) ;
300
-
301
- // The allocation must be equal or smaller for in-place iteration to be possible
302
- // therefore the new layout must be ≤ the old one and therefore valid.
303
- let dst_align = mem:: align_of :: < T > ( ) ;
304
- let dst_size = mem:: size_of :: < T > ( ) . unchecked_mul ( dst_cap) ;
305
- let new_layout = Layout :: from_size_align_unchecked ( dst_size, dst_align) ;
306
-
307
- let result = alloc. shrink (
308
- NonNull :: new_unchecked ( dst_buf as * mut u8 ) ,
309
- old_layout,
310
- new_layout,
311
- ) ;
312
- let Ok ( reallocated) = result else { handle_alloc_error ( new_layout) } ;
313
- dst_buf = reallocated. as_ptr ( ) as * mut T ;
314
- }
315
- } else {
316
- debug_assert_eq ! ( src_cap * mem:: size_of:: <I :: Src >( ) , dst_cap * mem:: size_of:: <T >( ) ) ;
249
+ fn from_iter < I , T > ( mut iterator : I ) -> Vec < T >
250
+ where
251
+ I : Iterator < Item = T > + InPlaceCollect ,
252
+ <I as SourceIter >:: Source : AsVecIntoIter ,
253
+ {
254
+ let ( src_buf, src_ptr, src_cap, mut dst_buf, dst_end, dst_cap) = unsafe {
255
+ let inner = iterator. as_inner ( ) . as_into_iter ( ) ;
256
+ (
257
+ inner. buf . as_ptr ( ) ,
258
+ inner. ptr ,
259
+ inner. cap ,
260
+ inner. buf . as_ptr ( ) as * mut T ,
261
+ inner. end as * const T ,
262
+ inner. cap * mem:: size_of :: < I :: Src > ( ) / mem:: size_of :: < T > ( ) ,
263
+ )
264
+ } ;
265
+
266
+ // SAFETY: `dst_buf` and `dst_end` are the start and end of the buffer.
267
+ let len = unsafe { SpecInPlaceCollect :: collect_in_place ( & mut iterator, dst_buf, dst_end) } ;
268
+
269
+ let src = unsafe { iterator. as_inner ( ) . as_into_iter ( ) } ;
270
+ // check if SourceIter contract was upheld
271
+ // caveat: if they weren't we might not even make it to this point
272
+ debug_assert_eq ! ( src_buf, src. buf. as_ptr( ) ) ;
273
+ // check InPlaceIterable contract. This is only possible if the iterator advanced the
274
+ // source pointer at all. If it uses unchecked access via UncheckedIndexedIterator
275
+ // and doesn't perform cleanup then the source pointer will stay in its initial position
276
+ // and we can't use it as reference.
277
+ if src. ptr != src_ptr {
278
+ debug_assert ! (
279
+ unsafe { dst_buf. add( len) as * const _ } <= src. ptr. as_ptr( ) ,
280
+ "InPlaceIterable contract violation, write pointer advanced beyond read pointer"
281
+ ) ;
282
+ }
283
+
284
+ // The ownership of the source allocation and the new `T` values is temporarily moved into `dst_guard`.
285
+ // This is safe because
286
+ // * `forget_allocation_drop_remaining` immediately forgets the allocation
287
+ // before any panic can occur in order to avoid any double free, and then proceeds to drop
288
+ // any remaining values at the tail of the source.
289
+ // * the shrink either panics without invalidating the allocation, aborts or
290
+ // succeeds. In the last case we disarm the guard.
291
+ //
292
+ // Note: This access to the source wouldn't be allowed by the TrustedRandomIteratorNoCoerce
293
+ // contract (used by SpecInPlaceCollect below). But see the "O(1) collect" section in the
294
+ // module documentation why this is ok anyway.
295
+ let dst_guard =
296
+ InPlaceDstDataSrcBufDrop { ptr : dst_buf, len, src_cap, src : PhantomData :: < I :: Src > } ;
297
+ src. forget_allocation_drop_remaining ( ) ;
298
+
299
+ // Adjust the allocation if the source had a capacity in bytes that wasn't a multiple
300
+ // of the destination type size.
301
+ // Since the discrepancy should generally be small this should only result in some
302
+ // bookkeeping updates and no memmove.
303
+ if needs_realloc :: < I :: Src , T > ( src_cap, dst_cap) {
304
+ let alloc = Global ;
305
+ debug_assert_ne ! ( src_cap, 0 ) ;
306
+ debug_assert_ne ! ( dst_cap, 0 ) ;
307
+ unsafe {
308
+ // The old allocation exists, therefore it must have a valid layout.
309
+ let src_align = mem:: align_of :: < I :: Src > ( ) ;
310
+ let src_size = mem:: size_of :: < I :: Src > ( ) . unchecked_mul ( src_cap) ;
311
+ let old_layout = Layout :: from_size_align_unchecked ( src_size, src_align) ;
312
+
313
+ // The allocation must be equal or smaller for in-place iteration to be possible
314
+ // therefore the new layout must be ≤ the old one and therefore valid.
315
+ let dst_align = mem:: align_of :: < T > ( ) ;
316
+ let dst_size = mem:: size_of :: < T > ( ) . unchecked_mul ( dst_cap) ;
317
+ let new_layout = Layout :: from_size_align_unchecked ( dst_size, dst_align) ;
318
+
319
+ let result =
320
+ alloc. shrink ( NonNull :: new_unchecked ( dst_buf as * mut u8 ) , old_layout, new_layout) ;
321
+ let Ok ( reallocated) = result else { handle_alloc_error ( new_layout) } ;
322
+ dst_buf = reallocated. as_ptr ( ) as * mut T ;
317
323
}
324
+ } else {
325
+ debug_assert_eq ! ( src_cap * mem:: size_of:: <I :: Src >( ) , dst_cap * mem:: size_of:: <T >( ) ) ;
326
+ }
318
327
319
- mem:: forget ( dst_guard) ;
328
+ mem:: forget ( dst_guard) ;
320
329
321
- let vec = unsafe { Vec :: from_raw_parts ( dst_buf, len, dst_cap) } ;
330
+ let vec = unsafe { Vec :: from_raw_parts ( dst_buf, len, dst_cap) } ;
322
331
323
- vec
324
- }
332
+ vec
325
333
}
326
334
327
335
fn write_in_place_with_drop < T > (
0 commit comments