allocator_api2/stable/vec/
into_iter.rs1use core::fmt;
2use core::iter::FusedIterator;
3use core::marker::PhantomData;
4use core::mem::{self, size_of, ManuallyDrop};
5
6use core::ptr::{self, NonNull};
7use core::slice::{self};
8
9use crate::stable::addr;
10
11use super::{Allocator, Global, RawVec};
12
13#[cfg(not(no_global_oom_handling))]
14use super::Vec;
15
16pub struct IntoIter<T, A: Allocator = Global> {
28 pub(super) buf: NonNull<T>,
29 pub(super) phantom: PhantomData<T>,
30 pub(super) cap: usize,
31 pub(super) alloc: ManuallyDrop<A>,
34 pub(super) ptr: *const T,
35 pub(super) end: *const T,
36}
37
38impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
39 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
40 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
41 }
42}
43
44impl<T, A: Allocator> IntoIter<T, A> {
45 pub fn as_slice(&self) -> &[T] {
57 unsafe { slice::from_raw_parts(self.ptr, self.len()) }
58 }
59
60 pub fn as_mut_slice(&mut self) -> &mut [T] {
74 unsafe { &mut *self.as_raw_mut_slice() }
75 }
76
77 #[inline(always)]
79 pub fn allocator(&self) -> &A {
80 &self.alloc
81 }
82
83 fn as_raw_mut_slice(&mut self) -> *mut [T] {
84 ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
85 }
86}
87
88impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
89 fn as_ref(&self) -> &[T] {
90 self.as_slice()
91 }
92}
93
94unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
95
96unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
97
98impl<T, A: Allocator> Iterator for IntoIter<T, A> {
99 type Item = T;
100
101 #[inline(always)]
102 fn next(&mut self) -> Option<T> {
103 if self.ptr == self.end {
104 None
105 } else if size_of::<T>() == 0 {
106 self.ptr = self.ptr.cast::<u8>().wrapping_add(1).cast();
110
111 Some(unsafe { mem::zeroed() })
113 } else {
114 let old = self.ptr;
115 self.ptr = unsafe { self.ptr.add(1) };
116
117 Some(unsafe { ptr::read(old) })
118 }
119 }
120
121 #[inline(always)]
122 fn size_hint(&self) -> (usize, Option<usize>) {
123 let exact = if size_of::<T>() == 0 {
124 addr(self.end).wrapping_sub(addr(self.ptr))
125 } else {
126 unsafe { self.end.offset_from(self.ptr) as usize }
127 };
128 (exact, Some(exact))
129 }
130
131 #[inline(always)]
132 fn count(self) -> usize {
133 self.len()
134 }
135}
136
137impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
138 #[inline(always)]
139 fn next_back(&mut self) -> Option<T> {
140 if self.end == self.ptr {
141 None
142 } else if size_of::<T>() == 0 {
143 self.end = self.end.cast::<u8>().wrapping_add(1).cast();
145
146 Some(unsafe { mem::zeroed() })
148 } else {
149 self.end = unsafe { self.end.sub(1) };
150
151 Some(unsafe { ptr::read(self.end) })
152 }
153 }
154}
155
156impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {}
157
158impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
159
160#[cfg(not(no_global_oom_handling))]
161impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
162 fn clone(&self) -> Self {
163 let mut vec = Vec::<T, A>::with_capacity_in(self.len(), (*self.alloc).clone());
164 vec.extend(self.as_slice().iter().cloned());
165 vec.into_iter()
166 }
167}
168
169impl<T, A: Allocator> Drop for IntoIter<T, A> {
170 fn drop(&mut self) {
171 struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
172
173 impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
174 fn drop(&mut self) {
175 unsafe {
176 let alloc = ManuallyDrop::take(&mut self.0.alloc);
178 let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
180 }
181 }
182 }
183
184 let guard = DropGuard(self);
185 unsafe {
187 ptr::drop_in_place(guard.0.as_raw_mut_slice());
188 }
189 }
191}