1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9#[derive(Debug)]
16#[repr(C)]
17pub struct VariableElements<Element, const RECOMMENDED_ALLOCATION: u32 = 0>
18where
19 Element: TrivialType,
20{
21 elements: NonNull<Element>,
22 size: NonNull<u32>,
23 capacity: u32,
24}
25
26unsafe impl<Element, const RECOMMENDED_ALLOCATION: u32> IoType
27 for VariableElements<Element, RECOMMENDED_ALLOCATION>
28where
29 Element: TrivialType,
30{
31 const METADATA: &[u8] = {
32 const fn metadata(
33 recommended_allocation: u32,
34 inner_metadata: &[u8],
35 ) -> ([u8; MAX_METADATA_CAPACITY], usize) {
36 if recommended_allocation == 0 {
37 return concat_metadata_sources(&[
38 &[IoTypeMetadataKind::VariableElements0 as u8],
39 inner_metadata,
40 ]);
41 }
42
43 let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
44 (IoTypeMetadataKind::VariableElements8b, 1)
45 } else if recommended_allocation < 2u32.pow(16) {
46 (IoTypeMetadataKind::VariableElements16b, 2)
47 } else {
48 (IoTypeMetadataKind::VariableElements32b, 4)
49 };
50
51 concat_metadata_sources(&[
52 &[io_type as u8],
53 recommended_allocation.to_le_bytes().split_at(size_bytes).0,
54 inner_metadata,
55 ])
56 }
57
58 metadata(RECOMMENDED_ALLOCATION, Element::METADATA)
60 .0
61 .split_at(metadata(RECOMMENDED_ALLOCATION, Element::METADATA).1)
62 .0
63 };
64
65 type PointerType = Element;
68
69 #[inline(always)]
70 fn size(&self) -> u32 {
71 self.size()
72 }
73
74 #[inline(always)]
75 unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
76 DerefWrapper(self.size)
77 }
78
79 #[inline(always)]
80 unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
81 DerefWrapper(self.size.as_ptr())
82 }
83
84 #[inline(always)]
85 fn capacity(&self) -> u32 {
86 self.capacity
87 }
88
89 #[inline(always)]
90 unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
91 DerefWrapper(NonNull::from_ref(&self.capacity))
92 }
93
94 #[inline(always)]
95 #[track_caller]
96 unsafe fn set_size(&mut self, size: u32) {
97 debug_assert!(
98 size <= self.capacity,
99 "`set_size` called with invalid input {size} for capacity {}",
100 self.capacity
101 );
102 debug_assert_eq!(
103 size % Element::SIZE,
104 0,
105 "`set_size` called with invalid input {size} for element size {}",
106 Element::SIZE
107 );
108
109 unsafe {
111 self.size.write(size);
112 }
113 }
114
115 #[inline(always)]
116 #[track_caller]
117 unsafe fn from_ptr<'a>(
118 ptr: &'a NonNull<Self::PointerType>,
119 size: &'a u32,
120 capacity: u32,
121 ) -> impl Deref<Target = Self> + 'a {
122 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
123 debug_assert!(
124 *size <= capacity,
125 "Size {size} must not exceed capacity {capacity}"
126 );
127 debug_assert_eq!(
128 size % Element::SIZE,
129 0,
130 "Size {size} is invalid for element size {}",
131 Element::SIZE
132 );
133
134 DerefWrapper(Self {
135 elements: *ptr,
136 size: NonNull::from_ref(size),
137 capacity,
138 })
139 }
140
141 #[inline(always)]
142 #[track_caller]
143 unsafe fn from_mut_ptr<'a>(
144 ptr: &'a mut NonNull<Self::PointerType>,
145 size: &'a mut *mut u32,
146 capacity: u32,
147 ) -> impl DerefMut<Target = Self> + 'a {
148 debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
149 let size = unsafe { NonNull::new_unchecked(*size) };
151 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
152 {
153 let size = unsafe { size.read() };
154 debug_assert!(
155 size <= capacity,
156 "Size {size} must not exceed capacity {capacity}"
157 );
158 debug_assert_eq!(
159 size % Element::SIZE,
160 0,
161 "Size {size} is invalid for element size {}",
162 Element::SIZE
163 );
164 }
165
166 DerefWrapper(Self {
167 elements: *ptr,
168 size,
169 capacity,
170 })
171 }
172
173 #[inline(always)]
174 unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
175 &self.elements
176 }
177
178 #[inline(always)]
179 unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
180 &mut self.elements
181 }
182}
183
184impl<Element, const RECOMMENDED_ALLOCATION: u32> IoTypeOptional
185 for VariableElements<Element, RECOMMENDED_ALLOCATION>
186where
187 Element: TrivialType,
188{
189}
190
191impl<Element, const RECOMMENDED_ALLOCATION: u32> VariableElements<Element, RECOMMENDED_ALLOCATION>
192where
193 Element: TrivialType,
194{
195 #[inline(always)]
205 #[track_caller]
206 pub const fn from_buffer<'a>(
207 buffer: &'a [<Self as IoType>::PointerType],
208 size: &'a u32,
209 ) -> impl Deref<Target = Self> + 'a {
210 debug_assert!(
211 buffer.len() * Element::SIZE as usize == *size as usize,
212 "Invalid size"
213 );
214 DerefWrapper(Self {
218 elements: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
219 size: NonNull::from_ref(size),
220 capacity: *size,
221 })
222 }
223
224 #[inline(always)]
232 #[track_caller]
233 pub fn from_buffer_mut<'a>(
234 buffer: &'a mut [<Self as IoType>::PointerType],
235 size: &'a mut u32,
236 ) -> impl DerefMut<Target = Self> + 'a {
237 debug_assert_eq!(
238 buffer.len() * Element::SIZE as usize,
239 *size as usize,
240 "Invalid size"
241 );
242
243 DerefWrapper(Self {
244 elements: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
245 size: NonNull::from_mut(size),
246 capacity: *size,
247 })
248 }
249
250 #[inline(always)]
260 #[track_caller]
261 pub fn from_uninit<'a>(
262 uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>],
263 size: &'a mut u32,
264 ) -> impl DerefMut<Target = Self> + 'a {
265 let capacity = uninit.len();
266 debug_assert!(
267 *size as usize <= capacity,
268 "Size {size} must not exceed capacity {capacity}"
269 );
270 debug_assert_eq!(
271 *size % Element::SIZE,
272 0,
273 "Size {size} is invalid for element size {}",
274 Element::SIZE
275 );
276 let capacity = capacity as u32;
277
278 DerefWrapper(Self {
279 elements: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
280 size: NonNull::from_mut(size),
281 capacity,
282 })
283 }
284
285 #[inline(always)]
287 pub const fn size(&self) -> u32 {
288 unsafe { self.size.read() }
290 }
291
292 #[inline(always)]
294 pub fn capacity(&self) -> u32 {
295 self.capacity
296 }
297
298 #[inline(always)]
300 pub const fn count(&self) -> u32 {
301 unsafe { self.size.read() }
303 }
304
305 #[inline(always)]
307 pub const fn get_initialized(&self) -> &[Element] {
308 let size = self.size();
309 let ptr = self.elements.as_ptr();
310 unsafe { slice::from_raw_parts(ptr, (size / Element::SIZE) as usize) }
312 }
313
314 #[inline(always)]
316 pub fn get_initialized_mut(&mut self) -> &mut [Element] {
317 let size = self.size();
318 let ptr = self.elements.as_ptr();
319 unsafe { slice::from_raw_parts_mut(ptr, (size / Element::SIZE) as usize) }
321 }
322
323 #[inline(always)]
327 #[must_use = "Operation may fail"]
328 pub fn append(&mut self, elements: &[Element]) -> bool {
329 let size = self.size();
330 if elements.len() * Element::SIZE as usize + size as usize > self.capacity as usize {
331 return false;
332 }
333
334 let Ok(offset) = isize::try_from(size / Element::SIZE) else {
336 return false;
337 };
338
339 let mut start = unsafe { self.elements.offset(offset) };
342 unsafe { ptr::copy_nonoverlapping(elements.as_ptr(), start.as_mut(), elements.len()) }
347
348 true
349 }
350
351 #[inline(always)]
356 #[must_use = "Operation may fail"]
357 pub fn truncate(&mut self, new_size: u32) -> bool {
358 if new_size > self.size() || new_size % Element::SIZE != 0 {
359 return false;
360 }
361
362 unsafe {
364 self.size.write(new_size);
365 }
366
367 true
368 }
369
370 #[inline(always)]
374 #[must_use = "Operation may fail"]
375 pub fn copy_from(&mut self, src: &Self) -> bool {
376 let src_size = src.size();
377 if src_size > self.capacity {
378 return false;
379 }
380
381 unsafe {
384 self.elements
385 .copy_from_nonoverlapping(src.elements, src_size as usize);
386 self.size.write(src_size);
387 }
388
389 true
390 }
391
392 #[inline(always)]
397 pub fn as_mut_ptr(&mut self) -> &mut NonNull<Element> {
398 &mut self.elements
399 }
400
401 #[inline(always)]
404 pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
405 &self,
406 ) -> &VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION> {
407 unsafe {
410 NonNull::from_ref(self)
411 .cast::<VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION>>()
412 .as_ref()
413 }
414 }
415
416 #[inline(always)]
419 pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
420 &mut self,
421 ) -> &mut VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION> {
422 unsafe {
425 NonNull::from_mut(self)
426 .cast::<VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION>>()
427 .as_mut()
428 }
429 }
430
431 #[inline(always)]
439 #[must_use = "Operation may fail"]
440 pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [Element]> {
441 if size > self.capacity || size % Element::SIZE != 0 {
442 return None;
443 }
444
445 unsafe {
447 self.size.write(size);
448 }
449 Some(self.get_initialized_mut())
450 }
451}