1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9#[derive(Debug)]
16#[repr(C)]
17pub struct VariableElements<Element, const RECOMMENDED_ALLOCATION: u32 = 0>
18where
19 Element: TrivialType,
20{
21 elements: NonNull<Element>,
22 size: NonNull<u32>,
23 capacity: u32,
24}
25
26unsafe impl<Element, const RECOMMENDED_ALLOCATION: u32> IoType
27 for VariableElements<Element, RECOMMENDED_ALLOCATION>
28where
29 Element: TrivialType,
30{
31 const METADATA: &[u8] = {
32 const fn metadata(
33 recommended_allocation: u32,
34 inner_metadata: &[u8],
35 ) -> ([u8; MAX_METADATA_CAPACITY], usize) {
36 if recommended_allocation == 0 {
37 return concat_metadata_sources(&[
38 &[IoTypeMetadataKind::VariableElements0 as u8],
39 inner_metadata,
40 ]);
41 }
42
43 let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
44 (IoTypeMetadataKind::VariableElements8b, 1)
45 } else if recommended_allocation < 2u32.pow(16) {
46 (IoTypeMetadataKind::VariableElements16b, 2)
47 } else {
48 (IoTypeMetadataKind::VariableElements32b, 4)
49 };
50
51 concat_metadata_sources(&[
52 &[io_type as u8],
53 recommended_allocation.to_le_bytes().split_at(size_bytes).0,
54 inner_metadata,
55 ])
56 }
57
58 metadata(RECOMMENDED_ALLOCATION, Element::METADATA)
60 .0
61 .split_at(metadata(RECOMMENDED_ALLOCATION, Element::METADATA).1)
62 .0
63 };
64
65 type PointerType = Element;
68
69 #[inline(always)]
70 fn size(&self) -> u32 {
71 self.size()
72 }
73
74 #[inline(always)]
75 unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
76 DerefWrapper(self.size)
77 }
78
79 #[inline(always)]
80 unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
81 DerefWrapper(self.size.as_ptr())
82 }
83
84 #[inline(always)]
85 fn capacity(&self) -> u32 {
86 self.capacity
87 }
88
89 #[inline(always)]
90 unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
91 DerefWrapper(NonNull::from_ref(&self.capacity))
92 }
93
94 #[inline(always)]
95 #[track_caller]
96 unsafe fn set_size(&mut self, size: u32) {
97 debug_assert!(
98 size <= self.capacity,
99 "`set_size` called with invalid input {size} for capacity {}",
100 self.capacity
101 );
102 debug_assert_eq!(
103 size % Element::SIZE,
104 0,
105 "`set_size` called with invalid input {size} for element size {}",
106 Element::SIZE
107 );
108
109 unsafe {
111 self.size.write(size);
112 }
113 }
114
115 #[inline(always)]
116 #[track_caller]
117 unsafe fn from_ptr<'a>(
118 ptr: &'a NonNull<Self::PointerType>,
119 size: &'a u32,
120 capacity: u32,
121 ) -> impl Deref<Target = Self> + 'a {
122 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
123 debug_assert!(
124 *size <= capacity,
125 "Size {size} must not exceed capacity {capacity}"
126 );
127 debug_assert_eq!(
128 size % Element::SIZE,
129 0,
130 "Size {size} is invalid for element size {}",
131 Element::SIZE
132 );
133
134 DerefWrapper(Self {
135 elements: *ptr,
136 size: NonNull::from_ref(size),
137 capacity,
138 })
139 }
140
141 #[inline(always)]
142 #[track_caller]
143 unsafe fn from_mut_ptr<'a>(
144 ptr: &'a mut NonNull<Self::PointerType>,
145 size: &'a mut *mut u32,
146 capacity: u32,
147 ) -> impl DerefMut<Target = Self> + 'a {
148 debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
149 let size = unsafe { NonNull::new_unchecked(*size) };
151 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
152 {
153 let size = unsafe { size.read() };
154 debug_assert!(
155 size <= capacity,
156 "Size {size} must not exceed capacity {capacity}"
157 );
158 debug_assert_eq!(
159 size % Element::SIZE,
160 0,
161 "Size {size} is invalid for element size {}",
162 Element::SIZE
163 );
164 }
165
166 DerefWrapper(Self {
167 elements: *ptr,
168 size,
169 capacity,
170 })
171 }
172
173 #[inline(always)]
174 unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
175 &self.elements
176 }
177
178 #[inline(always)]
179 unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
180 &mut self.elements
181 }
182}
183
184impl<Element, const RECOMMENDED_ALLOCATION: u32> IoTypeOptional
185 for VariableElements<Element, RECOMMENDED_ALLOCATION>
186where
187 Element: TrivialType,
188{
189}
190
191impl<Element, const RECOMMENDED_ALLOCATION: u32> VariableElements<Element, RECOMMENDED_ALLOCATION>
192where
193 Element: TrivialType,
194{
195 #[track_caller]
205 pub const fn from_buffer<'a>(
206 buffer: &'a [<Self as IoType>::PointerType],
207 size: &'a u32,
208 ) -> impl Deref<Target = Self> + 'a {
209 debug_assert!(
210 buffer.len() * Element::SIZE as usize == *size as usize,
211 "Invalid size"
212 );
213 DerefWrapper(Self {
217 elements: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
218 size: NonNull::from_ref(size),
219 capacity: *size,
220 })
221 }
222
223 #[track_caller]
231 pub fn from_buffer_mut<'a>(
232 buffer: &'a mut [<Self as IoType>::PointerType],
233 size: &'a mut u32,
234 ) -> impl DerefMut<Target = Self> + 'a {
235 debug_assert_eq!(
236 buffer.len() * Element::SIZE as usize,
237 *size as usize,
238 "Invalid size"
239 );
240
241 DerefWrapper(Self {
242 elements: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
243 size: NonNull::from_mut(size),
244 capacity: *size,
245 })
246 }
247
248 #[track_caller]
260 pub fn from_uninit<'a, const CAPACITY: usize>(
261 uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>; CAPACITY],
262 size: &'a mut u32,
263 ) -> impl DerefMut<Target = Self> + 'a {
264 debug_assert!(
265 *size as usize <= CAPACITY,
266 "Size {size} must not exceed capacity {CAPACITY}"
267 );
268 debug_assert_eq!(
269 *size % Element::SIZE,
270 0,
271 "Size {size} is invalid for element size {}",
272 Element::SIZE
273 );
274 let capacity = CAPACITY as u32;
275
276 DerefWrapper(Self {
277 elements: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
278 size: NonNull::from_mut(size),
279 capacity,
280 })
281 }
282
283 #[inline(always)]
285 pub const fn size(&self) -> u32 {
286 unsafe { self.size.read() }
288 }
289
290 #[inline(always)]
292 pub fn capacity(&self) -> u32 {
293 self.capacity
294 }
295
296 #[inline(always)]
298 pub const fn count(&self) -> u32 {
299 unsafe { self.size.read() }
301 }
302
303 #[inline(always)]
305 pub const fn get_initialized(&self) -> &[Element] {
306 let size = self.size();
307 let ptr = self.elements.as_ptr();
308 unsafe { slice::from_raw_parts(ptr, (size / Element::SIZE) as usize) }
310 }
311
312 #[inline(always)]
314 pub fn get_initialized_mut(&mut self) -> &mut [Element] {
315 let size = self.size();
316 let ptr = self.elements.as_ptr();
317 unsafe { slice::from_raw_parts_mut(ptr, (size / Element::SIZE) as usize) }
319 }
320
321 #[inline(always)]
325 #[must_use = "Operation may fail"]
326 pub fn append(&mut self, elements: &[Element]) -> bool {
327 let size = self.size();
328 if elements.len() * Element::SIZE as usize + size as usize > self.capacity as usize {
329 return false;
330 }
331
332 let Ok(offset) = isize::try_from(size / Element::SIZE) else {
334 return false;
335 };
336
337 let mut start = unsafe { self.elements.offset(offset) };
340 unsafe { ptr::copy_nonoverlapping(elements.as_ptr(), start.as_mut(), elements.len()) }
345
346 true
347 }
348
349 #[inline(always)]
354 #[must_use = "Operation may fail"]
355 pub fn truncate(&mut self, new_size: u32) -> bool {
356 if new_size > self.size() || new_size % Element::SIZE != 0 {
357 return false;
358 }
359
360 unsafe {
362 self.size.write(new_size);
363 }
364
365 true
366 }
367
368 #[inline(always)]
372 #[must_use = "Operation may fail"]
373 pub fn copy_from(&mut self, src: &Self) -> bool {
374 let src_size = src.size();
375 if src_size > self.capacity {
376 return false;
377 }
378
379 unsafe {
382 self.elements
383 .copy_from_nonoverlapping(src.elements, src_size as usize);
384 self.size.write(src_size);
385 }
386
387 true
388 }
389
390 #[inline(always)]
395 pub fn as_mut_ptr(&mut self) -> &mut NonNull<Element> {
396 &mut self.elements
397 }
398
399 #[inline(always)]
402 pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
403 &self,
404 ) -> &VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION> {
405 unsafe {
408 NonNull::from_ref(self)
409 .cast::<VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION>>()
410 .as_ref()
411 }
412 }
413
414 #[inline(always)]
417 pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
418 &mut self,
419 ) -> &mut VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION> {
420 unsafe {
423 NonNull::from_mut(self)
424 .cast::<VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION>>()
425 .as_mut()
426 }
427 }
428
429 #[inline(always)]
437 #[must_use = "Operation may fail"]
438 pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [Element]> {
439 if size > self.capacity || size % Element::SIZE != 0 {
440 return None;
441 }
442
443 unsafe {
445 self.size.write(size);
446 }
447 Some(self.get_initialized_mut())
448 }
449}