1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9#[derive(Debug)]
16#[repr(C)]
17pub struct VariableElements<Element, const RECOMMENDED_ALLOCATION: u32 = 0>
18where
19 Element: TrivialType,
20{
21 elements: NonNull<Element>,
22 size: NonNull<u32>,
23 capacity: u32,
24}
25
26unsafe impl<Element, const RECOMMENDED_ALLOCATION: u32> IoType
28 for VariableElements<Element, RECOMMENDED_ALLOCATION>
29where
30 Element: TrivialType,
31{
32 const METADATA: &[u8] = {
33 const fn metadata(
34 recommended_allocation: u32,
35 inner_metadata: &[u8],
36 ) -> ([u8; MAX_METADATA_CAPACITY], usize) {
37 if recommended_allocation == 0 {
38 return concat_metadata_sources(&[
39 &[IoTypeMetadataKind::VariableElements0 as u8],
40 inner_metadata,
41 ]);
42 }
43
44 let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
45 (IoTypeMetadataKind::VariableElements8b, 1)
46 } else if recommended_allocation < 2u32.pow(16) {
47 (IoTypeMetadataKind::VariableElements16b, 2)
48 } else {
49 (IoTypeMetadataKind::VariableElements32b, 4)
50 };
51
52 concat_metadata_sources(&[
53 &[io_type as u8],
54 recommended_allocation.to_le_bytes().split_at(size_bytes).0,
55 inner_metadata,
56 ])
57 }
58
59 metadata(RECOMMENDED_ALLOCATION, Element::METADATA)
61 .0
62 .split_at(metadata(RECOMMENDED_ALLOCATION, Element::METADATA).1)
63 .0
64 };
65
66 type PointerType = Element;
69
70 #[inline(always)]
71 fn size(&self) -> u32 {
72 self.size()
73 }
74
75 #[inline(always)]
76 unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
77 DerefWrapper(self.size)
78 }
79
80 #[inline(always)]
81 unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
82 DerefWrapper(self.size.as_ptr())
83 }
84
85 #[inline(always)]
86 fn capacity(&self) -> u32 {
87 self.capacity
88 }
89
90 #[inline(always)]
91 unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
92 DerefWrapper(NonNull::from_ref(&self.capacity))
93 }
94
95 #[inline(always)]
96 #[track_caller]
97 unsafe fn set_size(&mut self, size: u32) {
98 debug_assert!(
99 size <= self.capacity,
100 "`set_size` called with invalid input {size} for capacity {}",
101 self.capacity
102 );
103 debug_assert_eq!(
104 size % Element::SIZE,
105 0,
106 "`set_size` called with invalid input {size} for element size {}",
107 Element::SIZE
108 );
109
110 unsafe {
112 self.size.write(size);
113 }
114 }
115
116 #[inline(always)]
117 #[track_caller]
118 unsafe fn from_ptr<'a>(
119 ptr: &'a NonNull<Self::PointerType>,
120 size: &'a u32,
121 capacity: u32,
122 ) -> impl Deref<Target = Self> + 'a {
123 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
124 debug_assert!(
125 *size <= capacity,
126 "Size {size} must not exceed capacity {capacity}"
127 );
128 debug_assert_eq!(
129 size % Element::SIZE,
130 0,
131 "Size {size} is invalid for element size {}",
132 Element::SIZE
133 );
134
135 DerefWrapper(Self {
136 elements: *ptr,
137 size: NonNull::from_ref(size),
138 capacity,
139 })
140 }
141
142 #[inline(always)]
143 #[track_caller]
144 unsafe fn from_mut_ptr<'a>(
145 ptr: &'a mut NonNull<Self::PointerType>,
146 size: &'a mut *mut u32,
147 capacity: u32,
148 ) -> impl DerefMut<Target = Self> + 'a {
149 debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
150 let size = unsafe { NonNull::new_unchecked(*size) };
152 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
153 {
154 let size = unsafe { size.read() };
156 debug_assert!(
157 size <= capacity,
158 "Size {size} must not exceed capacity {capacity}"
159 );
160 debug_assert_eq!(
161 size % Element::SIZE,
162 0,
163 "Size {size} is invalid for element size {}",
164 Element::SIZE
165 );
166 }
167
168 DerefWrapper(Self {
169 elements: *ptr,
170 size,
171 capacity,
172 })
173 }
174
175 #[inline(always)]
176 unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
177 &self.elements
178 }
179
180 #[inline(always)]
181 unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
182 &mut self.elements
183 }
184}
185
186impl<Element, const RECOMMENDED_ALLOCATION: u32> IoTypeOptional
187 for VariableElements<Element, RECOMMENDED_ALLOCATION>
188where
189 Element: TrivialType,
190{
191}
192
193impl<Element, const RECOMMENDED_ALLOCATION: u32> VariableElements<Element, RECOMMENDED_ALLOCATION>
194where
195 Element: TrivialType,
196{
197 #[inline(always)]
207 #[track_caller]
208 pub const fn from_buffer<'a>(
209 buffer: &'a [<Self as IoType>::PointerType],
210 size: &'a u32,
211 ) -> impl Deref<Target = Self> + 'a {
212 debug_assert!(
213 buffer.len() * Element::SIZE as usize == *size as usize,
214 "Invalid size"
215 );
216 DerefWrapper(Self {
220 elements: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
221 size: NonNull::from_ref(size),
222 capacity: *size,
223 })
224 }
225
226 #[inline(always)]
234 #[track_caller]
235 pub fn from_buffer_mut<'a>(
236 buffer: &'a mut [<Self as IoType>::PointerType],
237 size: &'a mut u32,
238 ) -> impl DerefMut<Target = Self> + 'a {
239 debug_assert_eq!(
240 buffer.len() * Element::SIZE as usize,
241 *size as usize,
242 "Invalid size"
243 );
244
245 DerefWrapper(Self {
246 elements: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
247 size: NonNull::from_mut(size),
248 capacity: *size,
249 })
250 }
251
252 #[inline(always)]
262 #[track_caller]
263 pub fn from_uninit<'a>(
264 uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>],
265 size: &'a mut u32,
266 ) -> impl DerefMut<Target = Self> + 'a {
267 let capacity = uninit.len();
268 debug_assert!(
269 *size as usize <= capacity,
270 "Size {size} must not exceed capacity {capacity}"
271 );
272 debug_assert_eq!(
273 *size % Element::SIZE,
274 0,
275 "Size {size} is invalid for element size {}",
276 Element::SIZE
277 );
278 let capacity = capacity as u32;
279
280 DerefWrapper(Self {
281 elements: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
282 size: NonNull::from_mut(size),
283 capacity,
284 })
285 }
286
287 #[inline(always)]
289 pub const fn size(&self) -> u32 {
290 unsafe { self.size.read() }
292 }
293
294 #[inline(always)]
296 pub fn capacity(&self) -> u32 {
297 self.capacity
298 }
299
300 #[inline(always)]
302 pub const fn count(&self) -> u32 {
303 unsafe { self.size.read() }
305 }
306
307 #[inline(always)]
309 pub const fn get_initialized(&self) -> &[Element] {
310 let size = self.size();
311 let ptr = self.elements.as_ptr();
312 unsafe { slice::from_raw_parts(ptr, (size / Element::SIZE) as usize) }
314 }
315
316 #[inline(always)]
318 pub fn get_initialized_mut(&mut self) -> &mut [Element] {
319 let size = self.size();
320 let ptr = self.elements.as_ptr();
321 unsafe { slice::from_raw_parts_mut(ptr, (size / Element::SIZE) as usize) }
323 }
324
325 #[inline(always)]
329 #[must_use = "Operation may fail"]
330 pub fn append(&mut self, elements: &[Element]) -> bool {
331 let size = self.size();
332 if elements.len() * Element::SIZE as usize + size as usize > self.capacity as usize {
333 return false;
334 }
335
336 let Ok(offset) = isize::try_from(size / Element::SIZE) else {
338 return false;
339 };
340
341 let mut start = unsafe { self.elements.offset(offset) };
344 unsafe { ptr::copy_nonoverlapping(elements.as_ptr(), start.as_mut(), elements.len()) }
349
350 true
351 }
352
353 #[inline(always)]
358 #[must_use = "Operation may fail"]
359 pub fn truncate(&mut self, new_size: u32) -> bool {
360 if new_size > self.size() || !new_size.is_multiple_of(Element::SIZE) {
361 return false;
362 }
363
364 unsafe {
366 self.size.write(new_size);
367 }
368
369 true
370 }
371
372 #[inline(always)]
376 #[must_use = "Operation may fail"]
377 pub fn copy_from(&mut self, src: &Self) -> bool {
378 let src_size = src.size();
379 if src_size > self.capacity {
380 return false;
381 }
382
383 unsafe {
386 self.elements
387 .copy_from_nonoverlapping(src.elements, src_size as usize);
388 self.size.write(src_size);
389 }
390
391 true
392 }
393
394 #[inline(always)]
399 pub fn as_mut_ptr(&mut self) -> &mut NonNull<Element> {
400 &mut self.elements
401 }
402
403 #[inline(always)]
406 pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
407 &self,
408 ) -> &VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION> {
409 unsafe {
412 NonNull::from_ref(self)
413 .cast::<VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION>>()
414 .as_ref()
415 }
416 }
417
418 #[inline(always)]
421 pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
422 &mut self,
423 ) -> &mut VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION> {
424 unsafe {
427 NonNull::from_mut(self)
428 .cast::<VariableElements<Element, DIFFERENT_RECOMMENDED_ALLOCATION>>()
429 .as_mut()
430 }
431 }
432
433 #[inline(always)]
441 #[must_use = "Operation may fail"]
442 pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [Element]> {
443 if size > self.capacity || !size.is_multiple_of(Element::SIZE) {
444 return None;
445 }
446
447 unsafe {
449 self.size.write(size);
450 }
451 Some(self.get_initialized_mut())
452 }
453}