ab_io_type/
variable_bytes.rs1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9#[derive(Debug)]
16#[repr(C)]
17pub struct VariableBytes<const RECOMMENDED_ALLOCATION: u32 = 0> {
18 bytes: NonNull<u8>,
19 size: NonNull<u32>,
20 capacity: u32,
21}
22
23unsafe impl<const RECOMMENDED_ALLOCATION: u32> IoType for VariableBytes<RECOMMENDED_ALLOCATION> {
25 const METADATA: &[u8] = {
26 const fn metadata(recommended_allocation: u32) -> ([u8; MAX_METADATA_CAPACITY], usize) {
27 if recommended_allocation == 0 {
28 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes0 as u8]]);
29 } else if recommended_allocation == 512 {
30 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes512 as u8]]);
31 } else if recommended_allocation == 1024 {
32 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes1024 as u8]]);
33 } else if recommended_allocation == 2028 {
34 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes2028 as u8]]);
35 } else if recommended_allocation == 4096 {
36 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes4096 as u8]]);
37 } else if recommended_allocation == 8192 {
38 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes8192 as u8]]);
39 } else if recommended_allocation == 16384 {
40 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes16384 as u8]]);
41 } else if recommended_allocation == 32768 {
42 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes32768 as u8]]);
43 } else if recommended_allocation == 65536 {
44 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes65536 as u8]]);
45 } else if recommended_allocation == 131_072 {
46 return concat_metadata_sources(&[
47 &[IoTypeMetadataKind::VariableBytes131072 as u8],
48 ]);
49 } else if recommended_allocation == 262_144 {
50 return concat_metadata_sources(&[
51 &[IoTypeMetadataKind::VariableBytes262144 as u8],
52 ]);
53 } else if recommended_allocation == 524_288 {
54 return concat_metadata_sources(&[
55 &[IoTypeMetadataKind::VariableBytes524288 as u8],
56 ]);
57 } else if recommended_allocation == 1_048_576 {
58 return concat_metadata_sources(&[&[
59 IoTypeMetadataKind::VariableBytes1048576 as u8
60 ]]);
61 }
62
63 let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
64 (IoTypeMetadataKind::VariableBytes8b, 1)
65 } else if recommended_allocation < 2u32.pow(16) {
66 (IoTypeMetadataKind::VariableBytes16b, 2)
67 } else {
68 (IoTypeMetadataKind::VariableBytes32b, 4)
69 };
70
71 concat_metadata_sources(&[
72 &[io_type as u8],
73 recommended_allocation.to_le_bytes().split_at(size_bytes).0,
74 ])
75 }
76
77 metadata(RECOMMENDED_ALLOCATION)
79 .0
80 .split_at(metadata(RECOMMENDED_ALLOCATION).1)
81 .0
82 };
83
84 type PointerType = u8;
87
88 #[inline(always)]
89 fn size(&self) -> u32 {
90 self.size()
91 }
92
93 #[inline(always)]
94 unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
95 DerefWrapper(self.size)
96 }
97
98 #[inline(always)]
99 unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
100 DerefWrapper(self.size.as_ptr())
101 }
102
103 #[inline(always)]
104 fn capacity(&self) -> u32 {
105 self.capacity
106 }
107
108 #[inline(always)]
109 unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
110 DerefWrapper(NonNull::from_ref(&self.capacity))
111 }
112
113 #[inline(always)]
114 #[track_caller]
115 unsafe fn set_size(&mut self, size: u32) {
116 debug_assert!(
117 size <= self.capacity,
118 "`set_size` called with invalid input {size} for capacity {}",
119 self.capacity
120 );
121
122 unsafe {
124 self.size.write(size);
125 }
126 }
127
128 #[inline(always)]
129 #[track_caller]
130 unsafe fn from_ptr<'a>(
131 ptr: &'a NonNull<Self::PointerType>,
132 size: &'a u32,
133 capacity: u32,
134 ) -> impl Deref<Target = Self> + 'a {
135 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
136 debug_assert!(
137 *size <= capacity,
138 "Size {size} must not exceed capacity {capacity}"
139 );
140
141 DerefWrapper(Self {
142 bytes: *ptr,
143 size: NonNull::from_ref(size),
144 capacity,
145 })
146 }
147
148 #[inline(always)]
149 #[track_caller]
150 unsafe fn from_mut_ptr<'a>(
151 ptr: &'a mut NonNull<Self::PointerType>,
152 size: &'a mut *mut u32,
153 capacity: u32,
154 ) -> impl DerefMut<Target = Self> + 'a {
155 debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
156 let size = unsafe { NonNull::new_unchecked(*size) };
158 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
159 {
160 let size = unsafe { size.read() };
162 debug_assert!(
163 size <= capacity,
164 "Size {size} must not exceed capacity {capacity}"
165 );
166 }
167
168 DerefWrapper(Self {
169 bytes: *ptr,
170 size,
171 capacity,
172 })
173 }
174
175 #[inline(always)]
176 unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
177 &self.bytes
178 }
179
180 #[inline(always)]
181 unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
182 &mut self.bytes
183 }
184}
185
186impl<const RECOMMENDED_ALLOCATION: u32> IoTypeOptional for VariableBytes<RECOMMENDED_ALLOCATION> {}
187
188impl<const RECOMMENDED_ALLOCATION: u32> VariableBytes<RECOMMENDED_ALLOCATION> {
189 #[inline(always)]
197 #[track_caller]
198 pub const fn from_buffer<'a>(
199 buffer: &'a [<Self as IoType>::PointerType],
200 size: &'a u32,
201 ) -> impl Deref<Target = Self> + 'a {
202 debug_assert!(buffer.len() == *size as usize, "Invalid size");
203 DerefWrapper(Self {
207 bytes: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
208 size: NonNull::from_ref(size),
209 capacity: *size,
210 })
211 }
212
213 #[inline(always)]
221 #[track_caller]
222 pub fn from_buffer_mut<'a>(
223 buffer: &'a mut [<Self as IoType>::PointerType],
224 size: &'a mut u32,
225 ) -> impl DerefMut<Target = Self> + 'a {
226 debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
227
228 DerefWrapper(Self {
229 bytes: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
230 size: NonNull::from_mut(size),
231 capacity: *size,
232 })
233 }
234
235 #[inline(always)]
243 #[track_caller]
244 pub fn from_uninit<'a>(
245 uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>],
246 size: &'a mut u32,
247 ) -> impl DerefMut<Target = Self> + 'a {
248 let capacity = uninit.len();
249 debug_assert!(
250 *size as usize <= capacity,
251 "Size {size} must not exceed capacity {capacity}"
252 );
253 let capacity = capacity as u32;
254
255 DerefWrapper(Self {
256 bytes: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
257 size: NonNull::from_mut(size),
258 capacity,
259 })
260 }
261
262 #[inline(always)]
264 pub const fn size(&self) -> u32 {
265 unsafe { self.size.read() }
267 }
268
269 #[inline(always)]
271 pub fn capacity(&self) -> u32 {
272 self.capacity
273 }
274
275 #[inline(always)]
277 pub const fn get_initialized(&self) -> &[u8] {
278 let size = self.size();
279 let ptr = self.bytes.as_ptr();
280 unsafe { slice::from_raw_parts(ptr, size as usize) }
282 }
283
284 #[inline(always)]
286 pub fn get_initialized_mut(&mut self) -> &mut [u8] {
287 let size = self.size();
288 let ptr = self.bytes.as_ptr();
289 unsafe { slice::from_raw_parts_mut(ptr, size as usize) }
291 }
292
293 #[inline(always)]
297 #[must_use = "Operation may fail"]
298 pub fn append(&mut self, bytes: &[u8]) -> bool {
299 let size = self.size();
300 if bytes.len() + size as usize > self.capacity as usize {
301 return false;
302 }
303
304 let Ok(offset) = isize::try_from(size) else {
306 return false;
307 };
308
309 let mut start = unsafe { self.bytes.offset(offset) };
312 unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), start.as_mut(), bytes.len()) }
317
318 true
319 }
320
321 #[inline(always)]
325 #[must_use = "Operation may fail"]
326 pub fn truncate(&mut self, new_size: u32) -> bool {
327 if new_size > self.size() {
328 return false;
329 }
330
331 unsafe {
333 self.size.write(new_size);
334 }
335
336 true
337 }
338
339 #[inline(always)]
343 #[must_use = "Operation may fail"]
344 pub fn copy_from<T>(&mut self, src: &T) -> bool
345 where
346 T: IoType,
347 {
348 let src_size = src.size();
349 if src_size > self.capacity {
350 return false;
351 }
352
353 unsafe {
356 self.bytes
357 .copy_from_nonoverlapping(src.as_ptr().cast::<u8>(), src_size as usize);
358 self.size.write(src_size);
359 }
360
361 true
362 }
363
364 #[inline(always)]
369 pub fn as_mut_ptr(&mut self) -> &mut NonNull<u8> {
370 &mut self.bytes
371 }
372
373 #[inline(always)]
376 pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
377 &self,
378 ) -> &VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
379 unsafe {
382 NonNull::from_ref(self)
383 .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
384 .as_ref()
385 }
386 }
387
388 #[inline(always)]
391 pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
392 &mut self,
393 ) -> &mut VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
394 unsafe {
397 NonNull::from_mut(self)
398 .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
399 .as_mut()
400 }
401 }
402
403 #[inline(always)]
407 pub fn read_trivial_type<T>(&self) -> Option<T>
408 where
409 T: TrivialType,
410 {
411 if self.size() < T::SIZE {
412 return None;
413 }
414
415 let ptr = self.bytes.cast::<T>();
416
417 let value = unsafe {
420 if ptr.is_aligned() {
421 ptr.read()
422 } else {
423 ptr.read_unaligned()
424 }
425 };
426
427 Some(value)
428 }
429
430 #[inline(always)]
438 #[must_use = "Operation may fail"]
439 pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [u8]> {
440 if size > self.capacity {
441 return None;
442 }
443
444 unsafe {
446 self.size.write(size);
447 }
448 Some(self.get_initialized_mut())
449 }
450}