ab_io_type/
variable_bytes.rs1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9#[derive(Debug)]
16#[repr(C)]
17pub struct VariableBytes<const RECOMMENDED_ALLOCATION: u32 = 0> {
18 bytes: NonNull<u8>,
19 size: NonNull<u32>,
20 capacity: u32,
21}
22
23unsafe impl<const RECOMMENDED_ALLOCATION: u32> IoType for VariableBytes<RECOMMENDED_ALLOCATION> {
25 const METADATA: &[u8] = {
26 const fn metadata(recommended_allocation: u32) -> ([u8; MAX_METADATA_CAPACITY], usize) {
27 if recommended_allocation == 0 {
28 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes0 as u8]]);
29 } else if recommended_allocation == 512 {
30 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes512 as u8]]);
31 } else if recommended_allocation == 1024 {
32 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes1024 as u8]]);
33 } else if recommended_allocation == 2028 {
34 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes2028 as u8]]);
35 } else if recommended_allocation == 4096 {
36 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes4096 as u8]]);
37 } else if recommended_allocation == 8192 {
38 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes8192 as u8]]);
39 } else if recommended_allocation == 16384 {
40 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes16384 as u8]]);
41 } else if recommended_allocation == 32768 {
42 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes32768 as u8]]);
43 } else if recommended_allocation == 65536 {
44 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes65536 as u8]]);
45 } else if recommended_allocation == 131_072 {
46 return concat_metadata_sources(&[
47 &[IoTypeMetadataKind::VariableBytes131072 as u8],
48 ]);
49 } else if recommended_allocation == 262_144 {
50 return concat_metadata_sources(&[
51 &[IoTypeMetadataKind::VariableBytes262144 as u8],
52 ]);
53 } else if recommended_allocation == 524_288 {
54 return concat_metadata_sources(&[
55 &[IoTypeMetadataKind::VariableBytes524288 as u8],
56 ]);
57 } else if recommended_allocation == 1_048_576 {
58 return concat_metadata_sources(&[&[
59 IoTypeMetadataKind::VariableBytes1048576 as u8
60 ]]);
61 }
62
63 let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
64 (IoTypeMetadataKind::VariableBytes8b, 1)
65 } else if recommended_allocation < 2u32.pow(16) {
66 (IoTypeMetadataKind::VariableBytes16b, 2)
67 } else {
68 (IoTypeMetadataKind::VariableBytes32b, 4)
69 };
70
71 concat_metadata_sources(&[
72 &[io_type as u8],
73 recommended_allocation.to_le_bytes().split_at(size_bytes).0,
74 ])
75 }
76
77 metadata(RECOMMENDED_ALLOCATION)
79 .0
80 .split_at(metadata(RECOMMENDED_ALLOCATION).1)
81 .0
82 };
83
84 type PointerType = u8;
87
88 #[inline(always)]
89 fn size(&self) -> u32 {
90 self.size()
91 }
92
93 #[inline(always)]
94 fn capacity(&self) -> u32 {
95 self.capacity
96 }
97
98 #[inline(always)]
99 #[track_caller]
100 unsafe fn set_size(&mut self, size: u32) {
101 debug_assert!(
102 size <= self.capacity,
103 "`set_size` called with invalid input {size} for capacity {}",
104 self.capacity
105 );
106
107 unsafe {
109 self.size.write(size);
110 }
111 }
112
113 #[inline(always)]
114 #[track_caller]
115 unsafe fn from_ptr<'a>(
116 ptr: &'a NonNull<Self::PointerType>,
117 size: &'a u32,
118 capacity: u32,
119 ) -> impl Deref<Target = Self> + 'a {
120 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
121 debug_assert!(
122 *size <= capacity,
123 "Size {size} must not exceed capacity {capacity}"
124 );
125
126 DerefWrapper(Self {
127 bytes: *ptr,
128 size: NonNull::from_ref(size),
129 capacity,
130 })
131 }
132
133 #[inline(always)]
134 #[track_caller]
135 unsafe fn from_mut_ptr<'a>(
136 ptr: &'a mut NonNull<Self::PointerType>,
137 size: &'a mut u32,
138 capacity: u32,
139 ) -> impl DerefMut<Target = Self> + 'a {
140 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
141 debug_assert!(
142 *size <= capacity,
143 "Size {size} must not exceed capacity {capacity}"
144 );
145
146 DerefWrapper(Self {
147 bytes: *ptr,
148 size: NonNull::from_mut(size),
149 capacity,
150 })
151 }
152
153 #[inline(always)]
154 unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
155 &self.bytes
156 }
157
158 #[inline(always)]
159 unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
160 &mut self.bytes
161 }
162}
163
164impl<const RECOMMENDED_ALLOCATION: u32> IoTypeOptional for VariableBytes<RECOMMENDED_ALLOCATION> {}
165
166impl<const RECOMMENDED_ALLOCATION: u32> VariableBytes<RECOMMENDED_ALLOCATION> {
167 #[inline(always)]
175 #[track_caller]
176 pub const fn from_buffer<'a>(
177 buffer: &'a [<Self as IoType>::PointerType],
178 size: &'a u32,
179 ) -> impl Deref<Target = Self> + 'a {
180 debug_assert!(buffer.len() == *size as usize, "Invalid size");
181 DerefWrapper(Self {
185 bytes: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
186 size: NonNull::from_ref(size),
187 capacity: *size,
188 })
189 }
190
191 #[inline(always)]
199 #[track_caller]
200 pub fn from_buffer_mut<'a>(
201 buffer: &'a mut [<Self as IoType>::PointerType],
202 size: &'a mut u32,
203 ) -> impl DerefMut<Target = Self> + 'a {
204 debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
205
206 DerefWrapper(Self {
207 bytes: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
208 size: NonNull::from_mut(size),
209 capacity: *size,
210 })
211 }
212
213 #[inline(always)]
221 #[track_caller]
222 pub fn from_uninit<'a>(
223 uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>],
224 size: &'a mut u32,
225 ) -> impl DerefMut<Target = Self> + 'a {
226 let capacity = uninit.len();
227 debug_assert!(
228 *size as usize <= capacity,
229 "Size {size} must not exceed capacity {capacity}"
230 );
231 let capacity = capacity as u32;
232
233 DerefWrapper(Self {
234 bytes: NonNull::new(uninit.as_mut_ptr().cast_init()).expect("Not null; qed"),
235 size: NonNull::from_mut(size),
236 capacity,
237 })
238 }
239
240 #[inline(always)]
242 pub const fn size(&self) -> u32 {
243 unsafe { self.size.read() }
245 }
246
247 #[inline(always)]
249 pub fn capacity(&self) -> u32 {
250 self.capacity
251 }
252
253 #[inline(always)]
255 pub const fn get_initialized(&self) -> &[u8] {
256 let size = self.size();
257 let ptr = self.bytes.as_ptr();
258 unsafe { slice::from_raw_parts(ptr, size as usize) }
260 }
261
262 #[inline(always)]
264 pub fn get_initialized_mut(&mut self) -> &mut [u8] {
265 let size = self.size();
266 let ptr = self.bytes.as_ptr();
267 unsafe { slice::from_raw_parts_mut(ptr, size as usize) }
269 }
270
271 #[inline(always)]
275 #[must_use = "Operation may fail"]
276 pub fn append(&mut self, bytes: &[u8]) -> bool {
277 let size = self.size();
278 if bytes.len() + size as usize > self.capacity as usize {
279 return false;
280 }
281
282 let Ok(offset) = isize::try_from(size) else {
284 return false;
285 };
286
287 let mut start = unsafe { self.bytes.offset(offset) };
290 unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), start.as_mut(), bytes.len()) }
295
296 true
297 }
298
299 #[inline(always)]
303 #[must_use = "Operation may fail"]
304 pub fn truncate(&mut self, new_size: u32) -> bool {
305 if new_size > self.size() {
306 return false;
307 }
308
309 unsafe {
311 self.size.write(new_size);
312 }
313
314 true
315 }
316
317 #[inline(always)]
321 #[must_use = "Operation may fail"]
322 pub fn copy_from<T>(&mut self, src: &T) -> bool
323 where
324 T: IoType,
325 {
326 let src_size = src.size();
327 if src_size > self.capacity {
328 return false;
329 }
330
331 unsafe {
334 self.bytes
335 .copy_from_nonoverlapping(src.as_ptr().cast::<u8>(), src_size as usize);
336 self.size.write(src_size);
337 }
338
339 true
340 }
341
342 #[inline(always)]
347 pub fn as_mut_ptr(&mut self) -> &mut NonNull<u8> {
348 &mut self.bytes
349 }
350
351 #[inline(always)]
354 pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
355 &self,
356 ) -> &VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
357 unsafe {
360 NonNull::from_ref(self)
361 .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
362 .as_ref()
363 }
364 }
365
366 #[inline(always)]
369 pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
370 &mut self,
371 ) -> &mut VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
372 unsafe {
375 NonNull::from_mut(self)
376 .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
377 .as_mut()
378 }
379 }
380
381 #[inline(always)]
385 pub fn read_trivial_type<T>(&self) -> Option<T>
386 where
387 T: TrivialType,
388 {
389 if self.size() < T::SIZE {
390 return None;
391 }
392
393 let ptr = self.bytes.cast::<T>();
394
395 let value = unsafe {
398 if ptr.is_aligned() {
399 ptr.read()
400 } else {
401 ptr.read_unaligned()
402 }
403 };
404
405 Some(value)
406 }
407
408 #[inline(always)]
416 #[must_use = "Operation may fail"]
417 pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [u8]> {
418 if size > self.capacity {
419 return None;
420 }
421
422 unsafe {
424 self.size.write(size);
425 }
426 Some(self.get_initialized_mut())
427 }
428}