ab_contracts_io_type/
variable_bytes.rs1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9#[derive(Debug)]
16#[repr(C)]
17pub struct VariableBytes<const RECOMMENDED_ALLOCATION: u32 = 0> {
18 bytes: NonNull<u8>,
19 size: NonNull<u32>,
20 capacity: u32,
21}
22
23unsafe impl<const RECOMMENDED_ALLOCATION: u32> IoType for VariableBytes<RECOMMENDED_ALLOCATION> {
24 const METADATA: &[u8] = {
25 const fn metadata(recommended_allocation: u32) -> ([u8; MAX_METADATA_CAPACITY], usize) {
26 if recommended_allocation == 0 {
27 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes0 as u8]]);
28 } else if recommended_allocation == 512 {
29 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes512 as u8]]);
30 } else if recommended_allocation == 1024 {
31 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes1024 as u8]]);
32 } else if recommended_allocation == 2028 {
33 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes2028 as u8]]);
34 } else if recommended_allocation == 4096 {
35 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes4096 as u8]]);
36 } else if recommended_allocation == 8192 {
37 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes8192 as u8]]);
38 } else if recommended_allocation == 16384 {
39 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes16384 as u8]]);
40 } else if recommended_allocation == 32768 {
41 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes32768 as u8]]);
42 } else if recommended_allocation == 65536 {
43 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes65536 as u8]]);
44 } else if recommended_allocation == 131_072 {
45 return concat_metadata_sources(&[
46 &[IoTypeMetadataKind::VariableBytes131072 as u8],
47 ]);
48 } else if recommended_allocation == 262_144 {
49 return concat_metadata_sources(&[
50 &[IoTypeMetadataKind::VariableBytes262144 as u8],
51 ]);
52 } else if recommended_allocation == 524_288 {
53 return concat_metadata_sources(&[
54 &[IoTypeMetadataKind::VariableBytes524288 as u8],
55 ]);
56 } else if recommended_allocation == 1_048_576 {
57 return concat_metadata_sources(&[&[
58 IoTypeMetadataKind::VariableBytes1048576 as u8
59 ]]);
60 }
61
62 let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
63 (IoTypeMetadataKind::VariableBytes8b, 1)
64 } else if recommended_allocation < 2u32.pow(16) {
65 (IoTypeMetadataKind::VariableBytes16b, 2)
66 } else {
67 (IoTypeMetadataKind::VariableBytes32b, 4)
68 };
69
70 concat_metadata_sources(&[
71 &[io_type as u8],
72 recommended_allocation.to_le_bytes().split_at(size_bytes).0,
73 ])
74 }
75
76 metadata(RECOMMENDED_ALLOCATION)
78 .0
79 .split_at(metadata(RECOMMENDED_ALLOCATION).1)
80 .0
81 };
82
83 type PointerType = u8;
86
87 #[inline(always)]
88 fn size(&self) -> u32 {
89 self.size()
90 }
91
92 #[inline(always)]
93 unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
94 DerefWrapper(self.size)
95 }
96
97 #[inline(always)]
98 unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
99 DerefWrapper(self.size.as_ptr())
100 }
101
102 #[inline(always)]
103 fn capacity(&self) -> u32 {
104 self.capacity
105 }
106
107 #[inline(always)]
108 unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
109 DerefWrapper(NonNull::from_ref(&self.capacity))
110 }
111
112 #[inline(always)]
113 #[track_caller]
114 unsafe fn set_size(&mut self, size: u32) {
115 debug_assert!(
116 size <= self.capacity,
117 "`set_size` called with invalid input {size} for capacity {}",
118 self.capacity
119 );
120
121 unsafe {
123 self.size.write(size);
124 }
125 }
126
127 #[inline(always)]
128 #[track_caller]
129 unsafe fn from_ptr<'a>(
130 ptr: &'a NonNull<Self::PointerType>,
131 size: &'a u32,
132 capacity: u32,
133 ) -> impl Deref<Target = Self> + 'a {
134 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
135 debug_assert!(
136 *size <= capacity,
137 "Size {size} must not exceed capacity {capacity}"
138 );
139
140 DerefWrapper(Self {
141 bytes: *ptr,
142 size: NonNull::from_ref(size),
143 capacity,
144 })
145 }
146
147 #[inline(always)]
148 #[track_caller]
149 unsafe fn from_mut_ptr<'a>(
150 ptr: &'a mut NonNull<Self::PointerType>,
151 size: &'a mut *mut u32,
152 capacity: u32,
153 ) -> impl DerefMut<Target = Self> + 'a {
154 debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
155 let size = unsafe { NonNull::new_unchecked(*size) };
157 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
158 {
159 let size = unsafe { size.read() };
160 debug_assert!(
161 size <= capacity,
162 "Size {size} must not exceed capacity {capacity}"
163 );
164 }
165
166 DerefWrapper(Self {
167 bytes: *ptr,
168 size,
169 capacity,
170 })
171 }
172
173 #[inline(always)]
174 unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
175 &self.bytes
176 }
177
178 #[inline(always)]
179 unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
180 &mut self.bytes
181 }
182}
183
184impl<const RECOMMENDED_ALLOCATION: u32> IoTypeOptional for VariableBytes<RECOMMENDED_ALLOCATION> {}
185
186impl<const RECOMMENDED_ALLOCATION: u32> VariableBytes<RECOMMENDED_ALLOCATION> {
187 #[inline(always)]
195 #[track_caller]
196 pub const fn from_buffer<'a>(
197 buffer: &'a [<Self as IoType>::PointerType],
198 size: &'a u32,
199 ) -> impl Deref<Target = Self> + 'a {
200 debug_assert!(buffer.len() == *size as usize, "Invalid size");
201 DerefWrapper(Self {
205 bytes: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
206 size: NonNull::from_ref(size),
207 capacity: *size,
208 })
209 }
210
211 #[inline(always)]
219 #[track_caller]
220 pub fn from_buffer_mut<'a>(
221 buffer: &'a mut [<Self as IoType>::PointerType],
222 size: &'a mut u32,
223 ) -> impl DerefMut<Target = Self> + 'a {
224 debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
225
226 DerefWrapper(Self {
227 bytes: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
228 size: NonNull::from_mut(size),
229 capacity: *size,
230 })
231 }
232
233 #[inline(always)]
241 #[track_caller]
242 pub fn from_uninit<'a>(
243 uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>],
244 size: &'a mut u32,
245 ) -> impl DerefMut<Target = Self> + 'a {
246 let capacity = uninit.len();
247 debug_assert!(
248 *size as usize <= capacity,
249 "Size {size} must not exceed capacity {capacity}"
250 );
251 let capacity = capacity as u32;
252
253 DerefWrapper(Self {
254 bytes: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
255 size: NonNull::from_mut(size),
256 capacity,
257 })
258 }
259
260 #[inline(always)]
262 pub const fn size(&self) -> u32 {
263 unsafe { self.size.read() }
265 }
266
267 #[inline(always)]
269 pub fn capacity(&self) -> u32 {
270 self.capacity
271 }
272
273 #[inline(always)]
275 pub const fn get_initialized(&self) -> &[u8] {
276 let size = self.size();
277 let ptr = self.bytes.as_ptr();
278 unsafe { slice::from_raw_parts(ptr, size as usize) }
280 }
281
282 #[inline(always)]
284 pub fn get_initialized_mut(&mut self) -> &mut [u8] {
285 let size = self.size();
286 let ptr = self.bytes.as_ptr();
287 unsafe { slice::from_raw_parts_mut(ptr, size as usize) }
289 }
290
291 #[inline(always)]
295 #[must_use = "Operation may fail"]
296 pub fn append(&mut self, bytes: &[u8]) -> bool {
297 let size = self.size();
298 if bytes.len() + size as usize > self.capacity as usize {
299 return false;
300 }
301
302 let Ok(offset) = isize::try_from(size) else {
304 return false;
305 };
306
307 let mut start = unsafe { self.bytes.offset(offset) };
310 unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), start.as_mut(), bytes.len()) }
315
316 true
317 }
318
319 #[inline(always)]
323 #[must_use = "Operation may fail"]
324 pub fn truncate(&mut self, new_size: u32) -> bool {
325 if new_size > self.size() {
326 return false;
327 }
328
329 unsafe {
331 self.size.write(new_size);
332 }
333
334 true
335 }
336
337 #[inline(always)]
341 #[must_use = "Operation may fail"]
342 pub fn copy_from<T>(&mut self, src: &T) -> bool
343 where
344 T: IoType,
345 {
346 let src_size = src.size();
347 if src_size > self.capacity {
348 return false;
349 }
350
351 unsafe {
354 self.bytes
355 .copy_from_nonoverlapping(src.as_ptr().cast::<u8>(), src_size as usize);
356 self.size.write(src_size);
357 }
358
359 true
360 }
361
362 #[inline(always)]
367 pub fn as_mut_ptr(&mut self) -> &mut NonNull<u8> {
368 &mut self.bytes
369 }
370
371 #[inline(always)]
374 pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
375 &self,
376 ) -> &VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
377 unsafe {
380 NonNull::from_ref(self)
381 .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
382 .as_ref()
383 }
384 }
385
386 #[inline(always)]
389 pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
390 &mut self,
391 ) -> &mut VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
392 unsafe {
395 NonNull::from_mut(self)
396 .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
397 .as_mut()
398 }
399 }
400
401 #[inline(always)]
405 pub fn read_trivial_type<T>(&self) -> Option<T>
406 where
407 T: TrivialType,
408 {
409 if self.size() < T::SIZE {
410 return None;
411 }
412
413 let ptr = self.bytes.cast::<T>();
414
415 let value = unsafe {
416 if ptr.is_aligned() {
417 ptr.read()
418 } else {
419 ptr.read_unaligned()
420 }
421 };
422
423 Some(value)
424 }
425
426 #[inline(always)]
434 #[must_use = "Operation may fail"]
435 pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [u8]> {
436 if size > self.capacity {
437 return None;
438 }
439
440 unsafe {
442 self.size.write(size);
443 }
444 Some(self.get_initialized_mut())
445 }
446}