ab_contracts_io_type/
variable_bytes.rs1use crate::metadata::{IoTypeMetadataKind, MAX_METADATA_CAPACITY, concat_metadata_sources};
2use crate::trivial_type::TrivialType;
3use crate::{DerefWrapper, IoType, IoTypeOptional};
4use core::mem::MaybeUninit;
5use core::ops::{Deref, DerefMut};
6use core::ptr::NonNull;
7use core::{ptr, slice};
8
9#[derive(Debug)]
16#[repr(C)]
17pub struct VariableBytes<const RECOMMENDED_ALLOCATION: u32 = 0> {
18 bytes: NonNull<u8>,
19 size: NonNull<u32>,
20 capacity: u32,
21}
22
23unsafe impl<const RECOMMENDED_ALLOCATION: u32> IoType for VariableBytes<RECOMMENDED_ALLOCATION> {
24 const METADATA: &[u8] = {
25 const fn metadata(recommended_allocation: u32) -> ([u8; MAX_METADATA_CAPACITY], usize) {
26 if recommended_allocation == 0 {
27 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes0 as u8]]);
28 } else if recommended_allocation == 512 {
29 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes512 as u8]]);
30 } else if recommended_allocation == 1024 {
31 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes1024 as u8]]);
32 } else if recommended_allocation == 2028 {
33 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes2028 as u8]]);
34 } else if recommended_allocation == 4096 {
35 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes4096 as u8]]);
36 } else if recommended_allocation == 8192 {
37 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes8192 as u8]]);
38 } else if recommended_allocation == 16384 {
39 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes16384 as u8]]);
40 } else if recommended_allocation == 32768 {
41 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes32768 as u8]]);
42 } else if recommended_allocation == 65536 {
43 return concat_metadata_sources(&[&[IoTypeMetadataKind::VariableBytes65536 as u8]]);
44 } else if recommended_allocation == 131_072 {
45 return concat_metadata_sources(&[
46 &[IoTypeMetadataKind::VariableBytes131072 as u8],
47 ]);
48 } else if recommended_allocation == 262_144 {
49 return concat_metadata_sources(&[
50 &[IoTypeMetadataKind::VariableBytes262144 as u8],
51 ]);
52 } else if recommended_allocation == 524_288 {
53 return concat_metadata_sources(&[
54 &[IoTypeMetadataKind::VariableBytes524288 as u8],
55 ]);
56 } else if recommended_allocation == 1_048_576 {
57 return concat_metadata_sources(&[&[
58 IoTypeMetadataKind::VariableBytes1048576 as u8
59 ]]);
60 }
61
62 let (io_type, size_bytes) = if recommended_allocation < 2u32.pow(8) {
63 (IoTypeMetadataKind::VariableBytes8b, 1)
64 } else if recommended_allocation < 2u32.pow(16) {
65 (IoTypeMetadataKind::VariableBytes16b, 2)
66 } else {
67 (IoTypeMetadataKind::VariableBytes32b, 4)
68 };
69
70 concat_metadata_sources(&[
71 &[io_type as u8],
72 recommended_allocation.to_le_bytes().split_at(size_bytes).0,
73 ])
74 }
75
76 metadata(RECOMMENDED_ALLOCATION)
78 .0
79 .split_at(metadata(RECOMMENDED_ALLOCATION).1)
80 .0
81 };
82
83 type PointerType = u8;
86
87 #[inline(always)]
88 fn size(&self) -> u32 {
89 self.size()
90 }
91
92 #[inline(always)]
93 unsafe fn size_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
94 DerefWrapper(self.size)
95 }
96
97 #[inline(always)]
98 unsafe fn size_mut_ptr(&mut self) -> impl DerefMut<Target = *mut u32> {
99 DerefWrapper(self.size.as_ptr())
100 }
101
102 #[inline(always)]
103 fn capacity(&self) -> u32 {
104 self.capacity
105 }
106
107 #[inline(always)]
108 unsafe fn capacity_ptr(&self) -> impl Deref<Target = NonNull<u32>> {
109 DerefWrapper(NonNull::from_ref(&self.capacity))
110 }
111
112 #[inline(always)]
113 #[track_caller]
114 unsafe fn set_size(&mut self, size: u32) {
115 debug_assert!(
116 size <= self.capacity,
117 "`set_size` called with invalid input {size} for capacity {}",
118 self.capacity
119 );
120
121 unsafe {
123 self.size.write(size);
124 }
125 }
126
127 #[inline(always)]
128 #[track_caller]
129 unsafe fn from_ptr<'a>(
130 ptr: &'a NonNull<Self::PointerType>,
131 size: &'a u32,
132 capacity: u32,
133 ) -> impl Deref<Target = Self> + 'a {
134 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
135 debug_assert!(
136 *size <= capacity,
137 "Size {size} must not exceed capacity {capacity}"
138 );
139
140 DerefWrapper(Self {
141 bytes: *ptr,
142 size: NonNull::from_ref(size),
143 capacity,
144 })
145 }
146
147 #[inline(always)]
148 #[track_caller]
149 unsafe fn from_mut_ptr<'a>(
150 ptr: &'a mut NonNull<Self::PointerType>,
151 size: &'a mut *mut u32,
152 capacity: u32,
153 ) -> impl DerefMut<Target = Self> + 'a {
154 debug_assert!(!size.is_null(), "`null` pointer for non-`TrivialType` size");
155 let size = unsafe { NonNull::new_unchecked(*size) };
157 debug_assert!(ptr.is_aligned(), "Misaligned pointer");
158 {
159 let size = unsafe { size.read() };
160 debug_assert!(
161 size <= capacity,
162 "Size {size} must not exceed capacity {capacity}"
163 );
164 }
165
166 DerefWrapper(Self {
167 bytes: *ptr,
168 size,
169 capacity,
170 })
171 }
172
173 #[inline(always)]
174 unsafe fn as_ptr(&self) -> impl Deref<Target = NonNull<Self::PointerType>> {
175 &self.bytes
176 }
177
178 #[inline(always)]
179 unsafe fn as_mut_ptr(&mut self) -> impl DerefMut<Target = NonNull<Self::PointerType>> {
180 &mut self.bytes
181 }
182}
183
184impl<const RECOMMENDED_ALLOCATION: u32> IoTypeOptional for VariableBytes<RECOMMENDED_ALLOCATION> {}
185
186impl<const RECOMMENDED_ALLOCATION: u32> VariableBytes<RECOMMENDED_ALLOCATION> {
187 #[track_caller]
195 pub const fn from_buffer<'a>(
196 buffer: &'a [<Self as IoType>::PointerType],
197 size: &'a u32,
198 ) -> impl Deref<Target = Self> + 'a {
199 debug_assert!(buffer.len() == *size as usize, "Invalid size");
200 DerefWrapper(Self {
204 bytes: NonNull::new(buffer.as_ptr().cast_mut()).expect("Not null; qed"),
205 size: NonNull::from_ref(size),
206 capacity: *size,
207 })
208 }
209
210 #[track_caller]
218 pub fn from_buffer_mut<'a>(
219 buffer: &'a mut [<Self as IoType>::PointerType],
220 size: &'a mut u32,
221 ) -> impl DerefMut<Target = Self> + 'a {
222 debug_assert_eq!(buffer.len(), *size as usize, "Invalid size");
223
224 DerefWrapper(Self {
225 bytes: NonNull::new(buffer.as_mut_ptr()).expect("Not null; qed"),
226 size: NonNull::from_mut(size),
227 capacity: *size,
228 })
229 }
230
231 #[track_caller]
241 pub fn from_uninit<'a, const CAPACITY: usize>(
242 uninit: &'a mut [MaybeUninit<<Self as IoType>::PointerType>; CAPACITY],
243 size: &'a mut u32,
244 ) -> impl DerefMut<Target = Self> + 'a {
245 debug_assert!(
246 *size as usize <= CAPACITY,
247 "Size {size} must not exceed capacity {CAPACITY}"
248 );
249 let capacity = CAPACITY as u32;
250
251 DerefWrapper(Self {
252 bytes: NonNull::new(MaybeUninit::slice_as_mut_ptr(uninit)).expect("Not null; qed"),
253 size: NonNull::from_mut(size),
254 capacity,
255 })
256 }
257
258 #[inline(always)]
260 pub const fn size(&self) -> u32 {
261 unsafe { self.size.read() }
263 }
264
265 #[inline(always)]
267 pub fn capacity(&self) -> u32 {
268 self.capacity
269 }
270
271 #[inline(always)]
273 pub const fn get_initialized(&self) -> &[u8] {
274 let size = self.size();
275 let ptr = self.bytes.as_ptr();
276 unsafe { slice::from_raw_parts(ptr, size as usize) }
278 }
279
280 #[inline(always)]
282 pub fn get_initialized_mut(&mut self) -> &mut [u8] {
283 let size = self.size();
284 let ptr = self.bytes.as_ptr();
285 unsafe { slice::from_raw_parts_mut(ptr, size as usize) }
287 }
288
289 #[inline(always)]
293 #[must_use = "Operation may fail"]
294 pub fn append(&mut self, bytes: &[u8]) -> bool {
295 let size = self.size();
296 if bytes.len() + size as usize > self.capacity as usize {
297 return false;
298 }
299
300 let Ok(offset) = isize::try_from(size) else {
302 return false;
303 };
304
305 let mut start = unsafe { self.bytes.offset(offset) };
308 unsafe { ptr::copy_nonoverlapping(bytes.as_ptr(), start.as_mut(), bytes.len()) }
313
314 true
315 }
316
317 #[inline(always)]
321 #[must_use = "Operation may fail"]
322 pub fn truncate(&mut self, new_size: u32) -> bool {
323 if new_size > self.size() {
324 return false;
325 }
326
327 unsafe {
329 self.size.write(new_size);
330 }
331
332 true
333 }
334
335 #[inline(always)]
339 #[must_use = "Operation may fail"]
340 pub fn copy_from<T>(&mut self, src: &T) -> bool
341 where
342 T: IoType,
343 {
344 let src_size = src.size();
345 if src_size > self.capacity {
346 return false;
347 }
348
349 unsafe {
352 self.bytes
353 .copy_from_nonoverlapping(src.as_ptr().cast::<u8>(), src_size as usize);
354 self.size.write(src_size);
355 }
356
357 true
358 }
359
360 #[inline(always)]
365 pub fn as_mut_ptr(&mut self) -> &mut NonNull<u8> {
366 &mut self.bytes
367 }
368
369 #[inline(always)]
372 pub fn cast_ref<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
373 &self,
374 ) -> &VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
375 unsafe {
378 NonNull::from_ref(self)
379 .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
380 .as_ref()
381 }
382 }
383
384 #[inline(always)]
387 pub fn cast_mut<const DIFFERENT_RECOMMENDED_ALLOCATION: u32>(
388 &mut self,
389 ) -> &mut VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION> {
390 unsafe {
393 NonNull::from_mut(self)
394 .cast::<VariableBytes<DIFFERENT_RECOMMENDED_ALLOCATION>>()
395 .as_mut()
396 }
397 }
398
399 #[inline(always)]
403 pub fn read_trivial_type<T>(&self) -> Option<T>
404 where
405 T: TrivialType,
406 {
407 if self.size() < T::SIZE {
408 return None;
409 }
410
411 let ptr = self.bytes.cast::<T>();
412
413 let value = unsafe {
414 if ptr.is_aligned() {
415 ptr.read()
416 } else {
417 ptr.read_unaligned()
418 }
419 };
420
421 Some(value)
422 }
423
424 #[inline(always)]
432 #[must_use = "Operation may fail"]
433 pub unsafe fn assume_init(&mut self, size: u32) -> Option<&mut [u8]> {
434 if size > self.capacity {
435 return None;
436 }
437
438 unsafe {
440 self.size.write(size);
441 }
442 Some(self.get_initialized_mut())
443 }
444}