1#[cfg(feature = "alloc")]
4mod archival_history_segment;
5
6use crate::block::BlockNumber;
7use crate::hashes::Blake3Hash;
8use crate::pieces::{PieceIndex, Record};
9#[cfg(feature = "alloc")]
10pub use crate::segments::archival_history_segment::ArchivedHistorySegment;
11use ab_blake3::single_chunk_hash;
12use ab_io_type::trivial_type::TrivialType;
13use ab_io_type::unaligned::Unaligned;
14#[cfg(feature = "alloc")]
15use alloc::boxed::Box;
16use blake3::CHUNK_LEN;
17use core::iter::Step;
18use core::num::{NonZeroU32, NonZeroU64};
19use core::{fmt, mem};
20use derive_more::{
21 Add, AddAssign, Deref, DerefMut, Display, Div, DivAssign, From, Into, Mul, MulAssign, Sub,
22 SubAssign,
23};
24#[cfg(feature = "scale-codec")]
25use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
26#[cfg(feature = "serde")]
27use serde::{Deserialize, Deserializer, Serialize, Serializer};
28#[cfg(feature = "serde")]
29use serde_big_array::BigArray;
30
31#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
33#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
34#[repr(C)]
35pub struct SuperSegmentRoot([u8; SuperSegmentRoot::SIZE]);
36
37impl fmt::Debug for SuperSegmentRoot {
38 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
39 for byte in self.0 {
40 write!(f, "{byte:02x}")?;
41 }
42 Ok(())
43 }
44}
45
46#[cfg(feature = "serde")]
47#[derive(Serialize, Deserialize)]
48#[serde(transparent)]
49struct SuperSegmentRootBinary(#[serde(with = "BigArray")] [u8; SuperSegmentRoot::SIZE]);
50
51#[cfg(feature = "serde")]
52#[derive(Serialize, Deserialize)]
53#[serde(transparent)]
54struct SuperSegmentRootHex(#[serde(with = "hex")] [u8; SuperSegmentRoot::SIZE]);
55
56#[cfg(feature = "serde")]
57impl Serialize for SuperSegmentRoot {
58 #[inline]
59 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
60 where
61 S: Serializer,
62 {
63 if serializer.is_human_readable() {
64 SuperSegmentRootHex(self.0).serialize(serializer)
65 } else {
66 SuperSegmentRootBinary(self.0).serialize(serializer)
67 }
68 }
69}
70
71#[cfg(feature = "serde")]
72impl<'de> Deserialize<'de> for SuperSegmentRoot {
73 #[inline]
74 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
75 where
76 D: Deserializer<'de>,
77 {
78 Ok(Self(if deserializer.is_human_readable() {
79 SuperSegmentRootHex::deserialize(deserializer)?.0
80 } else {
81 SuperSegmentRootBinary::deserialize(deserializer)?.0
82 }))
83 }
84}
85
86impl Default for SuperSegmentRoot {
87 #[inline]
88 fn default() -> Self {
89 Self([0; Self::SIZE])
90 }
91}
92
93impl AsRef<[u8]> for SuperSegmentRoot {
94 #[inline]
95 fn as_ref(&self) -> &[u8] {
96 &self.0
97 }
98}
99
100impl AsMut<[u8]> for SuperSegmentRoot {
101 #[inline]
102 fn as_mut(&mut self) -> &mut [u8] {
103 &mut self.0
104 }
105}
106
107impl SuperSegmentRoot {
108 pub const SIZE: usize = 32;
110}
111
112#[derive(
114 Debug,
115 Display,
116 Default,
117 Copy,
118 Clone,
119 Ord,
120 PartialOrd,
121 Eq,
122 PartialEq,
123 Hash,
124 Add,
125 AddAssign,
126 Sub,
127 SubAssign,
128 Mul,
129 MulAssign,
130 Div,
131 DivAssign,
132 TrivialType,
133)]
134#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
135#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
136#[repr(C)]
137pub struct LocalSegmentIndex(u64);
138
139impl Step for LocalSegmentIndex {
140 #[inline]
141 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
142 u64::steps_between(&start.0, &end.0)
143 }
144
145 #[inline]
146 fn forward_checked(start: Self, count: usize) -> Option<Self> {
147 u64::forward_checked(start.0, count).map(Self)
148 }
149
150 #[inline]
151 fn backward_checked(start: Self, count: usize) -> Option<Self> {
152 u64::backward_checked(start.0, count).map(Self)
153 }
154}
155
156impl From<LocalSegmentIndex> for SegmentIndex {
159 fn from(value: LocalSegmentIndex) -> Self {
160 Self(value.0)
161 }
162}
163
164impl From<SegmentIndex> for LocalSegmentIndex {
167 fn from(value: SegmentIndex) -> Self {
168 Self(value.0)
169 }
170}
171
172impl const From<u64> for LocalSegmentIndex {
173 #[inline(always)]
174 fn from(value: u64) -> Self {
175 Self(value)
176 }
177}
178
179impl const From<LocalSegmentIndex> for u64 {
180 #[inline(always)]
181 fn from(value: LocalSegmentIndex) -> Self {
182 value.0
183 }
184}
185
186impl LocalSegmentIndex {
187 pub const ZERO: Self = Self(0);
189 pub const ONE: Self = Self(1);
191
192 #[inline]
194 pub fn checked_sub(self, rhs: Self) -> Option<Self> {
195 self.0.checked_sub(rhs.0).map(Self)
196 }
197
198 #[inline]
201 pub const fn saturating_sub(self, rhs: Self) -> Self {
202 Self(self.0.saturating_sub(rhs.0))
203 }
204}
205
206#[derive(
208 Debug,
209 Display,
210 Default,
211 Copy,
212 Clone,
213 Ord,
214 PartialOrd,
215 Eq,
216 PartialEq,
217 Hash,
218 Add,
219 AddAssign,
220 Sub,
221 SubAssign,
222 Mul,
223 MulAssign,
224 Div,
225 DivAssign,
226 TrivialType,
227)]
228#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
229#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
230#[repr(C)]
231pub struct SegmentIndex(u64);
232
233impl Step for SegmentIndex {
234 #[inline]
235 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
236 u64::steps_between(&start.0, &end.0)
237 }
238
239 #[inline]
240 fn forward_checked(start: Self, count: usize) -> Option<Self> {
241 u64::forward_checked(start.0, count).map(Self)
242 }
243
244 #[inline]
245 fn backward_checked(start: Self, count: usize) -> Option<Self> {
246 u64::backward_checked(start.0, count).map(Self)
247 }
248}
249
250impl const From<u64> for SegmentIndex {
251 #[inline(always)]
252 fn from(value: u64) -> Self {
253 Self(value)
254 }
255}
256
257impl const From<SegmentIndex> for u64 {
258 #[inline(always)]
259 fn from(value: SegmentIndex) -> Self {
260 value.0
261 }
262}
263
264impl SegmentIndex {
265 pub const ZERO: Self = Self(0);
267 pub const ONE: Self = Self(1);
269
270 #[inline]
272 pub const fn first_piece_index(&self) -> PieceIndex {
273 PieceIndex::from(self.0 * RecordedHistorySegment::NUM_PIECES as u64)
274 }
275
276 #[inline]
278 pub const fn last_piece_index(&self) -> PieceIndex {
279 PieceIndex::from((self.0 + 1) * RecordedHistorySegment::NUM_PIECES as u64 - 1)
280 }
281
282 #[inline]
284 pub fn segment_piece_indexes(&self) -> [PieceIndex; RecordedHistorySegment::NUM_PIECES] {
285 let mut piece_indices = [PieceIndex::ZERO; RecordedHistorySegment::NUM_PIECES];
286 (self.first_piece_index()..=self.last_piece_index())
287 .zip(&mut piece_indices)
288 .for_each(|(input, output)| {
289 *output = input;
290 });
291
292 piece_indices
293 }
294
295 #[inline]
297 pub fn checked_sub(self, rhs: Self) -> Option<Self> {
298 self.0.checked_sub(rhs.0).map(Self)
299 }
300
301 #[inline]
304 pub const fn saturating_sub(self, rhs: Self) -> Self {
305 Self(self.0.saturating_sub(rhs.0))
306 }
307}
308
309#[derive(Copy, Clone, Eq, PartialEq, Hash, Deref, DerefMut, From, Into, TrivialType)]
311#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
312#[repr(C)]
313pub struct SegmentRoot([u8; SegmentRoot::SIZE]);
314
315impl fmt::Debug for SegmentRoot {
316 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
317 for byte in self.0 {
318 write!(f, "{byte:02x}")?;
319 }
320 Ok(())
321 }
322}
323
324#[cfg(feature = "serde")]
325#[derive(Serialize, Deserialize)]
326#[serde(transparent)]
327struct SegmentRootBinary(#[serde(with = "BigArray")] [u8; SegmentRoot::SIZE]);
328
329#[cfg(feature = "serde")]
330#[derive(Serialize, Deserialize)]
331#[serde(transparent)]
332struct SegmentRootHex(#[serde(with = "hex")] [u8; SegmentRoot::SIZE]);
333
334#[cfg(feature = "serde")]
335impl Serialize for SegmentRoot {
336 #[inline]
337 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
338 where
339 S: Serializer,
340 {
341 if serializer.is_human_readable() {
342 SegmentRootHex(self.0).serialize(serializer)
343 } else {
344 SegmentRootBinary(self.0).serialize(serializer)
345 }
346 }
347}
348
349#[cfg(feature = "serde")]
350impl<'de> Deserialize<'de> for SegmentRoot {
351 #[inline]
352 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
353 where
354 D: Deserializer<'de>,
355 {
356 Ok(Self(if deserializer.is_human_readable() {
357 SegmentRootHex::deserialize(deserializer)?.0
358 } else {
359 SegmentRootBinary::deserialize(deserializer)?.0
360 }))
361 }
362}
363
364impl Default for SegmentRoot {
365 #[inline(always)]
366 fn default() -> Self {
367 Self([0; Self::SIZE])
368 }
369}
370
371impl AsRef<[u8]> for SegmentRoot {
372 #[inline(always)]
373 fn as_ref(&self) -> &[u8] {
374 &self.0
375 }
376}
377
378impl AsMut<[u8]> for SegmentRoot {
379 #[inline(always)]
380 fn as_mut(&mut self) -> &mut [u8] {
381 &mut self.0
382 }
383}
384
385impl SegmentRoot {
386 pub const SIZE: usize = 32;
388
389 #[inline(always)]
391 pub const fn slice_from_repr(value: &[[u8; Self::SIZE]]) -> &[Self] {
392 unsafe { mem::transmute(value) }
394 }
395
396 #[inline(always)]
398 pub const fn repr_from_slice(value: &[Self]) -> &[[u8; Self::SIZE]] {
399 unsafe { mem::transmute(value) }
401 }
402}
403
404#[derive(
406 Debug,
407 Display,
408 Copy,
409 Clone,
410 Ord,
411 PartialOrd,
412 Eq,
413 PartialEq,
414 Hash,
415 From,
416 Into,
417 Deref,
418 DerefMut,
419 TrivialType,
420)]
421#[cfg_attr(feature = "scale-codec", derive(Encode, Decode, MaxEncodedLen))]
422#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
423#[repr(C)]
424pub struct HistorySize(SegmentIndex);
426
427impl HistorySize {
428 pub const ONE: Self = Self(SegmentIndex::ZERO);
430
431 #[inline(always)]
433 pub const fn new(value: NonZeroU64) -> Self {
434 Self(SegmentIndex::from(value.get() - 1))
435 }
436
437 pub const fn as_segment_index(&self) -> SegmentIndex {
439 self.0
440 }
441
442 pub const fn as_non_zero_u64(&self) -> NonZeroU64 {
444 NonZeroU64::new(u64::from(self.0).saturating_add(1)).expect("Not zero; qed")
445 }
446
447 #[inline(always)]
449 pub const fn in_pieces(&self) -> NonZeroU64 {
450 NonZeroU64::new(
451 u64::from(self.0)
452 .saturating_add(1)
453 .saturating_mul(RecordedHistorySegment::NUM_PIECES as u64),
454 )
455 .expect("Not zero; qed")
456 }
457
458 #[inline(always)]
460 pub fn segment_index(&self) -> SegmentIndex {
461 self.0
462 }
463
464 #[inline(always)]
468 pub fn sector_expiration_check(&self, min_sector_lifetime: Self) -> Option<Self> {
469 self.as_non_zero_u64()
470 .checked_add(min_sector_lifetime.as_non_zero_u64().get())
471 .map(Self::new)
472 }
473}
474
475#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
477#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
478#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
479#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
480#[repr(C)]
481pub struct ArchivedBlockProgress {
482 bytes: u32,
484}
485
486impl Default for ArchivedBlockProgress {
487 #[inline(always)]
490 fn default() -> Self {
491 Self::new_complete()
492 }
493}
494
495impl ArchivedBlockProgress {
496 #[inline(always)]
498 pub const fn new_complete() -> Self {
499 Self { bytes: 0 }
500 }
501
502 #[inline(always)]
504 pub const fn new_partial(new_partial: NonZeroU32) -> Self {
505 Self {
506 bytes: new_partial.get(),
507 }
508 }
509
510 #[inline(always)]
512 pub const fn partial(&self) -> Option<NonZeroU32> {
513 NonZeroU32::new(self.bytes)
514 }
515}
516
517#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash, TrivialType)]
519#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
520#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
521#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
522#[repr(C)]
523pub struct LastArchivedBlock {
524 pub number: Unaligned<BlockNumber>,
526 pub archived_progress: ArchivedBlockProgress,
528}
529
530impl LastArchivedBlock {
531 #[inline(always)]
533 pub fn partial_archived(&self) -> Option<NonZeroU32> {
534 self.archived_progress.partial()
535 }
536
537 #[inline(always)]
539 pub fn set_partial_archived(&mut self, new_partial: NonZeroU32) {
540 self.archived_progress = ArchivedBlockProgress::new_partial(new_partial);
541 }
542
543 #[inline(always)]
545 pub fn set_complete(&mut self) {
546 self.archived_progress = ArchivedBlockProgress::new_complete();
547 }
548
549 pub const fn number(&self) -> BlockNumber {
551 self.number.as_inner()
552 }
553}
554
555#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, TrivialType)]
557#[cfg_attr(feature = "scale-codec", derive(Encode, Decode))]
558#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
559#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))]
560#[repr(C)]
561pub struct SegmentHeader {
562 pub segment_index: Unaligned<LocalSegmentIndex>,
564 pub segment_root: SegmentRoot,
566 pub prev_segment_header_hash: Blake3Hash,
568 pub last_archived_block: LastArchivedBlock,
570}
571
572impl SegmentHeader {
573 #[inline(always)]
575 pub fn hash(&self) -> Blake3Hash {
576 const {
577 assert!(size_of::<Self>() <= CHUNK_LEN);
578 }
579 Blake3Hash::new(
580 single_chunk_hash(self.as_bytes())
581 .expect("Less than a single chunk worth of bytes; qed"),
582 )
583 }
584
585 #[inline(always)]
587 pub const fn local_segment_index(&self) -> LocalSegmentIndex {
588 self.segment_index.as_inner()
589 }
590}
591
592#[derive(Copy, Clone, Eq, PartialEq, Deref, DerefMut)]
596#[repr(C)]
597pub struct RecordedHistorySegment([Record; Self::NUM_RAW_RECORDS]);
598
599impl fmt::Debug for RecordedHistorySegment {
600 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
601 f.debug_struct("RecordedHistorySegment")
602 .finish_non_exhaustive()
603 }
604}
605
606impl Default for RecordedHistorySegment {
607 #[inline]
608 fn default() -> Self {
609 Self([Record::default(); Self::NUM_RAW_RECORDS])
610 }
611}
612
613impl AsRef<[u8]> for RecordedHistorySegment {
614 #[inline]
615 fn as_ref(&self) -> &[u8] {
616 Record::slice_to_repr(&self.0).as_flattened().as_flattened()
617 }
618}
619
620impl AsMut<[u8]> for RecordedHistorySegment {
621 #[inline]
622 fn as_mut(&mut self) -> &mut [u8] {
623 Record::slice_mut_to_repr(&mut self.0)
624 .as_flattened_mut()
625 .as_flattened_mut()
626 }
627}
628
629impl RecordedHistorySegment {
630 pub const NUM_RAW_RECORDS: usize = 128;
632 pub const ERASURE_CODING_RATE: (usize, usize) = (1, 2);
634 pub const NUM_PIECES: usize =
637 Self::NUM_RAW_RECORDS * Self::ERASURE_CODING_RATE.1 / Self::ERASURE_CODING_RATE.0;
638 pub const SIZE: usize = Record::SIZE * Self::NUM_RAW_RECORDS;
644
645 #[inline]
647 #[cfg(feature = "alloc")]
648 pub fn new_boxed() -> Box<Self> {
649 unsafe { Box::<Self>::new_zeroed().assume_init() }
652 }
653}