Skip to main content

ab_core_primitives/block/
body.rs

1//! Block body primitives
2
3#[cfg(feature = "alloc")]
4pub mod owned;
5
6#[cfg(feature = "alloc")]
7use crate::block::body::owned::{
8    GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBlockBody, OwnedIntermediateShardBody,
9    OwnedLeafShardBody,
10};
11use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
12use crate::block::{BlockNumber, align_to_and_ensure_zero_padding};
13use crate::hashes::Blake3Hash;
14use crate::pot::PotCheckpoints;
15use crate::segments::{LocalSegmentIndex, SegmentRoot};
16use crate::shard::{RealShardKind, ShardIndex};
17use crate::transaction::Transaction;
18use ab_blake3::{BLOCK_LEN, OUT_LEN, single_block_hash};
19use ab_io_type::trivial_type::TrivialType;
20use ab_io_type::unaligned::Unaligned;
21use ab_merkle_tree::balanced::BalancedMerkleTree;
22use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
23#[cfg(feature = "alloc")]
24use core::iter;
25use core::iter::TrustedLen;
26use core::{array, cmp, fmt, slice};
27use derive_more::From;
28use yoke::Yokeable;
29
30/// Generic block body
31pub trait GenericBlockBody<'a>
32where
33    Self: Copy + fmt::Debug + Into<BlockBody<'a>> + Send + Sync,
34{
35    /// Shard kind
36    const SHARD_KIND: RealShardKind;
37
38    /// Owned block body
39    #[cfg(feature = "alloc")]
40    type Owned: GenericOwnedBlockBody<Body<'a> = Self>
41    where
42        Self: 'a;
43
44    /// Turn into an owned version
45    #[cfg(feature = "alloc")]
46    fn to_owned(self) -> Self::Owned;
47
48    /// Compute block body root
49    fn root(&self) -> Blake3Hash;
50}
51
52/// Calculates a Merkle Tree root for a provided list of segment roots
53#[inline]
54pub fn compute_segments_root<'a, const MAX_SEGMENTS: u64, Iter>(segment_roots: Iter) -> Blake3Hash
55where
56    [(); MAX_SEGMENTS.next_power_of_two().ilog2() as usize + 1]:,
57    Iter: IntoIterator<Item = &'a SegmentRoot>,
58{
59    // TODO: Keyed hash
60    let root = UnbalancedMerkleTree::compute_root_only::<MAX_SEGMENTS, _, _>(
61        segment_roots.into_iter().map(|segment_root| {
62            // Hash the root again so we can prove it, otherwise root of segments is
63            // indistinguishable from individual segment roots and can be used to confuse verifier
64            single_block_hash(segment_root.as_ref())
65                .expect("Less than a single block worth of bytes; qed")
66        }),
67    );
68
69    Blake3Hash::new(root.unwrap_or_default())
70}
71
72/// Own segments produced by a shard
73#[derive(Debug, Copy, Clone)]
74pub struct OwnSegments<'a> {
75    /// Local segment index of the first own segment root
76    pub first_local_segment_index: LocalSegmentIndex,
77    /// Segment roots produced by a shard
78    pub segment_roots: &'a [SegmentRoot],
79}
80
81impl OwnSegments<'_> {
82    /// Compute the root of own segments
83    #[inline]
84    pub fn root(&self) -> Blake3Hash {
85        // TODO: Keyed hash
86        let root = BalancedMerkleTree::compute_root_only(&[
87            single_block_hash(self.first_local_segment_index.as_bytes())
88                .expect("Less than a single block worth of bytes; qed"),
89            *compute_segments_root::<{ u8::MAX as u64 }, _>(self.segment_roots),
90        ]);
91
92        Blake3Hash::new(root)
93    }
94
95    /// Compute the root of own segments while mixing in shard index.
96    ///
97    /// This method is useful for deriving roots on intermediate shard that can be verified by the
98    /// beacon chain later (which will have just shard index available rather than the whole leaf
99    /// shard block header).
100    #[inline]
101    pub fn root_with_shard_index(&self, shard_index: ShardIndex) -> Blake3Hash {
102        // TODO: Keyed hash
103        let root = BalancedMerkleTree::compute_root_only(&[
104            {
105                const {
106                    assert!((ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize <= BLOCK_LEN);
107                }
108                let mut pair = [0u8; (ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize];
109                pair[..ShardIndex::SIZE as usize].copy_from_slice(shard_index.as_bytes());
110                pair[ShardIndex::SIZE as usize..]
111                    .copy_from_slice(self.first_local_segment_index.as_bytes());
112
113                single_block_hash(&pair).expect("Less than a single block worth of bytes; qed")
114            },
115            *compute_segments_root::<{ u8::MAX as u64 }, _>(self.segment_roots),
116        ]);
117
118        Blake3Hash::new(root)
119    }
120}
121
122/// Information about intermediate shard block
123#[derive(Debug, Clone)]
124pub struct IntermediateShardBlockInfo<'a> {
125    /// A block header that corresponds to an intermediate shard
126    pub header: IntermediateShardHeader<'a>,
127    /// Segments proof if either own or child segments are present
128    pub segments_proof: Option<&'a [u8; OUT_LEN]>,
129    /// Segments in the corresponding block
130    pub own_segments: Option<OwnSegments<'a>>,
131    num_leaf_shard_blocks_with_segments: u8,
132    leaf_shards_segments_bytes: &'a [u8],
133}
134
135impl<'a> IntermediateShardBlockInfo<'a> {
136    /// Segments of leaf shards in the corresponding intermediate shard block
137    #[inline]
138    pub fn leaf_shards_segments(
139        &self,
140    ) -> impl ExactSizeIterator<Item = (ShardIndex, OwnSegments<'a>)> + TrustedLen + use<'a> {
141        // SAFETY: Checked in constructor
142        let (counts, mut remainder) = unsafe {
143            self.leaf_shards_segments_bytes.split_at_unchecked(
144                usize::from(self.num_leaf_shard_blocks_with_segments) * size_of::<u8>(),
145            )
146        };
147        counts.iter().map(move |&num_own_segment_roots| {
148            let num_own_segment_roots = usize::from(num_own_segment_roots);
149
150            let shard_index;
151            // SAFETY: Checked in constructor
152            (shard_index, remainder) =
153                unsafe { remainder.split_at_unchecked(ShardIndex::SIZE as usize) };
154            // SAFETY: Correct size and no alignment requirements
155            let shard_index =
156                unsafe { Unaligned::<ShardIndex>::from_bytes_unchecked(shard_index) }.as_inner();
157
158            let first_local_segment_index;
159            // SAFETY: Checked in constructor
160            (first_local_segment_index, remainder) =
161                unsafe { remainder.split_at_unchecked(LocalSegmentIndex::SIZE as usize) };
162            // SAFETY: Correct size and no alignment requirements
163            let first_local_segment_index = unsafe {
164                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
165            }
166            .as_inner();
167
168            let own_segment_roots;
169            // SAFETY: Checked in constructor
170            (own_segment_roots, remainder) =
171                unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
172            // SAFETY: Valid pointer and size, no alignment requirements
173            let own_segment_roots = unsafe {
174                slice::from_raw_parts(
175                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
176                    num_own_segment_roots,
177                )
178            };
179            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
180
181            (
182                shard_index,
183                OwnSegments {
184                    first_local_segment_index,
185                    segment_roots: own_segment_roots,
186                },
187            )
188        })
189    }
190}
191
192/// Information about a collection of intermediate shard blocks
193#[derive(Debug, Copy, Clone)]
194pub struct IntermediateShardBlocksInfo<'a> {
195    num_blocks: usize,
196    bytes: &'a [u8],
197}
198
199impl<'a> IntermediateShardBlocksInfo<'a> {
200    /// Create an instance from provided bytes.
201    ///
202    /// `bytes` do not need to be aligned.
203    ///
204    /// Returns an instance and remaining bytes on success.
205    #[inline]
206    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
207        // The layout here is as follows:
208        // * number of blocks: u16 as unaligned little-endian bytes
209        // * for each block:
210        //   * number of own segment roots: u8
211        //   * number of leaf shard blocks with segments: u8
212        // * padding to 8-bytes boundary with zeroes
213        // * for each block:
214        //   * block header: IntermediateShardHeader
215        //   * segment roots proof (if either own or child segments are present)
216        //   * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
217        //   * concatenated own segment roots
218        //   * for each leaf shard block with segments:
219        //     * number of own segment roots: u8
220        //   * for each leaf shard block with segments:
221        //     * shard index: u32 as unaligned little-endian bytes
222        //     * local segment index of the first segment root: unaligned `LocalSegmentIndex`
223        //     * concatenated own segment roots
224        //   * padding to 8-bytes boundary with zeroes
225
226        let num_blocks = bytes.split_off(..size_of::<u16>())?;
227        let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
228
229        let bytes_start = bytes;
230
231        let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() * 2))?;
232
233        let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
234
235        for _ in 0..num_blocks {
236            let num_own_segment_roots = usize::from(counts[0]);
237            let num_leaf_shard_blocks_with_segments = usize::from(counts[1]);
238            counts = &counts[2..];
239
240            (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
241
242            if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
243                let _segments_proof = remainder.split_off(..SegmentRoot::SIZE)?;
244            }
245
246            if num_own_segment_roots > 0 {
247                let _first_local_segment_index =
248                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
249                let _own_segment_roots =
250                    remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
251            }
252
253            let leaf_shard_segment_roots_counts =
254                remainder.split_off(..num_leaf_shard_blocks_with_segments * size_of::<u8>())?;
255
256            for &num_own_segment_roots in leaf_shard_segment_roots_counts {
257                let _shard_index = remainder.split_off(..ShardIndex::SIZE as usize)?;
258                let _first_local_segment_index =
259                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
260                let _own_segment_roots = remainder
261                    .split_off(..usize::from(num_own_segment_roots) * SegmentRoot::SIZE)?;
262            }
263
264            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
265        }
266
267        let info = Self {
268            num_blocks,
269            bytes: &bytes_start[..bytes_start.len() - remainder.len()],
270        };
271
272        if !info.is_internally_consistent() {
273            return None;
274        }
275
276        Some((info, remainder))
277    }
278
279    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
280    /// checks
281    #[inline]
282    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
283        // The layout here is as follows:
284        // * number of blocks: u16 as unaligned little-endian bytes
285        // * for each block:
286        //   * number of own segment roots: u8
287        //   * number of leaf shard blocks with segments: u8
288        // * padding to 8-bytes boundary with zeroes
289        // * for each block:
290        //   * block header: IntermediateShardHeader
291        //   * segment roots proof (if either own or child segments are present)
292        //   * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
293        //   * concatenated own segment roots
294        //   * for each leaf shard block with segments:
295        //     * number of own segment roots: u8
296        //   * for each leaf shard block with segments:
297        //     * shard index: u32 as unaligned little-endian bytes
298        //     * local segment index of the first segment root: unaligned `LocalSegmentIndex`
299        //     * concatenated own segment roots
300        //   * padding to 8-bytes boundary with zeroes
301
302        let num_blocks = bytes.split_off(..size_of::<u16>())?;
303        let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
304
305        let bytes_start = bytes;
306
307        let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() * 2))?;
308
309        let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
310
311        for _ in 0..num_blocks {
312            let num_own_segment_roots = usize::from(counts[0]);
313            let num_leaf_shard_blocks_with_segments = usize::from(counts[1]);
314            counts = &counts[2..];
315
316            (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
317
318            if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
319                let _segments_proof = remainder.split_off(..OUT_LEN)?;
320            }
321
322            if num_own_segment_roots > 0 {
323                let _first_local_segment_index =
324                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
325                let _own_segment_roots =
326                    remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
327            }
328
329            let leaf_shard_segment_roots_counts =
330                remainder.split_off(..num_leaf_shard_blocks_with_segments * size_of::<u8>())?;
331
332            for &num_own_segment_roots in leaf_shard_segment_roots_counts {
333                let _shard_index = remainder.split_off(..ShardIndex::SIZE as usize)?;
334                let _first_local_segment_index =
335                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
336                let _own_segment_roots = remainder
337                    .split_off(..usize::from(num_own_segment_roots) * SegmentRoot::SIZE)?;
338            }
339
340            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
341        }
342
343        Some((
344            Self {
345                num_blocks,
346                bytes: &bytes_start[..bytes_start.len() - remainder.len()],
347            },
348            remainder,
349        ))
350    }
351
352    /// Check intermediate shard info's internal consistency.
353    ///
354    /// This is usually not necessary to be called explicitly since internal consistency is checked
355    /// by [`Self::try_from_bytes()`] internally.
356    #[inline]
357    pub fn is_internally_consistent(&self) -> bool {
358        let mut last_intermediate_shard_info =
359            None::<(ShardIndex, BlockNumber, Option<LocalSegmentIndex>)>;
360
361        self.iter().all(|intermediate_shard_block| {
362            let shard_index = intermediate_shard_block.header.prefix.shard_index;
363
364            // Ensure increasing order of shard indices, block numbers and local segment indices
365            if let Some((
366                last_intermediate_shard_index,
367                last_intermediate_shard_block_number,
368                last_intermediate_shard_first_local_segment_index,
369            )) = last_intermediate_shard_info
370            {
371                match last_intermediate_shard_index.cmp(&shard_index) {
372                    cmp::Ordering::Less => {
373                        last_intermediate_shard_info.replace((
374                            shard_index,
375                            intermediate_shard_block.header.prefix.number,
376                            intermediate_shard_block
377                                .own_segments
378                                .as_ref()
379                                .map(|own_segments| own_segments.first_local_segment_index),
380                        ));
381                    }
382                    cmp::Ordering::Equal => {
383                        if last_intermediate_shard_block_number
384                            >= intermediate_shard_block.header.prefix.number
385                        {
386                            return false;
387                        }
388                        if let Some(own_segments) = &intermediate_shard_block.own_segments {
389                            if let Some(last_intermediate_shard_first_local_segment_index) =
390                                last_intermediate_shard_first_local_segment_index
391                                && last_intermediate_shard_first_local_segment_index
392                                    >= own_segments.first_local_segment_index
393                            {
394                                return false;
395                            }
396
397                            last_intermediate_shard_info.replace((
398                                shard_index,
399                                intermediate_shard_block.header.prefix.number,
400                                Some(own_segments.first_local_segment_index),
401                            ));
402                        } else {
403                            last_intermediate_shard_info.replace((
404                                shard_index,
405                                intermediate_shard_block.header.prefix.number,
406                                last_intermediate_shard_first_local_segment_index,
407                            ));
408                        }
409                    }
410                    cmp::Ordering::Greater => {
411                        return false;
412                    }
413                }
414            } else {
415                last_intermediate_shard_info.replace((
416                    shard_index,
417                    intermediate_shard_block.header.prefix.number,
418                    intermediate_shard_block
419                        .own_segments
420                        .as_ref()
421                        .map(|own_segments| own_segments.first_local_segment_index),
422                ));
423            }
424
425            let mut last_leaf_shard_info = None::<(ShardIndex, LocalSegmentIndex)>;
426
427            // Ensure increasing order of shard indices and local segment indices
428            if !intermediate_shard_block.leaf_shards_segments().all(
429                |(leaf_shard_index, own_segments)| {
430                    if !leaf_shard_index.is_child_of(shard_index) {
431                        return false;
432                    }
433
434                    if let Some((
435                        last_leaf_shard_index,
436                        last_leaf_shard_first_local_segment_index,
437                    )) = last_leaf_shard_info
438                    {
439                        match last_leaf_shard_index.cmp(&leaf_shard_index) {
440                            cmp::Ordering::Less => {
441                                // Expected
442                            }
443                            cmp::Ordering::Equal => {
444                                if last_leaf_shard_first_local_segment_index
445                                    >= own_segments.first_local_segment_index
446                                {
447                                    return false;
448                                }
449                            }
450                            cmp::Ordering::Greater => {
451                                return false;
452                            }
453                        }
454                    }
455                    last_leaf_shard_info
456                        .replace((leaf_shard_index, own_segments.first_local_segment_index));
457
458                    true
459                },
460            ) {
461                return false;
462            }
463
464            if intermediate_shard_block.own_segments.is_none()
465                && intermediate_shard_block
466                    .leaf_shards_segments()
467                    .size_hint()
468                    .0
469                    == 0
470            {
471                return intermediate_shard_block.segments_proof.is_none();
472            }
473
474            let segments_proof = intermediate_shard_block.segments_proof.unwrap_or(&[0; _]);
475
476            // TODO: This is a workaround for https://github.com/rust-lang/rust/issues/139866 that allows
477            //  the code to compile. Constant 65535 is hardcoded here and below for compilation
478            // to  succeed.
479            #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
480            #[expect(clippy::eq_op, reason = "Intentional documentation")]
481            const {
482                assert!(u16::MAX == 65535);
483            }
484            let leaf_shards_root = UnbalancedMerkleTree::compute_root_only::<65535, _, _>(
485                intermediate_shard_block.leaf_shards_segments().map(
486                    |(shard_index, own_segments)| own_segments.root_with_shard_index(shard_index),
487                ),
488            )
489            .unwrap_or_default();
490
491            BalancedMerkleTree::<2>::verify(
492                &intermediate_shard_block.header.result.body_root,
493                array::from_ref(segments_proof),
494                0,
495                BalancedMerkleTree::compute_root_only(&[
496                    *intermediate_shard_block
497                        .own_segments
498                        .as_ref()
499                        .map(OwnSegments::root)
500                        .unwrap_or_default(),
501                    leaf_shards_root,
502                ]),
503            )
504        })
505    }
506
507    /// Iterator over intermediate shard blocks in a collection
508    #[inline]
509    pub fn iter(
510        &self,
511    ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + use<'a> {
512        // SAFETY: Checked in constructor
513        let (mut counts, mut remainder) = unsafe {
514            self.bytes
515                .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
516        };
517
518        (0..self.num_blocks).map(move |_| {
519            let num_own_segment_roots = usize::from(counts[0]);
520            let num_leaf_shard_blocks_with_segments = counts[1];
521            counts = &counts[2..];
522
523            // TODO: Unchecked method would have been helpful here
524            let header;
525            (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
526                .expect("Already checked in constructor; qed");
527
528            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
529                .expect("Already checked in constructor; qed");
530
531            let segments_proof =
532                if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
533                    let segments_proof;
534                    // SAFETY: Checked in constructor
535                    (segments_proof, remainder) = unsafe { remainder.split_at_unchecked(OUT_LEN) };
536                    // SAFETY: Valid pointer and size, no alignment requirements
537                    Some(unsafe {
538                        segments_proof
539                            .as_ptr()
540                            .cast::<[u8; OUT_LEN]>()
541                            .as_ref_unchecked()
542                    })
543                } else {
544                    None
545                };
546
547            let own_segments = if num_own_segment_roots > 0 {
548                let first_local_segment_index;
549                // SAFETY: Checked in constructor
550                (first_local_segment_index, remainder) =
551                    unsafe { remainder.split_at_unchecked(size_of::<LocalSegmentIndex>()) };
552                // SAFETY: Correct alignment and size
553                let first_local_segment_index =
554                    *unsafe { LocalSegmentIndex::from_bytes_unchecked(first_local_segment_index) };
555
556                let own_segment_roots;
557                // SAFETY: Checked in constructor
558                (own_segment_roots, remainder) = unsafe {
559                    remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE)
560                };
561                // SAFETY: Valid pointer and size, no alignment requirements
562                let own_segment_roots = unsafe {
563                    slice::from_raw_parts(
564                        own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
565                        num_own_segment_roots,
566                    )
567                };
568                let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
569
570                Some(OwnSegments {
571                    first_local_segment_index,
572                    segment_roots: own_segment_roots,
573                })
574            } else {
575                None
576            };
577
578            // SAFETY: Checked in constructor
579            let leaf_shard_segment_roots_counts = unsafe {
580                remainder.get_unchecked(
581                    ..usize::from(num_leaf_shard_blocks_with_segments) * size_of::<u8>(),
582                )
583            };
584
585            let leaf_shards_segments_bytes;
586            // SAFETY: Checked in constructor
587            (leaf_shards_segments_bytes, remainder) = unsafe {
588                remainder.split_at_unchecked(
589                    usize::from(num_leaf_shard_blocks_with_segments) * size_of::<u8>()
590                        + usize::from(num_leaf_shard_blocks_with_segments)
591                            * (ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize
592                        + leaf_shard_segment_roots_counts.iter().fold(
593                            0_usize,
594                            |acc, &num_own_segment_roots| {
595                                acc + usize::from(num_own_segment_roots) * SegmentRoot::SIZE
596                            },
597                        ),
598                )
599            };
600
601            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
602                .expect("Already checked in constructor; qed");
603
604            IntermediateShardBlockInfo {
605                header,
606                segments_proof,
607                own_segments,
608                num_leaf_shard_blocks_with_segments,
609                leaf_shards_segments_bytes,
610            }
611        })
612    }
613
614    /// Number of intermediate shard blocks
615    #[inline(always)]
616    pub const fn len(&self) -> usize {
617        self.num_blocks
618    }
619
620    /// Returns `true` if there are no intermediate shard blocks
621    #[inline(always)]
622    pub const fn is_empty(&self) -> bool {
623        self.num_blocks == 0
624    }
625
626    /// Compute the root of segments of the intermediate shard blocks info.
627    ///
628    /// Returns the default value for an empty collection of segment roots.
629    #[inline]
630    pub fn segments_root(&self) -> Blake3Hash {
631        let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
632            self.iter().map(|shard_block_info| {
633                // TODO: This is a workaround for https://github.com/rust-lang/rust/issues/139866
634                //  that allows the code to compile. Constant 4294967295 is hardcoded here and below
635                //  for compilation to succeed.
636                #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
637                #[expect(clippy::eq_op, reason = "Intentional documentation")]
638                const {
639                    assert!(u32::MAX == 4294967295);
640                }
641                UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
642                    shard_block_info
643                        .own_segments
644                        .as_ref()
645                        .map(OwnSegments::root)
646                        .into_iter()
647                        .chain(shard_block_info.leaf_shards_segments().map(
648                            |(shard_index, own_segments)| {
649                                own_segments.root_with_shard_index(shard_index)
650                            },
651                        )),
652                )
653                .unwrap_or_default()
654            }),
655        )
656        .unwrap_or_default();
657
658        Blake3Hash::new(root)
659    }
660
661    /// Compute the root of headers of the intermediate shard blocks info.
662    ///
663    /// Returns the default value for an empty collection of shard blocks.
664    #[inline]
665    pub fn headers_root(&self) -> Blake3Hash {
666        let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
667            // TODO: Keyed hash
668            self.iter().map(|shard_block_info| {
669                // Hash the root again so we can prove it, otherwise the root of headers is
670                // indistinguishable from individual block roots and can be used to confuse
671                // verifier
672                single_block_hash(shard_block_info.header.root().as_ref())
673                    .expect("Less than a single block worth of bytes; qed")
674            }),
675        )
676        .unwrap_or_default();
677
678        Blake3Hash::new(root)
679    }
680}
681
682/// Block body that corresponds to the beacon chain
683#[derive(Debug, Copy, Clone, Yokeable)]
684// Prevent creation of potentially broken invariants externally
685#[non_exhaustive]
686pub struct BeaconChainBody<'a> {
687    /// Segments produced by this shard
688    own_segments: Option<OwnSegments<'a>>,
689    /// Intermediate shard blocks
690    intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
691    /// Proof of time checkpoints from after future proof of time of the parent block to the
692    /// current block's future proof of time (inclusive)
693    pot_checkpoints: &'a [PotCheckpoints],
694}
695
696impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
697    const SHARD_KIND: RealShardKind = RealShardKind::BeaconChain;
698
699    #[cfg(feature = "alloc")]
700    type Owned = OwnedBeaconChainBody;
701
702    #[cfg(feature = "alloc")]
703    #[inline(always)]
704    fn to_owned(self) -> Self::Owned {
705        self.to_owned()
706    }
707
708    #[inline(always)]
709    fn root(&self) -> Blake3Hash {
710        self.root()
711    }
712}
713
714impl<'a> BeaconChainBody<'a> {
715    /// Create an instance from provided correctly aligned bytes.
716    ///
717    /// `bytes` should be 4-bytes aligned.
718    ///
719    /// Returns an instance and remaining bytes on success.
720    #[inline]
721    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
722        // The layout here is as follows:
723        // * number of PoT checkpoints: u32 as aligned little-endian bytes
724        // * number of own segment roots: u8
725        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
726        // * concatenated own segment roots
727        // * intermediate shard blocks: IntermediateShardBlocksInfo
728        // * concatenated PoT checkpoints
729
730        let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
731        // SAFETY: All bit patterns are valid
732        let num_pot_checkpoints =
733            *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
734
735        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
736        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
737
738        let own_segments = if num_own_segment_roots > 0 {
739            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
740            // SAFETY: Unaligned and correct size
741            let first_local_segment_index = unsafe {
742                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
743            }
744            .as_inner();
745
746            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
747            // SAFETY: Valid pointer and size, no alignment requirements
748            let own_segment_roots = unsafe {
749                slice::from_raw_parts(
750                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
751                    num_own_segment_roots,
752                )
753            };
754            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
755
756            Some(OwnSegments {
757                first_local_segment_index,
758                segment_roots: own_segment_roots,
759            })
760        } else {
761            None
762        };
763
764        let (intermediate_shard_blocks, mut remainder) =
765            IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
766
767        let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
768        // SAFETY: Valid pointer and size, no alignment requirements
769        let pot_checkpoints = unsafe {
770            slice::from_raw_parts(
771                pot_checkpoints
772                    .as_ptr()
773                    .cast::<[u8; PotCheckpoints::SIZE]>(),
774                num_pot_checkpoints,
775            )
776        };
777        let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
778
779        let body = Self {
780            own_segments,
781            intermediate_shard_blocks,
782            pot_checkpoints,
783        };
784
785        if !body.is_internally_consistent() {
786            return None;
787        }
788
789        Some((body, remainder))
790    }
791
792    /// Check block body's internal consistency.
793    ///
794    /// This is usually not necessary to be called explicitly since internal consistency is checked
795    /// by [`Self::try_from_bytes()`] internally.
796    #[inline]
797    pub fn is_internally_consistent(&self) -> bool {
798        // Nothing to check here
799        true
800    }
801
802    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
803    /// checks
804    #[inline]
805    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
806        // The layout here is as follows:
807        // * number of PoT checkpoints: u32 as aligned little-endian bytes
808        // * number of own segment roots: u8
809        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
810        // * concatenated own segment roots
811        // * intermediate shard blocks: IntermediateShardBlocksInfo
812        // * concatenated PoT checkpoints
813
814        let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
815        // SAFETY: All bit patterns are valid
816        let num_pot_checkpoints =
817            *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
818
819        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
820        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
821
822        let own_segments = if num_own_segment_roots > 0 {
823            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
824            // SAFETY: Unaligned and correct size
825            let first_local_segment_index = unsafe {
826                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
827            }
828            .as_inner();
829
830            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
831            // SAFETY: Valid pointer and size, no alignment requirements
832            let own_segment_roots = unsafe {
833                slice::from_raw_parts(
834                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
835                    num_own_segment_roots,
836                )
837            };
838            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
839
840            Some(OwnSegments {
841                first_local_segment_index,
842                segment_roots: own_segment_roots,
843            })
844        } else {
845            None
846        };
847
848        let (intermediate_shard_blocks, mut remainder) =
849            IntermediateShardBlocksInfo::try_from_bytes_unchecked(bytes)?;
850
851        let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
852        // SAFETY: Valid pointer and size, no alignment requirements
853        let pot_checkpoints = unsafe {
854            slice::from_raw_parts(
855                pot_checkpoints
856                    .as_ptr()
857                    .cast::<[u8; PotCheckpoints::SIZE]>(),
858                num_pot_checkpoints,
859            )
860        };
861        let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
862
863        Some((
864            Self {
865                own_segments,
866                intermediate_shard_blocks,
867                pot_checkpoints,
868            },
869            remainder,
870        ))
871    }
872
873    /// Create an owned version of this body
874    #[cfg(feature = "alloc")]
875    #[inline(always)]
876    pub fn to_owned(self) -> OwnedBeaconChainBody {
877        if let Some(own_segments) = self.own_segments {
878            let first_local_segment_index = own_segments.first_local_segment_index;
879
880            OwnedBeaconChainBody::new(
881                own_segments.segment_roots.iter().copied().enumerate().map(
882                    |(index, segment_root)| {
883                        (
884                            first_local_segment_index + LocalSegmentIndex::from(index as u64),
885                            segment_root,
886                        )
887                    },
888                ),
889                self.intermediate_shard_blocks.iter(),
890                self.pot_checkpoints,
891            )
892            .expect("`self` is always a valid invariant; qed")
893        } else {
894            OwnedBeaconChainBody::new(
895                iter::empty(),
896                self.intermediate_shard_blocks.iter(),
897                self.pot_checkpoints,
898            )
899            .expect("`self` is always a valid invariant; qed")
900        }
901    }
902
903    /// Segment roots produced by this shard
904    #[inline(always)]
905    pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
906        self.own_segments
907    }
908
909    /// Intermediate shard blocks
910    #[inline(always)]
911    pub fn intermediate_shard_blocks(&self) -> &IntermediateShardBlocksInfo<'a> {
912        &self.intermediate_shard_blocks
913    }
914
915    /// Proof of time checkpoints from after future proof of time of the parent block to the current
916    /// block's future proof of time (inclusive)
917    #[inline(always)]
918    pub fn pot_checkpoints(&self) -> &'a [PotCheckpoints] {
919        self.pot_checkpoints
920    }
921
922    /// Compute block body root
923    #[inline]
924    pub fn root(&self) -> Blake3Hash {
925        // TODO: Keyed hash
926        let root = BalancedMerkleTree::compute_root_only(&[
927            *self
928                .own_segments
929                .as_ref()
930                .map(OwnSegments::root)
931                .unwrap_or_default(),
932            *self.intermediate_shard_blocks.segments_root(),
933            *self.intermediate_shard_blocks.headers_root(),
934            blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
935                .into(),
936        ]);
937
938        Blake3Hash::new(root)
939    }
940}
941
942/// Information about leaf shard segments
943#[derive(Debug, Copy, Clone)]
944pub struct LeafShardOwnSegments<'a> {
945    /// Segment roots proof
946    pub segment_roots_proof: &'a [u8; OUT_LEN],
947    /// Segments produced by this shard
948    pub own_segments: OwnSegments<'a>,
949}
950
951/// Information about the leaf shard block container inside the intermediate shard block body
952#[derive(Debug, Clone)]
953pub struct LeafShardBlockInfo<'a> {
954    /// A block header that corresponds to a leaf shard
955    pub header: LeafShardHeader<'a>,
956    /// Segments in the corresponding block
957    pub segments: Option<LeafShardOwnSegments<'a>>,
958}
959
960/// Information about a collection of leaf shard blocks
961#[derive(Debug, Copy, Clone)]
962pub struct LeafShardBlocksInfo<'a> {
963    num_blocks: usize,
964    bytes: &'a [u8],
965}
966
967impl<'a> LeafShardBlocksInfo<'a> {
968    /// Create an instance from provided bytes.
969    ///
970    /// `bytes` do not need to be aligned.
971    ///
972    /// Returns an instance and remaining bytes on success.
973    #[inline]
974    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
975        // The layout here is as follows:
976        // * number of blocks: u16 as unaligned little-endian bytes
977        // * for each block:
978        //   * number of own segment roots: u8
979        // * padding to 8-bytes boundary with zeroes
980        // * for each block:
981        //   * block header: LeafShardHeader
982        //   * padding to 8-bytes boundary with zeroes
983        //   * local segment index of the first segment root (if any): `LocalSegmentIndex`
984        //   * segment roots proof (if there is at least one segment root)
985        //   * concatenated own segment roots
986
987        let num_blocks = bytes.split_off(..size_of::<u16>())?;
988        let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
989
990        let bytes_start = bytes;
991
992        let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
993
994        let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
995
996        for _ in 0..num_blocks {
997            let num_own_segment_roots = usize::from(counts[0]);
998            counts = &counts[1..];
999
1000            (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
1001
1002            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
1003
1004            if num_own_segment_roots > 0 {
1005                let _first_local_segment_index =
1006                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
1007                let _segment_roots_proof = remainder.split_off(..OUT_LEN)?;
1008                let _own_segment_roots =
1009                    remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1010            }
1011        }
1012
1013        let info = Self {
1014            num_blocks,
1015            bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1016        };
1017
1018        if !info.is_internally_consistent() {
1019            return None;
1020        }
1021
1022        Some((info, remainder))
1023    }
1024
1025    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
1026    /// checks
1027    #[inline]
1028    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1029        // The layout here is as follows:
1030        // * number of blocks: u16 as unaligned little-endian bytes
1031        // * for each block:
1032        //   * number of own segment roots: u8
1033        // * padding to 8-bytes boundary with zeroes
1034        // * for each block:
1035        //   * block header: LeafShardHeader
1036        //   * padding to 8-bytes boundary with zeroes
1037        //   * local segment index of the first segment root (if any): `LocalSegmentIndex`
1038        //   * segment roots proof (if there is at least one segment root)
1039        //   * concatenated own segment roots
1040
1041        let num_blocks = bytes.split_off(..size_of::<u16>())?;
1042        let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
1043
1044        let bytes_start = bytes;
1045
1046        let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
1047
1048        let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
1049
1050        for _ in 0..num_blocks {
1051            let num_own_segment_roots = usize::from(counts[0]);
1052            counts = &counts[1..];
1053
1054            (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
1055
1056            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
1057
1058            if num_own_segment_roots > 0 {
1059                let _first_local_segment_index =
1060                    remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
1061                let _segment_roots_proof = remainder.split_off(..OUT_LEN)?;
1062                let _own_segment_roots =
1063                    remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1064            }
1065        }
1066
1067        Some((
1068            Self {
1069                num_blocks,
1070                bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1071            },
1072            remainder,
1073        ))
1074    }
1075
1076    /// Check leaf shard info's internal consistency.
1077    ///
1078    /// This is usually not necessary to be called explicitly since internal consistency is checked
1079    /// by [`Self::try_from_bytes()`] internally.
1080    #[inline]
1081    pub fn is_internally_consistent(&self) -> bool {
1082        let mut last_leaf_shard_info = None::<(ShardIndex, BlockNumber, Option<LocalSegmentIndex>)>;
1083
1084        self.iter().all(|leaf_shard_block| {
1085            let shard_index = leaf_shard_block.header.prefix.shard_index;
1086
1087            // Ensure increasing order of shard indices, block numbers and local segment indices
1088            if let Some((
1089                last_leaf_shard_index,
1090                last_leaf_shard_block_number,
1091                last_leaf_shard_first_local_segment_index,
1092            )) = last_leaf_shard_info
1093            {
1094                match last_leaf_shard_index.cmp(&shard_index) {
1095                    cmp::Ordering::Less => {
1096                        last_leaf_shard_info.replace((
1097                            shard_index,
1098                            leaf_shard_block.header.prefix.number,
1099                            leaf_shard_block
1100                                .segments
1101                                .as_ref()
1102                                .map(|segments| segments.own_segments.first_local_segment_index),
1103                        ));
1104                    }
1105                    cmp::Ordering::Equal => {
1106                        if last_leaf_shard_block_number >= leaf_shard_block.header.prefix.number {
1107                            return false;
1108                        }
1109                        if let Some(leaf_shard_segments) = &leaf_shard_block.segments {
1110                            if let Some(last_leaf_shard_first_local_segment_index) =
1111                                last_leaf_shard_first_local_segment_index
1112                                && last_leaf_shard_first_local_segment_index
1113                                    >= leaf_shard_segments.own_segments.first_local_segment_index
1114                            {
1115                                return false;
1116                            }
1117
1118                            last_leaf_shard_info.replace((
1119                                shard_index,
1120                                leaf_shard_block.header.prefix.number,
1121                                Some(leaf_shard_segments.own_segments.first_local_segment_index),
1122                            ));
1123                        } else {
1124                            last_leaf_shard_info.replace((
1125                                shard_index,
1126                                leaf_shard_block.header.prefix.number,
1127                                last_leaf_shard_first_local_segment_index,
1128                            ));
1129                        }
1130                    }
1131                    cmp::Ordering::Greater => {
1132                        return false;
1133                    }
1134                }
1135            } else {
1136                last_leaf_shard_info.replace((
1137                    shard_index,
1138                    leaf_shard_block.header.prefix.number,
1139                    leaf_shard_block
1140                        .segments
1141                        .as_ref()
1142                        .map(|segments| segments.own_segments.first_local_segment_index),
1143                ));
1144            }
1145
1146            let Some(segments) = leaf_shard_block.segments else {
1147                return true;
1148            };
1149
1150            BalancedMerkleTree::<2>::verify(
1151                &leaf_shard_block.header.result.body_root,
1152                array::from_ref(segments.segment_roots_proof),
1153                0,
1154                *segments.own_segments.root(),
1155            )
1156        })
1157    }
1158
1159    /// Iterator over leaf shard blocks in a collection
1160    #[inline]
1161    pub fn iter(
1162        &self,
1163    ) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + use<'a> {
1164        // SAFETY: Checked in constructor
1165        let (mut counts, mut remainder) = unsafe {
1166            self.bytes
1167                .split_at_unchecked(self.num_blocks * size_of::<u8>())
1168        };
1169
1170        (0..self.num_blocks).map(move |_| {
1171            let num_own_segment_roots = usize::from(counts[0]);
1172            counts = &counts[1..];
1173
1174            // TODO: Unchecked method would have been helpful here
1175            let header;
1176            (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
1177                .expect("Already checked in constructor; qed");
1178
1179            remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
1180                .expect("Already checked in constructor; qed");
1181
1182            let segments = if num_own_segment_roots > 0 {
1183                let first_local_segment_index;
1184                // SAFETY: Checked in constructor
1185                (first_local_segment_index, remainder) =
1186                    unsafe { remainder.split_at_unchecked(LocalSegmentIndex::SIZE as usize) };
1187                // SAFETY: Correct alignment and size
1188                let first_local_segment_index =
1189                    *unsafe { LocalSegmentIndex::from_bytes_unchecked(first_local_segment_index) };
1190
1191                let segment_roots_proof;
1192                // SAFETY: Checked in constructor
1193                (segment_roots_proof, remainder) = unsafe { remainder.split_at_unchecked(OUT_LEN) };
1194                // SAFETY: Valid pointer and size, no alignment requirements
1195                let segment_roots_proof = unsafe {
1196                    segment_roots_proof
1197                        .as_ptr()
1198                        .cast::<[u8; OUT_LEN]>()
1199                        .as_ref_unchecked()
1200                };
1201
1202                let own_segment_roots;
1203                // SAFETY: Checked in constructor
1204                (own_segment_roots, remainder) = unsafe {
1205                    remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE)
1206                };
1207                // SAFETY: Valid pointer and size, no alignment requirements
1208                let own_segment_roots = unsafe {
1209                    slice::from_raw_parts(
1210                        own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1211                        num_own_segment_roots,
1212                    )
1213                };
1214                let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1215
1216                Some(LeafShardOwnSegments {
1217                    segment_roots_proof,
1218                    own_segments: OwnSegments {
1219                        first_local_segment_index,
1220                        segment_roots: own_segment_roots,
1221                    },
1222                })
1223            } else {
1224                None
1225            };
1226
1227            LeafShardBlockInfo { header, segments }
1228        })
1229    }
1230
1231    /// Number of leaf shard blocks
1232    #[inline(always)]
1233    pub const fn len(&self) -> usize {
1234        self.num_blocks
1235    }
1236
1237    /// Returns `true` if there are no leaf shard blocks
1238    #[inline(always)]
1239    pub const fn is_empty(&self) -> bool {
1240        self.num_blocks == 0
1241    }
1242
1243    /// Compute the root of segments of the leaf shard blocks info.
1244    ///
1245    /// Returns the default value for an empty collection of segment roots.
1246    #[inline]
1247    pub fn segments_root(&self) -> Blake3Hash {
1248        let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
1249            self.iter().map(|shard_block_info| {
1250                shard_block_info
1251                    .segments
1252                    .map(|own_segments| {
1253                        own_segments
1254                            .own_segments
1255                            .root_with_shard_index(shard_block_info.header.prefix.shard_index)
1256                    })
1257                    .unwrap_or_default()
1258            }),
1259        )
1260        .unwrap_or_default();
1261
1262        Blake3Hash::new(root)
1263    }
1264
1265    /// Compute the root of headers of the leaf shard blocks info.
1266    ///
1267    /// Returns the default value for an empty collection of shard blocks.
1268    #[inline]
1269    pub fn headers_root(&self) -> Blake3Hash {
1270        let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
1271            self.iter().map(|shard_block_info| {
1272                // Hash the root again so we can prove it, otherwise root of headers is
1273                // indistinguishable from individual block roots and can be used to confuse
1274                // verifier
1275                single_block_hash(shard_block_info.header.root().as_ref())
1276                    .expect("Less than a single block worth of bytes; qed")
1277            }),
1278        )
1279        .unwrap_or_default();
1280
1281        Blake3Hash::new(root)
1282    }
1283}
1284
1285/// Block body that corresponds to an intermediate shard
1286#[derive(Debug, Copy, Clone, Yokeable)]
1287// Prevent creation of potentially broken invariants externally
1288#[non_exhaustive]
1289pub struct IntermediateShardBody<'a> {
1290    /// Segments produced by this shard
1291    own_segments: Option<OwnSegments<'a>>,
1292    /// Leaf shard blocks
1293    leaf_shard_blocks: LeafShardBlocksInfo<'a>,
1294}
1295
1296impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
1297    const SHARD_KIND: RealShardKind = RealShardKind::IntermediateShard;
1298
1299    #[cfg(feature = "alloc")]
1300    type Owned = OwnedIntermediateShardBody;
1301
1302    #[cfg(feature = "alloc")]
1303    #[inline(always)]
1304    fn to_owned(self) -> Self::Owned {
1305        self.to_owned()
1306    }
1307
1308    #[inline(always)]
1309    fn root(&self) -> Blake3Hash {
1310        self.root()
1311    }
1312}
1313
1314impl<'a> IntermediateShardBody<'a> {
1315    /// Create an instance from provided bytes.
1316    ///
1317    /// `bytes` do not need to be aligned.
1318    ///
1319    /// Returns an instance and remaining bytes on success.
1320    #[inline]
1321    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1322        // The layout here is as follows:
1323        // * number of own segment roots: u8
1324        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
1325        // * concatenated own segment roots
1326        // * leaf shard blocks: LeafShardBlocksInfo
1327
1328        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1329        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1330
1331        let own_segments = if num_own_segment_roots > 0 {
1332            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1333            // SAFETY: Unaligned and correct size
1334            let first_local_segment_index = unsafe {
1335                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1336            }
1337            .as_inner();
1338
1339            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1340            // SAFETY: Valid pointer and size, no alignment requirements
1341            let own_segment_roots = unsafe {
1342                slice::from_raw_parts(
1343                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1344                    num_own_segment_roots,
1345                )
1346            };
1347            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1348
1349            Some(OwnSegments {
1350                first_local_segment_index,
1351                segment_roots: own_segment_roots,
1352            })
1353        } else {
1354            None
1355        };
1356
1357        let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
1358
1359        let body = Self {
1360            own_segments,
1361            leaf_shard_blocks,
1362        };
1363
1364        if !body.is_internally_consistent() {
1365            return None;
1366        }
1367
1368        Some((body, remainder))
1369    }
1370
1371    /// Check block body's internal consistency.
1372    ///
1373    /// This is usually not necessary to be called explicitly since internal consistency is checked
1374    /// by [`Self::try_from_bytes()`] internally.
1375    #[inline]
1376    pub fn is_internally_consistent(&self) -> bool {
1377        // Nothing to check here
1378        true
1379    }
1380
1381    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
1382    /// checks
1383    #[inline]
1384    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1385        // The layout here is as follows:
1386        // * number of own segment roots: u8
1387        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
1388        // * concatenated own segment roots
1389        // * leaf shard blocks: LeafShardBlocksInfo
1390
1391        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1392        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1393
1394        let own_segments = if num_own_segment_roots > 0 {
1395            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1396            // SAFETY: Unaligned and correct size
1397            let first_local_segment_index = unsafe {
1398                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1399            }
1400            .as_inner();
1401
1402            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1403            // SAFETY: Valid pointer and size, no alignment requirements
1404            let own_segment_roots = unsafe {
1405                slice::from_raw_parts(
1406                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1407                    num_own_segment_roots,
1408                )
1409            };
1410            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1411
1412            Some(OwnSegments {
1413                first_local_segment_index,
1414                segment_roots: own_segment_roots,
1415            })
1416        } else {
1417            None
1418        };
1419
1420        let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes_unchecked(bytes)?;
1421
1422        Some((
1423            Self {
1424                own_segments,
1425                leaf_shard_blocks,
1426            },
1427            remainder,
1428        ))
1429    }
1430
1431    /// Segment roots produced by this shard
1432    #[inline(always)]
1433    pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
1434        self.own_segments
1435    }
1436
1437    /// Leaf shard blocks
1438    #[inline(always)]
1439    pub fn leaf_shard_blocks(&self) -> &LeafShardBlocksInfo<'a> {
1440        &self.leaf_shard_blocks
1441    }
1442
1443    /// Create an owned version of this body
1444    #[cfg(feature = "alloc")]
1445    #[inline(always)]
1446    pub fn to_owned(self) -> OwnedIntermediateShardBody {
1447        if let Some(own_segments) = self.own_segments {
1448            let first_local_segment_index = own_segments.first_local_segment_index;
1449
1450            OwnedIntermediateShardBody::new(
1451                own_segments.segment_roots.iter().copied().enumerate().map(
1452                    |(index, segment_root)| {
1453                        (
1454                            first_local_segment_index + LocalSegmentIndex::from(index as u64),
1455                            segment_root,
1456                        )
1457                    },
1458                ),
1459                self.leaf_shard_blocks.iter(),
1460            )
1461            .expect("`self` is always a valid invariant; qed")
1462        } else {
1463            OwnedIntermediateShardBody::new(iter::empty(), self.leaf_shard_blocks.iter())
1464                .expect("`self` is always a valid invariant; qed")
1465        }
1466    }
1467
1468    /// Compute block body root
1469    #[inline]
1470    pub fn root(&self) -> Blake3Hash {
1471        // Explicit nested trees to emphasize that the proof size for segments is just one hash
1472        let root = BalancedMerkleTree::compute_root_only(&[
1473            BalancedMerkleTree::compute_root_only(&[
1474                *self
1475                    .own_segments
1476                    .as_ref()
1477                    .map(OwnSegments::root)
1478                    .unwrap_or_default(),
1479                *self.leaf_shard_blocks.segments_root(),
1480            ]),
1481            *self.leaf_shard_blocks.headers_root(),
1482        ]);
1483
1484        Blake3Hash::new(root)
1485    }
1486}
1487
1488/// Collection of transactions
1489#[derive(Debug, Copy, Clone)]
1490pub struct Transactions<'a> {
1491    num_transactions: usize,
1492    bytes: &'a [u8],
1493}
1494
1495impl<'a> Transactions<'a> {
1496    /// Create an instance from provided bytes.
1497    ///
1498    /// `bytes` do not need to be aligned.
1499    ///
1500    /// Returns an instance and remaining bytes on success.
1501    #[inline]
1502    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1503        // The layout here is as follows:
1504        // * number of transactions: u32 as unaligned little-endian bytes
1505        // * padding to 16-bytes boundary with zeroes
1506        // * for each transaction
1507        //   * transaction: Transaction
1508        //   * padding to 16-bytes boundary with zeroes
1509
1510        let num_transactions = bytes.split_off(..size_of::<u32>())?;
1511        let num_transactions = u32::from_le_bytes([
1512            num_transactions[0],
1513            num_transactions[1],
1514            num_transactions[2],
1515            num_transactions[3],
1516        ]) as usize;
1517
1518        let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
1519        let bytes_start = remainder;
1520
1521        for _ in 0..num_transactions {
1522            (_, remainder) = Transaction::try_from_bytes(bytes)?;
1523            remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
1524        }
1525
1526        Some((
1527            Self {
1528                num_transactions,
1529                bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1530            },
1531            remainder,
1532        ))
1533    }
1534
1535    /// Iterator over transactions in a collection
1536    #[inline]
1537    pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + use<'a> {
1538        let mut remainder = self.bytes;
1539
1540        (0..self.num_transactions).map(move |_| {
1541            // SAFETY: Checked in constructor
1542            let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
1543
1544            remainder = &remainder[transaction.encoded_size()..];
1545            remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
1546                .expect("Already checked in constructor; qed");
1547
1548            transaction
1549        })
1550    }
1551
1552    /// Number of transactions
1553    #[inline(always)]
1554    pub const fn len(&self) -> usize {
1555        self.num_transactions
1556    }
1557
1558    /// Returns `true` if there are no transactions
1559    #[inline(always)]
1560    pub const fn is_empty(&self) -> bool {
1561        self.num_transactions == 0
1562    }
1563
1564    /// Compute the root of transactions.
1565    ///
1566    /// Returns the default value for an empty collection of transactions.
1567    #[inline]
1568    pub fn root(&self) -> Blake3Hash {
1569        let root = UnbalancedMerkleTree::compute_root_only::<{ u32::MAX as u64 }, _, _>(
1570            self.iter().map(|transaction| {
1571                // Hash the hash again so we can prove it, otherwise root of transactions is
1572                // indistinguishable from individual transaction roots and can be used to
1573                // confuse verifier
1574                single_block_hash(transaction.hash().as_ref())
1575                    .expect("Less than a single block worth of bytes; qed")
1576            }),
1577        )
1578        .unwrap_or_default();
1579
1580        Blake3Hash::new(root)
1581    }
1582}
1583
1584/// Block body that corresponds to a leaf shard
1585#[derive(Debug, Copy, Clone, Yokeable)]
1586// Prevent creation of potentially broken invariants externally
1587#[non_exhaustive]
1588pub struct LeafShardBody<'a> {
1589    /// Segments produced by this shard
1590    own_segments: Option<OwnSegments<'a>>,
1591    /// User transactions
1592    transactions: Transactions<'a>,
1593}
1594
1595impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
1596    const SHARD_KIND: RealShardKind = RealShardKind::LeafShard;
1597
1598    #[cfg(feature = "alloc")]
1599    type Owned = OwnedLeafShardBody;
1600
1601    #[cfg(feature = "alloc")]
1602    #[inline(always)]
1603    fn to_owned(self) -> Self::Owned {
1604        self.to_owned()
1605    }
1606
1607    #[inline(always)]
1608    fn root(&self) -> Blake3Hash {
1609        self.root()
1610    }
1611}
1612
1613impl<'a> LeafShardBody<'a> {
1614    /// Create an instance from provided bytes.
1615    ///
1616    /// `bytes` do not need to be aligned.
1617    ///
1618    /// Returns an instance and remaining bytes on success.
1619    #[inline]
1620    pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1621        // The layout here is as follows:
1622        // * number of own segment roots: u8
1623        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
1624        // * concatenated own segment roots
1625        // * transactions: Transactions
1626
1627        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1628        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1629
1630        let own_segments = if num_own_segment_roots > 0 {
1631            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1632            // SAFETY: Unaligned and correct size
1633            let first_local_segment_index = unsafe {
1634                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1635            }
1636            .as_inner();
1637
1638            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1639            // SAFETY: Valid pointer and size, no alignment requirements
1640            let own_segment_roots = unsafe {
1641                slice::from_raw_parts(
1642                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1643                    num_own_segment_roots,
1644                )
1645            };
1646            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1647
1648            Some(OwnSegments {
1649                first_local_segment_index,
1650                segment_roots: own_segment_roots,
1651            })
1652        } else {
1653            None
1654        };
1655
1656        let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1657
1658        let body = Self {
1659            own_segments,
1660            transactions,
1661        };
1662
1663        if !body.is_internally_consistent() {
1664            return None;
1665        }
1666
1667        Some((body, remainder))
1668    }
1669
1670    /// Check block body's internal consistency.
1671    ///
1672    /// This is usually not necessary to be called explicitly since internal consistency is checked
1673    /// by [`Self::try_from_bytes()`] internally.
1674    #[inline]
1675    pub fn is_internally_consistent(&self) -> bool {
1676        // Nothing to check here
1677        true
1678    }
1679
1680    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
1681    /// checks
1682    #[inline]
1683    pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1684        // The layout here is as follows:
1685        // * number of own segment roots: u8
1686        // * local segment index of the first segment root (if any): unaligned `LocalSegmentIndex`
1687        // * concatenated own segment roots
1688        // * transactions: Transactions
1689
1690        let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1691        let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1692
1693        let own_segments = if num_own_segment_roots > 0 {
1694            let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1695            // SAFETY: Unaligned and correct size
1696            let first_local_segment_index = unsafe {
1697                Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1698            }
1699            .as_inner();
1700
1701            let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1702            // SAFETY: Valid pointer and size, no alignment requirements
1703            let own_segment_roots = unsafe {
1704                slice::from_raw_parts(
1705                    own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1706                    num_own_segment_roots,
1707                )
1708            };
1709            let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1710
1711            Some(OwnSegments {
1712                first_local_segment_index,
1713                segment_roots: own_segment_roots,
1714            })
1715        } else {
1716            None
1717        };
1718
1719        let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1720
1721        Some((
1722            Self {
1723                own_segments,
1724                transactions,
1725            },
1726            remainder,
1727        ))
1728    }
1729
1730    /// Segment roots produced by this shard
1731    #[inline(always)]
1732    pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
1733        self.own_segments
1734    }
1735
1736    /// User transactions
1737    #[inline(always)]
1738    pub fn transactions(&self) -> &Transactions<'a> {
1739        &self.transactions
1740    }
1741
1742    /// Create an owned version of this body
1743    #[cfg(feature = "alloc")]
1744    #[inline(always)]
1745    pub fn to_owned(self) -> OwnedLeafShardBody {
1746        let mut builder = if let Some(own_segments) = self.own_segments {
1747            let first_local_segment_index = own_segments.first_local_segment_index;
1748
1749            OwnedLeafShardBody::init(own_segments.segment_roots.iter().copied().enumerate().map(
1750                |(index, segment_root)| {
1751                    (
1752                        first_local_segment_index + LocalSegmentIndex::from(index as u64),
1753                        segment_root,
1754                    )
1755                },
1756            ))
1757            .expect("`self` is always a valid invariant; qed")
1758        } else {
1759            OwnedLeafShardBody::init(iter::empty())
1760                .expect("`self` is always a valid invariant; qed")
1761        };
1762        for transaction in self.transactions.iter() {
1763            builder
1764                .add_transaction(transaction)
1765                .expect("`self` is always a valid invariant; qed");
1766        }
1767
1768        builder.finish()
1769    }
1770
1771    /// Compute block body root
1772    #[inline]
1773    pub fn root(&self) -> Blake3Hash {
1774        let root = BalancedMerkleTree::compute_root_only(&[
1775            *self
1776                .own_segments
1777                .as_ref()
1778                .map(OwnSegments::root)
1779                .unwrap_or_default(),
1780            *self.transactions.root(),
1781        ]);
1782
1783        Blake3Hash::new(root)
1784    }
1785}
1786
1787/// Block body that together with [`BlockHeader`] form a [`Block`]
1788///
1789/// [`BlockHeader`]: crate::block::header::BlockHeader
1790/// [`Block`]: crate::block::Block
1791#[derive(Debug, Copy, Clone, From)]
1792pub enum BlockBody<'a> {
1793    /// Block body corresponds to the beacon chain
1794    BeaconChain(BeaconChainBody<'a>),
1795    /// Block body corresponds to an intermediate shard
1796    IntermediateShard(IntermediateShardBody<'a>),
1797    /// Block body corresponds to a leaf shard
1798    LeafShard(LeafShardBody<'a>),
1799}
1800
1801impl<'a> BlockBody<'a> {
1802    /// Try to create a new instance from provided bytes for the provided shard index.
1803    ///
1804    /// `bytes` do not need to be aligned.
1805    ///
1806    /// Returns an instance and remaining bytes on success, `None` if too few bytes were given,
1807    /// bytes are not properly aligned or input is otherwise invalid.
1808    #[inline]
1809    pub fn try_from_bytes(bytes: &'a [u8], shard_kind: RealShardKind) -> Option<(Self, &'a [u8])> {
1810        match shard_kind {
1811            RealShardKind::BeaconChain => {
1812                let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1813                Some((Self::BeaconChain(body), remainder))
1814            }
1815            RealShardKind::IntermediateShard => {
1816                let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1817                Some((Self::IntermediateShard(body), remainder))
1818            }
1819            RealShardKind::LeafShard => {
1820                let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1821                Some((Self::LeafShard(body), remainder))
1822            }
1823        }
1824    }
1825
1826    /// Check block body's internal consistency.
1827    ///
1828    /// This is usually not necessary to be called explicitly since internal consistency is checked
1829    /// by [`Self::try_from_bytes()`] internally.
1830    #[inline]
1831    pub fn is_internally_consistent(&self) -> bool {
1832        match self {
1833            Self::BeaconChain(body) => body.is_internally_consistent(),
1834            Self::IntermediateShard(body) => body.is_internally_consistent(),
1835            Self::LeafShard(body) => body.is_internally_consistent(),
1836        }
1837    }
1838
1839    /// The same as [`Self::try_from_bytes()`], but for trusted input that skips some consistency
1840    /// checks
1841    #[inline]
1842    pub fn try_from_bytes_unchecked(
1843        bytes: &'a [u8],
1844        shard_kind: RealShardKind,
1845    ) -> Option<(Self, &'a [u8])> {
1846        match shard_kind {
1847            RealShardKind::BeaconChain => {
1848                let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1849                Some((Self::BeaconChain(body), remainder))
1850            }
1851            RealShardKind::IntermediateShard => {
1852                let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1853                Some((Self::IntermediateShard(body), remainder))
1854            }
1855            RealShardKind::LeafShard => {
1856                let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1857                Some((Self::LeafShard(body), remainder))
1858            }
1859        }
1860    }
1861
1862    /// Create an owned version of this body
1863    #[cfg(feature = "alloc")]
1864    #[inline(always)]
1865    pub fn to_owned(self) -> OwnedBlockBody {
1866        match self {
1867            Self::BeaconChain(body) => body.to_owned().into(),
1868            Self::IntermediateShard(body) => body.to_owned().into(),
1869            Self::LeafShard(body) => body.to_owned().into(),
1870        }
1871    }
1872
1873    /// Compute block body root.
1874    ///
1875    /// Block body hash is actually a Merkle Tree Root. The leaves are derived from individual
1876    /// fields this enum in the declaration order.
1877    #[inline]
1878    pub fn root(&self) -> Blake3Hash {
1879        match self {
1880            Self::BeaconChain(body) => body.root(),
1881            Self::IntermediateShard(body) => body.root(),
1882            Self::LeafShard(body) => body.root(),
1883        }
1884    }
1885}