1#[cfg(feature = "alloc")]
4pub mod owned;
5
6#[cfg(feature = "alloc")]
7use crate::block::body::owned::{
8 GenericOwnedBlockBody, OwnedBeaconChainBody, OwnedBlockBody, OwnedIntermediateShardBody,
9 OwnedLeafShardBody,
10};
11use crate::block::header::{IntermediateShardHeader, LeafShardHeader};
12use crate::block::{BlockNumber, align_to_and_ensure_zero_padding};
13use crate::hashes::Blake3Hash;
14use crate::pot::PotCheckpoints;
15use crate::segments::{LocalSegmentIndex, SegmentRoot};
16use crate::shard::{RealShardKind, ShardIndex};
17use crate::transaction::Transaction;
18use ab_blake3::{BLOCK_LEN, OUT_LEN, single_block_hash};
19use ab_io_type::trivial_type::TrivialType;
20use ab_io_type::unaligned::Unaligned;
21use ab_merkle_tree::balanced::BalancedMerkleTree;
22use ab_merkle_tree::unbalanced::UnbalancedMerkleTree;
23#[cfg(feature = "alloc")]
24use core::iter;
25use core::iter::TrustedLen;
26use core::{array, cmp, fmt, slice};
27use derive_more::From;
28use yoke::Yokeable;
29
30pub trait GenericBlockBody<'a>
32where
33 Self: Copy + fmt::Debug + Into<BlockBody<'a>> + Send + Sync,
34{
35 const SHARD_KIND: RealShardKind;
37
38 #[cfg(feature = "alloc")]
40 type Owned: GenericOwnedBlockBody<Body<'a> = Self>
41 where
42 Self: 'a;
43
44 #[cfg(feature = "alloc")]
46 fn to_owned(self) -> Self::Owned;
47
48 fn root(&self) -> Blake3Hash;
50}
51
52#[inline]
54pub fn compute_segments_root<'a, Iter>(segment_roots: Iter) -> Blake3Hash
55where
56 Iter: IntoIterator<Item = &'a SegmentRoot>,
57{
58 #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
62 #[expect(clippy::eq_op, reason = "Intentional documentation")]
63 const {
64 assert!(u32::MAX == 4294967295);
65 }
66 let root = UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
68 segment_roots.into_iter().map(|segment_root| {
69 single_block_hash(segment_root.as_ref())
72 .expect("Less than a single block worth of bytes; qed")
73 }),
74 );
75
76 Blake3Hash::new(root.unwrap_or_default())
77}
78
79#[derive(Debug, Copy, Clone)]
81pub struct OwnSegments<'a> {
82 pub first_local_segment_index: LocalSegmentIndex,
84 pub segment_roots: &'a [SegmentRoot],
86}
87
88impl OwnSegments<'_> {
89 #[inline]
91 pub fn root(&self) -> Blake3Hash {
92 let root = BalancedMerkleTree::compute_root_only(&[
94 single_block_hash(self.first_local_segment_index.as_bytes())
95 .expect("Less than a single block worth of bytes; qed"),
96 *compute_segments_root(self.segment_roots),
97 ]);
98
99 Blake3Hash::new(root)
100 }
101
102 #[inline]
108 pub fn root_with_shard_index(&self, shard_index: ShardIndex) -> Blake3Hash {
109 let root = BalancedMerkleTree::compute_root_only(&[
111 {
112 const {
113 assert!((ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize <= BLOCK_LEN);
114 }
115 let mut pair = [0u8; (ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize];
116 pair[..ShardIndex::SIZE as usize].copy_from_slice(shard_index.as_bytes());
117 pair[ShardIndex::SIZE as usize..]
118 .copy_from_slice(self.first_local_segment_index.as_bytes());
119
120 single_block_hash(&pair).expect("Less than a single block worth of bytes; qed")
121 },
122 *compute_segments_root(self.segment_roots),
123 ]);
124
125 Blake3Hash::new(root)
126 }
127}
128
129#[derive(Debug, Clone)]
131pub struct IntermediateShardBlockInfo<'a> {
132 pub header: IntermediateShardHeader<'a>,
134 pub segments_proof: Option<&'a [u8; OUT_LEN]>,
136 pub own_segments: Option<OwnSegments<'a>>,
138 num_leaf_shard_blocks_with_segments: u8,
139 leaf_shards_segments_bytes: &'a [u8],
140}
141
142impl<'a> IntermediateShardBlockInfo<'a> {
143 #[inline]
145 pub fn leaf_shards_segments(
146 &self,
147 ) -> impl ExactSizeIterator<Item = (ShardIndex, OwnSegments<'a>)> + TrustedLen + 'a {
148 let (counts, mut remainder) = unsafe {
150 self.leaf_shards_segments_bytes.split_at_unchecked(
151 usize::from(self.num_leaf_shard_blocks_with_segments) * size_of::<u8>(),
152 )
153 };
154 counts.iter().map(move |&num_own_segment_roots| {
155 let num_own_segment_roots = usize::from(num_own_segment_roots);
156
157 let shard_index;
158 (shard_index, remainder) =
160 unsafe { remainder.split_at_unchecked(ShardIndex::SIZE as usize) };
161 let shard_index =
163 unsafe { Unaligned::<ShardIndex>::from_bytes_unchecked(shard_index) }.as_inner();
164
165 let first_local_segment_index;
166 (first_local_segment_index, remainder) =
168 unsafe { remainder.split_at_unchecked(LocalSegmentIndex::SIZE as usize) };
169 let first_local_segment_index = unsafe {
171 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
172 }
173 .as_inner();
174
175 let own_segment_roots;
176 (own_segment_roots, remainder) =
178 unsafe { remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE) };
179 let own_segment_roots = unsafe {
181 slice::from_raw_parts(
182 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
183 num_own_segment_roots,
184 )
185 };
186 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
187
188 (
189 shard_index,
190 OwnSegments {
191 first_local_segment_index,
192 segment_roots: own_segment_roots,
193 },
194 )
195 })
196 }
197}
198
199#[derive(Debug, Copy, Clone)]
201pub struct IntermediateShardBlocksInfo<'a> {
202 num_blocks: usize,
203 bytes: &'a [u8],
204}
205
206impl<'a> IntermediateShardBlocksInfo<'a> {
207 #[inline]
213 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
214 let num_blocks = bytes.split_off(..size_of::<u16>())?;
234 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
235
236 let bytes_start = bytes;
237
238 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() * 2))?;
239
240 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
241
242 for _ in 0..num_blocks {
243 let num_own_segment_roots = usize::from(counts[0]);
244 let num_leaf_shard_blocks_with_segments = usize::from(counts[1]);
245 counts = &counts[2..];
246
247 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
248
249 if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
250 let _segments_proof = remainder.split_off(..SegmentRoot::SIZE)?;
251 }
252
253 if num_own_segment_roots > 0 {
254 let _first_local_segment_index =
255 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
256 let _own_segment_roots =
257 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
258 }
259
260 let leaf_shard_segment_roots_counts =
261 remainder.split_off(..num_leaf_shard_blocks_with_segments * size_of::<u8>())?;
262
263 for &num_own_segment_roots in leaf_shard_segment_roots_counts {
264 let _shard_index = remainder.split_off(..ShardIndex::SIZE as usize)?;
265 let _first_local_segment_index =
266 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
267 let _own_segment_roots = remainder
268 .split_off(..usize::from(num_own_segment_roots) * SegmentRoot::SIZE)?;
269 }
270
271 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
272 }
273
274 let info = Self {
275 num_blocks,
276 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
277 };
278
279 if !info.is_internally_consistent() {
280 return None;
281 }
282
283 Some((info, remainder))
284 }
285
286 #[inline]
289 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
290 let num_blocks = bytes.split_off(..size_of::<u16>())?;
310 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
311
312 let bytes_start = bytes;
313
314 let mut counts = bytes.split_off(..num_blocks * (size_of::<u8>() * 2))?;
315
316 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
317
318 for _ in 0..num_blocks {
319 let num_own_segment_roots = usize::from(counts[0]);
320 let num_leaf_shard_blocks_with_segments = usize::from(counts[1]);
321 counts = &counts[2..];
322
323 (_, remainder) = IntermediateShardHeader::try_from_bytes(remainder)?;
324
325 if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
326 let _segments_proof = remainder.split_off(..OUT_LEN)?;
327 }
328
329 if num_own_segment_roots > 0 {
330 let _first_local_segment_index =
331 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
332 let _own_segment_roots =
333 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
334 }
335
336 let leaf_shard_segment_roots_counts =
337 remainder.split_off(..num_leaf_shard_blocks_with_segments * size_of::<u8>())?;
338
339 for &num_own_segment_roots in leaf_shard_segment_roots_counts {
340 let _shard_index = remainder.split_off(..ShardIndex::SIZE as usize)?;
341 let _first_local_segment_index =
342 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
343 let _own_segment_roots = remainder
344 .split_off(..usize::from(num_own_segment_roots) * SegmentRoot::SIZE)?;
345 }
346
347 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
348 }
349
350 Some((
351 Self {
352 num_blocks,
353 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
354 },
355 remainder,
356 ))
357 }
358
359 #[inline]
364 pub fn is_internally_consistent(&self) -> bool {
365 let mut last_intermediate_shard_info =
366 None::<(ShardIndex, BlockNumber, Option<LocalSegmentIndex>)>;
367
368 self.iter().all(|intermediate_shard_block| {
369 let shard_index = intermediate_shard_block.header.prefix.shard_index;
370
371 if let Some((
373 last_intermediate_shard_index,
374 last_intermediate_shard_block_number,
375 last_intermediate_shard_first_local_segment_index,
376 )) = last_intermediate_shard_info
377 {
378 match last_intermediate_shard_index.cmp(&shard_index) {
379 cmp::Ordering::Less => {
380 last_intermediate_shard_info.replace((
381 shard_index,
382 intermediate_shard_block.header.prefix.number,
383 intermediate_shard_block
384 .own_segments
385 .as_ref()
386 .map(|own_segments| own_segments.first_local_segment_index),
387 ));
388 }
389 cmp::Ordering::Equal => {
390 if last_intermediate_shard_block_number
391 >= intermediate_shard_block.header.prefix.number
392 {
393 return false;
394 }
395 if let Some(own_segments) = &intermediate_shard_block.own_segments {
396 if let Some(last_intermediate_shard_first_local_segment_index) =
397 last_intermediate_shard_first_local_segment_index
398 && last_intermediate_shard_first_local_segment_index
399 >= own_segments.first_local_segment_index
400 {
401 return false;
402 }
403
404 last_intermediate_shard_info.replace((
405 shard_index,
406 intermediate_shard_block.header.prefix.number,
407 Some(own_segments.first_local_segment_index),
408 ));
409 } else {
410 last_intermediate_shard_info.replace((
411 shard_index,
412 intermediate_shard_block.header.prefix.number,
413 last_intermediate_shard_first_local_segment_index,
414 ));
415 }
416 }
417 cmp::Ordering::Greater => {
418 return false;
419 }
420 }
421 } else {
422 last_intermediate_shard_info.replace((
423 shard_index,
424 intermediate_shard_block.header.prefix.number,
425 intermediate_shard_block
426 .own_segments
427 .as_ref()
428 .map(|own_segments| own_segments.first_local_segment_index),
429 ));
430 }
431
432 let mut last_leaf_shard_info = None::<(ShardIndex, LocalSegmentIndex)>;
433
434 if !intermediate_shard_block.leaf_shards_segments().all(
436 |(leaf_shard_index, own_segments)| {
437 if !leaf_shard_index.is_child_of(shard_index) {
438 return false;
439 }
440
441 if let Some((
442 last_leaf_shard_index,
443 last_leaf_shard_first_local_segment_index,
444 )) = last_leaf_shard_info
445 {
446 match last_leaf_shard_index.cmp(&leaf_shard_index) {
447 cmp::Ordering::Less => {
448 }
450 cmp::Ordering::Equal => {
451 if last_leaf_shard_first_local_segment_index
452 >= own_segments.first_local_segment_index
453 {
454 return false;
455 }
456 }
457 cmp::Ordering::Greater => {
458 return false;
459 }
460 }
461 }
462 last_leaf_shard_info
463 .replace((leaf_shard_index, own_segments.first_local_segment_index));
464
465 true
466 },
467 ) {
468 return false;
469 }
470
471 if intermediate_shard_block.own_segments.is_none()
472 && intermediate_shard_block
473 .leaf_shards_segments()
474 .size_hint()
475 .0
476 == 0
477 {
478 return intermediate_shard_block.segments_proof.is_none();
479 }
480
481 let segments_proof = intermediate_shard_block.segments_proof.unwrap_or(&[0; _]);
482
483 #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
487 #[expect(clippy::eq_op, reason = "Intentional documentation")]
488 const {
489 assert!(u16::MAX == 65535);
490 }
491 let leaf_shards_root = UnbalancedMerkleTree::compute_root_only::<65535, _, _>(
492 intermediate_shard_block.leaf_shards_segments().map(
493 |(shard_index, own_segments)| own_segments.root_with_shard_index(shard_index),
494 ),
495 )
496 .unwrap_or_default();
497
498 BalancedMerkleTree::<2>::verify(
499 &intermediate_shard_block.header.result.body_root,
500 array::from_ref(segments_proof),
501 0,
502 BalancedMerkleTree::compute_root_only(&[
503 *intermediate_shard_block
504 .own_segments
505 .as_ref()
506 .map(OwnSegments::root)
507 .unwrap_or_default(),
508 leaf_shards_root,
509 ]),
510 )
511 })
512 }
513
514 #[inline]
516 pub fn iter(
517 &self,
518 ) -> impl ExactSizeIterator<Item = IntermediateShardBlockInfo<'a>> + TrustedLen + 'a {
519 let (mut counts, mut remainder) = unsafe {
521 self.bytes
522 .split_at_unchecked(self.num_blocks * (size_of::<u8>() + size_of::<u16>()))
523 };
524
525 (0..self.num_blocks).map(move |_| {
526 let num_own_segment_roots = usize::from(counts[0]);
527 let num_leaf_shard_blocks_with_segments = counts[1];
528 counts = &counts[2..];
529
530 let header;
532 (header, remainder) = IntermediateShardHeader::try_from_bytes(remainder)
533 .expect("Already checked in constructor; qed");
534
535 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
536 .expect("Already checked in constructor; qed");
537
538 let segments_proof =
539 if num_own_segment_roots > 0 || num_leaf_shard_blocks_with_segments > 0 {
540 let segments_proof;
541 (segments_proof, remainder) = unsafe { remainder.split_at_unchecked(OUT_LEN) };
543 Some(unsafe {
545 segments_proof
546 .as_ptr()
547 .cast::<[u8; OUT_LEN]>()
548 .as_ref_unchecked()
549 })
550 } else {
551 None
552 };
553
554 let own_segments = if num_own_segment_roots > 0 {
555 let first_local_segment_index;
556 (first_local_segment_index, remainder) =
558 unsafe { remainder.split_at_unchecked(size_of::<LocalSegmentIndex>()) };
559 let first_local_segment_index =
561 *unsafe { LocalSegmentIndex::from_bytes_unchecked(first_local_segment_index) };
562
563 let own_segment_roots;
564 (own_segment_roots, remainder) = unsafe {
566 remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE)
567 };
568 let own_segment_roots = unsafe {
570 slice::from_raw_parts(
571 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
572 num_own_segment_roots,
573 )
574 };
575 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
576
577 Some(OwnSegments {
578 first_local_segment_index,
579 segment_roots: own_segment_roots,
580 })
581 } else {
582 None
583 };
584
585 let leaf_shard_segment_roots_counts = unsafe {
587 remainder.get_unchecked(
588 ..usize::from(num_leaf_shard_blocks_with_segments) * size_of::<u8>(),
589 )
590 };
591
592 let leaf_shards_segments_bytes;
593 (leaf_shards_segments_bytes, remainder) = unsafe {
595 remainder.split_at_unchecked(
596 usize::from(num_leaf_shard_blocks_with_segments) * size_of::<u8>()
597 + usize::from(num_leaf_shard_blocks_with_segments)
598 * (ShardIndex::SIZE + LocalSegmentIndex::SIZE) as usize
599 + leaf_shard_segment_roots_counts.iter().fold(
600 0_usize,
601 |acc, &num_own_segment_roots| {
602 acc + usize::from(num_own_segment_roots) * SegmentRoot::SIZE
603 },
604 ),
605 )
606 };
607
608 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
609 .expect("Already checked in constructor; qed");
610
611 IntermediateShardBlockInfo {
612 header,
613 segments_proof,
614 own_segments,
615 num_leaf_shard_blocks_with_segments,
616 leaf_shards_segments_bytes,
617 }
618 })
619 }
620
621 #[inline(always)]
623 pub const fn len(&self) -> usize {
624 self.num_blocks
625 }
626
627 #[inline(always)]
629 pub const fn is_empty(&self) -> bool {
630 self.num_blocks == 0
631 }
632
633 #[inline]
637 pub fn segments_root(&self) -> Blake3Hash {
638 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
639 self.iter().map(|shard_block_info| {
640 #[expect(clippy::assertions_on_constants, reason = "Intentional documentation")]
644 #[expect(clippy::eq_op, reason = "Intentional documentation")]
645 const {
646 assert!(u32::MAX == 4294967295);
647 }
648 UnbalancedMerkleTree::compute_root_only::<4294967295, _, _>(
649 shard_block_info
650 .own_segments
651 .as_ref()
652 .map(OwnSegments::root)
653 .into_iter()
654 .chain(shard_block_info.leaf_shards_segments().map(
655 |(shard_index, own_segments)| {
656 own_segments.root_with_shard_index(shard_index)
657 },
658 )),
659 )
660 .unwrap_or_default()
661 }),
662 )
663 .unwrap_or_default();
664
665 Blake3Hash::new(root)
666 }
667
668 #[inline]
672 pub fn headers_root(&self) -> Blake3Hash {
673 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
674 self.iter().map(|shard_block_info| {
676 single_block_hash(shard_block_info.header.root().as_ref())
680 .expect("Less than a single block worth of bytes; qed")
681 }),
682 )
683 .unwrap_or_default();
684
685 Blake3Hash::new(root)
686 }
687}
688
689#[derive(Debug, Copy, Clone, Yokeable)]
691#[non_exhaustive]
693pub struct BeaconChainBody<'a> {
694 own_segments: Option<OwnSegments<'a>>,
696 intermediate_shard_blocks: IntermediateShardBlocksInfo<'a>,
698 pot_checkpoints: &'a [PotCheckpoints],
701}
702
703impl<'a> GenericBlockBody<'a> for BeaconChainBody<'a> {
704 const SHARD_KIND: RealShardKind = RealShardKind::BeaconChain;
705
706 #[cfg(feature = "alloc")]
707 type Owned = OwnedBeaconChainBody;
708
709 #[cfg(feature = "alloc")]
710 #[inline(always)]
711 fn to_owned(self) -> Self::Owned {
712 self.to_owned()
713 }
714
715 #[inline(always)]
716 fn root(&self) -> Blake3Hash {
717 self.root()
718 }
719}
720
721impl<'a> BeaconChainBody<'a> {
722 #[inline]
728 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
729 let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
738 let num_pot_checkpoints =
740 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
741
742 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
743 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
744
745 let own_segments = if num_own_segment_roots > 0 {
746 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
747 let first_local_segment_index = unsafe {
749 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
750 }
751 .as_inner();
752
753 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
754 let own_segment_roots = unsafe {
756 slice::from_raw_parts(
757 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
758 num_own_segment_roots,
759 )
760 };
761 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
762
763 Some(OwnSegments {
764 first_local_segment_index,
765 segment_roots: own_segment_roots,
766 })
767 } else {
768 None
769 };
770
771 let (intermediate_shard_blocks, mut remainder) =
772 IntermediateShardBlocksInfo::try_from_bytes(bytes)?;
773
774 let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
775 let pot_checkpoints = unsafe {
777 slice::from_raw_parts(
778 pot_checkpoints
779 .as_ptr()
780 .cast::<[u8; PotCheckpoints::SIZE]>(),
781 num_pot_checkpoints,
782 )
783 };
784 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
785
786 let body = Self {
787 own_segments,
788 intermediate_shard_blocks,
789 pot_checkpoints,
790 };
791
792 if !body.is_internally_consistent() {
793 return None;
794 }
795
796 Some((body, remainder))
797 }
798
799 #[inline]
804 pub fn is_internally_consistent(&self) -> bool {
805 true
807 }
808
809 #[inline]
812 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
813 let num_pot_checkpoints = bytes.split_off(..size_of::<u32>())?;
822 let num_pot_checkpoints =
824 *unsafe { <u32 as TrivialType>::from_bytes(num_pot_checkpoints) }? as usize;
825
826 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
827 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
828
829 let own_segments = if num_own_segment_roots > 0 {
830 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
831 let first_local_segment_index = unsafe {
833 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
834 }
835 .as_inner();
836
837 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
838 let own_segment_roots = unsafe {
840 slice::from_raw_parts(
841 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
842 num_own_segment_roots,
843 )
844 };
845 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
846
847 Some(OwnSegments {
848 first_local_segment_index,
849 segment_roots: own_segment_roots,
850 })
851 } else {
852 None
853 };
854
855 let (intermediate_shard_blocks, mut remainder) =
856 IntermediateShardBlocksInfo::try_from_bytes_unchecked(bytes)?;
857
858 let pot_checkpoints = remainder.split_off(..num_pot_checkpoints * PotCheckpoints::SIZE)?;
859 let pot_checkpoints = unsafe {
861 slice::from_raw_parts(
862 pot_checkpoints
863 .as_ptr()
864 .cast::<[u8; PotCheckpoints::SIZE]>(),
865 num_pot_checkpoints,
866 )
867 };
868 let pot_checkpoints = PotCheckpoints::slice_from_bytes(pot_checkpoints);
869
870 Some((
871 Self {
872 own_segments,
873 intermediate_shard_blocks,
874 pot_checkpoints,
875 },
876 remainder,
877 ))
878 }
879
880 #[cfg(feature = "alloc")]
882 #[inline(always)]
883 pub fn to_owned(self) -> OwnedBeaconChainBody {
884 if let Some(own_segments) = self.own_segments {
885 let first_local_segment_index = own_segments.first_local_segment_index;
886
887 OwnedBeaconChainBody::new(
888 own_segments.segment_roots.iter().copied().enumerate().map(
889 |(index, segment_root)| {
890 (
891 first_local_segment_index + LocalSegmentIndex::from(index as u64),
892 segment_root,
893 )
894 },
895 ),
896 self.intermediate_shard_blocks.iter(),
897 self.pot_checkpoints,
898 )
899 .expect("`self` is always a valid invariant; qed")
900 } else {
901 OwnedBeaconChainBody::new(
902 iter::empty(),
903 self.intermediate_shard_blocks.iter(),
904 self.pot_checkpoints,
905 )
906 .expect("`self` is always a valid invariant; qed")
907 }
908 }
909
910 #[inline(always)]
912 pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
913 self.own_segments
914 }
915
916 #[inline(always)]
918 pub fn intermediate_shard_blocks(&self) -> &IntermediateShardBlocksInfo<'a> {
919 &self.intermediate_shard_blocks
920 }
921
922 #[inline(always)]
925 pub fn pot_checkpoints(&self) -> &'a [PotCheckpoints] {
926 self.pot_checkpoints
927 }
928
929 #[inline]
931 pub fn root(&self) -> Blake3Hash {
932 let root = BalancedMerkleTree::compute_root_only(&[
934 *self
935 .own_segments
936 .as_ref()
937 .map(OwnSegments::root)
938 .unwrap_or_default(),
939 *self.intermediate_shard_blocks.segments_root(),
940 *self.intermediate_shard_blocks.headers_root(),
941 blake3::hash(PotCheckpoints::bytes_from_slice(self.pot_checkpoints).as_flattened())
942 .into(),
943 ]);
944
945 Blake3Hash::new(root)
946 }
947}
948
949#[derive(Debug, Copy, Clone)]
951pub struct LeafShardOwnSegments<'a> {
952 pub segment_roots_proof: &'a [u8; OUT_LEN],
954 pub own_segments: OwnSegments<'a>,
956}
957
958#[derive(Debug, Clone)]
960pub struct LeafShardBlockInfo<'a> {
961 pub header: LeafShardHeader<'a>,
963 pub segments: Option<LeafShardOwnSegments<'a>>,
965}
966
967#[derive(Debug, Copy, Clone)]
969pub struct LeafShardBlocksInfo<'a> {
970 num_blocks: usize,
971 bytes: &'a [u8],
972}
973
974impl<'a> LeafShardBlocksInfo<'a> {
975 #[inline]
981 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
982 let num_blocks = bytes.split_off(..size_of::<u16>())?;
995 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
996
997 let bytes_start = bytes;
998
999 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
1000
1001 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
1002
1003 for _ in 0..num_blocks {
1004 let num_own_segment_roots = usize::from(counts[0]);
1005 counts = &counts[1..];
1006
1007 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
1008
1009 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
1010
1011 if num_own_segment_roots > 0 {
1012 let _first_local_segment_index =
1013 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
1014 let _segment_roots_proof = remainder.split_off(..OUT_LEN)?;
1015 let _own_segment_roots =
1016 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1017 }
1018 }
1019
1020 let info = Self {
1021 num_blocks,
1022 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1023 };
1024
1025 if !info.is_internally_consistent() {
1026 return None;
1027 }
1028
1029 Some((info, remainder))
1030 }
1031
1032 #[inline]
1035 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1036 let num_blocks = bytes.split_off(..size_of::<u16>())?;
1049 let num_blocks = usize::from(u16::from_le_bytes([num_blocks[0], num_blocks[1]]));
1050
1051 let bytes_start = bytes;
1052
1053 let mut counts = bytes.split_off(..num_blocks * size_of::<u8>())?;
1054
1055 let mut remainder = align_to_and_ensure_zero_padding::<u64>(bytes)?;
1056
1057 for _ in 0..num_blocks {
1058 let num_own_segment_roots = usize::from(counts[0]);
1059 counts = &counts[1..];
1060
1061 (_, remainder) = LeafShardHeader::try_from_bytes(remainder)?;
1062
1063 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)?;
1064
1065 if num_own_segment_roots > 0 {
1066 let _first_local_segment_index =
1067 remainder.split_off(..LocalSegmentIndex::SIZE as usize)?;
1068 let _segment_roots_proof = remainder.split_off(..OUT_LEN)?;
1069 let _own_segment_roots =
1070 remainder.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1071 }
1072 }
1073
1074 Some((
1075 Self {
1076 num_blocks,
1077 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1078 },
1079 remainder,
1080 ))
1081 }
1082
1083 #[inline]
1088 pub fn is_internally_consistent(&self) -> bool {
1089 let mut last_leaf_shard_info = None::<(ShardIndex, BlockNumber, Option<LocalSegmentIndex>)>;
1090
1091 self.iter().all(|leaf_shard_block| {
1092 let shard_index = leaf_shard_block.header.prefix.shard_index;
1093
1094 if let Some((
1096 last_leaf_shard_index,
1097 last_leaf_shard_block_number,
1098 last_leaf_shard_first_local_segment_index,
1099 )) = last_leaf_shard_info
1100 {
1101 match last_leaf_shard_index.cmp(&shard_index) {
1102 cmp::Ordering::Less => {
1103 last_leaf_shard_info.replace((
1104 shard_index,
1105 leaf_shard_block.header.prefix.number,
1106 leaf_shard_block
1107 .segments
1108 .as_ref()
1109 .map(|segments| segments.own_segments.first_local_segment_index),
1110 ));
1111 }
1112 cmp::Ordering::Equal => {
1113 if last_leaf_shard_block_number >= leaf_shard_block.header.prefix.number {
1114 return false;
1115 }
1116 if let Some(leaf_shard_segments) = &leaf_shard_block.segments {
1117 if let Some(last_leaf_shard_first_local_segment_index) =
1118 last_leaf_shard_first_local_segment_index
1119 && last_leaf_shard_first_local_segment_index
1120 >= leaf_shard_segments.own_segments.first_local_segment_index
1121 {
1122 return false;
1123 }
1124
1125 last_leaf_shard_info.replace((
1126 shard_index,
1127 leaf_shard_block.header.prefix.number,
1128 Some(leaf_shard_segments.own_segments.first_local_segment_index),
1129 ));
1130 } else {
1131 last_leaf_shard_info.replace((
1132 shard_index,
1133 leaf_shard_block.header.prefix.number,
1134 last_leaf_shard_first_local_segment_index,
1135 ));
1136 }
1137 }
1138 cmp::Ordering::Greater => {
1139 return false;
1140 }
1141 }
1142 } else {
1143 last_leaf_shard_info.replace((
1144 shard_index,
1145 leaf_shard_block.header.prefix.number,
1146 leaf_shard_block
1147 .segments
1148 .as_ref()
1149 .map(|segments| segments.own_segments.first_local_segment_index),
1150 ));
1151 }
1152
1153 let Some(segments) = leaf_shard_block.segments else {
1154 return true;
1155 };
1156
1157 BalancedMerkleTree::<2>::verify(
1158 &leaf_shard_block.header.result.body_root,
1159 array::from_ref(segments.segment_roots_proof),
1160 0,
1161 *segments.own_segments.root(),
1162 )
1163 })
1164 }
1165
1166 #[inline]
1168 pub fn iter(&self) -> impl ExactSizeIterator<Item = LeafShardBlockInfo<'a>> + TrustedLen + 'a {
1169 let (mut counts, mut remainder) = unsafe {
1171 self.bytes
1172 .split_at_unchecked(self.num_blocks * size_of::<u8>())
1173 };
1174
1175 (0..self.num_blocks).map(move |_| {
1176 let num_own_segment_roots = usize::from(counts[0]);
1177 counts = &counts[1..];
1178
1179 let header;
1181 (header, remainder) = LeafShardHeader::try_from_bytes(remainder)
1182 .expect("Already checked in constructor; qed");
1183
1184 remainder = align_to_and_ensure_zero_padding::<u64>(remainder)
1185 .expect("Already checked in constructor; qed");
1186
1187 let segments = if num_own_segment_roots > 0 {
1188 let first_local_segment_index;
1189 (first_local_segment_index, remainder) =
1191 unsafe { remainder.split_at_unchecked(LocalSegmentIndex::SIZE as usize) };
1192 let first_local_segment_index =
1194 *unsafe { LocalSegmentIndex::from_bytes_unchecked(first_local_segment_index) };
1195
1196 let segment_roots_proof;
1197 (segment_roots_proof, remainder) = unsafe { remainder.split_at_unchecked(OUT_LEN) };
1199 let segment_roots_proof = unsafe {
1201 segment_roots_proof
1202 .as_ptr()
1203 .cast::<[u8; OUT_LEN]>()
1204 .as_ref_unchecked()
1205 };
1206
1207 let own_segment_roots;
1208 (own_segment_roots, remainder) = unsafe {
1210 remainder.split_at_unchecked(num_own_segment_roots * SegmentRoot::SIZE)
1211 };
1212 let own_segment_roots = unsafe {
1214 slice::from_raw_parts(
1215 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1216 num_own_segment_roots,
1217 )
1218 };
1219 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1220
1221 Some(LeafShardOwnSegments {
1222 segment_roots_proof,
1223 own_segments: OwnSegments {
1224 first_local_segment_index,
1225 segment_roots: own_segment_roots,
1226 },
1227 })
1228 } else {
1229 None
1230 };
1231
1232 LeafShardBlockInfo { header, segments }
1233 })
1234 }
1235
1236 #[inline(always)]
1238 pub const fn len(&self) -> usize {
1239 self.num_blocks
1240 }
1241
1242 #[inline(always)]
1244 pub const fn is_empty(&self) -> bool {
1245 self.num_blocks == 0
1246 }
1247
1248 #[inline]
1252 pub fn segments_root(&self) -> Blake3Hash {
1253 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
1254 self.iter().map(|shard_block_info| {
1255 shard_block_info
1256 .segments
1257 .map(|own_segments| {
1258 own_segments
1259 .own_segments
1260 .root_with_shard_index(shard_block_info.header.prefix.shard_index)
1261 })
1262 .unwrap_or_default()
1263 }),
1264 )
1265 .unwrap_or_default();
1266
1267 Blake3Hash::new(root)
1268 }
1269
1270 #[inline]
1274 pub fn headers_root(&self) -> Blake3Hash {
1275 let root = UnbalancedMerkleTree::compute_root_only::<{ u16::MAX as u64 }, _, _>(
1276 self.iter().map(|shard_block_info| {
1277 single_block_hash(shard_block_info.header.root().as_ref())
1281 .expect("Less than a single block worth of bytes; qed")
1282 }),
1283 )
1284 .unwrap_or_default();
1285
1286 Blake3Hash::new(root)
1287 }
1288}
1289
1290#[derive(Debug, Copy, Clone, Yokeable)]
1292#[non_exhaustive]
1294pub struct IntermediateShardBody<'a> {
1295 own_segments: Option<OwnSegments<'a>>,
1297 leaf_shard_blocks: LeafShardBlocksInfo<'a>,
1299}
1300
1301impl<'a> GenericBlockBody<'a> for IntermediateShardBody<'a> {
1302 const SHARD_KIND: RealShardKind = RealShardKind::IntermediateShard;
1303
1304 #[cfg(feature = "alloc")]
1305 type Owned = OwnedIntermediateShardBody;
1306
1307 #[cfg(feature = "alloc")]
1308 #[inline(always)]
1309 fn to_owned(self) -> Self::Owned {
1310 self.to_owned()
1311 }
1312
1313 #[inline(always)]
1314 fn root(&self) -> Blake3Hash {
1315 self.root()
1316 }
1317}
1318
1319impl<'a> IntermediateShardBody<'a> {
1320 #[inline]
1326 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1327 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1334 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1335
1336 let own_segments = if num_own_segment_roots > 0 {
1337 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1338 let first_local_segment_index = unsafe {
1340 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1341 }
1342 .as_inner();
1343
1344 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1345 let own_segment_roots = unsafe {
1347 slice::from_raw_parts(
1348 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1349 num_own_segment_roots,
1350 )
1351 };
1352 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1353
1354 Some(OwnSegments {
1355 first_local_segment_index,
1356 segment_roots: own_segment_roots,
1357 })
1358 } else {
1359 None
1360 };
1361
1362 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes(bytes)?;
1363
1364 let body = Self {
1365 own_segments,
1366 leaf_shard_blocks,
1367 };
1368
1369 if !body.is_internally_consistent() {
1370 return None;
1371 }
1372
1373 Some((body, remainder))
1374 }
1375
1376 #[inline]
1381 pub fn is_internally_consistent(&self) -> bool {
1382 true
1384 }
1385
1386 #[inline]
1389 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1390 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1397 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1398
1399 let own_segments = if num_own_segment_roots > 0 {
1400 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1401 let first_local_segment_index = unsafe {
1403 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1404 }
1405 .as_inner();
1406
1407 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1408 let own_segment_roots = unsafe {
1410 slice::from_raw_parts(
1411 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1412 num_own_segment_roots,
1413 )
1414 };
1415 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1416
1417 Some(OwnSegments {
1418 first_local_segment_index,
1419 segment_roots: own_segment_roots,
1420 })
1421 } else {
1422 None
1423 };
1424
1425 let (leaf_shard_blocks, remainder) = LeafShardBlocksInfo::try_from_bytes_unchecked(bytes)?;
1426
1427 Some((
1428 Self {
1429 own_segments,
1430 leaf_shard_blocks,
1431 },
1432 remainder,
1433 ))
1434 }
1435
1436 #[inline(always)]
1438 pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
1439 self.own_segments
1440 }
1441
1442 #[inline(always)]
1444 pub fn leaf_shard_blocks(&self) -> &LeafShardBlocksInfo<'a> {
1445 &self.leaf_shard_blocks
1446 }
1447
1448 #[cfg(feature = "alloc")]
1450 #[inline(always)]
1451 pub fn to_owned(self) -> OwnedIntermediateShardBody {
1452 if let Some(own_segments) = self.own_segments {
1453 let first_local_segment_index = own_segments.first_local_segment_index;
1454
1455 OwnedIntermediateShardBody::new(
1456 own_segments.segment_roots.iter().copied().enumerate().map(
1457 |(index, segment_root)| {
1458 (
1459 first_local_segment_index + LocalSegmentIndex::from(index as u64),
1460 segment_root,
1461 )
1462 },
1463 ),
1464 self.leaf_shard_blocks.iter(),
1465 )
1466 .expect("`self` is always a valid invariant; qed")
1467 } else {
1468 OwnedIntermediateShardBody::new(iter::empty(), self.leaf_shard_blocks.iter())
1469 .expect("`self` is always a valid invariant; qed")
1470 }
1471 }
1472
1473 #[inline]
1475 pub fn root(&self) -> Blake3Hash {
1476 let root = BalancedMerkleTree::compute_root_only(&[
1478 BalancedMerkleTree::compute_root_only(&[
1479 *self
1480 .own_segments
1481 .as_ref()
1482 .map(OwnSegments::root)
1483 .unwrap_or_default(),
1484 *self.leaf_shard_blocks.segments_root(),
1485 ]),
1486 *self.leaf_shard_blocks.headers_root(),
1487 ]);
1488
1489 Blake3Hash::new(root)
1490 }
1491}
1492
1493#[derive(Debug, Copy, Clone)]
1495pub struct Transactions<'a> {
1496 num_transactions: usize,
1497 bytes: &'a [u8],
1498}
1499
1500impl<'a> Transactions<'a> {
1501 #[inline]
1507 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1508 let num_transactions = bytes.split_off(..size_of::<u32>())?;
1516 let num_transactions = u32::from_le_bytes([
1517 num_transactions[0],
1518 num_transactions[1],
1519 num_transactions[2],
1520 num_transactions[3],
1521 ]) as usize;
1522
1523 let mut remainder = align_to_and_ensure_zero_padding::<u128>(bytes)?;
1524 let bytes_start = remainder;
1525
1526 for _ in 0..num_transactions {
1527 (_, remainder) = Transaction::try_from_bytes(bytes)?;
1528 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)?;
1529 }
1530
1531 Some((
1532 Self {
1533 num_transactions,
1534 bytes: &bytes_start[..bytes_start.len() - remainder.len()],
1535 },
1536 remainder,
1537 ))
1538 }
1539
1540 #[inline]
1542 pub fn iter(&self) -> impl ExactSizeIterator<Item = Transaction<'a>> + TrustedLen + 'a {
1543 let mut remainder = self.bytes;
1544
1545 (0..self.num_transactions).map(move |_| {
1546 let transaction = unsafe { Transaction::from_bytes_unchecked(remainder) };
1548
1549 remainder = &remainder[transaction.encoded_size()..];
1550 remainder = align_to_and_ensure_zero_padding::<u128>(remainder)
1551 .expect("Already checked in constructor; qed");
1552
1553 transaction
1554 })
1555 }
1556
1557 #[inline(always)]
1559 pub const fn len(&self) -> usize {
1560 self.num_transactions
1561 }
1562
1563 #[inline(always)]
1565 pub const fn is_empty(&self) -> bool {
1566 self.num_transactions == 0
1567 }
1568
1569 #[inline]
1573 pub fn root(&self) -> Blake3Hash {
1574 let root = UnbalancedMerkleTree::compute_root_only::<{ u32::MAX as u64 }, _, _>(
1575 self.iter().map(|transaction| {
1576 single_block_hash(transaction.hash().as_ref())
1580 .expect("Less than a single block worth of bytes; qed")
1581 }),
1582 )
1583 .unwrap_or_default();
1584
1585 Blake3Hash::new(root)
1586 }
1587}
1588
1589#[derive(Debug, Copy, Clone, Yokeable)]
1591#[non_exhaustive]
1593pub struct LeafShardBody<'a> {
1594 own_segments: Option<OwnSegments<'a>>,
1596 transactions: Transactions<'a>,
1598}
1599
1600impl<'a> GenericBlockBody<'a> for LeafShardBody<'a> {
1601 const SHARD_KIND: RealShardKind = RealShardKind::LeafShard;
1602
1603 #[cfg(feature = "alloc")]
1604 type Owned = OwnedLeafShardBody;
1605
1606 #[cfg(feature = "alloc")]
1607 #[inline(always)]
1608 fn to_owned(self) -> Self::Owned {
1609 self.to_owned()
1610 }
1611
1612 #[inline(always)]
1613 fn root(&self) -> Blake3Hash {
1614 self.root()
1615 }
1616}
1617
1618impl<'a> LeafShardBody<'a> {
1619 #[inline]
1625 pub fn try_from_bytes(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1626 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1633 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1634
1635 let own_segments = if num_own_segment_roots > 0 {
1636 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1637 let first_local_segment_index = unsafe {
1639 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1640 }
1641 .as_inner();
1642
1643 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1644 let own_segment_roots = unsafe {
1646 slice::from_raw_parts(
1647 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1648 num_own_segment_roots,
1649 )
1650 };
1651 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1652
1653 Some(OwnSegments {
1654 first_local_segment_index,
1655 segment_roots: own_segment_roots,
1656 })
1657 } else {
1658 None
1659 };
1660
1661 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1662
1663 let body = Self {
1664 own_segments,
1665 transactions,
1666 };
1667
1668 if !body.is_internally_consistent() {
1669 return None;
1670 }
1671
1672 Some((body, remainder))
1673 }
1674
1675 #[inline]
1680 pub fn is_internally_consistent(&self) -> bool {
1681 true
1683 }
1684
1685 #[inline]
1688 pub fn try_from_bytes_unchecked(mut bytes: &'a [u8]) -> Option<(Self, &'a [u8])> {
1689 let num_own_segment_roots = bytes.split_off(..size_of::<u8>())?;
1696 let num_own_segment_roots = usize::from(num_own_segment_roots[0]);
1697
1698 let own_segments = if num_own_segment_roots > 0 {
1699 let first_local_segment_index = bytes.split_off(..LocalSegmentIndex::SIZE as usize)?;
1700 let first_local_segment_index = unsafe {
1702 Unaligned::<LocalSegmentIndex>::from_bytes_unchecked(first_local_segment_index)
1703 }
1704 .as_inner();
1705
1706 let own_segment_roots = bytes.split_off(..num_own_segment_roots * SegmentRoot::SIZE)?;
1707 let own_segment_roots = unsafe {
1709 slice::from_raw_parts(
1710 own_segment_roots.as_ptr().cast::<[u8; SegmentRoot::SIZE]>(),
1711 num_own_segment_roots,
1712 )
1713 };
1714 let own_segment_roots = SegmentRoot::slice_from_repr(own_segment_roots);
1715
1716 Some(OwnSegments {
1717 first_local_segment_index,
1718 segment_roots: own_segment_roots,
1719 })
1720 } else {
1721 None
1722 };
1723
1724 let (transactions, remainder) = Transactions::try_from_bytes(bytes)?;
1725
1726 Some((
1727 Self {
1728 own_segments,
1729 transactions,
1730 },
1731 remainder,
1732 ))
1733 }
1734
1735 #[inline(always)]
1737 pub fn own_segments(&self) -> Option<OwnSegments<'a>> {
1738 self.own_segments
1739 }
1740
1741 #[inline(always)]
1743 pub fn transactions(&self) -> &Transactions<'a> {
1744 &self.transactions
1745 }
1746
1747 #[cfg(feature = "alloc")]
1749 #[inline(always)]
1750 pub fn to_owned(self) -> OwnedLeafShardBody {
1751 let mut builder = if let Some(own_segments) = self.own_segments {
1752 let first_local_segment_index = own_segments.first_local_segment_index;
1753
1754 OwnedLeafShardBody::init(own_segments.segment_roots.iter().copied().enumerate().map(
1755 |(index, segment_root)| {
1756 (
1757 first_local_segment_index + LocalSegmentIndex::from(index as u64),
1758 segment_root,
1759 )
1760 },
1761 ))
1762 .expect("`self` is always a valid invariant; qed")
1763 } else {
1764 OwnedLeafShardBody::init(iter::empty())
1765 .expect("`self` is always a valid invariant; qed")
1766 };
1767 for transaction in self.transactions.iter() {
1768 builder
1769 .add_transaction(transaction)
1770 .expect("`self` is always a valid invariant; qed");
1771 }
1772
1773 builder.finish()
1774 }
1775
1776 #[inline]
1778 pub fn root(&self) -> Blake3Hash {
1779 let root = BalancedMerkleTree::compute_root_only(&[
1780 *self
1781 .own_segments
1782 .as_ref()
1783 .map(OwnSegments::root)
1784 .unwrap_or_default(),
1785 *self.transactions.root(),
1786 ]);
1787
1788 Blake3Hash::new(root)
1789 }
1790}
1791
1792#[derive(Debug, Copy, Clone, From)]
1797pub enum BlockBody<'a> {
1798 BeaconChain(BeaconChainBody<'a>),
1800 IntermediateShard(IntermediateShardBody<'a>),
1802 LeafShard(LeafShardBody<'a>),
1804}
1805
1806impl<'a> BlockBody<'a> {
1807 #[inline]
1814 pub fn try_from_bytes(bytes: &'a [u8], shard_kind: RealShardKind) -> Option<(Self, &'a [u8])> {
1815 match shard_kind {
1816 RealShardKind::BeaconChain => {
1817 let (body, remainder) = BeaconChainBody::try_from_bytes(bytes)?;
1818 Some((Self::BeaconChain(body), remainder))
1819 }
1820 RealShardKind::IntermediateShard => {
1821 let (body, remainder) = IntermediateShardBody::try_from_bytes(bytes)?;
1822 Some((Self::IntermediateShard(body), remainder))
1823 }
1824 RealShardKind::LeafShard => {
1825 let (body, remainder) = LeafShardBody::try_from_bytes(bytes)?;
1826 Some((Self::LeafShard(body), remainder))
1827 }
1828 }
1829 }
1830
1831 #[inline]
1836 pub fn is_internally_consistent(&self) -> bool {
1837 match self {
1838 Self::BeaconChain(body) => body.is_internally_consistent(),
1839 Self::IntermediateShard(body) => body.is_internally_consistent(),
1840 Self::LeafShard(body) => body.is_internally_consistent(),
1841 }
1842 }
1843
1844 #[inline]
1847 pub fn try_from_bytes_unchecked(
1848 bytes: &'a [u8],
1849 shard_kind: RealShardKind,
1850 ) -> Option<(Self, &'a [u8])> {
1851 match shard_kind {
1852 RealShardKind::BeaconChain => {
1853 let (body, remainder) = BeaconChainBody::try_from_bytes_unchecked(bytes)?;
1854 Some((Self::BeaconChain(body), remainder))
1855 }
1856 RealShardKind::IntermediateShard => {
1857 let (body, remainder) = IntermediateShardBody::try_from_bytes_unchecked(bytes)?;
1858 Some((Self::IntermediateShard(body), remainder))
1859 }
1860 RealShardKind::LeafShard => {
1861 let (body, remainder) = LeafShardBody::try_from_bytes_unchecked(bytes)?;
1862 Some((Self::LeafShard(body), remainder))
1863 }
1864 }
1865 }
1866
1867 #[cfg(feature = "alloc")]
1869 #[inline(always)]
1870 pub fn to_owned(self) -> OwnedBlockBody {
1871 match self {
1872 Self::BeaconChain(body) => body.to_owned().into(),
1873 Self::IntermediateShard(body) => body.to_owned().into(),
1874 Self::LeafShard(body) => body.to_owned().into(),
1875 }
1876 }
1877
1878 #[inline]
1883 pub fn root(&self) -> Blake3Hash {
1884 match self {
1885 Self::BeaconChain(body) => body.root(),
1886 Self::IntermediateShard(body) => body.root(),
1887 Self::LeafShard(body) => body.root(),
1888 }
1889 }
1890}