ab_core_primitives/pieces/
flat_pieces.rs1use crate::pieces::cow_bytes::CowBytes;
2use crate::pieces::{Piece, PieceArray};
3use crate::segments::RecordedHistorySegment;
4use alloc::boxed::Box;
5use bytes::{Bytes, BytesMut};
6use core::ops::{Deref, DerefMut};
7use core::{fmt, slice};
8#[cfg(feature = "parallel")]
9use rayon::prelude::*;
10
11#[derive(Clone, PartialEq, Eq)]
13pub struct FlatPieces(CowBytes);
14
15impl fmt::Debug for FlatPieces {
16 #[inline]
17 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
18 f.debug_struct("FlatPieces").finish_non_exhaustive()
19 }
20}
21
22impl Deref for FlatPieces {
23 type Target = [PieceArray];
24
25 #[inline]
26 fn deref(&self) -> &Self::Target {
27 let bytes = self.0.as_ref();
28 let pieces = unsafe {
31 slice::from_raw_parts(
32 bytes.as_ptr() as *const [u8; Piece::SIZE],
33 bytes.len() / Piece::SIZE,
34 )
35 };
36 PieceArray::slice_from_repr(pieces)
37 }
38}
39
40impl DerefMut for FlatPieces {
41 #[inline]
42 fn deref_mut(&mut self) -> &mut Self::Target {
43 let bytes = self.0.as_mut();
44 let pieces = unsafe {
47 slice::from_raw_parts_mut(
48 bytes.as_mut_ptr() as *mut [u8; Piece::SIZE],
49 bytes.len() / Piece::SIZE,
50 )
51 };
52 PieceArray::slice_mut_from_repr(pieces)
53 }
54}
55
56impl FlatPieces {
57 #[inline]
59 pub fn new(piece_count: usize) -> Self {
60 Self(CowBytes::Owned(BytesMut::zeroed(piece_count * Piece::SIZE)))
61 }
62
63 #[inline]
68 pub fn pieces(&self) -> Box<dyn ExactSizeIterator<Item = Piece> + '_> {
69 match &self.0 {
70 CowBytes::Shared(bytes) => Box::new(
71 bytes
72 .chunks_exact(Piece::SIZE)
73 .map(|slice| Piece(CowBytes::Shared(bytes.slice_ref(slice)))),
74 ),
75 CowBytes::Owned(bytes) => Box::new(
76 bytes
77 .chunks_exact(Piece::SIZE)
78 .map(|slice| Piece(CowBytes::Shared(Bytes::copy_from_slice(slice)))),
79 ),
80 }
81 }
82
83 #[inline]
85 pub fn source_pieces(&self) -> impl ExactSizeIterator<Item = Piece> + '_ {
86 self.pieces().take(RecordedHistorySegment::NUM_RAW_RECORDS)
87 }
88
89 #[inline]
91 pub fn source(&self) -> impl ExactSizeIterator<Item = &'_ PieceArray> + '_ {
92 self.iter().take(RecordedHistorySegment::NUM_RAW_RECORDS)
93 }
94
95 #[inline]
97 pub fn source_mut(&mut self) -> impl ExactSizeIterator<Item = &'_ mut PieceArray> + '_ {
98 self.iter_mut()
99 .take(RecordedHistorySegment::NUM_RAW_RECORDS)
100 }
101
102 #[inline]
104 pub fn parity_pieces(&self) -> impl ExactSizeIterator<Item = Piece> + '_ {
105 self.pieces().skip(RecordedHistorySegment::NUM_RAW_RECORDS)
106 }
107
108 #[inline]
110 pub fn parity(&self) -> impl ExactSizeIterator<Item = &'_ PieceArray> + '_ {
111 self.iter().skip(RecordedHistorySegment::NUM_RAW_RECORDS)
112 }
113
114 #[inline]
116 pub fn parity_mut(&mut self) -> impl ExactSizeIterator<Item = &'_ mut PieceArray> + '_ {
117 self.iter_mut()
118 .skip(RecordedHistorySegment::NUM_RAW_RECORDS)
119 }
120
121 pub fn to_shared(self) -> Self {
127 Self(match self.0 {
128 CowBytes::Shared(bytes) => CowBytes::Shared(bytes),
129 CowBytes::Owned(bytes) => CowBytes::Shared(bytes.freeze()),
130 })
131 }
132}
133
134#[cfg(feature = "parallel")]
135impl FlatPieces {
136 #[inline]
138 pub fn par_source(&self) -> impl IndexedParallelIterator<Item = &'_ PieceArray> + '_ {
139 self.par_iter()
140 .take(RecordedHistorySegment::NUM_RAW_RECORDS)
141 }
142
143 #[inline]
145 pub fn par_source_mut(
146 &mut self,
147 ) -> impl IndexedParallelIterator<Item = &'_ mut PieceArray> + '_ {
148 self.par_iter_mut()
149 .take(RecordedHistorySegment::NUM_RAW_RECORDS)
150 }
151
152 #[inline]
154 pub fn par_parity(&self) -> impl IndexedParallelIterator<Item = &'_ PieceArray> + '_ {
155 self.par_iter()
156 .skip(RecordedHistorySegment::NUM_RAW_RECORDS)
157 }
158
159 #[inline]
161 pub fn par_parity_mut(
162 &mut self,
163 ) -> impl IndexedParallelIterator<Item = &'_ mut PieceArray> + '_ {
164 self.par_iter_mut()
165 .skip(RecordedHistorySegment::NUM_RAW_RECORDS)
166 }
167}