1extern crate alloc;
2
3use ab_blake3::{CHUNK_LEN, OUT_LEN};
4use ab_contract_file::ContractInstruction;
5use ab_core_primitives::ed25519::{Ed25519PublicKey, Ed25519Signature};
6use ab_io_type::IoType;
7use ab_io_type::bool::Bool;
8use ab_riscv_interpreter::{
9 BasicInt, ExecutableInstruction, ExecutionError, FetchInstructionResult, InstructionFetcher,
10 InterpreterState, ProgramCounter, ProgramCounterError, SystemInstructionHandler, VirtualMemory,
11 VirtualMemoryError,
12};
13use ab_riscv_primitives::instructions::Instruction;
14use ab_riscv_primitives::instructions::rv64::Rv64Instruction;
15use ab_riscv_primitives::registers::general_purpose::{Register, Registers};
16use alloc::vec::Vec;
17use core::marker::PhantomData;
18use core::mem::offset_of;
19use core::ops::ControlFlow;
20
21pub const RISCV_CONTRACT_BYTES: &[u8] = cfg_select! {
23 target_env = "abundance" => {
24 &[]
25 }
26 _ => {
27 include_bytes!(env!("CONTRACT_PATH"))
28 }
29};
30
31#[derive(Debug, Copy, Clone)]
37#[repr(C)]
38pub struct Blake3HashChunkInternalArgs {
39 chunk_ptr: u64,
40 chunk_size: u32,
41 chunk_capacity: u32,
42 result_ptr: u64,
43 chunk: [u8; CHUNK_LEN],
44 result: [u8; OUT_LEN],
45}
46
47impl Blake3HashChunkInternalArgs {
48 pub fn new(internal_args_addr: u64, chunk: [u8; CHUNK_LEN]) -> Self {
50 Self {
51 chunk_ptr: internal_args_addr + offset_of!(Self, chunk) as u64,
52 chunk_size: CHUNK_LEN as u32,
53 chunk_capacity: CHUNK_LEN as u32,
54 result_ptr: internal_args_addr + offset_of!(Self, result) as u64,
55 chunk,
56 result: [0; _],
57 }
58 }
59
60 pub fn result(&self) -> [u8; OUT_LEN] {
62 self.result
63 }
64}
65
66#[derive(Debug, Copy, Clone)]
72#[repr(C)]
73pub struct Ed25519VerifyInternalArgs {
74 pub public_key_ptr: u64,
75 pub public_key_size: u32,
76 pub public_key_capacity: u32,
77 pub signature_ptr: u64,
78 pub signature_size: u32,
79 pub signature_capacity: u32,
80 pub message_ptr: u64,
81 pub message_size: u32,
82 pub message_capacity: u32,
83 pub result_ptr: u64,
84 pub public_key: Ed25519PublicKey,
85 pub signature: Ed25519Signature,
86 pub message: [u8; OUT_LEN],
87 pub result: Bool,
88}
89
90impl Ed25519VerifyInternalArgs {
91 pub fn new(
93 internal_args_addr: u64,
94 public_key: Ed25519PublicKey,
95 signature: Ed25519Signature,
96 message: [u8; OUT_LEN],
97 ) -> Self {
98 Self {
99 public_key_ptr: internal_args_addr + offset_of!(Self, public_key) as u64,
100 public_key_size: Ed25519PublicKey::SIZE as u32,
101 public_key_capacity: Ed25519PublicKey::SIZE as u32,
102 signature_ptr: internal_args_addr + offset_of!(Self, signature) as u64,
103 signature_size: Ed25519Signature::SIZE as u32,
104 signature_capacity: Ed25519Signature::SIZE as u32,
105 message_ptr: internal_args_addr + offset_of!(Self, message) as u64,
106 message_size: OUT_LEN as u32,
107 message_capacity: OUT_LEN as u32,
108 result_ptr: internal_args_addr + offset_of!(Self, result) as u64,
109 public_key,
110 signature,
111 message,
112 result: Bool::new(false),
113 }
114 }
115
116 pub fn result(&self) -> Bool {
118 self.result
119 }
120}
121
122#[derive(Debug, Copy, Clone)]
124#[repr(align(16))]
125pub struct TestMemory<const BASE_ADDR: u64, const SIZE: usize> {
126 data: [u8; SIZE],
127}
128
129impl<const BASE_ADDR: u64, const SIZE: usize> VirtualMemory for TestMemory<BASE_ADDR, SIZE> {
130 fn read<T>(&self, address: u64) -> Result<T, VirtualMemoryError>
131 where
132 T: BasicInt,
133 {
134 let offset = address
135 .checked_sub(BASE_ADDR)
136 .ok_or(VirtualMemoryError::OutOfBoundsRead { address })?;
137
138 if offset.saturating_add(size_of::<T>() as u64) > self.data.len() as u64 {
139 return Err(VirtualMemoryError::OutOfBoundsRead { address });
140 }
141
142 unsafe {
144 Ok(self
145 .data
146 .as_ptr()
147 .cast::<T>()
148 .byte_add(offset as usize)
149 .read_unaligned())
150 }
151 }
152
153 unsafe fn read_unchecked<T>(&self, address: u64) -> T
154 where
155 T: BasicInt,
156 {
157 unsafe {
159 let offset = address.unchecked_sub(BASE_ADDR) as usize;
160 self.data
161 .as_ptr()
162 .cast::<T>()
163 .byte_add(offset)
164 .read_unaligned()
165 }
166 }
167
168 fn read_slice(&self, address: u64, len: u32) -> Result<&[u8], VirtualMemoryError> {
169 let offset = address
170 .checked_sub(BASE_ADDR)
171 .ok_or(VirtualMemoryError::OutOfBoundsRead { address })?;
172
173 if offset > self.data.len() as u64 {
174 return Err(VirtualMemoryError::OutOfBoundsRead { address });
175 }
176
177 self.data
178 .get(offset as usize..)
179 .and_then(|data| data.get(..len as usize))
180 .ok_or(VirtualMemoryError::OutOfBoundsRead { address })
181 }
182
183 fn read_slice_up_to(&self, address: u64, len: u32) -> &[u8] {
184 let Some(offset) = address.checked_sub(BASE_ADDR) else {
185 return &[];
186 };
187
188 if offset > self.data.len() as u64 {
189 return &[];
190 }
191
192 let remaining = self.data.get(offset as usize..).unwrap_or_default();
193 remaining.get(..len as usize).unwrap_or(remaining)
194 }
195
196 fn write<T>(&mut self, address: u64, value: T) -> Result<(), VirtualMemoryError>
197 where
198 T: BasicInt,
199 {
200 let offset = address
201 .checked_sub(BASE_ADDR)
202 .ok_or(VirtualMemoryError::OutOfBoundsWrite { address })?;
203
204 if offset.saturating_add(size_of::<T>() as u64) > self.data.len() as u64 {
205 return Err(VirtualMemoryError::OutOfBoundsWrite { address });
206 }
207
208 unsafe {
210 self.data
211 .as_mut_ptr()
212 .cast::<T>()
213 .byte_add(offset as usize)
214 .write_unaligned(value);
215 }
216
217 Ok(())
218 }
219
220 fn write_slice(&mut self, address: u64, data: &[u8]) -> Result<(), VirtualMemoryError> {
221 let offset = address
222 .checked_sub(BASE_ADDR)
223 .ok_or(VirtualMemoryError::OutOfBoundsWrite { address })?;
224
225 if offset > self.data.len() as u64 {
226 return Err(VirtualMemoryError::OutOfBoundsWrite { address });
227 }
228
229 let len = data.len();
230 self.data
231 .get_mut(offset as usize..)
232 .and_then(|data| data.get_mut(..len))
233 .ok_or(VirtualMemoryError::OutOfBoundsWrite { address })?
234 .copy_from_slice(data);
235
236 Ok(())
237 }
238}
239
240impl<const BASE_ADDR: u64, const SIZE: usize> Default for TestMemory<BASE_ADDR, SIZE> {
241 fn default() -> Self {
242 Self { data: [0; SIZE] }
243 }
244}
245
246impl<const BASE_ADDR: u64, const SIZE: usize> TestMemory<BASE_ADDR, SIZE> {
247 pub fn get_mut_bytes(
249 &mut self,
250 address: u64,
251 size: usize,
252 ) -> Result<&mut [u8], VirtualMemoryError> {
253 let offset = address
254 .checked_sub(BASE_ADDR)
255 .ok_or(VirtualMemoryError::OutOfBoundsRead { address })? as usize;
256
257 if offset + size > self.data.len() {
258 return Err(VirtualMemoryError::OutOfBoundsRead { address });
259 }
260
261 Ok(&mut self.data[offset..][..size])
262 }
263}
264
265#[derive(Debug, Copy, Clone)]
267pub struct LazyInstructionFetcher {
268 return_trap_address: u64,
269 pc: u64,
270}
271
272impl<Memory> ProgramCounter<u64, Memory> for LazyInstructionFetcher
273where
274 Memory: VirtualMemory,
275{
276 #[inline(always)]
277 fn get_pc(&self) -> u64 {
278 self.pc
279 }
280
281 #[inline]
282 fn set_pc(
283 &mut self,
284 memory: &Memory,
285 pc: u64,
286 ) -> Result<ControlFlow<()>, ProgramCounterError<u64>> {
287 if pc == self.return_trap_address {
288 return Ok(ControlFlow::Break(()));
289 }
290
291 if !pc.is_multiple_of(u64::from(ContractInstruction::alignment())) {
292 return Err(ProgramCounterError::UnalignedInstruction { address: pc });
293 }
294
295 memory.read::<u32>(pc)?;
296
297 self.pc = pc;
298
299 Ok(ControlFlow::Continue(()))
300 }
301}
302
303impl<Memory> InstructionFetcher<ContractInstruction, Memory> for LazyInstructionFetcher
304where
305 Memory: VirtualMemory,
306{
307 #[inline]
308 fn fetch_instruction(
309 &mut self,
310 memory: &Memory,
311 ) -> Result<FetchInstructionResult<ContractInstruction>, ExecutionError<u64>> {
312 let instruction = unsafe { memory.read_unchecked(self.pc) };
316 let instruction =
318 unsafe { ContractInstruction::try_decode(instruction).unwrap_unchecked() };
319
320 self.pc += u64::from(instruction.size());
321
322 Ok(FetchInstructionResult::Instruction(instruction))
323 }
324}
325
326impl LazyInstructionFetcher {
327 #[inline(always)]
336 pub unsafe fn new(return_trap_address: u64, pc: u64) -> Self {
337 Self {
338 return_trap_address,
339 pc,
340 }
341 }
342}
343
344#[derive(Debug, Default, Clone)]
346pub struct EagerTestInstructionFetcher {
347 instructions: Vec<ContractInstruction>,
348 return_trap_address: u64,
349 base_addr: u64,
350 instruction_offset: usize,
351}
352
353impl<Memory> ProgramCounter<u64, Memory> for EagerTestInstructionFetcher
354where
355 Memory: VirtualMemory,
356{
357 #[inline(always)]
358 fn get_pc(&self) -> u64 {
359 self.base_addr + self.instruction_offset as u64 * size_of::<u32>() as u64
360 }
361
362 #[inline]
363 fn set_pc(
364 &mut self,
365 _memory: &Memory,
366 pc: u64,
367 ) -> Result<ControlFlow<()>, ProgramCounterError<u64>> {
368 let address = pc;
369
370 if address == self.return_trap_address {
371 return Ok(ControlFlow::Break(()));
372 }
373
374 if !address.is_multiple_of(size_of::<u32>() as u64) {
375 return Err(ProgramCounterError::UnalignedInstruction { address });
376 }
377
378 let offset = address
379 .checked_sub(self.base_addr)
380 .ok_or(VirtualMemoryError::OutOfBoundsRead { address })? as usize;
381 let instruction_offset = offset / size_of::<u32>();
382
383 if instruction_offset >= self.instructions.len() {
384 return Err(VirtualMemoryError::OutOfBoundsRead { address }.into());
385 }
386
387 self.instruction_offset = instruction_offset;
388
389 Ok(ControlFlow::Continue(()))
390 }
391}
392
393impl<Memory> InstructionFetcher<ContractInstruction, Memory> for EagerTestInstructionFetcher
394where
395 Memory: VirtualMemory,
396{
397 #[inline(always)]
398 fn fetch_instruction(
399 &mut self,
400 _memory: &Memory,
401 ) -> Result<FetchInstructionResult<ContractInstruction>, ExecutionError<u64>> {
402 let instruction = *unsafe { self.instructions.get_unchecked(self.instruction_offset) };
406 self.instruction_offset += 1;
407
408 Ok(FetchInstructionResult::Instruction(instruction))
409 }
410}
411
412impl EagerTestInstructionFetcher {
413 #[inline(always)]
425 pub unsafe fn new(
426 instructions: Vec<ContractInstruction>,
427 return_trap_address: u64,
428 base_addr: u64,
429 pc: u64,
430 ) -> Self {
431 Self {
432 instructions,
433 return_trap_address,
434 base_addr,
435 instruction_offset: (pc - base_addr) as usize / size_of::<u32>(),
436 }
437 }
438}
439
440#[derive(Debug, Clone, Copy)]
442pub struct NoopRv64SystemInstructionHandler<Instruction> {
443 _phantom: PhantomData<Instruction>,
444}
445
446impl<Reg> Default for NoopRv64SystemInstructionHandler<Reg> {
447 #[inline(always)]
448 fn default() -> Self {
449 Self {
450 _phantom: PhantomData,
451 }
452 }
453}
454
455impl<Reg, Memory, PC, CustomError> SystemInstructionHandler<Reg, Memory, PC, CustomError>
456 for NoopRv64SystemInstructionHandler<Rv64Instruction<Reg>>
457where
458 Reg: Register<Type = u64>,
459 [(); Reg::N]:,
460{
461 #[inline(always)]
462 fn handle_ecall(
463 &mut self,
464 _regs: &mut Registers<Reg>,
465 _memory: &mut Memory,
466 _program_counter: &mut PC,
467 ) -> Result<ControlFlow<()>, ExecutionError<u64, CustomError>> {
468 Ok(ControlFlow::Continue(()))
472 }
473}
474
475#[expect(clippy::type_complexity)]
477pub fn execute<Memory, IF>(
478 state: &mut InterpreterState<
479 <ContractInstruction as Instruction>::Reg,
480 (),
481 Memory,
482 IF,
483 NoopRv64SystemInstructionHandler<
484 Rv64Instruction<<ContractInstruction as Instruction>::Reg>,
485 >,
486 >,
487) -> Result<(), ExecutionError<u64>>
488where
489 Memory: VirtualMemory,
490 IF: InstructionFetcher<ContractInstruction, Memory>,
491{
492 loop {
493 let instruction = match state.instruction_fetcher.fetch_instruction(&state.memory)? {
494 FetchInstructionResult::Instruction(instruction) => instruction,
495 FetchInstructionResult::ControlFlow(ControlFlow::Continue(())) => {
496 continue;
497 }
498 FetchInstructionResult::ControlFlow(ControlFlow::Break(())) => {
499 break;
500 }
501 };
502
503 match instruction.execute(state)? {
504 ControlFlow::Continue(()) => {
505 continue;
506 }
507 ControlFlow::Break(()) => {
508 break;
509 }
510 }
511 }
512
513 Ok(())
514}