Move Block handling into separate module and add BlockIterator
Convert Xlat::split_region_to_blocks into BlockIterator in order to
eliminate the use of Vec.
Signed-off-by: Imre Kis <imre.kis@arm.com>
Change-Id: Ie1b040a07c424c59e46eeadb1245b7c4a2033add
diff --git a/src/block.rs b/src/block.rs
new file mode 100644
index 0000000..e80617b
--- /dev/null
+++ b/src/block.rs
@@ -0,0 +1,164 @@
+// SPDX-FileCopyrightText: Copyright 2024 Arm Limited and/or its affiliates <open-source-office@arm.com>
+// SPDX-License-Identifier: MIT OR Apache-2.0
+
+use core::fmt;
+
+use alloc::format;
+use alloc::string::ToString;
+
+use super::{
+ address::{PhysicalAddress, VirtualAddress},
+ TranslationGranule, XlatError,
+};
+
+#[derive(PartialEq)]
+pub struct Block {
+ pub pa: PhysicalAddress,
+ pub va: VirtualAddress,
+ pub size: usize,
+}
+
+impl Block {
+ pub fn new(pa: PhysicalAddress, va: VirtualAddress, size: usize) -> Self {
+ Self { pa, va, size }
+ }
+}
+
+impl fmt::Debug for Block {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Block")
+ .field("pa", &format_args!("{:#010x}", self.pa.0))
+ .field("va", &format_args!("{:#010x}", self.va.0))
+ .field("size", &format_args!("{:#010x}", self.size))
+ .finish()
+ }
+}
+
+pub struct BlockIterator<const VA_BITS: usize> {
+ pa: PhysicalAddress,
+ va: VirtualAddress,
+ length: usize,
+ granule: TranslationGranule<VA_BITS>,
+}
+
+impl<const VA_BITS: usize> BlockIterator<VA_BITS> {
+ pub fn new(
+ pa: PhysicalAddress,
+ va: VirtualAddress,
+ length: usize,
+ granule: TranslationGranule<VA_BITS>,
+ ) -> Result<Self, XlatError> {
+ let min_granule_mask = granule.block_size_at_level(3) - 1;
+
+ if length == 0 {
+ return Err(XlatError::InvalidParameterError(
+ "Length cannot be 0".to_string(),
+ ));
+ }
+
+ if (pa.0 | va.0 | length) & min_granule_mask != 0 {
+ return Err(XlatError::InvalidParameterError(format!(
+ "Addresses and length must be aligned {:#08x} {:#08x} {:#x} {:#x}",
+ pa.0, va.0, length, min_granule_mask
+ )));
+ }
+
+ Ok(Self {
+ pa,
+ va,
+ length,
+ granule,
+ })
+ }
+}
+
+impl<const VA_BITS: usize> Iterator for BlockIterator<VA_BITS> {
+ type Item = Block;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.length > 0 {
+ let initial_lookup_level = self.granule.initial_lookup_level();
+
+ for block_size in
+ (initial_lookup_level..=3).map(|level| self.granule.block_size_at_level(level))
+ {
+ if (self.pa.0 | self.va.0) & (block_size - 1) == 0 && self.length >= block_size {
+ let block = Block::new(self.pa, self.va, block_size);
+
+ self.pa = self.pa.add_offset(block_size).unwrap();
+ self.va = self.va.add_offset(block_size).unwrap();
+ self.length -= block_size;
+
+ return Some(block);
+ }
+ }
+ }
+
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ macro_rules! test_block {
+ ( $pa:expr, $va:expr, $size:literal, $blocks:expr ) => {
+ assert_eq!(
+ Block::new(PhysicalAddress($pa), VirtualAddress($va), $size),
+ $blocks
+ );
+ };
+ }
+
+ fn make_block(pa: usize, va: usize, size: usize) -> Block {
+ Block::new(PhysicalAddress(pa), VirtualAddress(va), size)
+ }
+
+ #[test]
+ fn test_block_iterator() {
+ let mut blocks = BlockIterator::new(
+ PhysicalAddress(0x3fff_c000),
+ VirtualAddress(0x3fff_c000),
+ 0x4020_5000,
+ TranslationGranule::<36>::Granule4k,
+ )
+ .unwrap();
+ test_block!(0x3fff_c000, 0x3fff_c000, 0x1000, blocks.next().unwrap());
+ test_block!(0x3fff_d000, 0x3fff_d000, 0x1000, blocks.next().unwrap());
+ test_block!(0x3fff_e000, 0x3fff_e000, 0x1000, blocks.next().unwrap());
+ test_block!(0x3fff_f000, 0x3fff_f000, 0x1000, blocks.next().unwrap());
+ test_block!(
+ 0x4000_0000,
+ 0x4000_0000,
+ 0x4000_0000,
+ blocks.next().unwrap()
+ );
+ test_block!(
+ 0x8000_0000,
+ 0x8000_0000,
+ 0x0020_0000,
+ blocks.next().unwrap()
+ );
+ test_block!(0x8020_0000, 0x8020_0000, 0x1000, blocks.next().unwrap());
+ }
+
+ #[test]
+ fn test_block_iterator_unaligned() {
+ let blocks = BlockIterator::new(
+ PhysicalAddress(0x3fff_c000),
+ VirtualAddress(0x3f20_0000),
+ 0x200000,
+ TranslationGranule::<36>::Granule4k,
+ )
+ .unwrap();
+ for (i, block) in blocks.enumerate().take(512) {
+ test_block!(
+ 0x3fff_c000 + (i << 12),
+ 0x3f20_0000 + (i << 12),
+ 0x1000,
+ block
+ );
+ }
+ }
+}
diff --git a/src/lib.rs b/src/lib.rs
index fec87cc..e8bd6eb 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -8,12 +8,12 @@
extern crate alloc;
use core::iter::zip;
-use core::{fmt, panic};
+use core::panic;
use address::{PhysicalAddress, VirtualAddress, VirtualAddressRange};
use alloc::format;
-use alloc::string::{String, ToString};
-use alloc::vec::Vec;
+use alloc::string::String;
+use block::{Block, BlockIterator};
use log::debug;
use bitflags::bitflags;
@@ -28,6 +28,7 @@
use self::region_pool::{Region, RegionPool, RegionPoolError};
pub mod address;
+mod block;
mod descriptor;
mod granule;
pub mod kernel_space;
@@ -110,29 +111,6 @@
}
}
-#[derive(PartialEq)]
-struct Block {
- pa: PhysicalAddress,
- va: VirtualAddress,
- size: usize,
-}
-
-impl Block {
- fn new(pa: PhysicalAddress, va: VirtualAddress, size: usize) -> Self {
- Self { pa, va, size }
- }
-}
-
-impl fmt::Debug for Block {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Block")
- .field("pa", &format_args!("{:#010x}", self.pa.0))
- .field("va", &format_args!("{:#010x}", self.va.0))
- .field("size", &format_args!("{:#010x}", self.size))
- .finish()
- }
-}
-
pub enum RegimeVaRange {
Lower,
Upper,
@@ -572,7 +550,7 @@
region: VirtualRegion,
attributes: Attributes,
) -> Result<VirtualAddress, XlatError> {
- let blocks = Self::split_region_to_blocks(
+ let blocks = BlockIterator::new(
region.get_pa(),
region.base(),
region.length(),
@@ -590,7 +568,7 @@
/// # Arguments
/// * region: Memory region object
fn unmap_region(&mut self, region: &VirtualRegion) -> Result<(), XlatError> {
- let blocks = Self::split_region_to_blocks(
+ let blocks = BlockIterator::new(
region.get_pa(),
region.base(),
region.length(),
@@ -612,57 +590,6 @@
self.regions.find_containing_region(va, length).ok()
}
- /// Splits memory region to blocks that matches the granule size of the translation table.
- /// # Arguments
- /// * pa: Physical address
- /// * va: Virtual address
- /// * length: Region size in bytes
- /// * granule: Translation granule
- /// # Return value
- /// * Vector of granule sized blocks
- fn split_region_to_blocks(
- mut pa: PhysicalAddress,
- mut va: VirtualAddress,
- mut length: usize,
- granule: TranslationGranule<VA_BITS>,
- ) -> Result<Vec<Block>, XlatError> {
- let min_granule_mask = granule.block_size_at_level(3) - 1;
-
- if length == 0 {
- return Err(XlatError::InvalidParameterError(
- "Length cannot be 0".to_string(),
- ));
- }
-
- if (pa.0 | va.0 | length) & min_granule_mask != 0 {
- return Err(XlatError::InvalidParameterError(format!(
- "Addresses and length must be aligned {:#08x} {:#08x} {:#x} {:#x}",
- pa.0, va.0, length, min_granule_mask
- )));
- }
-
- let mut pages = Vec::new();
-
- while length > 0 {
- let initial_lookup_level = granule.initial_lookup_level();
-
- for block_size in
- (initial_lookup_level..=3).map(|level| granule.block_size_at_level(level))
- {
- if (pa.0 | va.0) & (block_size - 1) == 0 && length >= block_size {
- pages.push(Block::new(pa, va, block_size));
- pa = pa.add_offset(block_size).ok_or(XlatError::Overflow)?;
- va = va.add_offset(block_size).ok_or(XlatError::Overflow)?;
-
- length -= block_size;
- break;
- }
- }
- }
-
- Ok(pages)
- }
-
/// Add block to memory mapping
/// # Arguments
/// * block: Memory block that can be represented by a single translation table entry
@@ -991,49 +918,3 @@
}
}
}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- type TestXlat = Xlat<36>;
-
- fn make_block(pa: usize, va: usize, size: usize) -> Block {
- Block::new(PhysicalAddress(pa), VirtualAddress(va), size)
- }
-
- #[test]
- fn test_split_to_pages() {
- let pages = TestXlat::split_region_to_blocks(
- PhysicalAddress(0x3fff_c000),
- VirtualAddress(0x3fff_c000),
- 0x4020_5000,
- TranslationGranule::Granule4k,
- )
- .unwrap();
- assert_eq!(make_block(0x3fff_c000, 0x3fff_c000, 0x1000), pages[0]);
- assert_eq!(make_block(0x3fff_d000, 0x3fff_d000, 0x1000), pages[1]);
- assert_eq!(make_block(0x3fff_e000, 0x3fff_e000, 0x1000), pages[2]);
- assert_eq!(make_block(0x3fff_f000, 0x3fff_f000, 0x1000), pages[3]);
- assert_eq!(make_block(0x4000_0000, 0x4000_0000, 0x4000_0000), pages[4]);
- assert_eq!(make_block(0x8000_0000, 0x8000_0000, 0x0020_0000), pages[5]);
- assert_eq!(make_block(0x8020_0000, 0x8020_0000, 0x1000), pages[6]);
- }
-
- #[test]
- fn test_split_to_pages_unaligned() {
- let pages = TestXlat::split_region_to_blocks(
- PhysicalAddress(0x3fff_c000),
- VirtualAddress(0x3f20_0000),
- 0x200000,
- TranslationGranule::Granule4k,
- )
- .unwrap();
- for (i, block) in pages.iter().enumerate().take(512) {
- assert_eq!(
- make_block(0x3fff_c000 + (i << 12), 0x3f20_0000 + (i << 12), 0x1000),
- *block
- );
- }
- }
-}