Skip to content

Commit

Permalink
Initial commit
Browse files Browse the repository at this point in the history
  • Loading branch information
equation314 committed Jul 10, 2024
0 parents commit 3c13499
Show file tree
Hide file tree
Showing 6 changed files with 610 additions and 0 deletions.
55 changes: 55 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
name: CI

on: [push, pull_request]

jobs:
ci:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
rust-toolchain: [nightly]
targets: [x86_64-unknown-linux-gnu, x86_64-unknown-none, riscv64gc-unknown-none-elf, aarch64-unknown-none-softfloat]
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@nightly
with:
toolchain: ${{ matrix.rust-toolchain }}
components: rust-src, clippy, rustfmt
targets: ${{ matrix.targets }}
- name: Check rust version
run: rustc --version --verbose
- name: Check code format
run: cargo fmt --all -- --check
- name: Clippy
run: cargo clippy --target ${{ matrix.targets }} --all-features -- -A clippy::new_without_default
- name: Build
run: cargo build --target ${{ matrix.targets }} --all-features
- name: Unit test
if: ${{ matrix.targets == 'x86_64-unknown-linux-gnu' }}
run: cargo test --target ${{ matrix.targets }} -- --nocapture

doc:
runs-on: ubuntu-latest
strategy:
fail-fast: false
permissions:
contents: write
env:
default-branch: ${{ format('refs/heads/{0}', github.event.repository.default_branch) }}
RUSTDOCFLAGS: -D rustdoc::broken_intra_doc_links -D missing-docs
steps:
- uses: actions/checkout@v4
- uses: dtolnay/rust-toolchain@nightly
- name: Build docs
continue-on-error: ${{ github.ref != env.default-branch && github.event_name != 'pull_request' }}
run: |
cargo doc --no-deps --all-features
printf '<meta http-equiv="refresh" content="0;url=%s/index.html">' $(cargo tree | head -1 | cut -d' ' -f1) > target/doc/index.html
- name: Deploy to Github Pages
if: ${{ github.ref == env.default-branch }}
uses: JamesIves/github-pages-deploy-action@v4
with:
single-commit: true
branch: gh-pages
folder: target/doc
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
/target
/.vscode
.DS_Store
Cargo.lock
12 changes: 12 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[package]
name = "slab_allocator"
version = "0.3.1"
edition = "2021"
authors = ["Robert Węcławski <[email protected]>", "Yiren Zhang <[email protected]>"]
license = "MIT"

description = "Slab allocator for `no_std` systems. Uses multiple slabs with blocks of different sizes and a linked list for blocks larger than 4096 bytes"
keywords = ["slab", "allocator", "no_std", "heap", "kernel"]

[dependencies]
buddy_system_allocator = { version = "0.10", default-features = false }
256 changes: 256 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,256 @@
//! Slab allocator for `no_std` systems. It uses multiple slabs with blocks of
//! different sizes and a [buddy_system_allocator] for blocks larger than 4096
//! bytes.
//!
//! It's based on <https://github.com/weclaw1/slab_allocator>.
//!
//! [buddy_system_allocator]: https://docs.rs/buddy_system_allocator/latest/buddy_system_allocator/
#![feature(allocator_api)]
#![no_std]

extern crate alloc;
extern crate buddy_system_allocator;

use alloc::alloc::{AllocError, Layout};
use core::ptr::NonNull;

#[cfg(test)]
mod tests;

mod slab;
use slab::Slab;

const SET_SIZE: usize = 64;
const MIN_HEAP_SIZE: usize = 0x8000;

enum HeapAllocator {
Slab64Bytes,
Slab128Bytes,
Slab256Bytes,
Slab512Bytes,
Slab1024Bytes,
Slab2048Bytes,
Slab4096Bytes,
BuddyAllocator,
}

/// A fixed size heap backed by multiple slabs with blocks of different sizes.
/// Allocations over 4096 bytes are served by linked list allocator.
pub struct Heap {
slab_64_bytes: Slab<64>,
slab_128_bytes: Slab<128>,
slab_256_bytes: Slab<256>,
slab_512_bytes: Slab<512>,
slab_1024_bytes: Slab<1024>,
slab_2048_bytes: Slab<2048>,
slab_4096_bytes: Slab<4096>,
buddy_allocator: buddy_system_allocator::Heap<32>,
}

impl Heap {
/// Creates a new heap with the given `heap_start_addr` and `heap_size`. The start address must be valid
/// and the memory in the `[heap_start_addr, heap_start_addr + heap_size)` range must not be used for
/// anything else.
///
/// # Safety
/// This function is unsafe because it can cause undefined behavior if the
/// given address is invalid.
pub unsafe fn new(heap_start_addr: usize, heap_size: usize) -> Heap {
assert!(
heap_start_addr % 4096 == 0,
"Start address should be page aligned"
);
assert!(
heap_size >= MIN_HEAP_SIZE,
"Heap size should be greater or equal to minimum heap size"
);
assert!(
heap_size % MIN_HEAP_SIZE == 0,
"Heap size should be a multiple of minimum heap size"
);
Heap {
slab_64_bytes: Slab::<64>::new(0, 0),
slab_128_bytes: Slab::<128>::new(0, 0),
slab_256_bytes: Slab::<256>::new(0, 0),
slab_512_bytes: Slab::<512>::new(0, 0),
slab_1024_bytes: Slab::<1024>::new(0, 0),
slab_2048_bytes: Slab::<2048>::new(0, 0),
slab_4096_bytes: Slab::<4096>::new(0, 0),
buddy_allocator: {
let mut buddy = buddy_system_allocator::Heap::<32>::new();
buddy.init(heap_start_addr, heap_size);
buddy
},
}
}

/// Adds memory to the heap. The start address must be valid
/// and the memory in the `[mem_start_addr, mem_start_addr + heap_size)` range must not be used for
/// anything else.
///
/// # Safety
/// This function is unsafe because it can cause undefined behavior if the
/// given address is invalid.
pub unsafe fn add_memory(&mut self, heap_start_addr: usize, heap_size: usize) {
assert!(
heap_start_addr % 4096 == 0,
"Start address should be page aligned"
);
assert!(
heap_size % 4096 == 0,
"Add Heap size should be a multiple of page size"
);
self.buddy_allocator
.add_to_heap(heap_start_addr, heap_start_addr + heap_size);
}

/// Adds memory to the heap. The start address must be valid
/// and the memory in the `[mem_start_addr, mem_start_addr + heap_size)` range must not be used for
/// anything else.
/// In case of linked list allocator the memory can only be extended.
///
/// # Safety
/// This function is unsafe because it can cause undefined behavior if the
/// given address is invalid.
unsafe fn _grow(&mut self, mem_start_addr: usize, mem_size: usize, slab: HeapAllocator) {
match slab {
HeapAllocator::Slab64Bytes => self.slab_64_bytes.grow(mem_start_addr, mem_size),
HeapAllocator::Slab128Bytes => self.slab_128_bytes.grow(mem_start_addr, mem_size),
HeapAllocator::Slab256Bytes => self.slab_256_bytes.grow(mem_start_addr, mem_size),
HeapAllocator::Slab512Bytes => self.slab_512_bytes.grow(mem_start_addr, mem_size),
HeapAllocator::Slab1024Bytes => self.slab_1024_bytes.grow(mem_start_addr, mem_size),
HeapAllocator::Slab2048Bytes => self.slab_2048_bytes.grow(mem_start_addr, mem_size),
HeapAllocator::Slab4096Bytes => self.slab_4096_bytes.grow(mem_start_addr, mem_size),
HeapAllocator::BuddyAllocator => self
.buddy_allocator
.add_to_heap(mem_start_addr, mem_start_addr + mem_size),
}
}

/// Allocates a chunk of the given size with the given alignment. Returns a pointer to the
/// beginning of that chunk if it was successful. Else it returns `Err`.
/// This function finds the slab of lowest size which can still accommodate the given chunk.
/// The runtime is in `O(1)` for chunks of size <= 4096, and `O(n)` when chunk size is > 4096,
pub fn allocate(&mut self, layout: Layout) -> Result<usize, AllocError> {
match Heap::layout_to_allocator(&layout) {
HeapAllocator::Slab64Bytes => self
.slab_64_bytes
.allocate(layout, &mut self.buddy_allocator),
HeapAllocator::Slab128Bytes => self
.slab_128_bytes
.allocate(layout, &mut self.buddy_allocator),
HeapAllocator::Slab256Bytes => self
.slab_256_bytes
.allocate(layout, &mut self.buddy_allocator),
HeapAllocator::Slab512Bytes => self
.slab_512_bytes
.allocate(layout, &mut self.buddy_allocator),
HeapAllocator::Slab1024Bytes => self
.slab_1024_bytes
.allocate(layout, &mut self.buddy_allocator),
HeapAllocator::Slab2048Bytes => self
.slab_2048_bytes
.allocate(layout, &mut self.buddy_allocator),
HeapAllocator::Slab4096Bytes => self
.slab_4096_bytes
.allocate(layout, &mut self.buddy_allocator),
HeapAllocator::BuddyAllocator => self
.buddy_allocator
.alloc(layout)
.map(|ptr| ptr.as_ptr() as usize)
.map_err(|_| AllocError),
}
}

/// Frees the given allocation. `ptr` must be a pointer returned
/// by a call to the `allocate` function with identical size and alignment. Undefined
/// behavior may occur for invalid arguments, thus this function is unsafe.
///
/// This function finds the slab which contains address of `ptr` and adds the blocks beginning
/// with `ptr` address to the list of free blocks.
/// This operation is in `O(1)` for blocks <= 4096 bytes and `O(n)` for blocks > 4096 bytes.
///
/// # Safety
/// This function is unsafe because it can cause undefined behavior if the
/// given address is invalid.
pub unsafe fn deallocate(&mut self, ptr: usize, layout: Layout) {
match Heap::layout_to_allocator(&layout) {
HeapAllocator::Slab64Bytes => self.slab_64_bytes.deallocate(ptr),
HeapAllocator::Slab128Bytes => self.slab_128_bytes.deallocate(ptr),
HeapAllocator::Slab256Bytes => self.slab_256_bytes.deallocate(ptr),
HeapAllocator::Slab512Bytes => self.slab_512_bytes.deallocate(ptr),
HeapAllocator::Slab1024Bytes => self.slab_1024_bytes.deallocate(ptr),
HeapAllocator::Slab2048Bytes => self.slab_2048_bytes.deallocate(ptr),
HeapAllocator::Slab4096Bytes => self.slab_4096_bytes.deallocate(ptr),
HeapAllocator::BuddyAllocator => self
.buddy_allocator
.dealloc(NonNull::new(ptr as *mut u8).unwrap(), layout),
}
}

/// Returns bounds on the guaranteed usable size of a successful
/// allocation created with the specified `layout`.
pub fn usable_size(&self, layout: Layout) -> (usize, usize) {
match Heap::layout_to_allocator(&layout) {
HeapAllocator::Slab64Bytes => (layout.size(), 64),
HeapAllocator::Slab128Bytes => (layout.size(), 128),
HeapAllocator::Slab256Bytes => (layout.size(), 256),
HeapAllocator::Slab512Bytes => (layout.size(), 512),
HeapAllocator::Slab1024Bytes => (layout.size(), 1024),
HeapAllocator::Slab2048Bytes => (layout.size(), 2048),
HeapAllocator::Slab4096Bytes => (layout.size(), 4096),
HeapAllocator::BuddyAllocator => (layout.size(), layout.size()),
}
}

/// Finds allocator to use based on layout size and alignment
fn layout_to_allocator(layout: &Layout) -> HeapAllocator {
if layout.size() > 4096 {
HeapAllocator::BuddyAllocator
} else if layout.size() <= 64 && layout.align() <= 64 {
HeapAllocator::Slab64Bytes
} else if layout.size() <= 128 && layout.align() <= 128 {
HeapAllocator::Slab128Bytes
} else if layout.size() <= 256 && layout.align() <= 256 {
HeapAllocator::Slab256Bytes
} else if layout.size() <= 512 && layout.align() <= 512 {
HeapAllocator::Slab512Bytes
} else if layout.size() <= 1024 && layout.align() <= 1024 {
HeapAllocator::Slab1024Bytes
} else if layout.size() <= 2048 && layout.align() <= 2048 {
HeapAllocator::Slab2048Bytes
} else {
HeapAllocator::Slab4096Bytes
}
}

/// Returns total memory size in bytes of the heap.
pub fn total_bytes(&self) -> usize {
self.slab_64_bytes.total_blocks() * 64
+ self.slab_128_bytes.total_blocks() * 128
+ self.slab_256_bytes.total_blocks() * 256
+ self.slab_512_bytes.total_blocks() * 512
+ self.slab_1024_bytes.total_blocks() * 1024
+ self.slab_2048_bytes.total_blocks() * 2048
+ self.slab_4096_bytes.total_blocks() * 4096
+ self.buddy_allocator.stats_total_bytes()
}

/// Returns allocated memory size in bytes.
pub fn used_bytes(&self) -> usize {
self.slab_64_bytes.used_blocks() * 64
+ self.slab_128_bytes.used_blocks() * 128
+ self.slab_256_bytes.used_blocks() * 256
+ self.slab_512_bytes.used_blocks() * 512
+ self.slab_1024_bytes.used_blocks() * 1024
+ self.slab_2048_bytes.used_blocks() * 2048
+ self.slab_4096_bytes.used_blocks() * 4096
+ self.buddy_allocator.stats_alloc_actual()
}

/// Returns available memory size in bytes.
pub fn available_bytes(&self) -> usize {
self.total_bytes() - self.used_bytes()
}
}
Loading

0 comments on commit 3c13499

Please sign in to comment.