xref: /relibc/src/platform/allocator/dlmalloc.rs (revision fd22fbbabfdcb99dacbd9e8317399ad91050d6cd)
1 use crate::ALLOCATOR;
2 use core::{
3     alloc::{GlobalAlloc, Layout},
4     sync::atomic::{AtomicUsize, Ordering},
5 };
6 
7 use super::types::*;
8 
9 extern "C" {
create_mspace(capacity: size_t, locked: c_int) -> usize10     fn create_mspace(capacity: size_t, locked: c_int) -> usize;
create_mspace_with_base(base: *mut c_void, capacity: size_t, locked: c_int) -> usize11     fn create_mspace_with_base(base: *mut c_void, capacity: size_t, locked: c_int) -> usize;
mspace_malloc(msp: usize, bytes: size_t) -> *mut c_void12     fn mspace_malloc(msp: usize, bytes: size_t) -> *mut c_void;
mspace_memalign(msp: usize, alignment: size_t, bytes: size_t) -> *mut c_void13     fn mspace_memalign(msp: usize, alignment: size_t, bytes: size_t) -> *mut c_void;
mspace_realloc(msp: usize, oldmem: *mut c_void, bytes: size_t) -> *mut c_void14     fn mspace_realloc(msp: usize, oldmem: *mut c_void, bytes: size_t) -> *mut c_void;
mspace_free(msp: usize, mem: *mut c_void)15     fn mspace_free(msp: usize, mem: *mut c_void);
16     //fn dlmalloc(bytes: size_t) -> *mut c_void;
17     //fn dlmemalign(alignment: size_t, bytes: size_t) -> *mut c_void;
18     //fn dlrealloc(oldmem: *mut c_void, bytes: size_t) -> *mut c_void;
19     //fn dlfree(mem: *mut c_void);
20 }
21 
22 pub struct Allocator {
23     mstate: AtomicUsize,
24 }
25 
26 pub const NEWALLOCATOR: Allocator = Allocator {
27     mstate: AtomicUsize::new(0),
28 };
29 
30 impl Allocator {
set_book_keeper(&self, mstate: usize)31     pub fn set_book_keeper(&self, mstate: usize) {
32         self.mstate.store(mstate, Ordering::Relaxed);
33     }
34 
get_book_keeper(&self) -> usize35     pub fn get_book_keeper(&self) -> usize {
36         self.mstate.load(Ordering::Relaxed)
37     }
38 }
39 
40 unsafe impl<'a> GlobalAlloc for Allocator {
alloc(&self, layout: Layout) -> *mut u841     unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
42         alloc_align(layout.size(), layout.align()) as *mut u8
43     }
44 
dealloc(&self, ptr: *mut u8, _layout: Layout)45     unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) {
46         free(ptr as *mut c_void)
47     }
48 }
49 
alloc(size: usize) -> *mut c_void50 pub unsafe fn alloc(size: usize) -> *mut c_void {
51     mspace_malloc(ALLOCATOR.get_book_keeper(), size)
52 }
53 
alloc_align(size: usize, alignment: usize) -> *mut c_void54 pub unsafe fn alloc_align(size: usize, alignment: usize) -> *mut c_void {
55     mspace_memalign(ALLOCATOR.get_book_keeper(), alignment, size)
56 }
57 
realloc(ptr: *mut c_void, size: size_t) -> *mut c_void58 pub unsafe fn realloc(ptr: *mut c_void, size: size_t) -> *mut c_void {
59     mspace_realloc(ALLOCATOR.get_book_keeper(), ptr, size)
60 }
61 
free(ptr: *mut c_void)62 pub unsafe fn free(ptr: *mut c_void) {
63     mspace_free(ALLOCATOR.get_book_keeper(), ptr)
64 }
65 
66 #[cfg(not(target_os = "dragonos"))]
new_mspace() -> usize67 pub fn new_mspace() -> usize {
68     unsafe { create_mspace(0, 0) }
69 }
70 
71 #[cfg(target_os = "dragonos")]
new_mspace() -> usize72 pub fn new_mspace() -> usize {
73     use core::sync::atomic::AtomicU8;
74 
75     use crate::header::stdlib::malloc;
76 
77     static mut space: [[u8; 128 * 16]; 2] = [[0; 128 * 16]; 2];
78     static cnt: AtomicU8 = AtomicU8::new(0);
79     let x = cnt.fetch_add(1, Ordering::Relaxed);
80     if x > 2 {
81         panic!("new_mspace: too many mspace");
82     }
83     //println!("I am here");
84     //println!("{:#?}",unsafe{space[x as usize].as_mut_ptr()});
85     let r = unsafe {
86         create_mspace_with_base(space[x as usize].as_mut_ptr() as *mut c_void, 128 * 16, 0)
87     };
88     println!("new_mspace: {:#018x}", r);
89     return r;
90 }
91