proc_macro/bridge/arena.rs
1//! A minimal arena allocator inspired by `rustc_arena::DroplessArena`.
2//!
3//! This is unfortunately a minimal re-implementation rather than a dependency
4//! as it is difficult to depend on crates from within `proc_macro`, due to it
5//! being built at the same time as `std`.
6
7use std::cell::{Cell, RefCell};
8use std::mem::MaybeUninit;
9use std::ops::Range;
10use std::{cmp, ptr, slice, str};
11
12// The arenas start with PAGE-sized chunks, and then each new chunk is twice as
13// big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
14// we stop growing. This scales well, from arenas that are barely used up to
15// arenas that are used for 100s of MiBs. Note also that the chosen sizes match
16// the usual sizes of pages and huge pages on Linux.
17const PAGE: usize = 4096;
18const HUGE_PAGE: usize = 2 * 1024 * 1024;
19
20/// A minimal arena allocator inspired by `rustc_arena::DroplessArena`.
21///
22/// This is unfortunately a complete re-implementation rather than a dependency
23/// as it is difficult to depend on crates from within `proc_macro`, due to it
24/// being built at the same time as `std`.
25///
26/// This arena doesn't have support for allocating anything other than byte
27/// slices, as that is all that is necessary.
28pub(crate) struct Arena {
29 start: Cell<*mut MaybeUninit<u8>>,
30 end: Cell<*mut MaybeUninit<u8>>,
31 chunks: RefCell<Vec<Box<[MaybeUninit<u8>]>>>,
32}
33
34impl Arena {
35 pub(crate) fn new() -> Self {
36 Arena {
37 start: Cell::new(ptr::null_mut()),
38 end: Cell::new(ptr::null_mut()),
39 chunks: RefCell::new(Vec::new()),
40 }
41 }
42
43 /// Add a new chunk with at least `additional` free bytes.
44 #[inline(never)]
45 #[cold]
46 fn grow(&self, additional: usize) {
47 let mut chunks = self.chunks.borrow_mut();
48 let mut new_cap;
49 if let Some(last_chunk) = chunks.last_mut() {
50 // If the previous chunk's len is less than HUGE_PAGE
51 // bytes, then this chunk will be least double the previous
52 // chunk's size.
53 new_cap = last_chunk.len().min(HUGE_PAGE / 2);
54 new_cap *= 2;
55 } else {
56 new_cap = PAGE;
57 }
58 // Also ensure that this chunk can fit `additional`.
59 new_cap = cmp::max(additional, new_cap);
60
61 let mut chunk = Box::new_uninit_slice(new_cap);
62 let Range { start, end } = chunk.as_mut_ptr_range();
63 self.start.set(start);
64 self.end.set(end);
65 chunks.push(chunk);
66 }
67
68 /// Allocates a byte slice with specified size from the current memory
69 /// chunk. Returns `None` if there is no free space left to satisfy the
70 /// request.
71 #[allow(clippy::mut_from_ref)]
72 fn alloc_raw_without_grow(&self, bytes: usize) -> Option<&mut [MaybeUninit<u8>]> {
73 let start = self.start.get().addr();
74 let old_end = self.end.get();
75 let end = old_end.addr();
76
77 let new_end = end.checked_sub(bytes)?;
78 if start <= new_end {
79 let new_end = old_end.with_addr(new_end);
80 self.end.set(new_end);
81 // SAFETY: `bytes` bytes starting at `new_end` were just reserved.
82 Some(unsafe { slice::from_raw_parts_mut(new_end, bytes) })
83 } else {
84 None
85 }
86 }
87
88 fn alloc_raw(&self, bytes: usize) -> &mut [MaybeUninit<u8>] {
89 if bytes == 0 {
90 return &mut [];
91 }
92
93 loop {
94 if let Some(a) = self.alloc_raw_without_grow(bytes) {
95 break a;
96 }
97 // No free space left. Allocate a new chunk to satisfy the request.
98 // On failure the grow will panic or abort.
99 self.grow(bytes);
100 }
101 }
102
103 #[allow(clippy::mut_from_ref)] // arena allocator
104 pub(crate) fn alloc_str<'a>(&'a self, string: &str) -> &'a mut str {
105 let alloc = self.alloc_raw(string.len());
106 let bytes = alloc.write_copy_of_slice(string.as_bytes());
107
108 // SAFETY: we convert from `&str` to `&[u8]`, clone it into the arena,
109 // and immediately convert the clone back to `&str`.
110 unsafe { str::from_utf8_unchecked_mut(bytes) }
111 }
112}