rune/core/gc/
context.rs

1use super::GcState;
2use super::Trace;
3use crate::core::object::GcString;
4use crate::core::object::LispHashTable;
5use crate::core::object::{Gc, IntoObject, Object, UninternedSymbolMap, WithLifetime};
6use bumpalo::collections::Vec as GcVec;
7use std::cell::{Cell, RefCell};
8use std::fmt::Debug;
9use std::ops::Deref;
10use std::sync::atomic::AtomicBool;
11
12/// A global store of all gc roots. This struct should be passed to the [Context]
13/// when it is created.
14#[derive(Default, Debug)]
15pub(crate) struct RootSet {
16    pub(super) roots: RefCell<Vec<*const dyn Trace>>,
17}
18
19#[expect(dead_code)]
20// These types are only stored here so they can be dropped
21pub(in crate::core) enum DropStackElem {
22    String(String),
23    ByteString(Vec<u8>),
24    Vec(Vec<Object<'static>>),
25}
26
27/// A block of allocations. This type should be owned by [Context] and not used
28/// directly.
29#[derive(Default)]
30pub(crate) struct Block<const CONST: bool> {
31    pub(in crate::core) objects: bumpalo::Bump,
32    // Allocations that will be dropped when the objects are moved. At that time
33    // the allocation will get copied into the GC heap. This let's us avoid an
34    // extra copy of memory when a vector is first made an object. The
35    // allocation can continue to live outside of the GC heap until we copy the
36    // object.
37    pub(in crate::core) drop_stack: RefCell<Vec<DropStackElem>>,
38    // We don't yet have a hashmap that supports allocators, so we need to keep
39    // track of the memory and free it only after the table is garbage
40    // collected. Kind of a hack.
41    pub(in crate::core) lisp_hashtables: RefCell<Vec<*const LispHashTable>>,
42    pub(in crate::core) uninterned_symbol_map: UninternedSymbolMap,
43}
44
45unsafe impl<const C: bool> Send for Block<C> {}
46
47/// Owns all allocations and creates objects. All objects have
48/// a lifetime tied to the borrow of their `Context`. When the
49/// `Context` goes out of scope, no objects should be accessible.
50pub(crate) struct Context<'rt> {
51    pub(crate) block: Block<false>,
52    root_set: &'rt RootSet,
53    next_limit: usize,
54}
55
56impl Drop for Context<'_> {
57    fn drop(&mut self) {
58        self.garbage_collect(true);
59        if self.block.objects.allocated_bytes() == 0 {
60            return;
61        }
62        if std::thread::panicking() {
63            eprintln!("Error: Context was dropped while still holding data");
64        } else {
65            panic!("Error: Context was dropped while still holding data");
66        }
67    }
68}
69
70thread_local! {
71    /// Ensure there is only one context per thread.
72    static SINGLETON_CHECK: Cell<bool> = const { Cell::new(false) };
73}
74
75/// Ensure there is only one global context.
76static GLOBAL_CHECK: AtomicBool = AtomicBool::new(false);
77
78impl Block<true> {
79    pub(crate) fn new_global() -> Self {
80        use std::sync::atomic::Ordering::SeqCst as Ord;
81        assert!(GLOBAL_CHECK.compare_exchange(false, true, Ord, Ord).is_ok());
82        Self::default()
83    }
84}
85
86impl Block<false> {
87    pub(crate) fn new_local() -> Self {
88        Self::assert_unique();
89        Self::default()
90    }
91
92    pub(crate) fn new_local_unchecked() -> Self {
93        Self::default()
94    }
95
96    pub(crate) fn assert_unique() {
97        SINGLETON_CHECK.with(|x| {
98            assert!(!x.get(), "There was already and active context when this context was created");
99            x.set(true);
100        });
101    }
102}
103
104impl<const CONST: bool> Block<CONST> {
105    pub(crate) fn add<'ob, T, Tx>(&'ob self, obj: T) -> Object<'ob>
106    where
107        T: IntoObject<Out<'ob> = Tx>,
108        Gc<Tx>: Into<Object<'ob>>,
109    {
110        obj.into_obj(self).into()
111    }
112
113    pub(crate) fn add_as<'ob, T, Tx, V>(&'ob self, obj: T) -> Gc<V>
114    where
115        T: IntoObject<Out<'ob> = Tx>,
116        Gc<Tx>: Into<Gc<V>>,
117    {
118        obj.into_obj(self).into()
119    }
120
121    /// Create a new String whose backing storage is already part of the GC
122    /// heap. Does not require dropping when moved during garbage collection
123    /// (unlike std::string).
124    pub(crate) fn string_with_capacity(&self, cap: usize) -> GcString<'_> {
125        GcString::with_capacity_in(cap, &self.objects)
126    }
127
128    /// Create a new Vec whose backing storage is already part of the GC
129    /// heap. Does not require dropping when moved during garbage collection
130    /// (unlike std::vec).
131    pub(crate) fn vec_new(&self) -> GcVec<'_, Object<'_>> {
132        GcVec::new_in(&self.objects)
133    }
134
135    pub(crate) fn vec_with_capacity(&self, cap: usize) -> GcVec<'_, Object<'_>> {
136        GcVec::with_capacity_in(cap, &self.objects)
137    }
138}
139
140impl<'ob, 'rt> Context<'rt> {
141    const MIN_GC_BYTES: usize = 2000;
142    const GC_GROWTH_FACTOR: usize = 12; // divide by 10
143    pub(crate) fn new(roots: &'rt RootSet) -> Self {
144        Self { block: Block::new_local(), root_set: roots, next_limit: Self::MIN_GC_BYTES }
145    }
146
147    pub(crate) fn from_block(block: Block<false>, roots: &'rt RootSet) -> Self {
148        Block::assert_unique();
149        Context { block, root_set: roots, next_limit: Self::MIN_GC_BYTES }
150    }
151
152    pub(crate) fn bind<T>(&'ob self, obj: T) -> <T as WithLifetime<'ob>>::Out
153    where
154        T: WithLifetime<'ob>,
155    {
156        unsafe { obj.with_lifetime() }
157    }
158
159    pub(crate) fn get_root_set(&'ob self) -> &'rt RootSet {
160        self.root_set
161    }
162
163    pub(crate) fn garbage_collect(&mut self, force: bool) {
164        let bytes = self.block.objects.allocated_bytes();
165        if cfg!(not(test)) && !force && bytes < self.next_limit {
166            return;
167        }
168
169        let mut state = GcState::new();
170        for x in self.root_set.roots.borrow().iter() {
171            // SAFETY: The contract of root structs will ensure that it removes
172            // itself from this list before it drops.
173            unsafe {
174                (**x).trace(&mut state);
175            }
176        }
177
178        state.trace_stack();
179
180        self.next_limit = (state.to_space.allocated_bytes() * Self::GC_GROWTH_FACTOR) / 10;
181        self.block.drop_stack.borrow_mut().clear();
182        // Find all hashtables that have not been moved (i.e. They are no longer
183        // accessible) and drop them. Otherwise, update the object pointer.
184        self.block.lisp_hashtables.borrow_mut().retain_mut(|ptr| {
185            let table = unsafe { &**ptr };
186            if let Some(fwd) = table.forwarding_ptr() {
187                *ptr = fwd.as_ptr().cast::<LispHashTable>();
188                true
189            } else {
190                unsafe { std::ptr::drop_in_place(*ptr as *mut LispHashTable) };
191                false
192            }
193        });
194
195        self.block.objects = state.to_space;
196    }
197}
198
199impl Deref for Context<'_> {
200    type Target = Block<false>;
201
202    fn deref(&self) -> &Self::Target {
203        &self.block
204    }
205}
206
207impl AsRef<Block<false>> for Context<'_> {
208    fn as_ref(&self) -> &Block<false> {
209        &self.block
210    }
211}
212
213impl<const CONST: bool> Drop for Block<CONST> {
214    // Only one block can exist in a thread at a time. This part of that
215    // contract.
216    fn drop(&mut self) {
217        SINGLETON_CHECK.with(|s| {
218            assert!(s.get(), "Context singleton check was overwritten");
219            s.set(false);
220        });
221    }
222}
223
224#[cfg(test)]
225mod test {
226    use rune_core::macros::{list, rebind, root};
227
228    use crate::core::{
229        cons::Cons,
230        object::{HashTable, ObjectType, Symbol},
231    };
232
233    use super::*;
234    fn bind_to_mut<'ob>(cx: &'ob mut Context) -> Object<'ob> {
235        cx.add("invariant")
236    }
237
238    #[test]
239    fn test_reborrow() {
240        let roots = &RootSet::default();
241        let cx = &mut Context::new(roots);
242        let obj = rebind!(bind_to_mut(cx));
243        _ = "foo".into_obj(cx);
244        assert_eq!(obj, "invariant");
245    }
246
247    #[test]
248    fn test_garbage_collect() {
249        let roots = &RootSet::default();
250        let cx = &mut Context::new(roots);
251        root!(vec, new(Vec), cx);
252        cx.garbage_collect(true);
253        let cons = list!["foo", 1, false, "end"; cx];
254        vec.push(cons);
255        cx.garbage_collect(true);
256    }
257
258    #[test]
259    fn test_move_values() {
260        let roots = &RootSet::default();
261        let cx = &mut Context::new(roots);
262        let int = cx.add(1);
263        let float = cx.add(1.5);
264        let cons: Object = Cons::new(int, float, cx).into();
265        let string = cx.add("string");
266        let symbol = cx.add(Symbol::new_uninterned("sym", cx));
267        println!("sym: {:?}", symbol.into_raw());
268        let mut table = HashTable::default();
269        table.insert(symbol, string);
270        let _ = table.get(&symbol).unwrap();
271        root!(symbol, cx);
272        let table = cx.add(table);
273        let vec = vec![cons, table];
274        let vec = cx.add(vec);
275        root!(vec, cx);
276        cx.garbage_collect(true);
277        let vec = vec.bind(cx);
278        let ObjectType::Vec(vec) = vec.untag() else { unreachable!() };
279        let ObjectType::Cons(cons) = vec[0].get().untag() else { unreachable!() };
280        let ObjectType::HashTable(table) = vec[1].get().untag() else { unreachable!() };
281        let key = symbol.bind(cx);
282        println!("key: {:?}", key.into_raw());
283        let val = table.get(symbol.bind(cx)).unwrap();
284        let ObjectType::String(string) = val.untag() else { unreachable!() };
285        let ObjectType::Int(int) = cons.car().untag() else { unreachable!() };
286        let ObjectType::Float(float) = cons.cdr().untag() else { unreachable!() };
287        assert_eq!(string, "string");
288        assert_eq!(**float, 1.5);
289        assert_eq!(int, 1);
290    }
291}