diff --git a/main/crates.js b/main/crates.js index 889cf7b..7f164b8 100644 --- a/main/crates.js +++ b/main/crates.js @@ -1 +1 @@ -window.ALL_CRATES = ["musegc"]; \ No newline at end of file +window.ALL_CRATES = ["refuse"]; \ No newline at end of file diff --git a/main/help.html b/main/help.html index 5b939e0..085d8c1 100644 --- a/main/help.html +++ b/main/help.html @@ -1,2 +1,2 @@ -Help -

Rustdoc help

Back
\ No newline at end of file +Help +

Rustdoc help

Back
\ No newline at end of file diff --git a/main/musegc/sidebar-items.js b/main/musegc/sidebar-items.js deleted file mode 100644 index aff6237..0000000 --- a/main/musegc/sidebar-items.js +++ /dev/null @@ -1 +0,0 @@ -window.SIDEBAR_ITEMS = {"fn":["collect","collected"],"struct":["AnyRef","CollectionGuard","CollectionStarting","Ref","Root","Tracer"],"trait":["Collectable","ContainsNoRefs","MapAs","NoMapping","SimpleType","Trace"]}; \ No newline at end of file diff --git a/main/musegc/trait.NoMapping.html b/main/musegc/trait.NoMapping.html deleted file mode 100644 index e5c916a..0000000 --- a/main/musegc/trait.NoMapping.html +++ /dev/null @@ -1,4 +0,0 @@ -NoMapping in musegc - Rust -

Trait musegc::NoMapping

source ·
pub trait NoMapping { }
Expand description

A type that implements MapAs with an empty implementation.

-

Implementations on Foreign Types§

source§

impl<T> NoMapping for Vec<T>

source§

impl<T, const N: usize> NoMapping for [T; N]

Implementors§

source§

impl<T> NoMapping for T
where - T: SimpleType,

\ No newline at end of file diff --git a/main/musegc/trait.SimpleType.html b/main/musegc/trait.SimpleType.html deleted file mode 100644 index aca830d..0000000 --- a/main/musegc/trait.SimpleType.html +++ /dev/null @@ -1,6 +0,0 @@ -SimpleType in musegc - Rust -

Trait musegc::SimpleType

source ·
pub trait SimpleType { }
Expand description

A type that can contain no Ref<T>s and has an empty MapAs -implementation.

-

Implementing this trait for a type automatically implements NoMapping -and ContainsNoRefs, which makes the type Collectable.

-

Implementations on Foreign Types§

source§

impl SimpleType for i8

source§

impl SimpleType for i16

source§

impl SimpleType for i32

source§

impl SimpleType for i64

source§

impl SimpleType for i128

source§

impl SimpleType for isize

source§

impl SimpleType for u8

source§

impl SimpleType for u16

source§

impl SimpleType for u32

source§

impl SimpleType for u64

source§

impl SimpleType for u128

source§

impl SimpleType for usize

Implementors§

\ No newline at end of file diff --git a/main/musegc/trait.Trace.html b/main/musegc/trait.Trace.html deleted file mode 100644 index 66772f1..0000000 --- a/main/musegc/trait.Trace.html +++ /dev/null @@ -1,20 +0,0 @@ -Trace in musegc - Rust -

Trait musegc::Trace

source ·
pub trait Trace {
-    const MAY_CONTAIN_REFERENCES: bool;
-
-    // Required method
-    fn trace(&self, tracer: &mut Tracer<'_>);
-}
Expand description

A type that can find and mark any references it has.

-

Required Associated Constants§

source

const MAY_CONTAIN_REFERENCES: bool

If true, this type may contain references and should have its trace() -function invoked during the collector’s “mark” phase.

-

Required Methods§

source

fn trace(&self, tracer: &mut Tracer<'_>)

Traces all refrences that this value references.

-

This function should invoke Tracer::mark() for each Ref<T> it -contains. Failing to do so will allow the garbage collector to free the -data, preventing the ability to load() the data in the -future.

-

Object Safety§

This trait is not object safe.

Implementations on Foreign Types§

source§

impl<T> Trace for Vec<T>
where - T: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<T, const N: usize> Trace for [T; N]
where - T: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

Implementors§

source§

impl<T> Trace for Ref<T>
where - T: Collectable,

source§

impl<T> Trace for Root<T>
where - T: Collectable,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

impl<T> Trace for T
where - T: ContainsNoRefs,

\ No newline at end of file diff --git a/main/musegc/all.html b/main/refuse/all.html similarity index 94% rename from main/musegc/all.html rename to main/refuse/all.html index 27ccb3c..ad1a661 100644 --- a/main/musegc/all.html +++ b/main/refuse/all.html @@ -1,2 +1,2 @@ -List of all items in this crate -

List of all items

Structs

Traits

Functions

\ No newline at end of file +List of all items in this crate +

List of all items

Structs

Traits

Functions

\ No newline at end of file diff --git a/main/refuse/architecture/index.html b/main/refuse/architecture/index.html new file mode 100644 index 0000000..e0933b1 --- /dev/null +++ b/main/refuse/architecture/index.html @@ -0,0 +1,100 @@ +refuse::architecture - Rust +

Module refuse::architecture

source ·
Expand description

Architecture overview of the underlying design of Refuse.

+

§Overview

+

Refuse is an incremental, tracing garbage collector. Incremental garbage +collectors can only run when it knows no threads are currently accessing +collectable memory. This fits the access pattern of an RwLock: the +collector can acquire a “write” lock to ensure that all other threads can’t +read while it is running.

+

Originally, Refuse used an RwLock and a shared allocation arena. This did +not perform well in multi-threaded benchmarks. So the global RwLock was +replaced with atomic tracking the number of currently acquired +CollectionGuard and whether the collector is currently trying to start +collection.

+

Each thread allocates its own independent allocation arena and stores a copy +of it in thread-local storage. It also registers a copy with the global +collector. Refuse’s public API ensures that no access is provided to the +local thread’s data without first having acquired a CollectionGuard. +This ensures that the collector can guarantee exclusive access to the +underlying data.

+

§Allocation Arenas

+

Each thread is given its own allocation arena, which is a data structure +designed for concurrently reading portions of its data while still being +able to perform new allocations from the owning thread.

+

At the root of each arena is a map of types to type-erased Bin<T>s. A +Bin<T> is the root of a linked-list of Slabs<T>. Each Slabs<T> +contains a list of Slab<T>s and an optional next Slabs<T>. Each +Slab<T> holds 256 Slot<T>s. Each slot is a combination of the slot’s +state, and the slot’s data.

+

The slot’s state is stored in an atomic and keeps track of:

+
    +
  • A 32-bit generation. When loading a Ref, its generation is validated +to ensure it is the same allocation.
  • +
  • Whether the slot is allocated or not
  • +
  • Garbage collector marking bits
  • +
+

The owning thread or and the collector are the only types that can modify +non-atomic data in a Bin<T>. Other threads may need to load a reference to +a Ref<T>’s underlying data while the owning thread is allocating. This is +made safe by:

+
    +
  • Because allocations for a new slab can’t be referred to until after the +allocating function returns, we can update Slabs::next safely while +other threads read the data structure.
  • +
  • Each slot’s state is controlled with atomic operations. This ensures +consistent access for both the reading thread and the allocating thread.
  • +
  • The slot’s state is generational, minimizing the chance of an invalid +reference being promoted. Even if a “stale” ref contains a reused +generation, the load will still point to valid data because of the order +of initialization.
  • +
+

§Collection

+

Refuse is a naive, mark-and-sweep collector. Each collection phase is +divided into three portions:

+
    +
  • Exclusive Access Acquisition
  • +
  • Tracing and marking
  • +
  • Sweeping
  • +
+

Refuse keeps track of two metrics:

+
    +
  • average_collection_locking: The average duration to acquire exclusive +access.
  • +
  • average_collection: The average duration of a total collection process, +including exclusive access acquisition.
  • +
+

§Exclusive Access Acquisition

+

Refuse’s goal is to be able to be used in nearly any application, including +games. Games typically do not want to dip below 60 frames-per-second (FPS), +which means that if a garbage collection pause is longer than 16ms, it will +cause FPS drops.

+

Refuse tries to minimize pauses by waiting for exclusive access only for a +multiple of average_collection_locking. If access isn’t acquired by the +deadline, collection is rescheduled again in the near future with an +increased multiple. If this process fails several times consecutively, +garbage collection will be forced by waiting indefinitely.

+

Access is controlled by a single AtomicUsize. A single bit keeps track +of whether the collector is trying to collect or not. The remaining bits +keep track of how many CollectionGuards are acquired and not yielding.

+

CollectionGuard::acquire() checks if the collection bit is set. If it +is, it waits until the current collection finishes and checks again. If the +bit is not set, the count is atomically incremented.

+

When the final CollectionGuard drops or yields, it notifies the +collector thread so that it can begin collecting.

+

§Tracing and marking

+

The goal of this phase is to identify all allocations that can currently be +reached by any Root<T>. When a slot is initially allocated, the marking +bits are 0. Each time the collector runs, a new non-zero marking bits is +selected by incrementing the previous marking bits and skipping 0 on wrap.

+

All Bin<T>s of all threads are scanned for any Slot<T> that is allocated +and has a non-zero root count. Each allocated slot is then marked. If the +slot didn’t already contain the current marking bits, it is Traced, +which allows any references found to be marked.

+

This process continues until all found references are marked and traced.

+

§Sweeping

+

The goal of this phase is to free allocations that are no longer reachable. +This is done by scanning all Bin<T>s of all threads looking for any +allocated Slot<T>s that do not contain the current marking bits. When +found, the slot is deallocated and contained data has its Drop +implementation invoked.

+
\ No newline at end of file diff --git a/main/refuse/architecture/sidebar-items.js b/main/refuse/architecture/sidebar-items.js new file mode 100644 index 0000000..5244ce0 --- /dev/null +++ b/main/refuse/architecture/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {}; \ No newline at end of file diff --git a/main/musegc/fn.collect.html b/main/refuse/fn.collect.html similarity index 84% rename from main/musegc/fn.collect.html rename to main/refuse/fn.collect.html index 9b0bc73..9379a93 100644 --- a/main/musegc/fn.collect.html +++ b/main/refuse/fn.collect.html @@ -1,6 +1,6 @@ -collect in musegc - Rust -

Function musegc::collect

source ·
pub fn collect()
Expand description

Invokes the garbage collector.

-

This function will deadlock if any CollectionGuards are held by the +collect in refuse - Rust

+

Function refuse::collect

source ·
pub fn collect()
Expand description

Invokes the garbage collector.

+

This function will deadlock if any CollectionGuards are held by the current thread when invoked. If a guard is held, consider calling -CollectionGuard::collect() instead.

+CollectionGuard::collect() instead.

\ No newline at end of file diff --git a/main/musegc/fn.collected.html b/main/refuse/fn.collected.html similarity index 86% rename from main/musegc/fn.collected.html rename to main/refuse/fn.collected.html index a4028f4..ce4b1be 100644 --- a/main/musegc/fn.collected.html +++ b/main/refuse/fn.collected.html @@ -1,8 +1,8 @@ -collected in musegc - Rust -

Function musegc::collected

source ·
pub fn collected<R>(wrapped: impl FnOnce() -> R) -> R
Expand description

Executes wrapped with garbage collection available.

+collected in refuse - Rust +

Function refuse::collected

source ·
pub fn collected<R>(wrapped: impl FnOnce() -> R) -> R
Expand description

Executes wrapped with garbage collection available.

This function installs a garbage collector for this thread, if needed. Repeated and nested calls are allowed.

-

Invoking CollectionGuard::acquire() within wrapped will return a +

Invoking CollectionGuard::acquire() within wrapped will return a result, while invoking it outside of a collected context will panic.

This function utilizes Rust’s thread-local storage.

\ No newline at end of file diff --git a/main/musegc/index.html b/main/refuse/index.html similarity index 79% rename from main/musegc/index.html rename to main/refuse/index.html index db4f00e..bba5250 100644 --- a/main/musegc/index.html +++ b/main/refuse/index.html @@ -1,16 +1,12 @@ -musegc - Rust -

Crate musegc

source ·
Expand description

§musegc

-
-

This name is a placeholder. The design of this crate has nothing to do with -Muse, so naming it muse-gc seems misleading.

-
+refuse - Rust +

Crate refuse

source ·
Expand description

§Refuse

An easy-to-use, incremental, multi-threaded garbage collector for Rust.

-
use musegc::{CollectionGuard, Root, Ref};
+
use refuse::{CollectionGuard, Root, Ref};
 
 // Execute a closure with access to a garbage collector.
-musegc::collected(|| {
+refuse::collected(|| {
     let guard = CollectionGuard::acquire();
     // Allocate a vec![Ref(1), Ref(2), Ref(3)].
     let values: Vec<Ref<u32>> = (1..=3).map(|value| Ref::new(value, &guard)).collect();
@@ -19,7 +15,7 @@
 
     // Manually execute the garbage collector. Our data will not be freed,
     // since `values` is a "root" reference.
-    musegc::collect();
+    refuse::collect();
 
     // Root references allow direct access to their data, even when a
     // `CollectionGuard` isn't held.
@@ -36,6 +32,8 @@
     guard.collect();
     assert_eq!(one.load(&guard), None);
 });
+

As the version number indicates, this crate is in early development. No semver +compatibility will be provided until 0.1.0.

§Motivation

While working on Muse, @Ecton recognized the need for garbage collection to prevent untrusted scripts from uncontrollably leaking @@ -67,15 +65,25 @@

§Safety

MIRIFLAGS="-Zmiri-permissive-provenance -Zmiri-ignore-leaks" cargo +nightly miri test
 
    -
  • -Zmiri-permissive-provenance: parking_lot internally casts a usize to a +
  • +

    -Zmiri-permissive-provenance: parking_lot internally casts a usize to a pointer, which breaks pointer provenance rules. Pointer provinence is currently only an experimental model, and nothing this collector is using from parking_lot couldn’t be implemented in a fashion that honors pointer provinence. Thus, this library’s author consider’s this an implementation -detail that can be ignored.

  • -
  • -Zmiri-ignore-leaks: This crate uses thread local storage which is -documented to not always run destructors for local keys on the main thread, as -some platforms abort rather than performing cleanup code.
  • +detail that can be ignored.

    + +
  • +

    -Zmiri-ignore-leaks: This crate spawns a global collector thread that never +shuts down. Miri detects that the main thread does not wait for spawned +threads to shut down and warns about this potential memory leak. When a thread +is shut down and all of its data is no longer reachable, the thread storage +will be cleaned up. However, the collector never shuts down and assumes that +new threads could still be spawned at any given time.

    +

    Additionally, on some platforms the main thread’s thread-local storage may not +be cleaned up when the main thread exits according to LocalKey’s +documentation

    +

This crate exposes a safe API that guarantees no undefined behavior can be triggered by incorrectly using the API or implementing the Collectable trait @@ -146,6 +154,6 @@

§1 thread

-

Structs§

  • A type-erased garbage collected reference.
  • A guard that prevents garbage collection while held.
  • A lock has been established by the collector on data needed to resolve a -reference.
  • A reference to data stored in a garbage collector.
  • A root reference to a T that has been allocated in the garbage collector.
  • A tracer for the garbage collector.

Traits§

Functions§

  • Invokes the garbage collector.
  • Executes wrapped with garbage collection available.
\ No newline at end of file +

Modules§

  • Architecture overview of the underlying design of Refuse.

Structs§

  • A type-erased garbage collected reference.
  • A guard that prevents garbage collection while held.
  • A lock has been established by the collector on data needed to resolve a +reference.
  • A reference to data stored in a garbage collector.
  • A root reference to a T that has been allocated in the garbage collector.
  • A tracer for the garbage collector.

Traits§

Functions§

  • Invokes the garbage collector.
  • Executes wrapped with garbage collection available.
\ No newline at end of file diff --git a/main/refuse/sidebar-items.js b/main/refuse/sidebar-items.js new file mode 100644 index 0000000..8dd73b7 --- /dev/null +++ b/main/refuse/sidebar-items.js @@ -0,0 +1 @@ +window.SIDEBAR_ITEMS = {"fn":["collect","collected"],"mod":["architecture"],"struct":["AnyRef","CollectionGuard","CollectionStarting","Ref","Root","Tracer"],"trait":["Collectable","ContainsNoRefs","MapAs","NoMapping","SimpleType","Trace"]}; \ No newline at end of file diff --git a/main/musegc/struct.AnyRef.html b/main/refuse/struct.AnyRef.html similarity index 91% rename from main/musegc/struct.AnyRef.html rename to main/refuse/struct.AnyRef.html index b94b800..b802814 100644 --- a/main/musegc/struct.AnyRef.html +++ b/main/refuse/struct.AnyRef.html @@ -1,17 +1,17 @@ -AnyRef in musegc - Rust -

Struct musegc::AnyRef

source ·
pub struct AnyRef { /* private fields */ }
Expand description

A type-erased garbage collected reference.

-

Implementations§

source§

impl AnyRef

source

pub fn downcast_ref<T>(&self) -> Option<Ref<T>>
where - T: Collectable,

Returns a Ref<T> if the underlying reference points to a T.

-
source

pub fn downcast_root<T>(&self, guard: &CollectionGuard) -> Option<Root<T>>
where - T: Collectable,

Returns a [Strong<T>] if the underlying reference points to a T that +AnyRef in refuse - Rust

+

Struct refuse::AnyRef

source ·
pub struct AnyRef { /* private fields */ }
Expand description

A type-erased garbage collected reference.

+

Implementations§

source§

impl AnyRef

source

pub fn downcast_ref<T>(&self) -> Option<Ref<T>>
where + T: Collectable,

Returns a Ref<T> if the underlying reference points to a T.

+
source

pub fn downcast_root<T>(&self, guard: &CollectionGuard) -> Option<Root<T>>
where + T: Collectable,

Returns a Root<T> if the underlying reference points to a T that has not been collected.

-
source

pub fn load_mapped<'guard, T>( +

source

pub fn load_mapped<'guard, T>( &self, - guard: &'guard CollectionGuard + guard: &'guard CollectionGuard ) -> Option<&'guard T>
where - T: ?Sized + 'static,

Returns a reference to the result of MapAs::map_as(), if the value -has not been collected and MapAs::Target is T.

-

Auto Trait Implementations§

§

impl Freeze for AnyRef

§

impl RefUnwindSafe for AnyRef

§

impl Send for AnyRef

§

impl Sync for AnyRef

§

impl Unpin for AnyRef

§

impl UnwindSafe for AnyRef

Blanket Implementations§

source§

impl<T> Any for T
where + T: ?Sized + 'static,

Returns a reference to the result of MapAs::map_as(), if the value +has not been collected and MapAs::Target is T.

+

Auto Trait Implementations§

§

impl Freeze for AnyRef

§

impl RefUnwindSafe for AnyRef

§

impl Send for AnyRef

§

impl Sync for AnyRef

§

impl Unpin for AnyRef

§

impl UnwindSafe for AnyRef

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
§

impl<A> Cast for A

§

fn cast<To>(self) -> To
where diff --git a/main/musegc/struct.CollectionGuard.html b/main/refuse/struct.CollectionGuard.html similarity index 91% rename from main/musegc/struct.CollectionGuard.html rename to main/refuse/struct.CollectionGuard.html index 5bf7d06..d96ab5e 100644 --- a/main/musegc/struct.CollectionGuard.html +++ b/main/refuse/struct.CollectionGuard.html @@ -1,40 +1,40 @@ -CollectionGuard in musegc - Rust -

Struct musegc::CollectionGuard

source ·
pub struct CollectionGuard { /* private fields */ }
Expand description

A guard that prevents garbage collection while held.

+CollectionGuard in refuse - Rust +

Struct refuse::CollectionGuard

source ·
pub struct CollectionGuard { /* private fields */ }
Expand description

A guard that prevents garbage collection while held.

To perform garbage collection, all threads must be paused to be traced. A -CollectionGuard allows the ability to read garbage-collectable data by +CollectionGuard allows the ability to read garbage-collectable data by ensuring the garbage collector can’t run while it exists.

To ensure the garbage collector can run without long pauses, either:

This type should not be held across potentially blocking operations such as IO, reading from a channel, or any other operation that may pause the -current thread. CollectionGuard::while_unlocked() can be used to +current thread. CollectionGuard::while_unlocked() can be used to temporarily release a guard during a long operation.

-

Implementations§

source§

impl CollectionGuard

source

pub fn acquire() -> Self

Acquires a lock that prevents the garbage collector from running.

+

Implementations§

source§

impl CollectionGuard

source

pub fn acquire() -> Self

Acquires a lock that prevents the garbage collector from running.

This guard is used to provide read-only access to garbage collected allocations.

§Panics

A panic will occur if this function is called outside of code executed -by collected().

-
source

pub fn collect(&mut self)

Manually invokes the garbage collector.

+by collected().

+
source

pub fn collect(&mut self)

Manually invokes the garbage collector.

This method temporarily releases this guard’s lock and waits for a garbage collection to run. If a garbage collection is already in progress, this function will return when the in-progress collection completes. Otherwise, the collector is started and this function waits until the collection finishes.

Finally, the guard is reacquired before returning.

-
source

pub fn yield_to_collector(&mut self)

Yield to the garbage collector, if needed.

+
source

pub fn yield_to_collector(&mut self)

Yield to the garbage collector, if needed.

This function will not yield unless the garbage collector is trying to acquire this thread’s lock. Because of this, it is a fairly efficient function to invoke. To minimize collection pauses, long-held guards should call this function regularly.

-
source

pub fn while_unlocked<R>(&mut self, unlocked: impl FnOnce() -> R) -> R

Executes unlocked while this guard is temporarily released.

-

Trait Implementations§

source§

impl AsMut<CollectionGuard> for CollectionGuard

source§

fn as_mut(&mut self) -> &mut CollectionGuard

Converts this type into a mutable reference of the (usually inferred) input type.
source§

impl AsRef<CollectionGuard> for CollectionGuard

source§

fn as_ref(&self) -> &CollectionGuard

Converts this type into a shared reference of the (usually inferred) input type.

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where +

source

pub fn while_unlocked<R>(&mut self, unlocked: impl FnOnce() -> R) -> R

Executes unlocked while this guard is temporarily released.

+

Trait Implementations§

source§

impl AsMut<CollectionGuard> for CollectionGuard

source§

fn as_mut(&mut self) -> &mut CollectionGuard

Converts this type into a mutable reference of the (usually inferred) input type.
source§

impl AsRef<CollectionGuard> for CollectionGuard

source§

fn as_ref(&self) -> &CollectionGuard

Converts this type into a shared reference of the (usually inferred) input type.

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
§

impl<A> Cast for A

§

fn cast<To>(self) -> To
where diff --git a/main/musegc/struct.CollectionStarting.html b/main/refuse/struct.CollectionStarting.html similarity index 92% rename from main/musegc/struct.CollectionStarting.html rename to main/refuse/struct.CollectionStarting.html index faa7315..52316d4 100644 --- a/main/musegc/struct.CollectionStarting.html +++ b/main/refuse/struct.CollectionStarting.html @@ -1,9 +1,9 @@ -CollectionStarting in musegc - Rust -
pub struct CollectionStarting;
Expand description

A lock has been established by the collector on data needed to resolve a +CollectionStarting in refuse - Rust

+
pub struct CollectionStarting;
Expand description

A lock has been established by the collector on data needed to resolve a reference.

-

Trait Implementations§

source§

impl Clone for CollectionStarting

source§

fn clone(&self) -> CollectionStarting

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for CollectionStarting

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl PartialEq for CollectionStarting

source§

fn eq(&self, other: &CollectionStarting) -> bool

This method tests for self and other values to be equal, and is used +

Trait Implementations§

source§

impl Clone for CollectionStarting

source§

fn clone(&self) -> CollectionStarting

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl Debug for CollectionStarting

source§

fn fmt(&self, f: &mut Formatter<'_>) -> Result

Formats the value using the given formatter. Read more
source§

impl PartialEq for CollectionStarting

source§

fn eq(&self, other: &CollectionStarting) -> bool

This method tests for self and other values to be equal, and is used by ==.
1.0.0 · source§

fn ne(&self, other: &Rhs) -> bool

This method tests for !=. The default implementation is almost always -sufficient, and should not be overridden without very good reason.
source§

impl Copy for CollectionStarting

source§

impl Eq for CollectionStarting

source§

impl StructuralPartialEq for CollectionStarting

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where +sufficient, and should not be overridden without very good reason.

source§

impl Copy for CollectionStarting

source§

impl Eq for CollectionStarting

source§

impl StructuralPartialEq for CollectionStarting

Auto Trait Implementations§

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
§

impl<A> Cast for A

§

fn cast<To>(self) -> To
where diff --git a/main/musegc/struct.Ref.html b/main/refuse/struct.Ref.html similarity index 80% rename from main/musegc/struct.Ref.html rename to main/refuse/struct.Ref.html index 43eb6eb..159634a 100644 --- a/main/musegc/struct.Ref.html +++ b/main/refuse/struct.Ref.html @@ -1,31 +1,65 @@ -Ref in musegc - Rust -

Struct musegc::Ref

source ·
pub struct Ref<T> { /* private fields */ }
Expand description

A reference to data stored in a garbage collector.

-

Unlike a Root<T>, this type is not guaranteed to have access to its -underlying data. If no Collectable reachable via all active Roots +Ref in refuse - Rust

+

Struct refuse::Ref

source ·
pub struct Ref<T> { /* private fields */ }
Expand description

A reference to data stored in a garbage collector.

+

Unlike a Root<T>, this type is not guaranteed to have access to its +underlying data. If no Collectable reachable via all active Roots marks this allocation, it will be collected.

Because of this, direct access to the data is not provided. To obtain a -reference, call Ref::load().

-

Implementations§

source§

impl<T> Ref<T>
where - T: Collectable,

source

pub fn new(value: T, guard: impl AsRef<CollectionGuard>) -> Self

Stores value in the garbage collector, returning a “weak” reference to +reference, call Ref::load().

+

§Loading a reference

+

Ref::load() is used to provide a reference to data stored in the garbage +collector.

+ +
use refuse::{CollectionGuard, Ref};
+
+refuse::collected(|| {
+    let guard = CollectionGuard::acquire();
+    let data = Ref::new(42, &guard);
+
+    assert_eq!(data.load(&guard), Some(&42));
+});
+

References returned from Ref::load() are tied to the lifetime of the +guard. This ensures that a reference to data can only be held between +moments when the garbage collector can be run. For example these usages are +prevented by the compiler:

+ +
let guard = CollectionGuard::acquire();
+let data = Ref::new(42, &guard);
+let reference = data.load(&guard).unwrap();
+
+drop(guard);
+
+// error[E0505]: cannot move out of `guard` because it is borrowed
+assert_eq!(reference, &42);
+ +
let mut guard = CollectionGuard::acquire();
+let data = Ref::new(42, &guard);
+let reference = data.load(&guard).unwrap();
+
+guard.yield_to_collector();
+
+// error[E0502]: cannot borrow `guard` as mutable because it is also borrowed as immutable
+assert_eq!(reference, &42);
+

Implementations§

source§

impl<T> Ref<T>
where + T: Collectable,

source

pub fn new(value: T, guard: impl AsRef<CollectionGuard>) -> Self

Stores value in the garbage collector, returning a “weak” reference to it.

-
source

pub fn as_any(self) -> AnyRef

Returns this reference as an untyped reference.

-
source

pub fn load<'guard>(&self, guard: &'guard CollectionGuard) -> Option<&'guard T>

Loads a reference to the underlying data. Returns None if the data has +

source

pub fn as_any(self) -> AnyRef

Returns this reference as an untyped reference.

+
source

pub fn load<'guard>(&self, guard: &'guard CollectionGuard) -> Option<&'guard T>

Loads a reference to the underlying data. Returns None if the data has been collected and is no longer available.

§Errors

Returns CollectionStarting if self was created in another thread and that thread is currently locked by the garbage collector.

-
source

pub fn as_root(&self, guard: &CollectionGuard) -> Option<Root<T>>

Loads a root reference to the underlying data. Returns None if the +

source

pub fn as_root(&self, guard: &CollectionGuard) -> Option<Root<T>>

Loads a root reference to the underlying data. Returns None if the data has been collected and is no longer available.

§Errors

Returns CollectionStarting if self was created in another thread and that thread is currently locked by the garbage collector.

-
source

pub fn ptr_eq(this: &Self, other: &Self) -> bool

Returns true if these two references point to the same underlying +

source

pub fn ptr_eq(this: &Self, other: &Self) -> bool

Returns true if these two references point to the same underlying allocation.

-

Trait Implementations§

source§

impl<T> Clone for Ref<T>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T> Trace for Ref<T>
where - T: Collectable,

source§

const MAY_CONTAIN_REFERENCES: bool = true

If true, this type may contain references and should have its trace() -function invoked during the collector’s “mark” phase.
source§

fn trace(&self, tracer: &mut Tracer<'_>)

Traces all refrences that this value references. Read more
source§

impl<T> Copy for Ref<T>

source§

impl<T> Send for Ref<T>
where - T: Collectable,

source§

impl<T> Sync for Ref<T>
where - T: Collectable,

Auto Trait Implementations§

§

impl<T> Freeze for Ref<T>

§

impl<T> RefUnwindSafe for Ref<T>

§

impl<T> Unpin for Ref<T>

§

impl<T> UnwindSafe for Ref<T>

Blanket Implementations§

source§

impl<T> Any for T
where +

Trait Implementations§

source§

impl<T> Clone for Ref<T>

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T> Trace for Ref<T>
where + T: Collectable,

source§

const MAY_CONTAIN_REFERENCES: bool = true

If true, this type may contain references and should have its trace() +function invoked during the collector’s “mark” phase.
source§

fn trace(&self, tracer: &mut Tracer<'_>)

Traces all refrences that this value references. Read more
source§

impl<T> Copy for Ref<T>

source§

impl<T> Send for Ref<T>
where + T: Collectable,

source§

impl<T> Sync for Ref<T>
where + T: Collectable,

Auto Trait Implementations§

§

impl<T> Freeze for Ref<T>

§

impl<T> RefUnwindSafe for Ref<T>

§

impl<T> Unpin for Ref<T>

§

impl<T> UnwindSafe for Ref<T>

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
§

impl<A> Cast for A

§

fn cast<To>(self) -> To
where diff --git a/main/musegc/struct.Root.html b/main/refuse/struct.Root.html similarity index 65% rename from main/musegc/struct.Root.html rename to main/refuse/struct.Root.html index 9a3d7e3..6bfd760 100644 --- a/main/musegc/struct.Root.html +++ b/main/refuse/struct.Root.html @@ -1,24 +1,24 @@ -Root in musegc - Rust -

Struct musegc::Root

source ·
pub struct Root<T>
where - T: Collectable,
{ /* private fields */ }
Expand description

A root reference to a T that has been allocated in the garbage collector.

-

This type behaves very similarly to Arc<T>. It is cheap-to-clone, -utilizing atomic reference counting to track the number of root references -currently exist to the underlying value.

+Root in refuse - Rust +

Struct refuse::Root

source ·
pub struct Root<T>
where + T: Collectable,
{ /* private fields */ }
Expand description

A root reference to a T that has been allocated in the garbage collector.

+

This type behaves very similarly to Arc<T>. It implements Deref<Target = T>, and it is also cheap-to-clone, utilizing atomic reference counting to +track the number of root references currently exist to the underlying value.

While any root references exist for a given allocation, the garbage collector will not collect the allocation.

-

Implementations§

source§

impl<T> Root<T>
where - T: Collectable,

source

pub fn new(value: T, guard: impl AsRef<CollectionGuard>) -> Self

Stores value in the garbage collector, returning a root reference to +

Implementations§

source§

impl<T> Root<T>
where + T: Collectable,

source

pub fn new(value: T, guard: impl AsRef<CollectionGuard>) -> Self

Stores value in the garbage collector, returning a root reference to the data.

-
source

pub const fn downgrade(&self) -> Ref<T>

Returns a “weak” reference to this root.

-
source

pub fn downgrade_any(&self) -> AnyRef

Returns an untyped “weak” reference erased to this root.

-

Trait Implementations§

source§

impl<T> Deref for Root<T>
where - T: Collectable,

§

type Target = T

The resulting type after dereferencing.
source§

fn deref(&self) -> &Self::Target

Dereferences the value.
source§

impl<T> Drop for Root<T>
where - T: Collectable,

source§

fn drop(&mut self)

Executes the destructor for this type. Read more
source§

impl<T> Trace for Root<T>
where - T: Collectable,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

If true, this type may contain references and should have its trace() -function invoked during the collector’s “mark” phase.
source§

fn trace(&self, _tracer: &mut Tracer<'_>)

Traces all refrences that this value references. Read more
source§

impl<T> Send for Root<T>
where - T: Collectable,

source§

impl<T> Sync for Root<T>
where - T: Collectable,

Auto Trait Implementations§

§

impl<T> Freeze for Root<T>

§

impl<T> RefUnwindSafe for Root<T>
where - T: RefUnwindSafe,

§

impl<T> Unpin for Root<T>

§

impl<T> UnwindSafe for Root<T>
where +

source

pub const fn downgrade(&self) -> Ref<T>

Returns a “weak” reference to this root.

+
source

pub fn downgrade_any(&self) -> AnyRef

Returns an untyped “weak” reference erased to this root.

+

Trait Implementations§

source§

impl<T> Clone for Root<T>
where + T: Collectable,

source§

fn clone(&self) -> Self

Returns a copy of the value. Read more
1.0.0 · source§

fn clone_from(&mut self, source: &Self)

Performs copy-assignment from source. Read more
source§

impl<T> Deref for Root<T>
where + T: Collectable,

§

type Target = T

The resulting type after dereferencing.
source§

fn deref(&self) -> &Self::Target

Dereferences the value.
source§

impl<T> Drop for Root<T>
where + T: Collectable,

source§

fn drop(&mut self)

Executes the destructor for this type. Read more
source§

impl<T> Trace for Root<T>
where + T: Collectable,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

If true, this type may contain references and should have its trace() +function invoked during the collector’s “mark” phase.
source§

fn trace(&self, _tracer: &mut Tracer<'_>)

Traces all refrences that this value references. Read more
source§

impl<T> Send for Root<T>
where + T: Collectable,

source§

impl<T> Sync for Root<T>
where + T: Collectable,

Auto Trait Implementations§

§

impl<T> Freeze for Root<T>

§

impl<T> RefUnwindSafe for Root<T>
where + T: RefUnwindSafe,

§

impl<T> Unpin for Root<T>

§

impl<T> UnwindSafe for Root<T>
where T: RefUnwindSafe,

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where @@ -29,6 +29,7 @@ U: From<T>,

source§

fn into(self) -> U

Calls U::from(self).

That is, this conversion is whatever the implementation of From<T> for U chooses to do.

-
source§

impl<T, U> TryFrom<U> for T
where +

source§

impl<T> ToOwned for T
where + T: Clone,

§

type Owned = T

The resulting type after obtaining ownership.
source§

fn to_owned(&self) -> T

Creates owned data from borrowed data, usually by cloning. Read more
source§

fn clone_into(&self, target: &mut T)

Uses borrowed data to replace owned data, usually by cloning. Read more
source§

impl<T, U> TryFrom<U> for T
where U: Into<T>,

§

type Error = Infallible

The type returned in the event of a conversion error.
source§

fn try_from(value: U) -> Result<T, <T as TryFrom<U>>::Error>

Performs the conversion.
source§

impl<T, U> TryInto<U> for T
where U: TryFrom<T>,

§

type Error = <U as TryFrom<T>>::Error

The type returned in the event of a conversion error.
source§

fn try_into(self) -> Result<U, <U as TryFrom<T>>::Error>

Performs the conversion.
\ No newline at end of file diff --git a/main/musegc/struct.Tracer.html b/main/refuse/struct.Tracer.html similarity index 90% rename from main/musegc/struct.Tracer.html rename to main/refuse/struct.Tracer.html index d6d5625..f488347 100644 --- a/main/musegc/struct.Tracer.html +++ b/main/refuse/struct.Tracer.html @@ -1,11 +1,11 @@ -Tracer in musegc - Rust -

Struct musegc::Tracer

source ·
pub struct Tracer<'a> { /* private fields */ }
Expand description

A tracer for the garbage collector.

-

This type allows Collectable values to mark() any -Ref<T>s they contain.

-

Implementations§

source§

impl<'a> Tracer<'a>

source

pub fn mark<T>(&mut self, collectable: Ref<T>)
where - T: Collectable,

Marks collectable as being referenced, ensuring it is not garbage +Tracer in refuse - Rust

+

Struct refuse::Tracer

source ·
pub struct Tracer<'a> { /* private fields */ }
Expand description

A tracer for the garbage collector.

+

This type allows Collectable values to mark() any +Ref<T>s they contain.

+

Implementations§

source§

impl<'a> Tracer<'a>

source

pub fn mark<T>(&mut self, collectable: Ref<T>)
where + T: Collectable,

Marks collectable as being referenced, ensuring it is not garbage collected.

-

Auto Trait Implementations§

§

impl<'a> Freeze for Tracer<'a>

§

impl<'a> RefUnwindSafe for Tracer<'a>

§

impl<'a> Send for Tracer<'a>

§

impl<'a> Sync for Tracer<'a>

§

impl<'a> Unpin for Tracer<'a>

§

impl<'a> UnwindSafe for Tracer<'a>

Blanket Implementations§

source§

impl<T> Any for T
where +

Auto Trait Implementations§

§

impl<'a> Freeze for Tracer<'a>

§

impl<'a> RefUnwindSafe for Tracer<'a>

§

impl<'a> Send for Tracer<'a>

§

impl<'a> Sync for Tracer<'a>

§

impl<'a> Unpin for Tracer<'a>

§

impl<'a> UnwindSafe for Tracer<'a>

Blanket Implementations§

source§

impl<T> Any for T
where T: 'static + ?Sized,

source§

fn type_id(&self) -> TypeId

Gets the TypeId of self. Read more
source§

impl<T> Borrow<T> for T
where T: ?Sized,

source§

fn borrow(&self) -> &T

Immutably borrows from an owned value. Read more
source§

impl<T> BorrowMut<T> for T
where T: ?Sized,

source§

fn borrow_mut(&mut self) -> &mut T

Mutably borrows from an owned value. Read more
§

impl<A> Cast for A

§

fn cast<To>(self) -> To
where diff --git a/main/musegc/trait.Collectable.html b/main/refuse/trait.Collectable.html similarity index 73% rename from main/musegc/trait.Collectable.html rename to main/refuse/trait.Collectable.html index 49551f4..aa1f44e 100644 --- a/main/musegc/trait.Collectable.html +++ b/main/refuse/trait.Collectable.html @@ -1,12 +1,12 @@ -Collectable in musegc - Rust -

Trait musegc::Collectable

source ·
pub trait Collectable: Trace + MapAs + Send + Sync + 'static { }
Expand description

A type that can be garbage collected.

-

A type needs to implement both Trace and MapAs to be collectable.

-

If a type can’t contain any Ref<T>s and no mapping functionality is -desired, the SimpleType trait can be implemented instead of Trace -and MapAs to enable collection.

-

If a type can’t contain any Ref<T>s, ContainsNoRefs can be -implemented instead of Trace.

-

If no mapping functionality is desired, NoMapping can be implemented -instead of MapAs.

-

Object Safety§

This trait is not object safe.

Implementors§

source§

impl<T> Collectable for T
where - T: Trace + MapAs + Send + Sync + 'static,

\ No newline at end of file +Collectable in refuse - Rust +

Trait refuse::Collectable

source ·
pub trait Collectable: Trace + MapAs + Send + Sync + 'static { }
Expand description

A type that can be garbage collected.

+

A type needs to implement both Trace and MapAs to be collectable.

+

If a type can’t contain any Ref<T>s and no mapping functionality is +desired, the SimpleType trait can be implemented instead of Trace +and MapAs to enable collection.

+

If a type can’t contain any Ref<T>s, ContainsNoRefs can be +implemented instead of Trace.

+

If no mapping functionality is desired, NoMapping can be implemented +instead of MapAs.

+

Object Safety§

This trait is not object safe.

Implementors§

source§

impl<T> Collectable for T
where + T: Trace + MapAs + Send + Sync + 'static,

\ No newline at end of file diff --git a/main/musegc/trait.ContainsNoRefs.html b/main/refuse/trait.ContainsNoRefs.html similarity index 81% rename from main/musegc/trait.ContainsNoRefs.html rename to main/refuse/trait.ContainsNoRefs.html index 386c7b0..848e197 100644 --- a/main/musegc/trait.ContainsNoRefs.html +++ b/main/refuse/trait.ContainsNoRefs.html @@ -1,7 +1,7 @@ -ContainsNoRefs in musegc - Rust -
pub trait ContainsNoRefs { }
Expand description

A type that can be garbage collected that cannot contain any Ref<T>s.

-

Types that implement this trait automatically implement Collectable. -This trait reduces the boilerplate for implementing Collectable for +ContainsNoRefs in refuse - Rust

+
pub trait ContainsNoRefs { }
Expand description

A type that can be garbage collected that cannot contain any Ref<T>s.

+

Types that implement this trait automatically implement Collectable. +This trait reduces the boilerplate for implementing Collectable for self-contained types.

-

Implementors§

source§

impl<T> ContainsNoRefs for T
where - T: SimpleType,

\ No newline at end of file +

Implementors§

source§

impl<T> ContainsNoRefs for T
where + T: SimpleType,

\ No newline at end of file diff --git a/main/musegc/trait.MapAs.html b/main/refuse/trait.MapAs.html similarity index 83% rename from main/musegc/trait.MapAs.html rename to main/refuse/trait.MapAs.html index 7cbd8de..61b3942 100644 --- a/main/musegc/trait.MapAs.html +++ b/main/refuse/trait.MapAs.html @@ -1,14 +1,14 @@ -MapAs in musegc - Rust -

Trait musegc::MapAs

source ·
pub trait MapAs {
+MapAs in refuse - Rust
+    

Trait refuse::MapAs

source ·
pub trait MapAs {
     type Target: ?Sized + 'static;
 
     // Required method
-    fn map_as(&self) -> &Self::Target;
+    fn map_as(&self) -> &Self::Target;
 }
Expand description

A mapping from one type to another.

-

This trait is used by AnyRef::load_mapped() to enable type-erased +

This trait is used by AnyRef::load_mapped() to enable type-erased loading of a secondary type.

-

If no mapping is desired, implement NoMapping instead.

-

Required Associated Types§

source

type Target: ?Sized + 'static

The target type of the mapping.

-

Required Methods§

source

fn map_as(&self) -> &Self::Target

Maps self to target type.

-

Implementors§

source§

impl<T> MapAs for T
where - T: NoMapping,

§

type Target = ()

\ No newline at end of file +

If no mapping is desired, implement NoMapping instead.

+

Required Associated Types§

source

type Target: ?Sized + 'static

The target type of the mapping.

+

Required Methods§

source

fn map_as(&self) -> &Self::Target

Maps self to target type.

+

Implementors§

source§

impl<T> MapAs for T
where + T: NoMapping,

§

type Target = ()

\ No newline at end of file diff --git a/main/refuse/trait.NoMapping.html b/main/refuse/trait.NoMapping.html new file mode 100644 index 0000000..7381051 --- /dev/null +++ b/main/refuse/trait.NoMapping.html @@ -0,0 +1,6 @@ +NoMapping in refuse - Rust +

Trait refuse::NoMapping

source ·
pub trait NoMapping { }
Expand description

A type that implements MapAs with an empty implementation.

+

Implementations on Foreign Types§

source§

impl<K> NoMapping for BTreeSet<K>

source§

impl<K> NoMapping for Set<K>
where + K: Sort,

source§

impl<K, S> NoMapping for HashSet<K, S>

source§

impl<K, V> NoMapping for BTreeMap<K, V>

source§

impl<K, V> NoMapping for Map<K, V>
where + K: Sort,

source§

impl<K, V, S> NoMapping for HashMap<K, V, S>

source§

impl<T> NoMapping for BinaryHeap<T>

source§

impl<T> NoMapping for LinkedList<T>

source§

impl<T> NoMapping for VecDeque<T>

source§

impl<T> NoMapping for Vec<T>

source§

impl<T, const N: usize> NoMapping for [T; N]

Implementors§

source§

impl<T> NoMapping for T
where + T: SimpleType,

\ No newline at end of file diff --git a/main/refuse/trait.SimpleType.html b/main/refuse/trait.SimpleType.html new file mode 100644 index 0000000..17337d8 --- /dev/null +++ b/main/refuse/trait.SimpleType.html @@ -0,0 +1,6 @@ +SimpleType in refuse - Rust +

Trait refuse::SimpleType

source ·
pub trait SimpleType { }
Expand description

A type that can contain no Ref<T>s and has an empty MapAs +implementation.

+

Implementing this trait for a type automatically implements NoMapping +and ContainsNoRefs, which makes the type Collectable.

+

Implementations on Foreign Types§

source§

impl SimpleType for i8

source§

impl SimpleType for i16

source§

impl SimpleType for i32

source§

impl SimpleType for i64

source§

impl SimpleType for i128

source§

impl SimpleType for isize

source§

impl SimpleType for u8

source§

impl SimpleType for u16

source§

impl SimpleType for u32

source§

impl SimpleType for u64

source§

impl SimpleType for u128

source§

impl SimpleType for usize

source§

impl SimpleType for AtomicI8

source§

impl SimpleType for AtomicI16

source§

impl SimpleType for AtomicI32

source§

impl SimpleType for AtomicI64

source§

impl SimpleType for AtomicIsize

source§

impl SimpleType for AtomicU8

source§

impl SimpleType for AtomicU16

source§

impl SimpleType for AtomicU32

source§

impl SimpleType for AtomicU64

source§

impl SimpleType for AtomicUsize

source§

impl SimpleType for NonZeroI8

source§

impl SimpleType for NonZeroI16

source§

impl SimpleType for NonZeroI32

source§

impl SimpleType for NonZeroI64

source§

impl SimpleType for NonZeroI128

source§

impl SimpleType for NonZeroIsize

source§

impl SimpleType for NonZeroU8

source§

impl SimpleType for NonZeroU16

source§

impl SimpleType for NonZeroU32

source§

impl SimpleType for NonZeroU64

source§

impl SimpleType for NonZeroU128

source§

impl SimpleType for NonZeroUsize

Implementors§

\ No newline at end of file diff --git a/main/refuse/trait.Trace.html b/main/refuse/trait.Trace.html new file mode 100644 index 0000000..4f9081a --- /dev/null +++ b/main/refuse/trait.Trace.html @@ -0,0 +1,32 @@ +Trace in refuse - Rust +

Trait refuse::Trace

source ·
pub trait Trace {
+    const MAY_CONTAIN_REFERENCES: bool;
+
+    // Required method
+    fn trace(&self, tracer: &mut Tracer<'_>);
+}
Expand description

A type that can find and mark any references it has.

+

Required Associated Constants§

source

const MAY_CONTAIN_REFERENCES: bool

If true, this type may contain references and should have its trace() +function invoked during the collector’s “mark” phase.

+

Required Methods§

source

fn trace(&self, tracer: &mut Tracer<'_>)

Traces all refrences that this value references.

+

This function should invoke Tracer::mark() for each Ref<T> it +contains. Failing to do so will allow the garbage collector to free the +data, preventing the ability to load() the data in the +future.

+

Object Safety§

This trait is not object safe.

Implementations on Foreign Types§

source§

impl<K> Trace for BTreeSet<K>
where + K: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<K> Trace for Set<K>
where + K: Trace + Sort,

source§

const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<K, S> Trace for HashSet<K, S>
where + K: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<K, V> Trace for BTreeMap<K, V>
where + K: Trace, + V: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = _

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<K, V> Trace for Map<K, V>
where + K: Trace + Sort, + V: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = _

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<K, V, S> Trace for HashMap<K, V, S>
where + K: Trace, + V: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = _

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<T> Trace for BinaryHeap<T>
where + T: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<T> Trace for LinkedList<T>
where + T: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<T> Trace for VecDeque<T>
where + T: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<T> Trace for Vec<T>
where + T: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

source§

impl<T, const N: usize> Trace for [T; N]
where + T: Trace,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

fn trace(&self, tracer: &mut Tracer<'_>)

Implementors§

source§

impl<T> Trace for Ref<T>
where + T: Collectable,

source§

impl<T> Trace for Root<T>
where + T: Collectable,

source§

const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES

source§

impl<T> Trace for T
where + T: ContainsNoRefs,

\ No newline at end of file diff --git a/main/search-index.js b/main/search-index.js index ac396df..962a1d4 100644 --- a/main/search-index.js +++ b/main/search-index.js @@ -1,5 +1,5 @@ var searchIndex = new Map(JSON.parse('[\ -["musegc",{"doc":"musegc","t":"FKFFKTKKFFKRKFNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNHNHNNNNNNNNNNNNNNNNNNNNNNNNNNNNMNNNNNNMNNNNNNNNNNNNNNNNNNNNNN","n":["AnyRef","Collectable","CollectionGuard","CollectionStarting","ContainsNoRefs","MAY_CONTAIN_REFERENCES","MapAs","NoMapping","Ref","Root","SimpleType","Target","Trace","Tracer","acquire","as_any","as_mut","as_ref","as_root","borrow","borrow","borrow","borrow","borrow","borrow","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","cast","cast","cast","cast","cast","cast","cast_into","cast_into","cast_into","cast_into","cast_into","cast_into","clone","clone","clone_into","clone_into","collect","collect","collected","deref","downcast_ref","downcast_root","downgrade","downgrade_any","drop","eq","fmt","from","from","from","from","from","from","from_cast","from_cast","from_cast","from_cast","from_cast","from_cast","into","into","into","into","into","into","load","load_mapped","map_as","mark","new","new","ptr_eq","to_owned","to_owned","trace","trace","trace","try_from","try_from","try_from","try_from","try_from","try_from","try_into","try_into","try_into","try_into","try_into","try_into","type_id","type_id","type_id","type_id","type_id","type_id","while_unlocked","yield_to_collector"],"q":[[0,"musegc"],[108,"core::option"],[109,"intentional::cast"],[110,"core::ops::function"],[111,"core::fmt"],[112,"core::fmt"],[113,"core::convert"],[114,"core::result"],[115,"core::any"]],"d":["A type-erased garbage collected reference.","A type that can be garbage collected.","A guard that prevents garbage collection while held.","A lock has been established by the collector on data …","A type that can be garbage collected that cannot contain …","If true, this type may contain references and should have …","A mapping from one type to another.","A type that implements MapAs with an empty implementation.","A reference to data stored in a garbage collector.","A root reference to a T that has been allocated in the …","A type that can contain no Ref<T>s and has an empty MapAs …","The target type of the mapping.","A type that can find and mark any references it has.","A tracer for the garbage collector.","Acquires a lock that prevents the garbage collector from …","Returns this reference as an untyped reference.","","","Loads a root reference to the underlying data. Returns None…","","","","","","","","","","","","","","","","","","","","","","","","","","","","","Invokes the garbage collector.","Manually invokes the garbage collector.","Executes wrapped with garbage collection available.","","Returns a Ref<T> if the underlying reference points to a T.","Returns a [Strong<T>] if the underlying reference points …","Returns a “weak” reference to this root.","Returns an untyped “weak” reference erased to this …","","","","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","","","","","","","Calls U::from(self).","Calls U::from(self).","Calls U::from(self).","Calls U::from(self).","Calls U::from(self).","Calls U::from(self).","Loads a reference to the underlying data. Returns None if …","Returns a reference to the result of MapAs::map_as(), if …","Maps self to target type.","Marks collectable as being referenced, ensuring it is not …","Stores value in the garbage collector, returning a root …","Stores value in the garbage collector, returning a “weak…","Returns true if these two references point to the same …","","","Traces all refrences that this value references.","","","","","","","","","","","","","","","","","","","","","Executes unlocked while this guard is temporarily released.","Yield to the garbage collector, if needed."],"i":[0,0,0,0,0,20,0,0,0,0,0,17,0,0,1,2,1,1,2,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,2,8,2,8,0,1,0,5,3,3,5,5,5,8,8,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,2,3,17,18,5,2,2,2,8,20,5,2,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,1,1],"f":"``````````````{{}b}{{{d{c}}}fh}{bb}0{{{d{c}}b}{{l{{j{c}}}}}h}{ce{}{}}00000000000{cg{}{}{{n{e}}}}00000111111{{{d{c}}}{{d{c}}}{}}{A`A`}{{ce}Ab{}{}}0{{}Ab}{bAb}{ec{}{{Af{}{{Ad{c}}}}}}{{{j{c}}}eh{}}{f{{l{{d{c}}}}}h}{{fb}{{l{{j{c}}}}}h}{{{j{c}}}{{d{c}}}h}{{{j{c}}}fh}{{{j{c}}}Abh}{{A`A`}Ah}{{A`Aj}Al}{cc{}}00000000000{ce{}{}}00000{{{d{c}}b}{{l{c}}}h}{{fb}{{l{c}}}An}{{{Bb{}{{B`{c}}}}}cAn}{{Bd{d{c}}}Abh}{{ce}{{j{c}}}h{{Bf{b}}}}{{ce}{{d{c}}}h{{Bf{b}}}}{{{d{c}}{d{c}}}Ahh}77{{BhBd}Ab}{{{j{c}}Bd}Abh}{{{d{c}}Bd}Abh}{c{{Bj{e}}}{}{}}00000000000{cBl{}}00000{{be}c{}{{Af{}{{Ad{c}}}}}}{bAb}","c":[],"p":[[5,"CollectionGuard",0],[5,"Ref",0],[5,"AnyRef",0],[10,"Collectable",0],[5,"Root",0],[6,"Option",108],[10,"CastFrom",109],[5,"CollectionStarting",0],[1,"unit"],[17,"Output"],[10,"FnOnce",110],[1,"bool"],[5,"Formatter",111],[8,"Result",111],[10,"Sized",112],[17,"Target"],[10,"MapAs",0],[5,"Tracer",0],[10,"AsRef",113],[10,"Trace",0],[6,"Result",114],[5,"TypeId",115]],"b":[]}]\ +["refuse",{"doc":"Refuse","t":"FKFFKTKKFFKRKFNCNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNHNHNNNNNNNNNNNNNNNNNNNNNNNNNNNNMNNNNNNNMNNNNNNNNNNNNNNNNNNNNNN","n":["AnyRef","Collectable","CollectionGuard","CollectionStarting","ContainsNoRefs","MAY_CONTAIN_REFERENCES","MapAs","NoMapping","Ref","Root","SimpleType","Target","Trace","Tracer","acquire","architecture","as_any","as_mut","as_ref","as_root","borrow","borrow","borrow","borrow","borrow","borrow","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","cast","cast","cast","cast","cast","cast","cast_into","cast_into","cast_into","cast_into","cast_into","cast_into","clone","clone","clone","clone_into","clone_into","clone_into","collect","collect","collected","deref","downcast_ref","downcast_root","downgrade","downgrade_any","drop","eq","fmt","from","from","from","from","from","from","from_cast","from_cast","from_cast","from_cast","from_cast","from_cast","into","into","into","into","into","into","load","load_mapped","map_as","mark","new","new","ptr_eq","to_owned","to_owned","to_owned","trace","trace","trace","try_from","try_from","try_from","try_from","try_from","try_from","try_into","try_into","try_into","try_into","try_into","try_into","type_id","type_id","type_id","type_id","type_id","type_id","while_unlocked","yield_to_collector"],"q":[[0,"refuse"],[112,"core::option"],[113,"intentional::cast"],[114,"core::ops::function"],[115,"core::fmt"],[116,"core::fmt"],[117,"core::convert"],[118,"core::result"],[119,"core::any"]],"d":["A type-erased garbage collected reference.","A type that can be garbage collected.","A guard that prevents garbage collection while held.","A lock has been established by the collector on data …","A type that can be garbage collected that cannot contain …","If true, this type may contain references and should have …","A mapping from one type to another.","A type that implements MapAs with an empty implementation.","A reference to data stored in a garbage collector.","A root reference to a T that has been allocated in the …","A type that can contain no Ref<T>s and has an empty MapAs …","The target type of the mapping.","A type that can find and mark any references it has.","A tracer for the garbage collector.","Acquires a lock that prevents the garbage collector from …","Architecture overview of the underlying design of Refuse.","Returns this reference as an untyped reference.","","","Loads a root reference to the underlying data. Returns None…","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","Invokes the garbage collector.","Manually invokes the garbage collector.","Executes wrapped with garbage collection available.","","Returns a Ref<T> if the underlying reference points to a T.","Returns a Root<T> if the underlying reference points to a T…","Returns a “weak” reference to this root.","Returns an untyped “weak” reference erased to this …","","","","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","","","","","","","Calls U::from(self).","Calls U::from(self).","Calls U::from(self).","Calls U::from(self).","Calls U::from(self).","Calls U::from(self).","Loads a reference to the underlying data. Returns None if …","Returns a reference to the result of MapAs::map_as(), if …","Maps self to target type.","Marks collectable as being referenced, ensuring it is not …","Stores value in the garbage collector, returning a root …","Stores value in the garbage collector, returning a “weak…","Returns true if these two references point to the same …","","","","Traces all refrences that this value references.","","","","","","","","","","","","","","","","","","","","","Executes unlocked while this guard is temporarily released.","Yield to the garbage collector, if needed."],"i":[0,0,0,0,0,20,0,0,0,0,0,17,0,0,1,0,2,1,1,2,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,5,2,8,5,2,8,0,1,0,5,3,3,5,5,5,8,8,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,2,3,17,18,5,2,2,5,2,8,20,5,2,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,1,1],"f":"``````````````{{}b}`{{{d{c}}}fh}{bb}0{{{d{c}}b}{{l{{j{c}}}}}h}{ce{}{}}00000000000{cg{}{}{{n{e}}}}00000111111{{{j{c}}}{{j{c}}}h}{{{d{c}}}{{d{c}}}{}}{A`A`}{{ce}Ab{}{}}00{{}Ab}{bAb}{ec{}{{Af{}{{Ad{c}}}}}}{{{j{c}}}eh{}}{f{{l{{d{c}}}}}h}{{fb}{{l{{j{c}}}}}h}{{{j{c}}}{{d{c}}}h}{{{j{c}}}fh}{{{j{c}}}Abh}{{A`A`}Ah}{{A`Aj}Al}{cc{}}00000000000{ce{}{}}00000{{{d{c}}b}{{l{c}}}h}{{fb}{{l{c}}}An}{{{Bb{}{{B`{c}}}}}cAn}{{Bd{d{c}}}Abh}{{ce}{{j{c}}}h{{Bf{b}}}}{{ce}{{d{c}}}h{{Bf{b}}}}{{{d{c}}{d{c}}}Ahh}777{{BhBd}Ab}{{{j{c}}Bd}Abh}{{{d{c}}Bd}Abh}{c{{Bj{e}}}{}{}}00000000000{cBl{}}00000{{be}c{}{{Af{}{{Ad{c}}}}}}{bAb}","c":[],"p":[[5,"CollectionGuard",0],[5,"Ref",0],[5,"AnyRef",0],[10,"Collectable",0],[5,"Root",0],[6,"Option",112],[10,"CastFrom",113],[5,"CollectionStarting",0],[1,"unit"],[17,"Output"],[10,"FnOnce",114],[1,"bool"],[5,"Formatter",115],[8,"Result",115],[10,"Sized",116],[17,"Target"],[10,"MapAs",0],[5,"Tracer",0],[10,"AsRef",117],[10,"Trace",0],[6,"Result",118],[5,"TypeId",119]],"b":[]}]\ ]')); if (typeof exports !== 'undefined') exports.searchIndex = searchIndex; else if (window.initSearch) window.initSearch(searchIndex); diff --git a/main/settings.html b/main/settings.html index b99d127..8fd19ac 100644 --- a/main/settings.html +++ b/main/settings.html @@ -1,2 +1,2 @@ -Settings -

Rustdoc settings

Back
\ No newline at end of file +Settings +

Rustdoc settings

Back
\ No newline at end of file diff --git a/main/src-files.js b/main/src-files.js index c24cdec..8bc2482 100644 --- a/main/src-files.js +++ b/main/src-files.js @@ -1,4 +1,4 @@ var srcIndex = new Map(JSON.parse('[\ -["musegc",["",[],["lib.rs"]]]\ +["refuse",["",[],["lib.rs"]]]\ ]')); createSrcSidebar(); diff --git a/main/src/musegc/lib.rs.html b/main/src/refuse/lib.rs.html similarity index 80% rename from main/src/musegc/lib.rs.html rename to main/src/refuse/lib.rs.html index 6188c01..76298e5 100644 --- a/main/src/musegc/lib.rs.html +++ b/main/src/refuse/lib.rs.html @@ -1,6 +1,6 @@ -lib.rs - source 1897 1898 1899 +1900 +1901 +1902 +1903 +1904 +1905 +1906 +1907 +1908 +1909 +1910 +1911 +1912 +1913 +1914 +1915 +1916 +1917 +1918 +1919 +1920 +1921 +1922 +1923 +1924 +1925 +1926 +1927 +1928 +1929 +1930 +1931 +1932 +1933 +1934 +1935 +1936 +1937 +1938 +1939 +1940 +1941 +1942 +1943 +1944 +1945 +1946 +1947 +1948 +1949 +1950 +1951 +1952 +1953 +1954 +1955 +1956 +1957 +1958 +1959 +1960 +1961 +1962 +1963 +1964 +1965 +1966 +1967 +1968 +1969 +1970 +1971 +1972 +1973 +1974 +1975 +1976 +1977 +1978 +1979 +1980 +1981 +1982 +1983 +1984 +1985 +1986 +1987 +1988 +1989 +1990 +1991 +1992 +1993 +1994 +1995 +1996 +1997 +1998 +1999 +2000 +2001 +2002 +2003 +2004 +2005 +2006 +2007 +2008 +2009 +2010 +2011 +2012 +2013 +2014 +2015 +2016 +2017 +2018 +2019 +2020 +2021 +2022 +2023 +2024 +2025 +2026 +2027 +2028 +2029 +2030 +2031 +2032 +2033 +2034 +2035 +2036 +2037 +2038 +2039 +2040 +2041 +2042 +2043 +2044 +2045 +2046 +2047 +2048 +2049 +2050 +2051 +2052 +2053 +2054 +2055 +2056 +2057 +2058 +2059 +2060 +2061 +2062 +2063 +2064 +2065 +2066 +2067 +2068 +2069 +2070 +2071 +2072 +2073 +2074 +2075 +2076 +2077 +2078 +2079 +2080 +2081 +2082 +2083 +2084 +2085 +2086 +2087 +2088 +2089 +2090 +2091 +2092 +2093 +2094 +2095 +2096 +2097 +2098 +2099 +2100 +2101 +2102 +2103 +2104 +2105 +2106 +2107 +2108 +2109 +2110 +2111 +2112 +2113 +2114 +2115 +2116 +2117 +2118 +2119 +2120 +2121 +2122 +2123 +2124 +2125 +2126 +2127 +2128 +2129 +2130 +2131 +2132 +2133 +2134 +2135 +2136 +2137 +2138 +2139 +2140 +2141 +2142 +2143 +2144 +2145 +2146 +2147 +2148 +2149 +2150 +2151 +2152 +2153 +2154 +2155 +2156 +2157 +2158 +2159 +2160 +2161 +2162 +2163 +2164 +2165 +2166 +2167 +2168 +2169 +2170 +2171 +2172 +2173 +2174 +2175 +2176 +2177 +2178 +2179 +2180 +2181 +2182 +2183 +2184 +2185 +2186 +2187 +2188 +2189 +2190 +2191 +2192 +2193 +2194 +2195 +2196 +2197 +2198 +2199 +2200 +2201 +2202 +2203 +2204 +2205 +2206 +2207 +2208 +2209 +2210 +2211 +2212 +2213 +2214 +2215 +2216 +2217 +2218 +2219 +2220 +2221 +2222 +2223 +2224 +2225 +2226 +2227 +2228 +2229 +2230 +2231 +2232 +2233 +2234 +2235 +2236 +2237 +2238 +2239 +2240 +2241 +2242 +2243 +2244 +2245 +2246 +2247 +2248 +2249 +2250 +2251 +2252 +2253 +2254 +2255 +2256 +2257 +2258 +2259 +2260 +2261 +2262 +2263 +2264 +2265 +2266 +2267 +2268 +2269 +2270 +2271 +2272 +2273 +2274 +2275 +2276 +2277 +2278 +2279 +2280 +2281 +2282 +2283 +2284 +2285 +2286 +2287 +2288 +2289 +2290 +2291 +2292 +2293 +2294 +2295 +2296 +2297 +2298 +2299
#![doc = include_str!("../README.md")]
+
 use core::slice;
 use std::alloc::{alloc_zeroed, Layout};
 use std::any::{Any, TypeId};
 use std::cell::{Cell, OnceCell, RefCell, UnsafeCell};
+use std::collections::{BTreeMap, BTreeSet, BinaryHeap, HashMap, HashSet, LinkedList, VecDeque};
 use std::marker::PhantomData;
 use std::mem::ManuallyDrop;
-use std::num::NonZeroUsize;
+use std::num::{
+    NonZeroI128, NonZeroI16, NonZeroI32, NonZeroI64, NonZeroI8, NonZeroIsize, NonZeroU128,
+    NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize,
+};
 use std::ops::Deref;
-use std::sync::atomic::{self, AtomicBool, AtomicU32, AtomicU64, AtomicU8, AtomicUsize, Ordering};
+use std::sync::atomic::{
+    self, AtomicBool, AtomicI16, AtomicI32, AtomicI64, AtomicI8, AtomicIsize, AtomicU16, AtomicU32,
+    AtomicU64, AtomicU8, AtomicUsize, Ordering,
+};
 use std::sync::{Arc, OnceLock, Weak};
 use std::time::{Duration, Instant};
 use std::{array, thread};
 
-use ahash::AHashMap;
 use crossbeam_utils::sync::{Parker, Unparker};
 use flume::{Receiver, RecvError, RecvTimeoutError, Sender};
 use intentional::{Assert, Cast};
-use kempt::Map;
+use kempt::map::Field;
+use kempt::{Map, Set};
 use parking_lot::{Condvar, Mutex, RwLock};
 
-#[derive(Clone, Copy, Eq, PartialEq, Hash)]
-struct CollectorThreadId(u64);
+/// Architecture overview of the underlying design of Refuse.
+///
+/// # Overview
+///
+/// *Refuse* is an incremental, tracing garbage collector. Incremental garbage
+/// collectors can only run when it knows no threads are currently accessing
+/// collectable memory. This fits the access pattern of an `RwLock`: the
+/// collector can acquire a "write" lock to ensure that all other threads can't
+/// read while it is running.
+///
+/// Originally, Refuse used an `RwLock` and a shared allocation arena. This did
+/// not perform well in multi-threaded benchmarks. So the global `RwLock` was
+/// replaced with atomic tracking the number of currently acquired
+/// [`CollectionGuard`] and whether the collector is currently trying to start
+/// collection.
+///
+/// Each thread allocates its own independent allocation arena and stores a copy
+/// of it in thread-local storage. It also registers a copy with the global
+/// collector. Refuse's public API ensures that no access is provided to the
+/// local thread's data without first having acquired a [`CollectionGuard`].
+/// This ensures that the collector can guarantee exclusive access to the
+/// underlying data.
+///
+/// # Allocation Arenas
+///
+/// Each thread is given its own allocation arena, which is a data structure
+/// designed for concurrently reading portions of its data while still being
+/// able to perform new allocations from the owning thread.
+///
+/// At the root of each arena is a map of types to type-erased `Bin<T>`s. A
+/// `Bin<T>` is the root of a linked-list of `Slabs<T>`. Each `Slabs<T>`
+/// contains a list of `Slab<T>`s and an optional next `Slabs<T>`. Each
+/// `Slab<T>` holds 256 `Slot<T>`s. Each slot is a combination of the slot's
+/// state, and the slot's data.
+///
+/// The slot's state is stored in an atomic and keeps track of:
+///
+/// - A 32-bit generation. When loading a [`Ref`], its generation is validated
+///   to ensure it is the same allocation.
+/// - Whether the slot is allocated or not
+/// - Garbage collector marking bits
+///
+/// The owning thread or and the collector are the only types that can modify
+/// non-atomic data in a `Bin<T>`. Other threads may need to load a reference to
+/// a `Ref<T>`'s underlying data while the owning thread is allocating. This is
+/// made safe by:
+///
+/// - Because allocations for a new slab can't be referred to until after the
+///   allocating function returns, we can update `Slabs::next` safely while
+///   other threads read the data structure.
+/// - Each slot's state is controlled with atomic operations. This ensures
+///   consistent access for both the reading thread and the allocating thread.
+/// - The slot's state is generational, minimizing the chance of an invalid
+///   reference being promoted. Even if a "stale" ref contains a reused
+///   generation, the load will still point to valid data because of the order
+///   of initialization.
+///
+/// # Collection
+///
+/// Refuse is a naive, mark-and-sweep collector. Each collection phase is
+/// divided into three portions:
+///
+/// - Exclusive Access Acquisition
+/// - Tracing and marking
+/// - Sweeping
+///
+/// Refuse keeps track of two metrics:
+///
+/// - `average_collection_locking`: The average duration to acquire exclusive
+///   access.
+/// - `average_collection`: The average duration of a total collection process,
+///   including exclusive access acquisition.
+///
+/// ## Exclusive Access Acquisition
+///
+/// Refuse's goal is to be able to be used in nearly any application, including
+/// games. Games typically do not want to dip below 60 frames-per-second (FPS),
+/// which means that if a garbage collection pause is longer than 16ms, it will
+/// cause FPS drops.
+///
+/// Refuse tries to minimize pauses by waiting for exclusive access only for a
+/// multiple of `average_collection_locking`. If access isn't acquired by the
+/// deadline, collection is rescheduled again in the near future with an
+/// increased multiple. If this process fails several times consecutively,
+/// garbage collection will be forced by waiting indefinitely.
+///
+/// Access is controlled by a single [`AtomicUsize`]. A single bit keeps track
+/// of whether the collector is trying to collect or not. The remaining bits
+/// keep track of how many [`CollectionGuard`]s are acquired and not yielding.
+///
+/// [`CollectionGuard::acquire()`] checks if the collection bit is set. If it
+/// is, it waits until the current collection finishes and checks again. If the
+/// bit is not set, the count is atomically incremented.
+///
+/// When the final [`CollectionGuard`] drops or yields, it notifies the
+/// collector thread so that it can begin collecting.
+///
+/// ## Tracing and marking
+///
+/// The goal of this phase is to identify all allocations that can currently be
+/// reached by any [`Root<T>`]. When a slot is initially allocated, the marking
+/// bits are 0. Each time the collector runs, a new non-zero marking bits is
+/// selected by incrementing the previous marking bits and skipping 0 on wrap.
+///
+/// All `Bin<T>`s of all threads are scanned for any `Slot<T>` that is allocated
+/// and has a non-zero root count. Each allocated slot is then marked. If the
+/// slot didn't already contain the current marking bits, it is [`Trace`]d,
+/// which allows any references found to be marked.
+///
+/// This process continues until all found references are marked and traced.
+///
+/// ## Sweeping
+///
+/// The goal of this phase is to free allocations that are no longer reachable.
+/// This is done by scanning all `Bin<T>`s of all threads looking for any
+/// allocated `Slot<T>`s that do not contain the current marking bits. When
+/// found, the slot is deallocated and contained data has its `Drop`
+/// implementation invoked.
+pub mod architecture {}
+
+#[derive(Clone, Copy, Eq, PartialEq, Hash, Ord, PartialOrd)]
+struct CollectorThreadId(u32);
 
 impl CollectorThreadId {
     fn unique() -> Self {
-        static NEXT_ID: AtomicU64 = AtomicU64::new(0);
+        static NEXT_ID: AtomicU32 = AtomicU32::new(0);
         Self(NEXT_ID.fetch_add(1, Ordering::Release))
     }
 }
@@ -1961,7 +2489,7 @@ 

Files

} } -type AllThreadBins = Arc<RwLock<AHashMap<CollectorThreadId, Weak<UnsafeBins>>>>; +type AllThreadBins = Arc<RwLock<Map<CollectorThreadId, Weak<UnsafeBins>>>>; struct ThreadBins { alive: bool, @@ -1975,6 +2503,7 @@

Files

signalled_collector: AtomicBool, reader_state: ReaderState, all_threads: AllThreadBins, + type_indexes: RwLock<Map<TypeId, TypeIndex>>, } impl CollectorInfo { @@ -2003,7 +2532,7 @@

Files

struct MarkRequest { thread: CollectorThreadId, - type_id: TypeId, + type_index: TypeIndex, slot_generation: u32, bin_id: BinId, mark_bits: u8, @@ -2013,7 +2542,7 @@

Files

struct Collector { shared: Arc<CollectorInfo>, receiver: Receiver<CollectorCommand>, - thread_bins: AHashMap<CollectorThreadId, ThreadBins>, + thread_bins: Map<CollectorThreadId, ThreadBins>, active_threads: usize, mark_bits: u8, next_gc: Option<Instant>, @@ -2027,7 +2556,7 @@

Files

Self { shared, receiver, - thread_bins: AHashMap::new(), + thread_bins: Map::new(), active_threads: 0, mark_bits: 0, next_gc: None, @@ -2168,13 +2697,13 @@

Files

} fn acquire_all_locks<'a>( - thread_bins: &'a AHashMap<CollectorThreadId, ThreadBins>, + thread_bins: &'a Map<CollectorThreadId, ThreadBins>, start: Instant, average_collection_locking: Duration, pause_failures: u8, collector: &CollectorInfo, parker: &Parker, - ) -> Option<AHashMap<CollectorThreadId, &'a mut Bins>> { + ) -> Option<Map<CollectorThreadId, &'a mut Bins>> { let force_gc = pause_failures >= 2; let lock_wait = average_collection_locking * u32::from(pause_failures + 1) * 3; let long_lock_deadline = start + lock_wait; @@ -2197,7 +2726,7 @@

Files

Some( thread_bins .iter() - .map(|(thread_id, bins)| (*thread_id, unsafe { bins.bins.assume_mut() })) + .map(|entry| (*entry.key(), unsafe { entry.value.bins.assume_mut() })) .collect(), ) } @@ -2246,7 +2775,7 @@

Files

loop { let MarkRequest { thread, - type_id, + type_index, slot_generation, bin_id, mark_bits, @@ -2270,7 +2799,7 @@

Files

let bins = all_bins[&thread] .by_type .read() - .get(&type_id) + .get(&type_index) .expect("areas are never deallocated") .clone(); if bins.mark_one(mark_bits, slot_generation, bin_id) { @@ -2284,7 +2813,7 @@

Files

atomic::fence(Ordering::Acquire); let mut threads_to_remove = Vec::new(); - for (thread_id, bins) in all_bins { + for (thread_id, bins) in all_bins.into_iter().map(Field::into_parts) { let mut live_objects = 0_usize; for bin in bins.by_type.write().values_mut() { live_objects = live_objects.saturating_add(bin.sweep(self.mark_bits)); @@ -2332,6 +2861,7 @@

Files

signalled_collector: AtomicBool::new(false), reader_state: ReaderState(AtomicUsize::new(0)), all_threads: AllThreadBins::default(), + type_indexes: RwLock::default(), }); thread::Builder::new() .name(String::from("collector")) @@ -2413,13 +2943,19 @@

Files

lock.get_or_init(|| { let all_threads = GlobalCollector::get().info.all_threads.clone(); let bins = Arc::<UnsafeBins>::default(); - let thread_id = CollectorThreadId::unique(); - CollectorCommand::NewThread(thread_id, bins.clone()).send(); - all_threads.write().insert(thread_id, Arc::downgrade(&bins)); - ThreadLocalBins { - bins, - thread_id, - all_threads, + loop { + let thread_id = CollectorThreadId::unique(); + let mut threads = all_threads.write(); + if let kempt::map::Entry::Vacant(entry) = threads.entry(thread_id) { + CollectorCommand::NewThread(thread_id, bins.clone()).send(); + entry.insert(Arc::downgrade(&bins)); + drop(threads); + return ThreadLocalBins { + bins, + thread_id, + all_threads, + }; + } } }); wrapped() @@ -2616,9 +3152,8 @@

Files

result } - fn adopt<T: Collectable>(&self, value: RefCounted<T>) -> (u32, BinId) { - let (gen, bin) = Bins::adopt(value, self); - (gen, bin) + fn adopt<T: Collectable>(&self, value: Rooted<T>) -> (TypeIndex, u32, BinId) { + Bins::adopt(value, self) } } @@ -2720,18 +3255,48 @@

Files

impl<T> NoMapping for T where T: SimpleType {} impl<T> ContainsNoRefs for T where T: SimpleType {} -impl SimpleType for u8 {} -impl SimpleType for u16 {} -impl SimpleType for u32 {} -impl SimpleType for u64 {} -impl SimpleType for u128 {} -impl SimpleType for usize {} -impl SimpleType for i8 {} -impl SimpleType for i16 {} -impl SimpleType for i32 {} -impl SimpleType for i64 {} -impl SimpleType for i128 {} -impl SimpleType for isize {} +macro_rules! impl_simple_type { + ($($ty:ty),+ ,) => { + $(impl SimpleType for $ty {})+ + } +} + +impl_simple_type!( + u8, + u16, + u32, + u64, + u128, + usize, + i8, + i16, + i32, + i64, + i128, + isize, + AtomicU8, + AtomicU16, + AtomicU32, + AtomicU64, + AtomicUsize, + AtomicI8, + AtomicI16, + AtomicI32, + AtomicI64, + AtomicIsize, + NonZeroU8, + NonZeroU16, + NonZeroU32, + NonZeroU64, + NonZeroU128, + NonZeroUsize, + NonZeroI8, + NonZeroI16, + NonZeroI32, + NonZeroI64, + NonZeroI128, + NonZeroIsize, +); impl<T> Trace for Vec<T> where @@ -2748,6 +3313,147 @@

Files

impl<T> NoMapping for Vec<T> {} +impl<T> Trace for VecDeque<T> +where + T: Trace, +{ + const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for item in self { + item.trace(tracer); + } + } +} + +impl<T> NoMapping for VecDeque<T> {} + +impl<T> Trace for BinaryHeap<T> +where + T: Trace, +{ + const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for item in self { + item.trace(tracer); + } + } +} + +impl<T> NoMapping for BinaryHeap<T> {} + +impl<T> Trace for LinkedList<T> +where + T: Trace, +{ + const MAY_CONTAIN_REFERENCES: bool = T::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for item in self { + item.trace(tracer); + } + } +} + +impl<T> NoMapping for LinkedList<T> {} + +impl<K, V, S> Trace for HashMap<K, V, S> +where + K: Trace, + V: Trace, +{ + const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES || V::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for (k, v) in self { + k.trace(tracer); + v.trace(tracer); + } + } +} + +impl<K, V, S> NoMapping for HashMap<K, V, S> {} + +impl<K, S> Trace for HashSet<K, S> +where + K: Trace, +{ + const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for k in self { + k.trace(tracer); + } + } +} + +impl<K, S> NoMapping for HashSet<K, S> {} + +impl<K, V> Trace for BTreeMap<K, V> +where + K: Trace, + V: Trace, +{ + const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES || V::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for (k, v) in self { + k.trace(tracer); + v.trace(tracer); + } + } +} + +impl<K, V> NoMapping for BTreeMap<K, V> {} + +impl<K> Trace for BTreeSet<K> +where + K: Trace, +{ + const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for k in self { + k.trace(tracer); + } + } +} + +impl<K> NoMapping for BTreeSet<K> {} + +impl<K, V> Trace for Map<K, V> +where + K: Trace + kempt::Sort, + V: Trace, +{ + const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES || V::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for field in self { + field.key().trace(tracer); + field.value.trace(tracer); + } + } +} + +impl<K, V> NoMapping for Map<K, V> where K: kempt::Sort {} + +impl<K> Trace for Set<K> +where + K: Trace + kempt::Sort, +{ + const MAY_CONTAIN_REFERENCES: bool = K::MAY_CONTAIN_REFERENCES; + + fn trace(&self, tracer: &mut Tracer) { + for k in self { + k.trace(tracer); + } + } +} + +impl<K> NoMapping for Set<K> where K: kempt::Sort {} + impl<T, const N: usize> Trace for [T; N] where T: Trace, @@ -2817,7 +3523,7 @@

Files

self.mark_one_sender .send(MarkRequest { thread: collectable.creating_thread, - type_id: TypeId::of::<T>(), + type_index: collectable.type_index, slot_generation: collectable.slot_generation, bin_id: collectable.bin_id, mark_bits: self.mark_bit, @@ -2831,13 +3537,14 @@

Files

fn size_of_types() { assert_eq!(std::mem::size_of::<Root<u32>>(), 24); assert_eq!(std::mem::size_of::<Ref<u32>>(), 16); + assert_eq!(std::mem::size_of::<AnyRef>(), 16); } /// A root reference to a `T` that has been allocated in the garbage collector. /// -/// This type behaves very similarly to [`Arc<T>`]. It is cheap-to-clone, -/// utilizing atomic reference counting to track the number of root references -/// currently exist to the underlying value. +/// This type behaves very similarly to [`Arc<T>`]. It implements `Deref<Target +/// = T>`, and it is also cheap-to-clone, utilizing atomic reference counting to +/// track the number of root references currently exist to the underlying value. /// /// While any root references exist for a given allocation, the garbage /// collector will not collect the allocation. @@ -2845,7 +3552,7 @@

Files

where T: Collectable, { - data: *const RefCounted<T>, + data: *const Rooted<T>, reference: Ref<T>, } @@ -2853,14 +3560,20 @@

Files

where T: Collectable, { - fn from_parts(slot_generation: u32, bin_id: BinId, guard: &CollectionGuard) -> Self { + fn from_parts( + type_index: TypeIndex, + slot_generation: u32, + bin_id: BinId, + guard: &CollectionGuard, + ) -> Self { // SAFETY: The guard is always present except during allocation which // never invokes this function. Since `bin_id` was just allocated, we // also can assume that it is allocated. - let data = unsafe { guard.bins().allocated_slot_pointer::<T>(bin_id) }; + let data = unsafe { guard.bins().allocated_slot_pointer::<T>(type_index, bin_id) }; Self { data, reference: Ref { + type_index, creating_thread: guard.thread.thread_id, slot_generation, bin_id, @@ -2873,8 +3586,8 @@

Files

/// the data. pub fn new(value: T, guard: impl AsRef<CollectionGuard>) -> Self { let guard = guard.as_ref(); - let (gen, bin) = guard.adopt(RefCounted::strong(value)); - Self::from_parts(gen, bin, guard) + let (type_index, gen, bin) = guard.adopt(Rooted::root(value)); + Self::from_parts(type_index, gen, bin, guard) } /// Returns a "weak" reference to this root. @@ -2889,15 +3602,29 @@

Files

self.reference.as_any() } - fn ref_counted(&self) -> &RefCounted<T> { + fn as_rooted(&self) -> &Rooted<T> { // SAFETY: The garbage collector will not collect data while we have a - // strong count. The returned lifetime of the data is tied to `self`, - // which ensures the returned lifetime is valid only for as long as this - // `Root<T>` is alive. + // non-zero root count. The returned lifetime of the data is tied to + // `self`, which ensures the returned lifetime is valid only for as long + // as this `Root<T>` is alive. This ensures at least one root will + // remain in existence, preventing the count from reaching 0. unsafe { &(*self.data) } } } +impl<T> Clone for Root<T> +where + T: Collectable, +{ + fn clone(&self) -> Self { + self.as_rooted().roots.fetch_add(1, Ordering::Acquire); + Self { + data: self.data, + reference: self.reference, + } + } +} + // SAFETY: Root<T>'s usage of a pointer prevents auto implementation. // `Collectable` requires `Send`, and `Root<T>` ensures proper Send + Sync // behavior in its memory accesses. @@ -2914,7 +3641,7 @@

Files

type Target = T; fn deref(&self) -> &Self::Target { - &self.ref_counted().value + &self.as_rooted().value } } @@ -2923,7 +3650,7 @@

Files

T: Collectable, { fn drop(&mut self) { - if self.ref_counted().strong.fetch_sub(1, Ordering::Acquire) == 1 { + if self.as_rooted().roots.fetch_sub(1, Ordering::Acquire) == 1 { CollectorCommand::schedule_collect_if_needed(); } } @@ -2937,7 +3664,53 @@

Files

/// /// Because of this, direct access to the data is not provided. To obtain a /// reference, call [`Ref::load()`]. +/// +/// # Loading a reference +/// +/// [`Ref::load()`] is used to provide a reference to data stored in the garbage +/// collector. +/// +/// ```rust +/// use refuse::{CollectionGuard, Ref}; +/// +/// refuse::collected(|| { +/// let guard = CollectionGuard::acquire(); +/// let data = Ref::new(42, &guard); +/// +/// assert_eq!(data.load(&guard), Some(&42)); +/// }); +/// ``` +/// +/// References returned from [`Ref::load()`] are tied to the lifetime of the +/// guard. This ensures that a reference to data can only be held between +/// moments when the garbage collector can be run. For example these usages are +/// prevented by the compiler: +/// +/// ```rust,compile_fail +/// # use refuse::{CollectionGuard, Ref}; +/// let guard = CollectionGuard::acquire(); +/// let data = Ref::new(42, &guard); +/// let reference = data.load(&guard).unwrap(); +/// +/// drop(guard); +/// +/// // error[E0505]: cannot move out of `guard` because it is borrowed +/// assert_eq!(reference, &42); +/// ``` +/// +/// ```rust,compile_fail +/// # use refuse::{CollectionGuard, Ref}; +/// let mut guard = CollectionGuard::acquire(); +/// let data = Ref::new(42, &guard); +/// let reference = data.load(&guard).unwrap(); +/// +/// guard.yield_to_collector(); +/// +/// // error[E0502]: cannot borrow `guard` as mutable because it is also borrowed as immutable +/// assert_eq!(reference, &42); +/// ``` pub struct Ref<T> { + type_index: TypeIndex, creating_thread: CollectorThreadId, slot_generation: u32, bin_id: BinId, @@ -2952,9 +3725,10 @@

Files

/// it. pub fn new(value: T, guard: impl AsRef<CollectionGuard>) -> Self { let guard = guard.as_ref(); - let (slot_generation, bin_id) = guard.adopt(RefCounted::weak(value)); + let (type_index, slot_generation, bin_id) = guard.adopt(Rooted::reference(value)); Self { + type_index, creating_thread: guard.thread.thread_id, slot_generation, bin_id, @@ -2966,7 +3740,7 @@

Files

#[must_use] pub fn as_any(self) -> AnyRef { AnyRef { - type_id: TypeId::of::<T>(), + type_id: self.type_index, creating_thread: self.creating_thread, slot_generation: self.slot_generation, bin_id: self.bin_id, @@ -2975,19 +3749,17 @@

Files

fn load_slot_from<'guard>( &self, - bins: &Map<TypeId, Arc<dyn AnyBin>>, + bins: &Map<TypeIndex, Arc<dyn AnyBin>>, guard: &'guard CollectionGuard, - ) -> Option<&'guard RefCounted<T>> { - let type_id = TypeId::of::<T>(); - bins.get(&type_id) - .assert("areas are never deallocated") - .as_any() + ) -> Option<&'guard Rooted<T>> { + bins.get(&self.type_index)? + .as_any() .downcast_ref::<Bin<T>>() .assert("type mismatch") .load(self.bin_id, self.slot_generation, guard) } - fn load_slot<'guard>(&self, guard: &'guard CollectionGuard) -> Option<&'guard RefCounted<T>> { + fn load_slot<'guard>(&self, guard: &'guard CollectionGuard) -> Option<&'guard Rooted<T>> { if guard.thread.thread_id == self.creating_thread { self.load_slot_from(&guard.bins().by_type.read(), guard) } else { @@ -3025,7 +3797,7 @@

Files

#[must_use] pub fn as_root(&self, guard: &CollectionGuard) -> Option<Root<T>> { self.load_slot(guard).map(|allocated| { - allocated.strong.fetch_add(1, Ordering::Acquire); + allocated.roots.fetch_add(1, Ordering::Acquire); Root { data: allocated, reference: *self, @@ -3067,7 +3839,7 @@

Files

#[derive(Default)] struct Bins { - by_type: RwLock<Map<TypeId, Arc<dyn AnyBin>>>, + by_type: RwLock<Map<TypeIndex, Arc<dyn AnyBin>>>, } impl Bins { @@ -3075,13 +3847,17 @@

Files

/// /// This function must only be called when `bin_id` is known to be /// allocated. - unsafe fn allocated_slot_pointer<T>(&self, bin_id: BinId) -> *const RefCounted<T> + unsafe fn allocated_slot_pointer<T>( + &self, + type_index: TypeIndex, + bin_id: BinId, + ) -> *const Rooted<T> where T: Collectable, { let by_type = self.by_type.read(); let slot = &by_type - .get(&TypeId::of::<T>()) + .get(&type_index) .expect("areas are never deallocated") .as_any() .downcast_ref::<Bin<T>>() @@ -3094,11 +3870,11 @@

Files

&*(*slot.value.get()).allocated } - fn adopt<T>(value: RefCounted<T>, bins_guard: &CollectionGuard) -> (u32, BinId) + fn adopt<T>(value: Rooted<T>, bins_guard: &CollectionGuard) -> (TypeIndex, u32, BinId) where T: Collectable, { - let type_id = TypeId::of::<T>(); + let type_id = TypeIndex::of::<T>(); let mut by_type = bins_guard.bins().by_type.upgradable_read(); if let Some(bin) = by_type.get(&type_id) { let (gen, bin) = bin @@ -3106,16 +3882,16 @@

Files

.downcast_ref::<Bin<T>>() .expect("type mismatch") .adopt(value); - (gen, bin) + (type_id, gen, bin) } else { by_type.with_upgraded(|by_type| { // We don't need to check for another thread allocating, because the // only thread that can allocate is the local thread. We needed a // write guard, however, because other threads could be trying to // load data this thread allocated. - let bin = Bin::new(value); + let bin = Bin::new(value, type_id); by_type.insert(type_id, Arc::new(bin)); - (0, BinId::first()) + (type_id, 0, BinId::first()) }) } } @@ -3125,6 +3901,7 @@

Files

where T: Collectable, { + type_index: TypeIndex, free_head: AtomicU32, slabs: Slabs<T>, slabs_tail: Cell<Option<*const Slabs<T>>>, @@ -3135,8 +3912,9 @@

Files

where T: Collectable, { - fn new(first_value: RefCounted<T>) -> Self { + fn new(first_value: Rooted<T>, type_index: TypeIndex) -> Self { Self { + type_index, free_head: AtomicU32::new(0), slabs: Slabs::new(first_value, 0), slabs_tail: Cell::new(None), @@ -3144,7 +3922,8 @@

Files

} } - fn adopt(&self, value: RefCounted<T>) -> (u32, BinId) { + fn adopt(&self, value: Rooted<T>) -> (u32, BinId) { + let mut value = Some(value); loop { let bin_id = BinId(self.free_head.load(Ordering::Acquire)); if bin_id.invalid() { @@ -3153,19 +3932,19 @@

Files

let slab = &self.slabs[bin_id.slab() as usize]; let slot_index = bin_id.slot(); let slot = &slab.slots[usize::from(slot_index)]; - if let Some(generation) = slot.state.try_allocate() { - // SAFETY: Unallocated slots are only accessed through the - // current local thread while a guard is held, which must be - // true for this function to be invoked. try_allocate ensures - // this slot wasn't previously allocated, making it safe for us - // to initialize the data with `value`. - let next = unsafe { - let next = (*slot.value.get()).free; - slot.value.get().write(SlotData { - allocated: ManuallyDrop::new(value), - }); - next - }; + + // SAFETY: Unallocated slots are only accessed through the + // current local thread while a guard is held, which must be + // true for this function to be invoked. try_allocate ensures + // this slot wasn't previously allocated, making it safe for us + // to initialize the data with `value`. + if let Some((generation, next)) = slot.state.try_allocate(|| unsafe { + let next = (*slot.value.get()).free; + slot.value.get().write(SlotData { + allocated: ManuallyDrop::new(value.take().expect("only taken once")), + }); + next + }) { self.free_head.store(next, Ordering::Release); let _result = slab.last_allocated.fetch_update( Ordering::Release, @@ -3183,7 +3962,7 @@

Files

} else { &self.slabs }; - let (generation, bin_id, new_tail) = tail.adopt(value); + let (generation, bin_id, new_tail) = tail.adopt(value.take().expect("only taken once")); if new_tail.is_some() { self.slabs_tail.set(new_tail); @@ -3196,7 +3975,7 @@

Files

bin_id: BinId, slot_generation: u32, _guard: &'guard CollectionGuard, - ) -> Option<&'guard RefCounted<T>> { + ) -> Option<&'guard Rooted<T>> { let slab = self.slabs.get(bin_id.slab() as usize)?; let slot = &slab.slots[usize::from(bin_id.slot())]; slot.state @@ -3231,10 +4010,11 @@

Files

}; // SAFETY: `state.generation()` only returns `Some()` when the // slot is allocated. - let strong_count = - unsafe { (*slot.value.get()).allocated.strong.load(Ordering::Relaxed) }; - if strong_count > 0 { + let root_count = + unsafe { (*slot.value.get()).allocated.roots.load(Ordering::Relaxed) }; + if root_count > 0 { tracer.mark::<T>(Ref { + type_index: self.type_index, creating_thread: tracer.tracing_thread, slot_generation, bin_id: BinId::new(slab_index.cast::<u32>(), index.cast::<u8>()), @@ -3386,7 +4166,7 @@

Files

where T: Collectable, { - fn new(initial_value: RefCounted<T>, offset: usize) -> Self { + fn new(initial_value: Rooted<T>, offset: usize) -> Self { let mut initial_value = Some(initial_value); Self { offset, @@ -3414,7 +4194,7 @@

Files

} } - fn adopt(&self, mut value: RefCounted<T>) -> (u32, BinId, Option<*const Slabs<T>>) { + fn adopt(&self, mut value: Rooted<T>) -> (u32, BinId, Option<*const Slabs<T>>) { let first_free = self.first_free_slab.get(); for index in first_free..256 { @@ -3508,7 +4288,7 @@

Files

} impl<T> Slab<T> { - fn new(first_value: RefCounted<T>) -> Box<Self> + fn new(first_value: Rooted<T>) -> Box<Self> where T: Collectable, { @@ -3525,11 +4305,7 @@

Files

this } - fn try_adopt( - &self, - value: RefCounted<T>, - slab_index: usize, - ) -> Result<(u32, BinId), RefCounted<T>> { + fn try_adopt(&self, value: Rooted<T>, slab_index: usize) -> Result<(u32, BinId), Rooted<T>> { if let Ok(last_allocated) = self.last_allocated.fetch_update( Ordering::Release, Ordering::Acquire, @@ -3550,7 +4326,7 @@

Files

} union SlotData<T> { - allocated: ManuallyDrop<RefCounted<T>>, + allocated: ManuallyDrop<Rooted<T>>, free: u32, } @@ -3560,7 +4336,7 @@

Files

} impl<T> Slot<T> { - fn allocate(&self, value: RefCounted<T>) -> u32 { + fn allocate(&self, value: Rooted<T>) -> u32 { let generation = self.state.allocate(); // SAFETY: `state.allocate()` will panic if the slot was previously // allocated. @@ -3595,22 +4371,22 @@

Files

// SAFETY: Bin<T> is Sync as long as T is Sync. unsafe impl<T> Sync for Bin<T> where T: Collectable {} -struct RefCounted<T> { - strong: AtomicU64, +struct Rooted<T> { + roots: AtomicU64, value: T, } -impl<T> RefCounted<T> { - fn weak(value: T) -> Self { +impl<T> Rooted<T> { + fn reference(value: T) -> Self { Self { - strong: AtomicU64::new(0), + roots: AtomicU64::new(0), value, } } - fn strong(value: T) -> Self { + fn root(value: T) -> Self { Self { - strong: AtomicU64::new(1), + roots: AtomicU64::new(1), value, } } @@ -3641,23 +4417,19 @@

Files

state & Self::ALLOCATED != 0 && state.cast::<u32>() == generation } - fn try_allocate(&self) -> Option<u32> { - let mut new_generation = None; - if self - .0 - .fetch_update(Ordering::Release, Ordering::Acquire, |state| { - (state & Self::ALLOCATED == 0).then(|| { - let generation = state.cast::<u32>().wrapping_add(1); - new_generation = Some(generation); - Self::ALLOCATED | u64::from(generation) - }) - }) - .is_ok() - { - new_generation - } else { - None - } + fn try_allocate<R>(&self, allocated: impl FnOnce() -> R) -> Option<(u32, R)> { + let state = self.0.load(Ordering::Acquire); + if state & Self::ALLOCATED != 0 { + return None; + } + + let result = allocated(); + let generation = state.cast::<u32>().wrapping_add(1); + + self.0 + .store(Self::ALLOCATED | u64::from(generation), Ordering::Release); + + Some((generation, result)) } fn allocate(&self) -> u32 { @@ -3724,9 +4496,31 @@

Files

GlobalCollector::get().info.wait_for_collection(now); } +#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd)] +struct TypeIndex(u32); + +impl TypeIndex { + fn of<T: 'static>() -> TypeIndex { + let collector = GlobalCollector::get(); + let types = collector.info.type_indexes.read(); + let type_id = TypeId::of::<T>(); + if let Some(index) = types.get(&type_id) { + *index + } else { + drop(types); + let mut types = collector.info.type_indexes.write(); + let next_id = types.len(); + + *types + .entry(type_id) + .or_insert(TypeIndex(u32::try_from(next_id).expect("too many types"))) + } + } +} + /// A type-erased garbage collected reference. pub struct AnyRef { - type_id: TypeId, + type_id: TypeIndex, creating_thread: CollectorThreadId, slot_generation: u32, bin_id: BinId, @@ -3739,7 +4533,15 @@

Files

where T: Collectable, { - (TypeId::of::<T>() == self.type_id).then_some(Ref { + let correct_type = GlobalCollector::get() + .info + .type_indexes + .read() + .get(&TypeId::of::<T>()) + == Some(&self.type_id); + + correct_type.then_some(Ref { + type_index: self.type_id, creating_thread: self.creating_thread, slot_generation: self.slot_generation, bin_id: self.bin_id, @@ -3747,7 +4549,7 @@

Files

}) } - /// Returns a [`Strong<T>`] if the underlying reference points to a `T` that + /// Returns a [`Root<T>`] if the underlying reference points to a `T` that /// has not been collected. #[must_use] pub fn downcast_root<T>(&self, guard: &CollectionGuard) -> Option<Root<T>> @@ -3780,15 +4582,13 @@

Files

fn load_mapped_slot_from<'guard, T>( &self, - bins: &Map<TypeId, Arc<dyn AnyBin>>, + bins: &Map<TypeIndex, Arc<dyn AnyBin>>, guard: &'guard CollectionGuard, ) -> Option<&'guard T> where T: ?Sized + 'static, { - let bins = bins - .get(&self.type_id) - .assert("areas are never deallocated"); + let bins = bins.get(&self.type_id)?; bins.mapper().downcast_ref::<Mapper<T>>()?.0.load_mapped( self.bin_id, diff --git a/main/trait.impl/core/clone/trait.Clone.js b/main/trait.impl/core/clone/trait.Clone.js index 8497164..e75ab91 100644 --- a/main/trait.impl/core/clone/trait.Clone.js +++ b/main/trait.impl/core/clone/trait.Clone.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl Clone for CollectionStarting"],["impl<T> Clone for Ref<T>"]] +"refuse":[["impl Clone for CollectionStarting"],["impl<T> Clone for Ref<T>"],["impl<T> Clone for Root<T>
where\n T: Collectable,
"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/cmp/trait.Eq.js b/main/trait.impl/core/cmp/trait.Eq.js index f02f3a9..878dd04 100644 --- a/main/trait.impl/core/cmp/trait.Eq.js +++ b/main/trait.impl/core/cmp/trait.Eq.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl Eq for CollectionStarting"]] +"refuse":[["impl Eq for CollectionStarting"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/cmp/trait.PartialEq.js b/main/trait.impl/core/cmp/trait.PartialEq.js index 1f5d5d1..6571de9 100644 --- a/main/trait.impl/core/cmp/trait.PartialEq.js +++ b/main/trait.impl/core/cmp/trait.PartialEq.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl PartialEq for CollectionStarting"]] +"refuse":[["impl PartialEq for CollectionStarting"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/convert/trait.AsMut.js b/main/trait.impl/core/convert/trait.AsMut.js index f36ac40..7bdaef0 100644 --- a/main/trait.impl/core/convert/trait.AsMut.js +++ b/main/trait.impl/core/convert/trait.AsMut.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl AsMut<CollectionGuard> for CollectionGuard"]] +"refuse":[["impl AsMut<CollectionGuard> for CollectionGuard"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/convert/trait.AsRef.js b/main/trait.impl/core/convert/trait.AsRef.js index efd4913..ef95a5e 100644 --- a/main/trait.impl/core/convert/trait.AsRef.js +++ b/main/trait.impl/core/convert/trait.AsRef.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl AsRef<CollectionGuard> for CollectionGuard"]] +"refuse":[["impl AsRef<CollectionGuard> for CollectionGuard"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/fmt/trait.Debug.js b/main/trait.impl/core/fmt/trait.Debug.js index dfca535..bfec073 100644 --- a/main/trait.impl/core/fmt/trait.Debug.js +++ b/main/trait.impl/core/fmt/trait.Debug.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl Debug for CollectionStarting"]] +"refuse":[["impl Debug for CollectionStarting"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/marker/trait.Copy.js b/main/trait.impl/core/marker/trait.Copy.js index 6061e46..62a3187 100644 --- a/main/trait.impl/core/marker/trait.Copy.js +++ b/main/trait.impl/core/marker/trait.Copy.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl Copy for CollectionStarting"],["impl<T> Copy for Ref<T>"]] +"refuse":[["impl Copy for CollectionStarting"],["impl<T> Copy for Ref<T>"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/marker/trait.Freeze.js b/main/trait.impl/core/marker/trait.Freeze.js index 0583fd7..2f2942a 100644 --- a/main/trait.impl/core/marker/trait.Freeze.js +++ b/main/trait.impl/core/marker/trait.Freeze.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl Freeze for AnyRef",1,["musegc::AnyRef"]],["impl Freeze for CollectionGuard",1,["musegc::CollectionGuard"]],["impl Freeze for CollectionStarting",1,["musegc::CollectionStarting"]],["impl<'a> Freeze for Tracer<'a>",1,["musegc::Tracer"]],["impl<T> Freeze for Ref<T>",1,["musegc::Ref"]],["impl<T> Freeze for Root<T>",1,["musegc::Root"]]] +"refuse":[["impl Freeze for AnyRef",1,["refuse::AnyRef"]],["impl Freeze for CollectionGuard",1,["refuse::CollectionGuard"]],["impl Freeze for CollectionStarting",1,["refuse::CollectionStarting"]],["impl<'a> Freeze for Tracer<'a>",1,["refuse::Tracer"]],["impl<T> Freeze for Ref<T>",1,["refuse::Ref"]],["impl<T> Freeze for Root<T>",1,["refuse::Root"]]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/marker/trait.Send.js b/main/trait.impl/core/marker/trait.Send.js index ff056e5..d5535bd 100644 --- a/main/trait.impl/core/marker/trait.Send.js +++ b/main/trait.impl/core/marker/trait.Send.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl Send for AnyRef",1,["musegc::AnyRef"]],["impl Send for CollectionGuard",1,["musegc::CollectionGuard"]],["impl Send for CollectionStarting",1,["musegc::CollectionStarting"]],["impl<'a> Send for Tracer<'a>",1,["musegc::Tracer"]],["impl<T> Send for Ref<T>
where\n T: Collectable,
"],["impl<T> Send for Root<T>
where\n T: Collectable,
"]] +"refuse":[["impl Send for AnyRef",1,["refuse::AnyRef"]],["impl Send for CollectionGuard",1,["refuse::CollectionGuard"]],["impl Send for CollectionStarting",1,["refuse::CollectionStarting"]],["impl<'a> Send for Tracer<'a>",1,["refuse::Tracer"]],["impl<T> Send for Ref<T>
where\n T: Collectable,
"],["impl<T> Send for Root<T>
where\n T: Collectable,
"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/marker/trait.StructuralPartialEq.js b/main/trait.impl/core/marker/trait.StructuralPartialEq.js index 8a09ea5..dd56bee 100644 --- a/main/trait.impl/core/marker/trait.StructuralPartialEq.js +++ b/main/trait.impl/core/marker/trait.StructuralPartialEq.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl StructuralPartialEq for CollectionStarting"]] +"refuse":[["impl StructuralPartialEq for CollectionStarting"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/marker/trait.Sync.js b/main/trait.impl/core/marker/trait.Sync.js index 76492aa..7f2b460 100644 --- a/main/trait.impl/core/marker/trait.Sync.js +++ b/main/trait.impl/core/marker/trait.Sync.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl Sync for AnyRef",1,["musegc::AnyRef"]],["impl Sync for CollectionGuard",1,["musegc::CollectionGuard"]],["impl Sync for CollectionStarting",1,["musegc::CollectionStarting"]],["impl<'a> Sync for Tracer<'a>",1,["musegc::Tracer"]],["impl<T> Sync for Ref<T>
where\n T: Collectable,
"],["impl<T> Sync for Root<T>
where\n T: Collectable,
"]] +"refuse":[["impl Sync for AnyRef",1,["refuse::AnyRef"]],["impl Sync for CollectionGuard",1,["refuse::CollectionGuard"]],["impl Sync for CollectionStarting",1,["refuse::CollectionStarting"]],["impl<'a> Sync for Tracer<'a>",1,["refuse::Tracer"]],["impl<T> Sync for Ref<T>
where\n T: Collectable,
"],["impl<T> Sync for Root<T>
where\n T: Collectable,
"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/marker/trait.Unpin.js b/main/trait.impl/core/marker/trait.Unpin.js index f66a9b7..542c07b 100644 --- a/main/trait.impl/core/marker/trait.Unpin.js +++ b/main/trait.impl/core/marker/trait.Unpin.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl Unpin for AnyRef",1,["musegc::AnyRef"]],["impl Unpin for CollectionGuard",1,["musegc::CollectionGuard"]],["impl Unpin for CollectionStarting",1,["musegc::CollectionStarting"]],["impl<'a> Unpin for Tracer<'a>",1,["musegc::Tracer"]],["impl<T> Unpin for Ref<T>",1,["musegc::Ref"]],["impl<T> Unpin for Root<T>",1,["musegc::Root"]]] +"refuse":[["impl Unpin for AnyRef",1,["refuse::AnyRef"]],["impl Unpin for CollectionGuard",1,["refuse::CollectionGuard"]],["impl Unpin for CollectionStarting",1,["refuse::CollectionStarting"]],["impl<'a> Unpin for Tracer<'a>",1,["refuse::Tracer"]],["impl<T> Unpin for Ref<T>",1,["refuse::Ref"]],["impl<T> Unpin for Root<T>",1,["refuse::Root"]]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/ops/deref/trait.Deref.js b/main/trait.impl/core/ops/deref/trait.Deref.js index 5e994d5..86fd3af 100644 --- a/main/trait.impl/core/ops/deref/trait.Deref.js +++ b/main/trait.impl/core/ops/deref/trait.Deref.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl<T> Deref for Root<T>
where\n T: Collectable,
"]] +"refuse":[["impl<T> Deref for Root<T>
where\n T: Collectable,
"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/ops/drop/trait.Drop.js b/main/trait.impl/core/ops/drop/trait.Drop.js index 997693b..47d00ab 100644 --- a/main/trait.impl/core/ops/drop/trait.Drop.js +++ b/main/trait.impl/core/ops/drop/trait.Drop.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl<T> Drop for Root<T>
where\n T: Collectable,
"]] +"refuse":[["impl<T> Drop for Root<T>
where\n T: Collectable,
"]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/panic/unwind_safe/trait.RefUnwindSafe.js b/main/trait.impl/core/panic/unwind_safe/trait.RefUnwindSafe.js index e28ccdb..38e250a 100644 --- a/main/trait.impl/core/panic/unwind_safe/trait.RefUnwindSafe.js +++ b/main/trait.impl/core/panic/unwind_safe/trait.RefUnwindSafe.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl !RefUnwindSafe for CollectionGuard",1,["musegc::CollectionGuard"]],["impl RefUnwindSafe for AnyRef",1,["musegc::AnyRef"]],["impl RefUnwindSafe for CollectionStarting",1,["musegc::CollectionStarting"]],["impl<'a> RefUnwindSafe for Tracer<'a>",1,["musegc::Tracer"]],["impl<T> RefUnwindSafe for Ref<T>",1,["musegc::Ref"]],["impl<T> RefUnwindSafe for Root<T>
where\n T: RefUnwindSafe,
",1,["musegc::Root"]]] +"refuse":[["impl !RefUnwindSafe for CollectionGuard",1,["refuse::CollectionGuard"]],["impl RefUnwindSafe for AnyRef",1,["refuse::AnyRef"]],["impl RefUnwindSafe for CollectionStarting",1,["refuse::CollectionStarting"]],["impl<'a> RefUnwindSafe for Tracer<'a>",1,["refuse::Tracer"]],["impl<T> RefUnwindSafe for Ref<T>",1,["refuse::Ref"]],["impl<T> RefUnwindSafe for Root<T>
where\n T: RefUnwindSafe,
",1,["refuse::Root"]]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/core/panic/unwind_safe/trait.UnwindSafe.js b/main/trait.impl/core/panic/unwind_safe/trait.UnwindSafe.js index 57191ec..a700cfb 100644 --- a/main/trait.impl/core/panic/unwind_safe/trait.UnwindSafe.js +++ b/main/trait.impl/core/panic/unwind_safe/trait.UnwindSafe.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[["impl !UnwindSafe for CollectionGuard",1,["musegc::CollectionGuard"]],["impl UnwindSafe for AnyRef",1,["musegc::AnyRef"]],["impl UnwindSafe for CollectionStarting",1,["musegc::CollectionStarting"]],["impl<'a> UnwindSafe for Tracer<'a>",1,["musegc::Tracer"]],["impl<T> UnwindSafe for Ref<T>",1,["musegc::Ref"]],["impl<T> UnwindSafe for Root<T>
where\n T: RefUnwindSafe,
",1,["musegc::Root"]]] +"refuse":[["impl !UnwindSafe for CollectionGuard",1,["refuse::CollectionGuard"]],["impl UnwindSafe for AnyRef",1,["refuse::AnyRef"]],["impl UnwindSafe for CollectionStarting",1,["refuse::CollectionStarting"]],["impl<'a> UnwindSafe for Tracer<'a>",1,["refuse::Tracer"]],["impl<T> UnwindSafe for Ref<T>",1,["refuse::Ref"]],["impl<T> UnwindSafe for Root<T>
where\n T: RefUnwindSafe,
",1,["refuse::Root"]]] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/musegc/trait.Collectable.js b/main/trait.impl/refuse/trait.Collectable.js similarity index 93% rename from main/trait.impl/musegc/trait.Collectable.js rename to main/trait.impl/refuse/trait.Collectable.js index 738a71c..934b065 100644 --- a/main/trait.impl/musegc/trait.Collectable.js +++ b/main/trait.impl/refuse/trait.Collectable.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[] +"refuse":[] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/musegc/trait.ContainsNoRefs.js b/main/trait.impl/refuse/trait.ContainsNoRefs.js similarity index 93% rename from main/trait.impl/musegc/trait.ContainsNoRefs.js rename to main/trait.impl/refuse/trait.ContainsNoRefs.js index 738a71c..934b065 100644 --- a/main/trait.impl/musegc/trait.ContainsNoRefs.js +++ b/main/trait.impl/refuse/trait.ContainsNoRefs.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[] +"refuse":[] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/musegc/trait.MapAs.js b/main/trait.impl/refuse/trait.MapAs.js similarity index 93% rename from main/trait.impl/musegc/trait.MapAs.js rename to main/trait.impl/refuse/trait.MapAs.js index 738a71c..934b065 100644 --- a/main/trait.impl/musegc/trait.MapAs.js +++ b/main/trait.impl/refuse/trait.MapAs.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[] +"refuse":[] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/musegc/trait.NoMapping.js b/main/trait.impl/refuse/trait.NoMapping.js similarity index 93% rename from main/trait.impl/musegc/trait.NoMapping.js rename to main/trait.impl/refuse/trait.NoMapping.js index 738a71c..934b065 100644 --- a/main/trait.impl/musegc/trait.NoMapping.js +++ b/main/trait.impl/refuse/trait.NoMapping.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[] +"refuse":[] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/musegc/trait.SimpleType.js b/main/trait.impl/refuse/trait.SimpleType.js similarity index 93% rename from main/trait.impl/musegc/trait.SimpleType.js rename to main/trait.impl/refuse/trait.SimpleType.js index 738a71c..934b065 100644 --- a/main/trait.impl/musegc/trait.SimpleType.js +++ b/main/trait.impl/refuse/trait.SimpleType.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[] +"refuse":[] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file diff --git a/main/trait.impl/musegc/trait.Trace.js b/main/trait.impl/refuse/trait.Trace.js similarity index 93% rename from main/trait.impl/musegc/trait.Trace.js rename to main/trait.impl/refuse/trait.Trace.js index 738a71c..934b065 100644 --- a/main/trait.impl/musegc/trait.Trace.js +++ b/main/trait.impl/refuse/trait.Trace.js @@ -1,3 +1,3 @@ (function() {var implementors = { -"musegc":[] +"refuse":[] };if (window.register_implementors) {window.register_implementors(implementors);} else {window.pending_implementors = implementors;}})() \ No newline at end of file