diff --git a/main/crates.js b/main/crates.js index 889cf7b..7f164b8 100644 --- a/main/crates.js +++ b/main/crates.js @@ -1 +1 @@ -window.ALL_CRATES = ["musegc"]; \ No newline at end of file +window.ALL_CRATES = ["refuse"]; \ No newline at end of file diff --git a/main/help.html b/main/help.html index 5b939e0..085d8c1 100644 --- a/main/help.html +++ b/main/help.html @@ -1,2 +1,2 @@ -
pub trait SimpleType { }
A type that can contain no Ref<T>
s and has an empty MapAs
-implementation.
Implementing this trait for a type automatically implements NoMapping
-and ContainsNoRefs
, which makes the type Collectable
.
pub trait Trace {
- const MAY_CONTAIN_REFERENCES: bool;
-
- // Required method
- fn trace(&self, tracer: &mut Tracer<'_>);
-}
A type that can find and mark any references it has.
-If true, this type may contain references and should have its trace()
-function invoked during the collector’s “mark” phase.
Architecture overview of the underlying design of Refuse.
+Refuse is an incremental, tracing garbage collector. Incremental garbage
+collectors can only run when it knows no threads are currently accessing
+collectable memory. This fits the access pattern of an RwLock
: the
+collector can acquire a “write” lock to ensure that all other threads can’t
+read while it is running.
Originally, Refuse used an RwLock
and a shared allocation arena. This did
+not perform well in multi-threaded benchmarks. So the global RwLock
was
+replaced with atomic tracking the number of currently acquired
+CollectionGuard
and whether the collector is currently trying to start
+collection.
Each thread allocates its own independent allocation arena and stores a copy
+of it in thread-local storage. It also registers a copy with the global
+collector. Refuse’s public API ensures that no access is provided to the
+local thread’s data without first having acquired a CollectionGuard
.
+This ensures that the collector can guarantee exclusive access to the
+underlying data.
Each thread is given its own allocation arena, which is a data structure +designed for concurrently reading portions of its data while still being +able to perform new allocations from the owning thread.
+At the root of each arena is a map of types to type-erased Bin<T>
s. A
+Bin<T>
is the root of a linked-list of Slabs<T>
. Each Slabs<T>
+contains a list of Slab<T>
s and an optional next Slabs<T>
. Each
+Slab<T>
holds 256 Slot<T>
s. Each slot is a combination of the slot’s
+state, and the slot’s data.
The slot’s state is stored in an atomic and keeps track of:
+Ref
, its generation is validated
+to ensure it is the same allocation.The owning thread or and the collector are the only types that can modify
+non-atomic data in a Bin<T>
. Other threads may need to load a reference to
+a Ref<T>
’s underlying data while the owning thread is allocating. This is
+made safe by:
Slabs::next
safely while
+other threads read the data structure.Refuse is a naive, mark-and-sweep collector. Each collection phase is +divided into three portions:
+Refuse keeps track of two metrics:
+average_collection_locking
: The average duration to acquire exclusive
+access.average_collection
: The average duration of a total collection process,
+including exclusive access acquisition.Refuse’s goal is to be able to be used in nearly any application, including +games. Games typically do not want to dip below 60 frames-per-second (FPS), +which means that if a garbage collection pause is longer than 16ms, it will +cause FPS drops.
+Refuse tries to minimize pauses by waiting for exclusive access only for a
+multiple of average_collection_locking
. If access isn’t acquired by the
+deadline, collection is rescheduled again in the near future with an
+increased multiple. If this process fails several times consecutively,
+garbage collection will be forced by waiting indefinitely.
Access is controlled by a single AtomicUsize
. A single bit keeps track
+of whether the collector is trying to collect or not. The remaining bits
+keep track of how many CollectionGuard
s are acquired and not yielding.
CollectionGuard::acquire()
checks if the collection bit is set. If it
+is, it waits until the current collection finishes and checks again. If the
+bit is not set, the count is atomically incremented.
When the final CollectionGuard
drops or yields, it notifies the
+collector thread so that it can begin collecting.
The goal of this phase is to identify all allocations that can currently be
+reached by any Root<T>
. When a slot is initially allocated, the marking
+bits are 0. Each time the collector runs, a new non-zero marking bits is
+selected by incrementing the previous marking bits and skipping 0 on wrap.
All Bin<T>
s of all threads are scanned for any Slot<T>
that is allocated
+and has a non-zero root count. Each allocated slot is then marked. If the
+slot didn’t already contain the current marking bits, it is Trace
d,
+which allows any references found to be marked.
This process continues until all found references are marked and traced.
+The goal of this phase is to free allocations that are no longer reachable.
+This is done by scanning all Bin<T>
s of all threads looking for any
+allocated Slot<T>
s that do not contain the current marking bits. When
+found, the slot is deallocated and contained data has its Drop
+implementation invoked.
pub fn collect()
Invokes the garbage collector.
-This function will deadlock if any CollectionGuard
s are held by the
+
pub fn collect()
Invokes the garbage collector.
+This function will deadlock if any CollectionGuard
s are held by the
current thread when invoked. If a guard is held, consider calling
-CollectionGuard::collect()
instead.
CollectionGuard::collect()
instead.
pub fn collected<R>(wrapped: impl FnOnce() -> R) -> R
Executes wrapped
with garbage collection available.
pub fn collected<R>(wrapped: impl FnOnce() -> R) -> R
Executes wrapped
with garbage collection available.
This function installs a garbage collector for this thread, if needed. Repeated and nested calls are allowed.
-Invoking CollectionGuard::acquire()
within wrapped
will return a
+
Invoking CollectionGuard::acquire()
within wrapped
will return a
result, while invoking it outside of a collected context will panic.
This function utilizes Rust’s thread-local storage.
-+This name is a placeholder. The design of this crate has nothing to do with -Muse, so naming it
-muse-gc
seems misleading.
An easy-to-use, incremental, multi-threaded garbage collector for Rust.
-use musegc::{CollectionGuard, Root, Ref};
+use refuse::{CollectionGuard, Root, Ref};
// Execute a closure with access to a garbage collector.
-musegc::collected(|| {
+refuse::collected(|| {
let guard = CollectionGuard::acquire();
// Allocate a vec![Ref(1), Ref(2), Ref(3)].
let values: Vec<Ref<u32>> = (1..=3).map(|value| Ref::new(value, &guard)).collect();
@@ -19,7 +15,7 @@
// Manually execute the garbage collector. Our data will not be freed,
// since `values` is a "root" reference.
- musegc::collect();
+ refuse::collect();
// Root references allow direct access to their data, even when a
// `CollectionGuard` isn't held.
@@ -36,6 +32,8 @@
guard.collect();
assert_eq!(one.load(&guard), None);
});
+As the version number indicates, this crate is in early development. No semver
+compatibility will be provided until 0.1.0
.
§Motivation
While working on Muse, @Ecton recognized the need for garbage
collection to prevent untrusted scripts from uncontrollably leaking
@@ -67,15 +65,25 @@
§Safety
MIRIFLAGS="-Zmiri-permissive-provenance -Zmiri-ignore-leaks" cargo +nightly miri test
--Zmiri-permissive-provenance
: parking_lot
internally casts a usize to a
+-
+
-Zmiri-permissive-provenance
: parking_lot
internally casts a usize to a
pointer, which breaks pointer provenance rules. Pointer provinence is
currently only an experimental model, and nothing this collector is using
from parking_lot
couldn’t be implemented in a fashion that honors pointer
provinence. Thus, this library’s author consider’s this an implementation
-detail that can be ignored.
--Zmiri-ignore-leaks
: This crate uses thread local storage which is
-documented to not always run destructors for local keys on the main thread, as
-some platforms abort rather than performing cleanup code.
+detail that can be ignored.
+
+-
+
-Zmiri-ignore-leaks
: This crate spawns a global collector thread that never
+shuts down. Miri detects that the main thread does not wait for spawned
+threads to shut down and warns about this potential memory leak. When a thread
+is shut down and all of its data is no longer reachable, the thread storage
+will be cleaned up. However, the collector never shuts down and assumes that
+new threads could still be spawned at any given time.
+Additionally, on some platforms the main thread’s thread-local storage may not
+be cleaned up when the main thread exits according to LocalKey
’s
+documentation
+
This crate exposes a safe API that guarantees no undefined behavior can be
triggered by incorrectly using the API or implementing the Collectable
trait
@@ -146,6 +154,6 @@
§1 thread
-Structs§
- A type-erased garbage collected reference.
- A guard that prevents garbage collection while held.
- A lock has been established by the collector on data needed to resolve a
-reference.
- A reference to data stored in a garbage collector.
- A root reference to a
T
that has been allocated in the garbage collector. - A tracer for the garbage collector.
Traits§
- A type that can be garbage collected.
- A type that can be garbage collected that cannot contain any
Ref<T>
s. - A mapping from one type to another.
- A type that implements
MapAs
with an empty implementation. - A type that can find and mark any references it has.
Functions§
- Invokes the garbage collector.
- Executes
wrapped
with garbage collection available.
T
that has been allocated in the garbage collector.Ref<T>
s.MapAs
with an empty implementation.wrapped
with garbage collection available.pub struct AnyRef { /* private fields */ }
A type-erased garbage collected reference.
-Returns a Ref<T>
if the underlying reference points to a T
.
Returns a [Strong<T>
] if the underlying reference points to a T
that
+
pub struct AnyRef { /* private fields */ }
A type-erased garbage collected reference.
+Returns a Ref<T>
if the underlying reference points to a T
.
Returns a Root<T>
if the underlying reference points to a T
that
has not been collected.
Returns a reference to the result of MapAs::map_as()
, if the value
-has not been collected and MapAs::Target
is T
.
Returns a reference to the result of MapAs::map_as()
, if the value
+has not been collected and MapAs::Target
is T
.
pub struct CollectionGuard { /* private fields */ }
A guard that prevents garbage collection while held.
+pub struct CollectionGuard { /* private fields */ }
A guard that prevents garbage collection while held.
To perform garbage collection, all threads must be paused to be traced. A
-CollectionGuard
allows the ability to read garbage-collectable data by
+CollectionGuard
allows the ability to read garbage-collectable data by
ensuring the garbage collector can’t run while it exists.
To ensure the garbage collector can run without long pauses, either:
CollectionGuard
s for short periods of time, dropping the
+CollectionGuard
s for short periods of time, dropping the
guards when not needed.CollectionGuard::yield_to_collector()
at a regular basis. This
+CollectionGuard::yield_to_collector()
at a regular basis. This
function is very cheap to invoke if the collector is not trying to acquire
the lock.This type should not be held across potentially blocking operations such as
IO, reading from a channel, or any other operation that may pause the
-current thread. CollectionGuard::while_unlocked()
can be used to
+current thread. CollectionGuard::while_unlocked()
can be used to
temporarily release a guard during a long operation.
Acquires a lock that prevents the garbage collector from running.
This guard is used to provide read-only access to garbage collected allocations.
A panic will occur if this function is called outside of code executed
-by collected()
.
Manually invokes the garbage collector.
+bycollected()
.
+Manually invokes the garbage collector.
This method temporarily releases this guard’s lock and waits for a garbage collection to run. If a garbage collection is already in progress, this function will return when the in-progress collection completes. Otherwise, the collector is started and this function waits until the collection finishes.
Finally, the guard is reacquired before returning.
-Yield to the garbage collector, if needed.
+Yield to the garbage collector, if needed.
This function will not yield unless the garbage collector is trying to acquire this thread’s lock. Because of this, it is a fairly efficient function to invoke. To minimize collection pauses, long-held guards should call this function regularly.
-Executes unlocked
while this guard is temporarily released.
pub struct CollectionStarting;
A lock has been established by the collector on data needed to resolve a +
pub struct CollectionStarting;
A lock has been established by the collector on data needed to resolve a reference.
-source
. Read moreself
and other
values to be equal, and is used
+source
. Read moreself
and other
values to be equal, and is used
by ==
.pub struct Ref<T> { /* private fields */ }
A reference to data stored in a garbage collector.
-Unlike a Root<T>
, this type is not guaranteed to have access to its
-underlying data. If no Collectable
reachable via all active Root
s
+
pub struct Ref<T> { /* private fields */ }
A reference to data stored in a garbage collector.
+Unlike a Root<T>
, this type is not guaranteed to have access to its
+underlying data. If no Collectable
reachable via all active Root
s
marks this allocation, it will be collected.
Because of this, direct access to the data is not provided. To obtain a
-reference, call Ref::load()
.
Stores value
in the garbage collector, returning a “weak” reference to
+reference, call Ref::load()
.
Ref::load()
is used to provide a reference to data stored in the garbage
+collector.
use refuse::{CollectionGuard, Ref};
+
+refuse::collected(|| {
+ let guard = CollectionGuard::acquire();
+ let data = Ref::new(42, &guard);
+
+ assert_eq!(data.load(&guard), Some(&42));
+});
References returned from Ref::load()
are tied to the lifetime of the
+guard. This ensures that a reference to data can only be held between
+moments when the garbage collector can be run. For example these usages are
+prevented by the compiler:
let guard = CollectionGuard::acquire();
+let data = Ref::new(42, &guard);
+let reference = data.load(&guard).unwrap();
+
+drop(guard);
+
+// error[E0505]: cannot move out of `guard` because it is borrowed
+assert_eq!(reference, &42);
let mut guard = CollectionGuard::acquire();
+let data = Ref::new(42, &guard);
+let reference = data.load(&guard).unwrap();
+
+guard.yield_to_collector();
+
+// error[E0502]: cannot borrow `guard` as mutable because it is also borrowed as immutable
+assert_eq!(reference, &42);
Stores value
in the garbage collector, returning a “weak” reference to
it.
Loads a reference to the underlying data. Returns None
if the data has
+
Loads a reference to the underlying data. Returns None
if the data has
been collected and is no longer available.
Returns CollectionStarting
if self
was created in another thread and
that thread is currently locked by the garbage collector.
Loads a root reference to the underlying data. Returns None
if the
+
Loads a root reference to the underlying data. Returns None
if the
data has been collected and is no longer available.
Returns CollectionStarting
if self
was created in another thread and
that thread is currently locked by the garbage collector.
pub struct Root<T>where
- T: Collectable,{ /* private fields */ }
A root reference to a T
that has been allocated in the garbage collector.
This type behaves very similarly to Arc<T>
. It is cheap-to-clone,
-utilizing atomic reference counting to track the number of root references
-currently exist to the underlying value.
pub struct Root<T>where
+ T: Collectable,{ /* private fields */ }
A root reference to a T
that has been allocated in the garbage collector.
This type behaves very similarly to Arc<T>
. It implements Deref<Target = T>
, and it is also cheap-to-clone, utilizing atomic reference counting to
+track the number of root references currently exist to the underlying value.
While any root references exist for a given allocation, the garbage collector will not collect the allocation.
-Stores value
in the garbage collector, returning a root reference to
+
Stores value
in the garbage collector, returning a root reference to
the data.
Returns an untyped “weak” reference erased to this root.
-Returns an untyped “weak” reference erased to this root.
+pub struct Tracer<'a> { /* private fields */ }
A tracer for the garbage collector.
-This type allows Collectable
values to mark()
any
-Ref<T>
s they contain.
Marks collectable
as being referenced, ensuring it is not garbage
+
pub struct Tracer<'a> { /* private fields */ }
A tracer for the garbage collector.
+This type allows Collectable
values to mark()
any
+Ref<T>
s they contain.
pub trait Collectable: Trace + MapAs + Send + Sync + 'static { }
A type that can be garbage collected.
-A type needs to implement both Trace
and MapAs
to be collectable.
If a type can’t contain any Ref<T>
s and no mapping functionality is
-desired, the SimpleType
trait can be implemented instead of Trace
-and MapAs
to enable collection.
If a type can’t contain any Ref<T>
s, ContainsNoRefs
can be
-implemented instead of Trace
.
If no mapping functionality is desired, NoMapping
can be implemented
-instead of MapAs
.
pub trait Collectable: Trace + MapAs + Send + Sync + 'static { }
A type that can be garbage collected.
+A type needs to implement both Trace
and MapAs
to be collectable.
If a type can’t contain any Ref<T>
s and no mapping functionality is
+desired, the SimpleType
trait can be implemented instead of Trace
+and MapAs
to enable collection.
If a type can’t contain any Ref<T>
s, ContainsNoRefs
can be
+implemented instead of Trace
.
If no mapping functionality is desired, NoMapping
can be implemented
+instead of MapAs
.
pub trait ContainsNoRefs { }
A type that can be garbage collected that cannot contain any Ref<T>
s.
Types that implement this trait automatically implement Collectable
.
-This trait reduces the boilerplate for implementing Collectable
for
+
pub trait ContainsNoRefs { }
A type that can be garbage collected that cannot contain any Ref<T>
s.
Types that implement this trait automatically implement Collectable
.
+This trait reduces the boilerplate for implementing Collectable
for
self-contained types.
pub trait MapAs {
+MapAs in refuse - Rust
+ pub trait MapAs {
type Target: ?Sized + 'static;
// Required method
- fn map_as(&self) -> &Self::Target;
+ fn map_as(&self) -> &Self::Target;
}
Expand description
A mapping from one type to another.
-This trait is used by AnyRef::load_mapped()
to enable type-erased
+
This trait is used by AnyRef::load_mapped()
to enable type-erased
loading of a secondary type.
-If no mapping is desired, implement NoMapping
instead.
-Required Associated Types§
Required Methods§
Implementors§
\ No newline at end of file
+If no mapping is desired, implement NoMapping
instead.
+
pub trait NoMapping { }
A type that implements MapAs
with an empty implementation.
pub trait SimpleType { }
A type that can contain no Ref<T>
s and has an empty MapAs
+implementation.
Implementing this trait for a type automatically implements NoMapping
+and ContainsNoRefs
, which makes the type Collectable
.
pub trait Trace {
+ const MAY_CONTAIN_REFERENCES: bool;
+
+ // Required method
+ fn trace(&self, tracer: &mut Tracer<'_>);
+}
A type that can find and mark any references it has.
+If true, this type may contain references and should have its trace()
+function invoked during the collector’s “mark” phase.
MapAs
with an empty implementation.","A reference to data stored in a garbage collector.","A root reference to a T
that has been allocated in the …","A type that can contain no Ref<T>
s and has an empty MapAs
…","The target type of the mapping.","A type that can find and mark any references it has.","A tracer for the garbage collector.","Acquires a lock that prevents the garbage collector from …","Returns this reference as an untyped reference.","","","Loads a root reference to the underlying data. Returns None
…","","","","","","","","","","","","","","","","","","","","","","","","","","","","","Invokes the garbage collector.","Manually invokes the garbage collector.","Executes wrapped
with garbage collection available.","","Returns a Ref<T>
if the underlying reference points to a T
.","Returns a [Strong<T>
] if the underlying reference points …","Returns a “weak” reference to this root.","Returns an untyped “weak” reference erased to this …","","","","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","","","","","","","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Loads a reference to the underlying data. Returns None
if …","Returns a reference to the result of MapAs::map_as()
, if …","Maps self
to target type.","Marks collectable
as being referenced, ensuring it is not …","Stores value
in the garbage collector, returning a root …","Stores value
in the garbage collector, returning a “weak…","Returns true if these two references point to the same …","","","Traces all refrences that this value references.","","","","","","","","","","","","","","","","","","","","","Executes unlocked
while this guard is temporarily released.","Yield to the garbage collector, if needed."],"i":[0,0,0,0,0,20,0,0,0,0,0,17,0,0,1,2,1,1,2,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,2,8,2,8,0,1,0,5,3,3,5,5,5,8,8,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,2,3,17,18,5,2,2,2,8,20,5,2,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,1,1],"f":"``````````````{{}b}{{{d{c}}}fh}{bb}0{{{d{c}}b}{{l{{j{c}}}}}h}{ce{}{}}00000000000{cg{}{}{{n{e}}}}00000111111{{{d{c}}}{{d{c}}}{}}{A`A`}{{ce}Ab{}{}}0{{}Ab}{bAb}{ec{}{{Af{}{{Ad{c}}}}}}{{{j{c}}}eh{}}{f{{l{{d{c}}}}}h}{{fb}{{l{{j{c}}}}}h}{{{j{c}}}{{d{c}}}h}{{{j{c}}}fh}{{{j{c}}}Abh}{{A`A`}Ah}{{A`Aj}Al}{cc{}}00000000000{ce{}{}}00000{{{d{c}}b}{{l{c}}}h}{{fb}{{l{c}}}An}{{{Bb{}{{B`{c}}}}}cAn}{{Bd{d{c}}}Abh}{{ce}{{j{c}}}h{{Bf{b}}}}{{ce}{{d{c}}}h{{Bf{b}}}}{{{d{c}}{d{c}}}Ahh}77{{BhBd}Ab}{{{j{c}}Bd}Abh}{{{d{c}}Bd}Abh}{c{{Bj{e}}}{}{}}00000000000{cBl{}}00000{{be}c{}{{Af{}{{Ad{c}}}}}}{bAb}","c":[],"p":[[5,"CollectionGuard",0],[5,"Ref",0],[5,"AnyRef",0],[10,"Collectable",0],[5,"Root",0],[6,"Option",108],[10,"CastFrom",109],[5,"CollectionStarting",0],[1,"unit"],[17,"Output"],[10,"FnOnce",110],[1,"bool"],[5,"Formatter",111],[8,"Result",111],[10,"Sized",112],[17,"Target"],[10,"MapAs",0],[5,"Tracer",0],[10,"AsRef",113],[10,"Trace",0],[6,"Result",114],[5,"TypeId",115]],"b":[]}]\
+["refuse",{"doc":"Refuse","t":"FKFFKTKKFFKRKFNCNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNNHNHNNNNNNNNNNNNNNNNNNNNNNNNNNNNMNNNNNNNMNNNNNNNNNNNNNNNNNNNNNN","n":["AnyRef","Collectable","CollectionGuard","CollectionStarting","ContainsNoRefs","MAY_CONTAIN_REFERENCES","MapAs","NoMapping","Ref","Root","SimpleType","Target","Trace","Tracer","acquire","architecture","as_any","as_mut","as_ref","as_root","borrow","borrow","borrow","borrow","borrow","borrow","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","borrow_mut","cast","cast","cast","cast","cast","cast","cast_into","cast_into","cast_into","cast_into","cast_into","cast_into","clone","clone","clone","clone_into","clone_into","clone_into","collect","collect","collected","deref","downcast_ref","downcast_root","downgrade","downgrade_any","drop","eq","fmt","from","from","from","from","from","from","from_cast","from_cast","from_cast","from_cast","from_cast","from_cast","into","into","into","into","into","into","load","load_mapped","map_as","mark","new","new","ptr_eq","to_owned","to_owned","to_owned","trace","trace","trace","try_from","try_from","try_from","try_from","try_from","try_from","try_into","try_into","try_into","try_into","try_into","try_into","type_id","type_id","type_id","type_id","type_id","type_id","while_unlocked","yield_to_collector"],"q":[[0,"refuse"],[112,"core::option"],[113,"intentional::cast"],[114,"core::ops::function"],[115,"core::fmt"],[116,"core::fmt"],[117,"core::convert"],[118,"core::result"],[119,"core::any"]],"d":["A type-erased garbage collected reference.","A type that can be garbage collected.","A guard that prevents garbage collection while held.","A lock has been established by the collector on data …","A type that can be garbage collected that cannot contain …","If true, this type may contain references and should have …","A mapping from one type to another.","A type that implements MapAs
with an empty implementation.","A reference to data stored in a garbage collector.","A root reference to a T
that has been allocated in the …","A type that can contain no Ref<T>
s and has an empty MapAs
…","The target type of the mapping.","A type that can find and mark any references it has.","A tracer for the garbage collector.","Acquires a lock that prevents the garbage collector from …","Architecture overview of the underlying design of Refuse.","Returns this reference as an untyped reference.","","","Loads a root reference to the underlying data. Returns None
…","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","","Invokes the garbage collector.","Manually invokes the garbage collector.","Executes wrapped
with garbage collection available.","","Returns a Ref<T>
if the underlying reference points to a T
.","Returns a Root<T>
if the underlying reference points to a T
…","Returns a “weak” reference to this root.","Returns an untyped “weak” reference erased to this …","","","","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","Returns the argument unchanged.","","","","","","","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Calls U::from(self)
.","Loads a reference to the underlying data. Returns None
if …","Returns a reference to the result of MapAs::map_as()
, if …","Maps self
to target type.","Marks collectable
as being referenced, ensuring it is not …","Stores value
in the garbage collector, returning a root …","Stores value
in the garbage collector, returning a “weak…","Returns true if these two references point to the same …","","","","Traces all refrences that this value references.","","","","","","","","","","","","","","","","","","","","","Executes unlocked
while this guard is temporarily released.","Yield to the garbage collector, if needed."],"i":[0,0,0,0,0,20,0,0,0,0,0,17,0,0,1,0,2,1,1,2,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,5,2,8,5,2,8,0,1,0,5,3,3,5,5,5,8,8,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,2,3,17,18,5,2,2,5,2,8,20,5,2,1,18,5,2,3,8,1,18,5,2,3,8,1,18,5,2,3,8,1,1],"f":"``````````````{{}b}`{{{d{c}}}fh}{bb}0{{{d{c}}b}{{l{{j{c}}}}}h}{ce{}{}}00000000000{cg{}{}{{n{e}}}}00000111111{{{j{c}}}{{j{c}}}h}{{{d{c}}}{{d{c}}}{}}{A`A`}{{ce}Ab{}{}}00{{}Ab}{bAb}{ec{}{{Af{}{{Ad{c}}}}}}{{{j{c}}}eh{}}{f{{l{{d{c}}}}}h}{{fb}{{l{{j{c}}}}}h}{{{j{c}}}{{d{c}}}h}{{{j{c}}}fh}{{{j{c}}}Abh}{{A`A`}Ah}{{A`Aj}Al}{cc{}}00000000000{ce{}{}}00000{{{d{c}}b}{{l{c}}}h}{{fb}{{l{c}}}An}{{{Bb{}{{B`{c}}}}}cAn}{{Bd{d{c}}}Abh}{{ce}{{j{c}}}h{{Bf{b}}}}{{ce}{{d{c}}}h{{Bf{b}}}}{{{d{c}}{d{c}}}Ahh}777{{BhBd}Ab}{{{j{c}}Bd}Abh}{{{d{c}}Bd}Abh}{c{{Bj{e}}}{}{}}00000000000{cBl{}}00000{{be}c{}{{Af{}{{Ad{c}}}}}}{bAb}","c":[],"p":[[5,"CollectionGuard",0],[5,"Ref",0],[5,"AnyRef",0],[10,"Collectable",0],[5,"Root",0],[6,"Option",112],[10,"CastFrom",113],[5,"CollectionStarting",0],[1,"unit"],[17,"Output"],[10,"FnOnce",114],[1,"bool"],[5,"Formatter",115],[8,"Result",115],[10,"Sized",116],[17,"Target"],[10,"MapAs",0],[5,"Tracer",0],[10,"AsRef",117],[10,"Trace",0],[6,"Result",118],[5,"TypeId",119]],"b":[]}]\
]'));
if (typeof exports !== 'undefined') exports.searchIndex = searchIndex;
else if (window.initSearch) window.initSearch(searchIndex);
diff --git a/main/settings.html b/main/settings.html
index b99d127..8fd19ac 100644
--- a/main/settings.html
+++ b/main/settings.html
@@ -1,2 +1,2 @@
-