aboutsummaryrefslogtreecommitdiffstats
path: root/yjit/src/invariants.rs
diff options
context:
space:
mode:
authorAlan Wu <XrXr@users.noreply.github.com>2023-03-16 15:39:27 -0400
committerTakashi Kokubun <takashikkbn@gmail.com>2023-03-17 09:30:24 -0700
commit10e4fa3a0f05f72733d132794cedd08906b40196 (patch)
treeaaa4bb05559fc5216e76901044743d1cc59c29e0 /yjit/src/invariants.rs
parentc62cf60d187fbe74da97f79148789cdd1bfa0332 (diff)
downloadruby-10e4fa3a0f05f72733d132794cedd08906b40196.tar.gz
YJIT: Use raw pointers and shared references over `Rc<RefCell<_>>`
`Rc` and `RefCell` both incur runtime space costs. In addition, `RefCell` has given us some headaches with the non obvious borrow panics it likes to throw out. The latest one started with 7fd53eeb46db261bbc20025cdab70096245a5cbe and is yet to be resolved. Since we already rely on the GC to properly reclaim memory for `Block` and `Branch`, we might as well stop paying the overhead of `Rc` and `RefCell`. The `RefCell` panics go away with this change, too. On 25 iterations of `railsbench` with a stats build I got `yjit_alloc_size: 8,386,129 => 7,348,637`, with the new memory size 87.6% of the status quo. This makes the metadata and machine code size roughly line up one-to-one. The general idea here is to use `&` shared references with [interior mutability][1] with `Cell`, which doesn't take any extra space. The `noalias` requirement that `&mut` imposes is way too hard to meet and verify. Imagine replacing places where we would've gotten `BorrowError` from `RefCell` with Rust/LLVM miscompiling us due to aliasing violations. With shared references, we don't have to think about subtle cases like the GC _sometimes_ calling the mark callback while codegen has an aliasing reference in a stack frame below. We mostly only need to worry about liveness, with which the GC already helps. There is now a clean split between blocks and branches that are not yet fully constructed and ones that are "in-service", so to speak. Working with `PendingBranch` and `JITState` don't really involve `unsafe` stuff. This change allows `Branch` and `Block` to not have as many optional fields as many of them are only optional during compilation. Fields that change post-compilation are wrapped in `Cell` to facilitate mutation through shared references. I do some `unsafe` dances here. I've included just a couple tests to run with Miri (`cargo +nightly miri test miri`). We can add more Miri tests if desired. [1]: https://doc.rust-lang.org/std/cell/struct.UnsafeCell.html
Diffstat (limited to 'yjit/src/invariants.rs')
-rw-r--r--yjit/src/invariants.rs119
1 files changed, 58 insertions, 61 deletions
diff --git a/yjit/src/invariants.rs b/yjit/src/invariants.rs
index dbeafe1969..0a969905dc 100644
--- a/yjit/src/invariants.rs
+++ b/yjit/src/invariants.rs
@@ -16,8 +16,8 @@ use std::mem;
// Invariants to track:
// assume_bop_not_redefined(jit, INTEGER_REDEFINED_OP_FLAG, BOP_PLUS)
// assume_method_lookup_stable(comptime_recv_klass, cme, jit);
-// assume_single_ractor_mode(jit)
-// assume_stable_global_constant_state(jit);
+// assume_single_ractor_mode()
+// track_stable_constant_names_assumption()
/// Used to track all of the various block references that contain assumptions
/// about the state of the virtual machine.
@@ -78,9 +78,9 @@ impl Invariants {
}
}
-/// A public function that can be called from within the code generation
-/// functions to ensure that the block being generated is invalidated when the
-/// basic operator is redefined.
+/// Mark the pending block as assuming that certain basic operators (e.g. Integer#==)
+/// have not been redefined.
+#[must_use]
pub fn assume_bop_not_redefined(
jit: &mut JITState,
ocb: &mut OutlinedCb,
@@ -89,18 +89,7 @@ pub fn assume_bop_not_redefined(
) -> bool {
if unsafe { BASIC_OP_UNREDEFINED_P(bop, klass) } {
jit_ensure_block_entry_exit(jit, ocb);
-
- let invariants = Invariants::get_instance();
- invariants
- .basic_operator_blocks
- .entry((klass, bop))
- .or_default()
- .insert(jit.get_block());
- invariants
- .block_basic_operators
- .entry(jit.get_block())
- .or_default()
- .insert((klass, bop));
+ jit.bop_assumptions.push((klass, bop));
return true;
} else {
@@ -108,28 +97,33 @@ pub fn assume_bop_not_redefined(
}
}
-// Remember that a block assumes that
-// `rb_callable_method_entry(receiver_klass, cme->called_id) == cme` and that
-// `cme` is valid.
-// When either of these assumptions becomes invalid, rb_yjit_method_lookup_change() or
-// rb_yjit_cme_invalidate() invalidates the block.
-//
-// @raise NoMemoryError
-pub fn assume_method_lookup_stable(
- jit: &mut JITState,
- ocb: &mut OutlinedCb,
+/// Track that a block is only valid when a certain basic operator has not been redefined
+/// since the block's inception.
+pub fn track_bop_assumption(uninit_block: BlockRef, bop: (RedefinitionFlag, ruby_basic_operators)) {
+ let invariants = Invariants::get_instance();
+ invariants
+ .basic_operator_blocks
+ .entry(bop)
+ .or_default()
+ .insert(uninit_block);
+ invariants
+ .block_basic_operators
+ .entry(uninit_block)
+ .or_default()
+ .insert(bop);
+}
+
+/// Track that a block will assume that `cme` is valid (false == METHOD_ENTRY_INVALIDATED(cme)).
+/// [rb_yjit_cme_invalidate] invalidates the block when `cme` is invalidated.
+pub fn track_method_lookup_stability_assumption(
+ uninit_block: BlockRef,
callee_cme: *const rb_callable_method_entry_t,
) {
- jit_ensure_block_entry_exit(jit, ocb);
-
- let block = jit.get_block();
- jit.push_cme_dependency(callee_cme);
-
Invariants::get_instance()
.cme_validity
.entry(callee_cme)
.or_default()
- .insert(block);
+ .insert(uninit_block);
}
// Checks rb_method_basic_definition_p and registers the current block for invalidation if method
@@ -141,10 +135,10 @@ pub fn assume_method_basic_definition(
ocb: &mut OutlinedCb,
klass: VALUE,
mid: ID
- ) -> bool {
+) -> bool {
if unsafe { rb_method_basic_definition_p(klass, mid) } != 0 {
let cme = unsafe { rb_callable_method_entry(klass, mid) };
- assume_method_lookup_stable(jit, ocb, cme);
+ jit.assume_method_lookup_stable(ocb, cme);
true
} else {
false
@@ -158,22 +152,24 @@ pub fn assume_single_ractor_mode(jit: &mut JITState, ocb: &mut OutlinedCb) -> bo
false
} else {
jit_ensure_block_entry_exit(jit, ocb);
- Invariants::get_instance()
- .single_ractor
- .insert(jit.get_block());
+ jit.block_assumes_single_ractor = true;
+
true
}
}
-/// Walk through the ISEQ to go from the current opt_getinlinecache to the
-/// subsequent opt_setinlinecache and find all of the name components that are
-/// associated with this constant (which correspond to the getconstant
-/// arguments).
-pub fn assume_stable_constant_names(jit: &mut JITState, ocb: &mut OutlinedCb, idlist: *const ID) {
- /// Tracks that a block is assuming that the name component of a constant
- /// has not changed since the last call to this function.
+/// Track that the block will assume single ractor mode.
+pub fn track_single_ractor_assumption(uninit_block: BlockRef) {
+ Invariants::get_instance()
+ .single_ractor
+ .insert(uninit_block);
+}
+
+/// Track that a block will assume that the name components of a constant path expression
+/// has not changed since the block's full initialization.
+pub fn track_stable_constant_names_assumption(uninit_block: BlockRef, idlist: *const ID) {
fn assume_stable_constant_name(
- jit: &mut JITState,
+ uninit_block: BlockRef,
id: ID,
) {
if id == idNULL as u64 {
@@ -186,10 +182,10 @@ pub fn assume_stable_constant_names(jit: &mut JITState, ocb: &mut OutlinedCb, id
.constant_state_blocks
.entry(id)
.or_default()
- .insert(jit.get_block());
+ .insert(uninit_block);
invariants
.block_constant_states
- .entry(jit.get_block())
+ .entry(uninit_block)
.or_default()
.insert(id);
}
@@ -198,12 +194,9 @@ pub fn assume_stable_constant_names(jit: &mut JITState, ocb: &mut OutlinedCb, id
for i in 0.. {
match unsafe { *idlist.offset(i) } {
0 => break, // End of NULL terminated list
- id => assume_stable_constant_name(jit, id),
+ id => assume_stable_constant_name(uninit_block, id),
}
}
-
- jit_ensure_block_entry_exit(jit, ocb);
-
}
/// Called when a basic operator is redefined. Note that all the blocks assuming
@@ -344,19 +337,22 @@ pub extern "C" fn rb_yjit_root_mark() {
/// Remove all invariant assumptions made by the block by removing the block as
/// as a key in all of the relevant tables.
-pub fn block_assumptions_free(blockref: &BlockRef) {
+/// For safety, the block has to be initialized and the vm lock must be held.
+/// However, outgoing/incoming references to the block does _not_ need to be valid.
+pub fn block_assumptions_free(blockref: BlockRef) {
let invariants = Invariants::get_instance();
{
- let block = blockref.borrow();
+ // SAFETY: caller ensures that this reference is valid
+ let block = unsafe { blockref.as_ref() };
// For each method lookup dependency
for dep in block.iter_cme_deps() {
// Remove tracking for cme validity
- if let Some(blockset) = invariants.cme_validity.get_mut(dep) {
- blockset.remove(blockref);
+ if let Some(blockset) = invariants.cme_validity.get_mut(&dep) {
+ blockset.remove(&blockref);
if blockset.is_empty() {
- invariants.cme_validity.remove(dep);
+ invariants.cme_validity.remove(&dep);
}
}
}
@@ -506,17 +502,18 @@ pub extern "C" fn rb_yjit_tracing_invalidate_all() {
if on_stack_iseqs.contains(&iseq) {
// This ISEQ is running, so we can't free blocks immediately
for block in blocks {
- delayed_deallocation(&block);
+ delayed_deallocation(block);
}
payload.dead_blocks.shrink_to_fit();
} else {
// Safe to free dead blocks since the ISEQ isn't running
+ // Since we're freeing _all_ blocks, we don't need to keep the graph well formed
for block in blocks {
- free_block(&block);
+ unsafe { free_block(block, false) };
}
mem::take(&mut payload.dead_blocks)
- .iter()
- .for_each(free_block);
+ .into_iter()
+ .for_each(|block| unsafe { free_block(block, false) });
}
}