diff options
Diffstat (limited to 'yjit/src/codegen.rs')
-rw-r--r-- | yjit/src/codegen.rs | 83 |
1 files changed, 49 insertions, 34 deletions
diff --git a/yjit/src/codegen.rs b/yjit/src/codegen.rs index 5c06b08b47..5de12e0420 100644 --- a/yjit/src/codegen.rs +++ b/yjit/src/codegen.rs @@ -310,9 +310,6 @@ fn jit_prepare_routine_call( jit.record_boundary_patch_point = true; jit_save_pc(jit, asm); gen_save_sp(asm); - // After jit_prepare_routine_call(), we often pop operands before asm.ccall(). - // They should be spilled here to let GC mark them. - asm.spill_temps(); // In case the routine calls Ruby methods, it can set local variables // through Kernel#binding and other means. @@ -1347,10 +1344,11 @@ fn gen_splatarray( jit_prepare_routine_call(jit, asm); // Get the operands from the stack - let ary_opnd = asm.stack_pop(1); + let ary_opnd = asm.stack_opnd(0); // Call rb_vm_splat_array(flag, ary) let ary = asm.ccall(rb_vm_splat_array as *const u8, vec![flag.into(), ary_opnd]); + asm.stack_pop(1); // Keep it on stack during ccall for GC let stack_ret = asm.stack_push(Type::TArray); asm.mov(stack_ret, ary); @@ -1369,11 +1367,12 @@ fn gen_concatarray( jit_prepare_routine_call(jit, asm); // Get the operands from the stack - let ary2st_opnd = asm.stack_pop(1); - let ary1_opnd = asm.stack_pop(1); + let ary2st_opnd = asm.stack_opnd(0); + let ary1_opnd = asm.stack_opnd(1); // Call rb_vm_concat_array(ary1, ary2st) let ary = asm.ccall(rb_vm_concat_array as *const u8, vec![ary1_opnd, ary2st_opnd]); + asm.stack_pop(2); // Keep them on stack during ccall for GC let stack_ret = asm.stack_push(Type::TArray); asm.mov(stack_ret, ary); @@ -1790,7 +1789,7 @@ fn gen_setlocal_generic( jit_prepare_routine_call(jit, asm); // Pop the value to write from the stack - let value_opnd = asm.stack_pop(1); + let value_opnd = asm.stack_opnd(0); // void rb_vm_env_write(const VALUE *ep, int index, VALUE v) let index = -(ep_offset as i64); @@ -1802,6 +1801,7 @@ fn gen_setlocal_generic( value_opnd, ] ); + asm.stack_pop(1); // Keep it on stack during ccall for GC return Some(KeepCompiling); } @@ -1958,13 +1958,14 @@ fn gen_checkmatch( jit_prepare_routine_call(jit, asm); } - let pattern = asm.stack_pop(1); - let target = asm.stack_pop(1); + let pattern = asm.stack_opnd(0); + let target = asm.stack_opnd(1); extern "C" { fn rb_vm_check_match(ec: EcPtr, target: VALUE, pattern: VALUE, num: u32) -> VALUE; } let result = asm.ccall(rb_vm_check_match as *const u8, vec![EC, target, pattern, flag.into()]); + asm.stack_pop(2); // Keep them on stack during ccall for GC let stack_ret = asm.stack_push(Type::Unknown); asm.mov(stack_ret, result); @@ -2087,8 +2088,8 @@ fn gen_set_ivar( jit_prepare_routine_call(jit, asm); // Get the operands from the stack - let val_opnd = asm.stack_pop(1); - let recv_opnd = asm.stack_pop(1); + let val_opnd = asm.stack_opnd(0); + let recv_opnd = asm.stack_opnd(1); // Call rb_vm_set_ivar_id with the receiver, the ivar name, and the value let val = asm.ccall( @@ -2099,6 +2100,7 @@ fn gen_set_ivar( val_opnd, ], ); + asm.stack_pop(2); // Keep them on stack during ccall for GC let out_opnd = asm.stack_push(Type::Unknown); asm.mov(out_opnd, val); @@ -2427,7 +2429,7 @@ fn gen_setinstancevariable( jit_prepare_routine_call(jit, asm); // Get the operands from the stack - let val_opnd = asm.stack_pop(1); + let val_opnd = asm.stack_opnd(0); // Call rb_vm_setinstancevariable(iseq, obj, id, val, ic); asm.ccall( @@ -2440,6 +2442,7 @@ fn gen_setinstancevariable( Opnd::const_ptr(ic as *const u8), ] ); + asm.stack_pop(1); // Keep it on stack during ccall for GC } else { // Get the receiver let mut recv = asm.load(Opnd::mem(64, CFP, RUBY_OFFSET_CFP_SELF)); @@ -2557,10 +2560,11 @@ fn gen_defined( jit_prepare_routine_call(jit, asm); // Get the operands from the stack - let v_opnd = asm.stack_pop(1); + let v_opnd = asm.stack_opnd(0); // Call vm_defined(ec, reg_cfp, op_type, obj, v) let def_result = asm.ccall(rb_vm_defined as *const u8, vec![EC, CFP, op_type.into(), obj.into(), v_opnd]); + asm.stack_pop(1); // Keep it on stack during ccall for GC // if (vm_defined(ec, GET_CFP(), op_type, obj, v)) { // val = pushval; @@ -3259,7 +3263,7 @@ fn gen_opt_aref_with( jit_prepare_routine_call(jit, asm); let key_opnd = Opnd::Value(jit.get_arg(0)); - let recv_opnd = asm.stack_pop(1); + let recv_opnd = asm.stack_opnd(0); extern "C" { fn rb_vm_opt_aref_with(recv: VALUE, key: VALUE) -> VALUE; @@ -3272,6 +3276,7 @@ fn gen_opt_aref_with( key_opnd ], ); + asm.stack_pop(1); // Keep it on stack during GC asm.cmp(val_opnd, Qundef.into()); asm.je(Target::side_exit(Counter::opt_aref_with_qundef)); @@ -4524,14 +4529,15 @@ fn jit_rb_int_div( jit_prepare_routine_call(jit, asm); asm_comment!(asm, "Integer#/"); - let obj = asm.stack_pop(1); - let recv = asm.stack_pop(1); + let obj = asm.stack_opnd(0); + let recv = asm.stack_opnd(1); // Check for arg0 % 0 asm.cmp(obj, VALUE::fixnum_from_usize(0).as_i64().into()); asm.je(Target::side_exit(Counter::opt_div_zero)); let ret = asm.ccall(rb_fix_div_fix as *const u8, vec![recv, obj]); + asm.stack_pop(2); // Keep them during ccall for GC let ret_opnd = asm.stack_push(Type::Unknown); asm.mov(ret_opnd, ret); @@ -4637,6 +4643,7 @@ fn jit_rb_str_uplus( // We allocate when we dup the string jit_prepare_routine_call(jit, asm); + asm.spill_temps(); // For ccall. Unconditionally spill them for RegTemps consistency. asm_comment!(asm, "Unary plus on string"); let recv_opnd = asm.stack_pop(1); @@ -4712,9 +4719,10 @@ fn jit_rb_str_getbyte( // Raises when non-integers are passed in jit_prepare_routine_call(jit, asm); - let index = asm.stack_pop(1); - let recv = asm.stack_pop(1); + let index = asm.stack_opnd(0); + let recv = asm.stack_opnd(1); let ret_opnd = asm.ccall(rb_str_getbyte as *const u8, vec![recv, index]); + asm.stack_pop(2); // Keep them on stack during ccall for GC // Can either return a FIXNUM or nil let out_opnd = asm.stack_push(Type::UnknownImm); @@ -4803,6 +4811,7 @@ fn jit_rb_str_concat( // other Ractors may trigger global invalidation, so we need ctx.clear_local_types(). // PC is used on errors like Encoding::CompatibilityError raised by rb_str_buf_append. jit_prepare_routine_call(jit, asm); + asm.spill_temps(); // For ccall. Unconditionally spill them for RegTemps consistency. let concat_arg = asm.stack_pop(1); let recv = asm.stack_pop(1); @@ -4826,7 +4835,6 @@ fn jit_rb_str_concat( asm.jnz(enc_mismatch); // If encodings match, call the simple append function and jump to return - asm.spill_temps(); // for ccall let ret_opnd = asm.ccall(rb_yjit_str_simple_append as *const u8, vec![recv, concat_arg]); let ret_label = asm.new_label("func_return"); let stack_ret = asm.stack_push(Type::TString); @@ -4836,7 +4844,7 @@ fn jit_rb_str_concat( // If encodings are different, use a slower encoding-aware concatenate asm.write_label(enc_mismatch); - asm.spill_temps(); // for ccall + asm.spill_temps(); // Ignore the register for the other local branch let ret_opnd = asm.ccall(rb_str_buf_append as *const u8, vec![recv, concat_arg]); let stack_ret = asm.stack_push(Type::TString); asm.mov(stack_ret, ret_opnd); @@ -4886,9 +4894,10 @@ fn jit_rb_ary_push( // rb_ary_push allocates memory for buffer extension jit_prepare_routine_call(jit, asm); - let item_opnd = asm.stack_pop(1); - let ary_opnd = asm.stack_pop(1); + let item_opnd = asm.stack_opnd(0); + let ary_opnd = asm.stack_opnd(1); let ret = asm.ccall(rb_ary_push as *const u8, vec![ary_opnd, item_opnd]); + asm.stack_pop(2); // Keep them on stack during ccall for GC let ret_opnd = asm.stack_push(Type::TArray); asm.mov(ret_opnd, ret); @@ -6079,8 +6088,8 @@ fn gen_send_iseq( let stack_opnd = asm.stack_opnd(builtin_argc - i); args.push(stack_opnd); } - asm.stack_pop((builtin_argc + 1).try_into().unwrap()); let val = asm.ccall(unsafe { (*builtin_info).func_ptr as *const u8 }, args); + asm.stack_pop((builtin_argc + 1).try_into().unwrap()); // Keep them on stack during ccall for GC // Push the return value let stack_ret = asm.stack_push(Type::Unknown); @@ -6848,12 +6857,12 @@ fn gen_send_dynamic<F: Fn(&mut Assembler) -> Opnd>( // Save PC and SP to prepare for dynamic dispatch jit_prepare_routine_call(jit, asm); - // Pop arguments and a receiver - asm.stack_pop(sp_pops); - // Dispatch a method let ret = vm_sendish(asm); + // Pop arguments and a receiver + asm.stack_pop(sp_pops); + // Push the return value let stack_ret = asm.stack_push(Type::Unknown); asm.mov(stack_ret, ret); @@ -7784,7 +7793,7 @@ fn gen_setglobal( // Kernel#set_trace_var jit_prepare_routine_call(jit, asm); - let val = asm.stack_pop(1); + let val = asm.stack_opnd(0); asm.ccall( rb_gvar_set as *const u8, vec![ @@ -7792,6 +7801,7 @@ fn gen_setglobal( val, ], ); + asm.stack_pop(1); // Keep it during ccall for GC Some(KeepCompiling) } @@ -7804,10 +7814,11 @@ fn gen_anytostring( // Save the PC and SP since we might call #to_s jit_prepare_routine_call(jit, asm); - let str = asm.stack_pop(1); - let val = asm.stack_pop(1); + let str = asm.stack_opnd(0); + let val = asm.stack_opnd(1); let val = asm.ccall(rb_obj_as_string_result as *const u8, vec![str, val]); + asm.stack_pop(2); // Keep them during ccall for GC // Push the return value let stack_ret = asm.stack_push(Type::TString); @@ -7858,8 +7869,9 @@ fn gen_intern( // Save the PC and SP because we might allocate jit_prepare_routine_call(jit, asm); - let str = asm.stack_pop(1); + let str = asm.stack_opnd(0); let sym = asm.ccall(rb_str_intern as *const u8, vec![str]); + asm.stack_pop(1); // Keep it during ccall for GC // Push the return value let stack_ret = asm.stack_push(Type::Unknown); @@ -7881,7 +7893,6 @@ fn gen_toregexp( jit_prepare_routine_call(jit, asm); let values_ptr = asm.lea(asm.ctx.sp_opnd(-((SIZEOF_VALUE as isize) * (cnt as isize)))); - asm.stack_pop(cnt); let ary = asm.ccall( rb_ary_tmp_new_from_values as *const u8, @@ -7891,6 +7902,7 @@ fn gen_toregexp( values_ptr, ] ); + asm.stack_pop(cnt); // Let ccall spill them // Save the array so we can clear it later asm.cpush(ary); @@ -8027,7 +8039,7 @@ fn gen_setclassvariable( // rb_vm_setclassvariable can raise exceptions. jit_prepare_routine_call(jit, asm); - let val = asm.stack_pop(1); + let val = asm.stack_opnd(0); asm.ccall( rb_vm_setclassvariable as *const u8, vec![ @@ -8038,6 +8050,7 @@ fn gen_setclassvariable( Opnd::UImm(jit.get_arg(1).as_u64()), ], ); + asm.stack_pop(1); // Keep it during ccall for GC Some(KeepCompiling) } @@ -8053,8 +8066,8 @@ fn gen_getconstant( // vm_get_ev_const can raise exceptions. jit_prepare_routine_call(jit, asm); - let allow_nil_opnd = asm.stack_pop(1); - let klass_opnd = asm.stack_pop(1); + let allow_nil_opnd = asm.stack_opnd(0); + let klass_opnd = asm.stack_opnd(1); extern "C" { fn rb_vm_get_ev_const(ec: EcPtr, klass: VALUE, id: ID, allow_nil: VALUE) -> VALUE; @@ -8069,6 +8082,7 @@ fn gen_getconstant( allow_nil_opnd ], ); + asm.stack_pop(2); // Keep them during ccall for GC let top = asm.stack_push(Type::Unknown); asm.mov(top, val_opnd); @@ -8290,6 +8304,7 @@ fn gen_getblockparam( // Save the PC and SP because we might allocate jit_prepare_routine_call(jit, asm); + asm.spill_temps(); // For ccall. Unconditionally spill them for RegTemps consistency. // A mirror of the interpreter code. Checking for the case // where it's pushing rb_block_param_proxy. |