@@ -108,6 +108,8 @@ void InterpreterMacroAssembler::dispatch_prolog(TosState state, int bcp_incr) {
108108// own dispatch. The dispatch address in R24_dispatch_addr is used for the
109109// dispatch.
110110void InterpreterMacroAssembler::dispatch_epilog (TosState state, int bcp_incr) {
111+ assert (nonvolatile_accross_vthread_preemtion (R24_dispatch_addr),
112+ " Requirement of field accesses (e.g. putstatic)" );
111113 if (bcp_incr) { addi (R14_bcp, R14_bcp, bcp_incr); }
112114 mtctr (R24_dispatch_addr);
113115 bcctr (bcondAlways, 0 , bhintbhBCCTRisNotPredictable);
@@ -862,6 +864,9 @@ void InterpreterMacroAssembler::remove_activation(TosState state,
862864 bool install_monitor_exception) {
863865 BLOCK_COMMENT (" remove_activation {" );
864866
867+ asm_assert_mem8_is_zero (in_bytes (JavaThread::preempt_alternate_return_offset ()), R16_thread,
868+ " remove_activation: should not have alternate return address set" );
869+
865870 unlock_if_synchronized_method (state, throw_monitor_exception, install_monitor_exception);
866871
867872 // The below poll is for the stack watermark barrier. It allows fixing up frames lazily,
@@ -2014,57 +2019,89 @@ void InterpreterMacroAssembler::call_VM(Register oop_result, address entry_point
20142019}
20152020
20162021void InterpreterMacroAssembler::call_VM_preemptable (Register oop_result, address entry_point,
2017- Register arg_1, bool check_exceptions) {
2022+ Register arg_1,
2023+ bool check_exceptions) {
20182024 if (!Continuations::enabled ()) {
20192025 call_VM (oop_result, entry_point, arg_1, check_exceptions);
20202026 return ;
20212027 }
2028+ call_VM_preemptable (oop_result, entry_point, arg_1, noreg /* arg_2 */ , check_exceptions);
2029+ }
2030+
2031+ void InterpreterMacroAssembler::call_VM_preemptable (Register oop_result, address entry_point,
2032+ Register arg_1, Register arg_2,
2033+ bool check_exceptions) {
2034+ if (!Continuations::enabled ()) {
2035+ call_VM (oop_result, entry_point, arg_1, arg_2, check_exceptions);
2036+ return ;
2037+ }
20222038
20232039 Label resume_pc, not_preempted;
2040+ Register tmp = R11_scratch1;
2041+ assert_different_registers (arg_1, tmp);
2042+ assert_different_registers (arg_2, tmp);
20242043
2025- DEBUG_ONLY (ld (R0, in_bytes (JavaThread::preempt_alternate_return_offset ()), R16_thread));
2026- DEBUG_ONLY (cmpdi (CR0, R0, 0 ));
2027- asm_assert_eq (" Should not have alternate return address set" );
2044+ #ifdef ASSERT
2045+ asm_assert_mem8_is_zero (in_bytes (JavaThread::preempt_alternate_return_offset ()), R16_thread,
2046+ " Should not have alternate return address set" );
2047+ // We check this counter in patch_return_pc_with_preempt_stub() during freeze.
2048+ lwa (tmp, in_bytes (JavaThread::interp_at_preemptable_vmcall_cnt_offset ()), R16_thread);
2049+ addi (tmp, tmp, 1 );
2050+ cmpwi (CR0, tmp, 0 );
2051+ stw (tmp, in_bytes (JavaThread::interp_at_preemptable_vmcall_cnt_offset ()), R16_thread);
2052+ asm_assert (gt, " call_VM_preemptable: should be > 0" );
2053+ #endif // ASSERT
20282054
20292055 // Preserve 2 registers
2030- assert (nonvolatile_accross_vthread_preemtion (R31) && nonvolatile_accross_vthread_preemtion (R22 ), " " );
2056+ assert (nonvolatile_accross_vthread_preemtion (R31) && nonvolatile_accross_vthread_preemtion (R24 ), " " );
20312057 ld (R3_ARG1, _abi0 (callers_sp), R1_SP); // load FP
20322058 std (R31, _ijava_state_neg (lresult), R3_ARG1);
2033- std (R22 , _ijava_state_neg (fresult), R3_ARG1);
2059+ std (R24 , _ijava_state_neg (fresult), R3_ARG1);
20342060
20352061 // We set resume_pc as last java pc. It will be saved if the vthread gets preempted.
20362062 // Later execution will continue right there.
20372063 mr_if_needed (R4_ARG2, arg_1);
2064+ assert (arg_2 != R4_ARG2, " smashed argument" );
2065+ mr_if_needed (R5_ARG3, arg_2, true /* allow_noreg */ );
20382066 push_cont_fastpath ();
2039- call_VM (oop_result, entry_point, false /* check_exceptions*/ , &resume_pc /* last_java_pc */ );
2067+ call_VM (noreg /* oop_result */ , entry_point, false /* check_exceptions*/ , &resume_pc /* last_java_pc */ );
20402068 pop_cont_fastpath ();
20412069
2070+ #ifdef ASSERT
2071+ lwa (tmp, in_bytes (JavaThread::interp_at_preemptable_vmcall_cnt_offset ()), R16_thread);
2072+ addi (tmp, tmp, -1 );
2073+ cmpwi (CR0, tmp, 0 );
2074+ stw (tmp, in_bytes (JavaThread::interp_at_preemptable_vmcall_cnt_offset ()), R16_thread);
2075+ asm_assert (ge, " call_VM_preemptable: should be >= 0" );
2076+ #endif // ASSERT
2077+
20422078 // Jump to handler if the call was preempted
20432079 ld (R0, in_bytes (JavaThread::preempt_alternate_return_offset ()), R16_thread);
20442080 cmpdi (CR0, R0, 0 );
20452081 beq (CR0, not_preempted);
2082+ // Preempted. Frames are already frozen on heap.
20462083 mtlr (R0);
20472084 li (R0, 0 );
20482085 std (R0, in_bytes (JavaThread::preempt_alternate_return_offset ()), R16_thread);
20492086 blr ();
20502087
20512088 bind (resume_pc); // Location to resume execution
20522089 restore_after_resume (noreg /* fp */ );
2090+
20532091 bind (not_preempted);
2092+ if (check_exceptions) {
2093+ check_and_forward_exception (R11_scratch1, R12_scratch2);
2094+ }
2095+ if (oop_result->is_valid ()) {
2096+ get_vm_result_oop (oop_result);
2097+ }
20542098}
20552099
20562100void InterpreterMacroAssembler::restore_after_resume (Register fp) {
2057- if (!Continuations::enabled ()) return ;
2058-
20592101 const address resume_adapter = TemplateInterpreter::cont_resume_interpreter_adapter ();
20602102 add_const_optimized (R31, R29_TOC, MacroAssembler::offset_to_global_toc (resume_adapter));
20612103 mtctr (R31);
20622104 bctrl ();
2063- // Restore registers that are preserved across vthread preemption
2064- assert (nonvolatile_accross_vthread_preemtion (R31) && nonvolatile_accross_vthread_preemtion (R22), " " );
2065- ld (R3_ARG1, _abi0 (callers_sp), R1_SP); // load FP
2066- ld (R31, _ijava_state_neg (lresult), R3_ARG1);
2067- ld (R22, _ijava_state_neg (fresult), R3_ARG1);
20682105#ifdef ASSERT
20692106 // Assert FP is in R11_scratch1 (see generate_cont_resume_interpreter_adapter())
20702107 {
0 commit comments