@@ -1091,6 +1091,15 @@ void CallJavaNode::dump_compact_spec(outputStream* st) const {
10911091}
10921092#endif
10931093
1094+ void CallJavaNode::register_for_late_inline () {
1095+ if (generator () != nullptr ) {
1096+ Compile::current ()->prepend_late_inline (generator ());
1097+ set_generator (nullptr );
1098+ } else {
1099+ assert (false , " repeated inline attempt" );
1100+ }
1101+ }
1102+
10941103// =============================================================================
10951104uint CallStaticJavaNode::size_of () const { return sizeof (*this ); }
10961105bool CallStaticJavaNode::cmp ( const Node &n ) const {
@@ -1101,26 +1110,35 @@ bool CallStaticJavaNode::cmp( const Node &n ) const {
11011110Node* CallStaticJavaNode::Ideal (PhaseGVN* phase, bool can_reshape) {
11021111 CallGenerator* cg = generator ();
11031112 if (can_reshape && cg != nullptr ) {
1104- assert (IncrementalInlineMH, " required" );
1105- assert (cg->call_node () == this , " mismatch" );
1106- assert (cg->is_mh_late_inline (), " not virtual" );
1107-
1108- // Check whether this MH handle call becomes a candidate for inlining.
1109- ciMethod* callee = cg->method ();
1110- vmIntrinsics::ID iid = callee->intrinsic_id ();
1111- if (iid == vmIntrinsics::_invokeBasic) {
1112- if (in (TypeFunc::Parms)->Opcode () == Op_ConP) {
1113- phase->C ->prepend_late_inline (cg);
1114- set_generator (nullptr );
1113+ if (cg->is_mh_late_inline ()) {
1114+ assert (IncrementalInlineMH, " required" );
1115+ assert (cg->call_node () == this , " mismatch" );
1116+ assert (cg->method ()->is_method_handle_intrinsic (), " required" );
1117+
1118+ // Check whether this MH handle call becomes a candidate for inlining.
1119+ ciMethod* callee = cg->method ();
1120+ vmIntrinsics::ID iid = callee->intrinsic_id ();
1121+ if (iid == vmIntrinsics::_invokeBasic) {
1122+ if (in (TypeFunc::Parms)->Opcode () == Op_ConP) {
1123+ register_for_late_inline ();
1124+ }
1125+ } else if (iid == vmIntrinsics::_linkToNative) {
1126+ // never retry
1127+ } else {
1128+ assert (callee->has_member_arg (), " wrong type of call?" );
1129+ if (in (TypeFunc::Parms + callee->arg_size () - 1 )->Opcode () == Op_ConP) {
1130+ register_for_late_inline ();
1131+ phase->C ->inc_number_of_mh_late_inlines ();
1132+ }
11151133 }
1116- } else if (iid == vmIntrinsics::_linkToNative) {
1117- // never retry
11181134 } else {
1119- assert (callee->has_member_arg (), " wrong type of call?" );
1120- if (in (TypeFunc::Parms + callee->arg_size () - 1 )->Opcode () == Op_ConP) {
1121- phase->C ->prepend_late_inline (cg);
1122- set_generator (nullptr );
1135+ assert (IncrementalInline, " required" );
1136+ assert (!cg->method ()->is_method_handle_intrinsic (), " required" );
1137+ if (phase->C ->print_inlining ()) {
1138+ phase->C ->inline_printer ()->record (cg->method (), cg->call_node ()->jvms (), InliningResult::FAILURE,
1139+ " static call node changed: trying again" );
11231140 }
1141+ register_for_late_inline ();
11241142 }
11251143 }
11261144 return CallNode::Ideal (phase, can_reshape);
@@ -1189,39 +1207,46 @@ bool CallDynamicJavaNode::cmp( const Node &n ) const {
11891207Node* CallDynamicJavaNode::Ideal (PhaseGVN* phase, bool can_reshape) {
11901208 CallGenerator* cg = generator ();
11911209 if (can_reshape && cg != nullptr ) {
1192- assert (IncrementalInlineVirtual, " required " );
1193- assert (cg-> call_node () == this , " mismatch " );
1194- assert (cg->is_virtual_late_inline () , " not virtual " );
1195-
1196- // Recover symbolic info for method resolution.
1197- ciMethod* caller = jvms ()->method ();
1198- ciBytecodeStream iter (caller);
1199- iter.force_bci (jvms ()->bci ());
1200-
1201- bool not_used1;
1202- ciSignature* not_used2;
1203- ciMethod* orig_callee = iter.get_method (not_used1, ¬_used2); // callee in the bytecode
1204- ciKlass* holder = iter.get_declared_method_holder ();
1205- if (orig_callee->is_method_handle_intrinsic ()) {
1206- assert (_override_symbolic_info, " required" );
1207- orig_callee = method ();
1208- holder = method ()->holder ();
1209- }
1210+ if (cg-> is_virtual_late_inline ()) {
1211+ assert (IncrementalInlineVirtual , " required " );
1212+ assert (cg->call_node () == this , " mismatch " );
1213+
1214+ // Recover symbolic info for method resolution.
1215+ ciMethod* caller = jvms ()->method ();
1216+ ciBytecodeStream iter (caller);
1217+ iter.force_bci (jvms ()->bci ());
1218+
1219+ bool not_used1;
1220+ ciSignature* not_used2;
1221+ ciMethod* orig_callee = iter.get_method (not_used1, ¬_used2); // callee in the bytecode
1222+ ciKlass* holder = iter.get_declared_method_holder ();
1223+ if (orig_callee->is_method_handle_intrinsic ()) {
1224+ assert (_override_symbolic_info, " required" );
1225+ orig_callee = method ();
1226+ holder = method ()->holder ();
1227+ }
12101228
1211- ciInstanceKlass* klass = ciEnv::get_instance_klass_for_declared_method_holder (holder);
1229+ ciInstanceKlass* klass = ciEnv::get_instance_klass_for_declared_method_holder (holder);
12121230
1213- Node* receiver_node = in (TypeFunc::Parms);
1214- const TypeOopPtr* receiver_type = phase->type (receiver_node)->isa_oopptr ();
1231+ Node* receiver_node = in (TypeFunc::Parms);
1232+ const TypeOopPtr* receiver_type = phase->type (receiver_node)->isa_oopptr ();
12151233
1216- int not_used3;
1217- bool call_does_dispatch;
1218- ciMethod* callee = phase->C ->optimize_virtual_call (caller, klass, holder, orig_callee, receiver_type, true /* is_virtual*/ ,
1219- call_does_dispatch, not_used3); // out-parameters
1220- if (!call_does_dispatch) {
1221- // Register for late inlining.
1222- cg->set_callee_method (callee);
1223- phase->C ->prepend_late_inline (cg); // MH late inlining prepends to the list, so do the same
1224- set_generator (nullptr );
1234+ int not_used3;
1235+ bool call_does_dispatch;
1236+ ciMethod* callee = phase->C ->optimize_virtual_call (caller, klass, holder, orig_callee, receiver_type, true /* is_virtual*/ ,
1237+ call_does_dispatch, not_used3); // out-parameters
1238+ if (!call_does_dispatch) {
1239+ // Register for late inlining.
1240+ cg->set_callee_method (callee);
1241+ register_for_late_inline (); // MH late inlining prepends to the list, so do the same
1242+ }
1243+ } else {
1244+ assert (IncrementalInline, " required" );
1245+ if (phase->C ->print_inlining ()) {
1246+ phase->C ->inline_printer ()->record (cg->method (), cg->call_node ()->jvms (), InliningResult::FAILURE,
1247+ " dynamic call node changed: trying again" );
1248+ }
1249+ register_for_late_inline ();
12251250 }
12261251 }
12271252 return CallNode::Ideal (phase, can_reshape);
0 commit comments