@@ -243,7 +243,6 @@ void PhaseCFG::schedule_pinned_nodes(VectorSet &visited) {
243
243
}
244
244
}
245
245
246
- #ifdef ASSERT
247
246
// Assert that new input b2 is dominated by all previous inputs.
248
247
// Check this by by seeing that it is dominated by b1, the deepest
249
248
// input observed until b2.
@@ -255,6 +254,7 @@ static void assert_dom(Block* b1, Block* b2, Node* n, const PhaseCFG* cfg) {
255
254
tmp = tmp->_idom ;
256
255
}
257
256
if (tmp != b1) {
257
+ #ifdef ASSERT
258
258
// Detected an unschedulable graph. Print some nice stuff and die.
259
259
tty->print_cr (" !!! Unschedulable graph !!!" );
260
260
for (uint j=0 ; j<n->len (); j++) { // For all inputs
@@ -267,10 +267,11 @@ static void assert_dom(Block* b1, Block* b2, Node* n, const PhaseCFG* cfg) {
267
267
}
268
268
tty->print (" Failing node: " );
269
269
n->dump ();
270
- assert (false , " unscheduable graph" );
270
+ assert (false , " unschedulable graph" );
271
+ #endif
272
+ cfg->C ->record_failure (" unschedulable graph" );
271
273
}
272
274
}
273
- #endif
274
275
275
276
static Block* find_deepest_input (Node* n, const PhaseCFG* cfg) {
276
277
// Find the last input dominated by all other inputs.
@@ -285,7 +286,10 @@ static Block* find_deepest_input(Node* n, const PhaseCFG* cfg) {
285
286
// The new inb must be dominated by the previous deepb.
286
287
// The various inputs must be linearly ordered in the dom
287
288
// tree, or else there will not be a unique deepest block.
288
- DEBUG_ONLY (assert_dom (deepb, inb, n, cfg));
289
+ assert_dom (deepb, inb, n, cfg);
290
+ if (cfg->C ->failing ()) {
291
+ return nullptr ;
292
+ }
289
293
deepb = inb; // Save deepest block
290
294
deepb_dom_depth = deepb->_dom_depth ;
291
295
}
@@ -372,6 +376,9 @@ bool PhaseCFG::schedule_early(VectorSet &visited, Node_Stack &roots) {
372
376
if (!parent_node->pinned ()) {
373
377
// Set earliest legal block.
374
378
Block* earliest_block = find_deepest_input (parent_node, this );
379
+ if (C->failing ()) {
380
+ return false ;
381
+ }
375
382
map_node_to_block (parent_node, earliest_block);
376
383
} else {
377
384
assert (get_block_for_node (parent_node) == get_block_for_node (parent_node->in (0 )), " Pinned Node should be at the same block as its control edge" );
@@ -523,7 +530,10 @@ static Block* memory_early_block(Node* load, Block* early, const PhaseCFG* cfg)
523
530
// The new inb must be dominated by the previous deepb.
524
531
// The various inputs must be linearly ordered in the dom
525
532
// tree, or else there will not be a unique deepest block.
526
- DEBUG_ONLY (assert_dom (deepb, inb, load, cfg));
533
+ assert_dom (deepb, inb, load, cfg);
534
+ if (cfg->C ->failing ()) {
535
+ return nullptr ;
536
+ }
527
537
deepb = inb; // Save deepest block
528
538
deepb_dom_depth = deepb->_dom_depth ;
529
539
}
@@ -715,6 +725,9 @@ Block* PhaseCFG::insert_anti_dependences(Block* LCA, Node* load, bool verify) {
715
725
// dominator tree, and allow for a broader discovery of anti-dependences.
716
726
if (C->subsume_loads ()) {
717
727
early = memory_early_block (load, early, this );
728
+ if (C->failing ()) {
729
+ return nullptr ;
730
+ }
718
731
}
719
732
720
733
ResourceArea* area = Thread::current ()->resource_area ();
@@ -1519,6 +1532,9 @@ void PhaseCFG::schedule_late(VectorSet &visited, Node_Stack &stack) {
1519
1532
// Hoist LCA above possible-defs and insert anti-dependences to
1520
1533
// defs in new LCA block.
1521
1534
LCA = insert_anti_dependences (LCA, self);
1535
+ if (C->failing ()) {
1536
+ return ;
1537
+ }
1522
1538
}
1523
1539
1524
1540
if (early->_dom_depth > LCA->_dom_depth ) {
@@ -1611,8 +1627,8 @@ void PhaseCFG::global_code_motion() {
1611
1627
Node_Stack stack ((C->live_nodes () >> 2 ) + 16 ); // pre-grow
1612
1628
if (!schedule_early (visited, stack)) {
1613
1629
// Bailout without retry
1614
- assert (false , " early schedule failed" );
1615
- C->record_method_not_compilable (" early schedule failed" );
1630
+ assert (C-> failure_is_artificial () , " early schedule failed" );
1631
+ C->record_method_not_compilable (" early schedule failed" DEBUG_ONLY (COMMA true ) );
1616
1632
return ;
1617
1633
}
1618
1634
@@ -1657,6 +1673,9 @@ void PhaseCFG::global_code_motion() {
1657
1673
// uncommon trap. Combined with the too_many_traps guards
1658
1674
// above, this prevents SEGV storms reported in 6366351,
1659
1675
// by recompiling offending methods without this optimization.
1676
+ if (C->failing ()) {
1677
+ return ;
1678
+ }
1660
1679
}
1661
1680
}
1662
1681
@@ -1726,6 +1745,9 @@ void PhaseCFG::global_code_motion() {
1726
1745
for (uint i = 0 ; i < number_of_blocks (); i++) {
1727
1746
Block* block = get_block (i);
1728
1747
call_catch_cleanup (block);
1748
+ if (C->failing ()) {
1749
+ return ;
1750
+ }
1729
1751
}
1730
1752
1731
1753
#ifndef PRODUCT
0 commit comments