forked from illumos/gcc
/
except.c
3808 lines (3124 loc) · 101 KB
/
except.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/* Implements exception handling.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to the Free
Software Foundation, 59 Temple Place - Suite 330, Boston, MA
02111-1307, USA. */
/* An exception is an event that can be signaled from within a
function. This event can then be "caught" or "trapped" by the
callers of this function. This potentially allows program flow to
be transferred to any arbitrary code associated with a function call
several levels up the stack.
The intended use for this mechanism is for signaling "exceptional
events" in an out-of-band fashion, hence its name. The C++ language
(and many other OO-styled or functional languages) practically
requires such a mechanism, as otherwise it becomes very difficult
or even impossible to signal failure conditions in complex
situations. The traditional C++ example is when an error occurs in
the process of constructing an object; without such a mechanism, it
is impossible to signal that the error occurs without adding global
state variables and error checks around every object construction.
The act of causing this event to occur is referred to as "throwing
an exception". (Alternate terms include "raising an exception" or
"signaling an exception".) The term "throw" is used because control
is returned to the callers of the function that is signaling the
exception, and thus there is the concept of "throwing" the
exception up the call stack.
[ Add updated documentation on how to use this. ] */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
#include "rtl.h"
#include "tree.h"
#include "flags.h"
#include "function.h"
#include "expr.h"
#include "libfuncs.h"
#include "insn-config.h"
#include "except.h"
#include "integrate.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "output.h"
#include "dwarf2asm.h"
#include "dwarf2out.h"
#include "dwarf2.h"
#include "toplev.h"
#include "hashtab.h"
#include "intl.h"
#include "ggc.h"
#include "tm_p.h"
#include "target.h"
#include "langhooks.h"
#include "cgraph.h"
/* Provide defaults for stuff that may not be defined when using
sjlj exceptions. */
#ifndef EH_RETURN_DATA_REGNO
#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
#endif
/* Nonzero means enable synchronous exceptions for non-call instructions. */
int flag_non_call_exceptions;
/* Protect cleanup actions with must-not-throw regions, with a call
to the given failure handler. */
tree (*lang_protect_cleanup_actions) (void);
/* Return true if type A catches type B. */
int (*lang_eh_type_covers) (tree a, tree b);
/* Map a type to a runtime object to match type. */
tree (*lang_eh_runtime_type) (tree);
/* A hash table of label to region number. */
struct ehl_map_entry GTY(())
{
rtx label;
struct eh_region *region;
};
static GTY(()) int call_site_base;
static GTY ((param_is (union tree_node)))
htab_t type_to_runtime_map;
/* Describe the SjLj_Function_Context structure. */
static GTY(()) tree sjlj_fc_type_node;
static int sjlj_fc_call_site_ofs;
static int sjlj_fc_data_ofs;
static int sjlj_fc_personality_ofs;
static int sjlj_fc_lsda_ofs;
static int sjlj_fc_jbuf_ofs;
/* Describes one exception region. */
struct eh_region GTY(())
{
/* The immediately surrounding region. */
struct eh_region *outer;
/* The list of immediately contained regions. */
struct eh_region *inner;
struct eh_region *next_peer;
/* An identifier for this region. */
int region_number;
/* When a region is deleted, its parents inherit the REG_EH_REGION
numbers already assigned. */
bitmap aka;
/* Each region does exactly one thing. */
enum eh_region_type
{
ERT_UNKNOWN = 0,
ERT_CLEANUP,
ERT_TRY,
ERT_CATCH,
ERT_ALLOWED_EXCEPTIONS,
ERT_MUST_NOT_THROW,
ERT_THROW,
ERT_FIXUP
} type;
/* Holds the action to perform based on the preceding type. */
union eh_region_u {
/* A list of catch blocks, a surrounding try block,
and the label for continuing after a catch. */
struct eh_region_u_try {
struct eh_region *catch;
struct eh_region *last_catch;
struct eh_region *prev_try;
rtx continue_label;
} GTY ((tag ("ERT_TRY"))) try;
/* The list through the catch handlers, the list of type objects
matched, and the list of associated filters. */
struct eh_region_u_catch {
struct eh_region *next_catch;
struct eh_region *prev_catch;
tree type_list;
tree filter_list;
} GTY ((tag ("ERT_CATCH"))) catch;
/* A tree_list of allowed types. */
struct eh_region_u_allowed {
tree type_list;
int filter;
} GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
/* The type given by a call to "throw foo();", or discovered
for a throw. */
struct eh_region_u_throw {
tree type;
} GTY ((tag ("ERT_THROW"))) throw;
/* Retain the cleanup expression even after expansion so that
we can match up fixup regions. */
struct eh_region_u_cleanup {
tree exp;
struct eh_region *prev_try;
} GTY ((tag ("ERT_CLEANUP"))) cleanup;
/* The real region (by expression and by pointer) that fixup code
should live in. */
struct eh_region_u_fixup {
tree cleanup_exp;
struct eh_region *real_region;
bool resolved;
} GTY ((tag ("ERT_FIXUP"))) fixup;
} GTY ((desc ("%0.type"))) u;
/* Entry point for this region's handler before landing pads are built. */
rtx label;
/* Entry point for this region's handler from the runtime eh library. */
rtx landing_pad;
/* Entry point for this region's handler from an inner region. */
rtx post_landing_pad;
/* The RESX insn for handing off control to the next outermost handler,
if appropriate. */
rtx resume;
/* True if something in this region may throw. */
unsigned may_contain_throw : 1;
};
struct call_site_record GTY(())
{
rtx landing_pad;
int action;
};
/* Used to save exception status for each function. */
struct eh_status GTY(())
{
/* The tree of all regions for this function. */
struct eh_region *region_tree;
/* The same information as an indexable array. */
struct eh_region ** GTY ((length ("%h.last_region_number"))) region_array;
/* The most recently open region. */
struct eh_region *cur_region;
/* This is the region for which we are processing catch blocks. */
struct eh_region *try_region;
rtx filter;
rtx exc_ptr;
int built_landing_pads;
int last_region_number;
varray_type ttype_data;
varray_type ehspec_data;
varray_type action_record_data;
htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map;
struct call_site_record * GTY ((length ("%h.call_site_data_used")))
call_site_data;
int call_site_data_used;
int call_site_data_size;
rtx ehr_stackadj;
rtx ehr_handler;
rtx ehr_label;
rtx sjlj_fc;
rtx sjlj_exit_after;
};
static int t2r_eq (const void *, const void *);
static hashval_t t2r_hash (const void *);
static void add_type_for_runtime (tree);
static tree lookup_type_for_runtime (tree);
static struct eh_region *expand_eh_region_end (void);
static rtx get_exception_filter (struct function *);
static void collect_eh_region_array (void);
static void resolve_fixup_regions (void);
static void remove_fixup_regions (void);
static void remove_unreachable_regions (rtx);
static void convert_from_eh_region_ranges_1 (rtx *, int *, int);
static struct eh_region *duplicate_eh_region_1 (struct eh_region *,
struct inline_remap *);
static void duplicate_eh_region_2 (struct eh_region *, struct eh_region **);
static int ttypes_filter_eq (const void *, const void *);
static hashval_t ttypes_filter_hash (const void *);
static int ehspec_filter_eq (const void *, const void *);
static hashval_t ehspec_filter_hash (const void *);
static int add_ttypes_entry (htab_t, tree);
static int add_ehspec_entry (htab_t, htab_t, tree);
static void assign_filter_values (void);
static void build_post_landing_pads (void);
static void connect_post_landing_pads (void);
static void dw2_build_landing_pads (void);
struct sjlj_lp_info;
static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
static void sjlj_mark_call_sites (struct sjlj_lp_info *);
static void sjlj_emit_function_enter (rtx);
static void sjlj_emit_function_exit (void);
static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
static void sjlj_build_landing_pads (void);
static hashval_t ehl_hash (const void *);
static int ehl_eq (const void *, const void *);
static void add_ehl_entry (rtx, struct eh_region *);
static void remove_exception_handler_label (rtx);
static void remove_eh_handler (struct eh_region *);
static int for_each_eh_label_1 (void **, void *);
struct reachable_info;
/* The return value of reachable_next_level. */
enum reachable_code
{
/* The given exception is not processed by the given region. */
RNL_NOT_CAUGHT,
/* The given exception may need processing by the given region. */
RNL_MAYBE_CAUGHT,
/* The given exception is completely processed by the given region. */
RNL_CAUGHT,
/* The given exception is completely processed by the runtime. */
RNL_BLOCKED
};
static int check_handled (tree, tree);
static void add_reachable_handler (struct reachable_info *,
struct eh_region *, struct eh_region *);
static enum reachable_code reachable_next_level (struct eh_region *, tree,
struct reachable_info *);
static int action_record_eq (const void *, const void *);
static hashval_t action_record_hash (const void *);
static int add_action_record (htab_t, int, int);
static int collect_one_action_chain (htab_t, struct eh_region *);
static int add_call_site (rtx, int);
static void push_uleb128 (varray_type *, unsigned int);
static void push_sleb128 (varray_type *, int);
#ifndef HAVE_AS_LEB128
static int dw2_size_of_call_site_table (void);
static int sjlj_size_of_call_site_table (void);
#endif
static void dw2_output_call_site_table (void);
static void sjlj_output_call_site_table (void);
/* Routine to see if exception handling is turned on.
DO_WARN is nonzero if we want to inform the user that exception
handling is turned off.
This is used to ensure that -fexceptions has been specified if the
compiler tries to use any exception-specific functions. */
int
doing_eh (int do_warn)
{
if (! flag_exceptions)
{
static int warned = 0;
if (! warned && do_warn)
{
error ("exception handling disabled, use -fexceptions to enable");
warned = 1;
}
return 0;
}
return 1;
}
void
init_eh (void)
{
if (! flag_exceptions)
return;
type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
/* Create the SjLj_Function_Context structure. This should match
the definition in unwind-sjlj.c. */
if (USING_SJLJ_EXCEPTIONS)
{
tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
build_pointer_type (sjlj_fc_type_node));
DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
integer_type_node);
DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
tmp = build_index_type (build_int_2 (4 - 1, 0));
tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
tmp);
f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
ptr_type_node);
DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
ptr_type_node);
DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
#ifdef DONT_USE_BUILTIN_SETJMP
#ifdef JMP_BUF_SIZE
tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
#else
/* Should be large enough for most systems, if it is not,
JMP_BUF_SIZE should be defined with the proper value. It will
also tend to be larger than necessary for most systems, a more
optimal port will define JMP_BUF_SIZE. */
tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
#endif
#else
/* builtin_setjmp takes a pointer to 5 words. */
tmp = build_int_2 (5 * BITS_PER_WORD / POINTER_SIZE - 1, 0);
#endif
tmp = build_index_type (tmp);
tmp = build_array_type (ptr_type_node, tmp);
f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
#ifdef DONT_USE_BUILTIN_SETJMP
/* We don't know what the alignment requirements of the
runtime's jmp_buf has. Overestimate. */
DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
DECL_USER_ALIGN (f_jbuf) = 1;
#endif
DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
TREE_CHAIN (f_prev) = f_cs;
TREE_CHAIN (f_cs) = f_data;
TREE_CHAIN (f_data) = f_per;
TREE_CHAIN (f_per) = f_lsda;
TREE_CHAIN (f_lsda) = f_jbuf;
layout_type (sjlj_fc_type_node);
/* Cache the interesting field offsets so that we have
easy access from rtl. */
sjlj_fc_call_site_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
sjlj_fc_data_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
sjlj_fc_personality_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
sjlj_fc_lsda_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
sjlj_fc_jbuf_ofs
= (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
+ tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
}
}
void
init_eh_for_function (void)
{
cfun->eh = ggc_alloc_cleared (sizeof (struct eh_status));
}
/* Start an exception handling region. All instructions emitted
after this point are considered to be part of the region until
expand_eh_region_end is invoked. */
void
expand_eh_region_start (void)
{
struct eh_region *new_region;
struct eh_region *cur_region;
rtx note;
if (! doing_eh (0))
return;
/* Insert a new blank region as a leaf in the tree. */
new_region = ggc_alloc_cleared (sizeof (*new_region));
cur_region = cfun->eh->cur_region;
new_region->outer = cur_region;
if (cur_region)
{
new_region->next_peer = cur_region->inner;
cur_region->inner = new_region;
}
else
{
new_region->next_peer = cfun->eh->region_tree;
cfun->eh->region_tree = new_region;
}
cfun->eh->cur_region = new_region;
/* Create a note marking the start of this region. */
new_region->region_number = ++cfun->eh->last_region_number;
note = emit_note (NOTE_INSN_EH_REGION_BEG);
NOTE_EH_HANDLER (note) = new_region->region_number;
}
/* Common code to end a region. Returns the region just ended. */
static struct eh_region *
expand_eh_region_end (void)
{
struct eh_region *cur_region = cfun->eh->cur_region;
rtx note;
/* Create a note marking the end of this region. */
note = emit_note (NOTE_INSN_EH_REGION_END);
NOTE_EH_HANDLER (note) = cur_region->region_number;
/* Pop. */
cfun->eh->cur_region = cur_region->outer;
return cur_region;
}
/* End an exception handling region for a cleanup. HANDLER is an
expression to expand for the cleanup. */
void
expand_eh_region_end_cleanup (tree handler)
{
struct eh_region *region;
tree protect_cleanup_actions;
rtx around_label;
rtx data_save[2];
if (! doing_eh (0))
return;
region = expand_eh_region_end ();
region->type = ERT_CLEANUP;
region->label = gen_label_rtx ();
region->u.cleanup.exp = handler;
region->u.cleanup.prev_try = cfun->eh->try_region;
around_label = gen_label_rtx ();
emit_jump (around_label);
emit_label (region->label);
if (flag_non_call_exceptions || region->may_contain_throw)
{
/* Give the language a chance to specify an action to be taken if an
exception is thrown that would propagate out of the HANDLER. */
protect_cleanup_actions
= (lang_protect_cleanup_actions
? (*lang_protect_cleanup_actions) ()
: NULL_TREE);
if (protect_cleanup_actions)
expand_eh_region_start ();
/* In case this cleanup involves an inline destructor with a try block in
it, we need to save the EH return data registers around it. */
data_save[0] = gen_reg_rtx (ptr_mode);
emit_move_insn (data_save[0], get_exception_pointer (cfun));
data_save[1] = gen_reg_rtx (word_mode);
emit_move_insn (data_save[1], get_exception_filter (cfun));
expand_expr (handler, const0_rtx, VOIDmode, 0);
emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
emit_move_insn (cfun->eh->filter, data_save[1]);
if (protect_cleanup_actions)
expand_eh_region_end_must_not_throw (protect_cleanup_actions);
/* We need any stack adjustment complete before the around_label. */
do_pending_stack_adjust ();
}
/* We delay the generation of the _Unwind_Resume until we generate
landing pads. We emit a marker here so as to get good control
flow data in the meantime. */
region->resume
= emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
emit_barrier ();
emit_label (around_label);
}
/* End an exception handling region for a try block, and prepares
for subsequent calls to expand_start_catch. */
void
expand_start_all_catch (void)
{
struct eh_region *region;
if (! doing_eh (1))
return;
region = expand_eh_region_end ();
region->type = ERT_TRY;
region->u.try.prev_try = cfun->eh->try_region;
region->u.try.continue_label = gen_label_rtx ();
cfun->eh->try_region = region;
emit_jump (region->u.try.continue_label);
}
/* Begin a catch clause. TYPE is the type caught, a list of such types, or
null if this is a catch-all clause. Providing a type list enables to
associate the catch region with potentially several exception types, which
is useful e.g. for Ada. */
void
expand_start_catch (tree type_or_list)
{
struct eh_region *t, *c, *l;
tree type_list;
if (! doing_eh (0))
return;
type_list = type_or_list;
if (type_or_list)
{
/* Ensure to always end up with a type list to normalize further
processing, then register each type against the runtime types
map. */
tree type_node;
if (TREE_CODE (type_or_list) != TREE_LIST)
type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
type_node = type_list;
for (; type_node; type_node = TREE_CHAIN (type_node))
add_type_for_runtime (TREE_VALUE (type_node));
}
expand_eh_region_start ();
t = cfun->eh->try_region;
c = cfun->eh->cur_region;
c->type = ERT_CATCH;
c->u.catch.type_list = type_list;
c->label = gen_label_rtx ();
l = t->u.try.last_catch;
c->u.catch.prev_catch = l;
if (l)
l->u.catch.next_catch = c;
else
t->u.try.catch = c;
t->u.try.last_catch = c;
emit_label (c->label);
}
/* End a catch clause. Control will resume after the try/catch block. */
void
expand_end_catch (void)
{
struct eh_region *try_region;
if (! doing_eh (0))
return;
expand_eh_region_end ();
try_region = cfun->eh->try_region;
emit_jump (try_region->u.try.continue_label);
}
/* End a sequence of catch handlers for a try block. */
void
expand_end_all_catch (void)
{
struct eh_region *try_region;
if (! doing_eh (0))
return;
try_region = cfun->eh->try_region;
cfun->eh->try_region = try_region->u.try.prev_try;
emit_label (try_region->u.try.continue_label);
}
/* End an exception region for an exception type filter. ALLOWED is a
TREE_LIST of types to be matched by the runtime. FAILURE is an
expression to invoke if a mismatch occurs.
??? We could use these semantics for calls to rethrow, too; if we can
see the surrounding catch clause, we know that the exception we're
rethrowing satisfies the "filter" of the catch type. */
void
expand_eh_region_end_allowed (tree allowed, tree failure)
{
struct eh_region *region;
rtx around_label;
if (! doing_eh (0))
return;
region = expand_eh_region_end ();
region->type = ERT_ALLOWED_EXCEPTIONS;
region->u.allowed.type_list = allowed;
region->label = gen_label_rtx ();
for (; allowed ; allowed = TREE_CHAIN (allowed))
add_type_for_runtime (TREE_VALUE (allowed));
/* We must emit the call to FAILURE here, so that if this function
throws a different exception, that it will be processed by the
correct region. */
around_label = gen_label_rtx ();
emit_jump (around_label);
emit_label (region->label);
expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
/* We must adjust the stack before we reach the AROUND_LABEL because
the call to FAILURE does not occur on all paths to the
AROUND_LABEL. */
do_pending_stack_adjust ();
emit_label (around_label);
}
/* End an exception region for a must-not-throw filter. FAILURE is an
expression invoke if an uncaught exception propagates this far.
This is conceptually identical to expand_eh_region_end_allowed with
an empty allowed list (if you passed "std::terminate" instead of
"__cxa_call_unexpected"), but they are represented differently in
the C++ LSDA. */
void
expand_eh_region_end_must_not_throw (tree failure)
{
struct eh_region *region;
rtx around_label;
if (! doing_eh (0))
return;
region = expand_eh_region_end ();
region->type = ERT_MUST_NOT_THROW;
region->label = gen_label_rtx ();
/* We must emit the call to FAILURE here, so that if this function
throws a different exception, that it will be processed by the
correct region. */
around_label = gen_label_rtx ();
emit_jump (around_label);
emit_label (region->label);
expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
emit_label (around_label);
}
/* End an exception region for a throw. No handling goes on here,
but it's the easiest way for the front-end to indicate what type
is being thrown. */
void
expand_eh_region_end_throw (tree type)
{
struct eh_region *region;
if (! doing_eh (0))
return;
region = expand_eh_region_end ();
region->type = ERT_THROW;
region->u.throw.type = type;
}
/* End a fixup region. Within this region the cleanups for the immediately
enclosing region are _not_ run. This is used for goto cleanup to avoid
destroying an object twice.
This would be an extraordinarily simple prospect, were it not for the
fact that we don't actually know what the immediately enclosing region
is. This surprising fact is because expand_cleanups is currently
generating a sequence that it will insert somewhere else. We collect
the proper notion of "enclosing" in convert_from_eh_region_ranges. */
void
expand_eh_region_end_fixup (tree handler)
{
struct eh_region *fixup;
if (! doing_eh (0))
return;
fixup = expand_eh_region_end ();
fixup->type = ERT_FIXUP;
fixup->u.fixup.cleanup_exp = handler;
}
/* Note that the current EH region (if any) may contain a throw, or a
call to a function which itself may contain a throw. */
void
note_eh_region_may_contain_throw (void)
{
struct eh_region *region;
region = cfun->eh->cur_region;
while (region && !region->may_contain_throw)
{
region->may_contain_throw = 1;
region = region->outer;
}
}
/* Return an rtl expression for a pointer to the exception object
within a handler. */
rtx
get_exception_pointer (struct function *fun)
{
rtx exc_ptr = fun->eh->exc_ptr;
if (fun == cfun && ! exc_ptr)
{
exc_ptr = gen_reg_rtx (ptr_mode);
fun->eh->exc_ptr = exc_ptr;
}
return exc_ptr;
}
/* Return an rtl expression for the exception dispatch filter
within a handler. */
static rtx
get_exception_filter (struct function *fun)
{
rtx filter = fun->eh->filter;
if (fun == cfun && ! filter)
{
filter = gen_reg_rtx (word_mode);
fun->eh->filter = filter;
}
return filter;
}
/* This section is for the exception handling specific optimization pass. */
/* Random access the exception region tree. It's just as simple to
collect the regions this way as in expand_eh_region_start, but
without having to realloc memory. */
static void
collect_eh_region_array (void)
{
struct eh_region **array, *i;
i = cfun->eh->region_tree;
if (! i)
return;
array = ggc_alloc_cleared ((cfun->eh->last_region_number + 1)
* sizeof (*array));
cfun->eh->region_array = array;
while (1)
{
array[i->region_number] = i;
/* If there are sub-regions, process them. */
if (i->inner)
i = i->inner;
/* If there are peers, process them. */
else if (i->next_peer)
i = i->next_peer;
/* Otherwise, step back up the tree to the next peer. */
else
{
do {
i = i->outer;
if (i == NULL)
return;
} while (i->next_peer == NULL);
i = i->next_peer;
}
}
}
static void
resolve_one_fixup_region (struct eh_region *fixup)
{
struct eh_region *cleanup, *real;
int j, n;
n = cfun->eh->last_region_number;
cleanup = 0;
for (j = 1; j <= n; ++j)
{
cleanup = cfun->eh->region_array[j];
if (cleanup && cleanup->type == ERT_CLEANUP
&& cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
break;
}
if (j > n)
abort ();
real = cleanup->outer;
if (real && real->type == ERT_FIXUP)
{
if (!real->u.fixup.resolved)
resolve_one_fixup_region (real);
real = real->u.fixup.real_region;
}
fixup->u.fixup.real_region = real;
fixup->u.fixup.resolved = true;
}
static void
resolve_fixup_regions (void)
{
int i, n = cfun->eh->last_region_number;
for (i = 1; i <= n; ++i)
{
struct eh_region *fixup = cfun->eh->region_array[i];
if (!fixup || fixup->type != ERT_FIXUP || fixup->u.fixup.resolved)
continue;
resolve_one_fixup_region (fixup);
}
}
/* Now that we've discovered what region actually encloses a fixup,
we can shuffle pointers and remove them from the tree. */
static void
remove_fixup_regions (void)
{
int i;
rtx insn, note;
struct eh_region *fixup;
/* Walk the insn chain and adjust the REG_EH_REGION numbers
for instructions referencing fixup regions. This is only
strictly necessary for fixup regions with no parent, but
doesn't hurt to do it for all regions. */
for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
if (INSN_P (insn)
&& (note = find_reg_note (insn, REG_EH_REGION, NULL))
&& INTVAL (XEXP (note, 0)) > 0
&& (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
&& fixup->type == ERT_FIXUP)
{
if (fixup->u.fixup.real_region)
XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
else
remove_note (insn, note);
}
/* Remove the fixup regions from the tree. */
for (i = cfun->eh->last_region_number; i > 0; --i)
{
fixup = cfun->eh->region_array[i];
if (! fixup)
continue;
/* Allow GC to maybe free some memory. */
if (fixup->type == ERT_CLEANUP)
fixup->u.cleanup.exp = NULL_TREE;
if (fixup->type != ERT_FIXUP)
continue;
if (fixup->inner)
{
struct eh_region *parent, *p, **pp;
parent = fixup->u.fixup.real_region;
/* Fix up the children's parent pointers; find the end of
the list. */
for (p = fixup->inner; ; p = p->next_peer)
{
p->outer = parent;
if (! p->next_peer)
break;
}
/* In the tree of cleanups, only outer-inner ordering matters.
So link the children back in anywhere at the correct level. */
if (parent)
pp = &parent->inner;
else
pp = &cfun->eh->region_tree;
p->next_peer = *pp;