Bug Summary

File:build/gcc/vec.h
Warning:line 815, column 10
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name except.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-WyAKBp.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c

1/* Implements exception handling.
2 Copyright (C) 1989-2021 Free Software Foundation, Inc.
3 Contributed by Mike Stump <mrs@cygnus.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22/* An exception is an event that can be "thrown" from within a
23 function. This event can then be "caught" by the callers of
24 the function.
25
26 The representation of exceptions changes several times during
27 the compilation process:
28
29 In the beginning, in the front end, we have the GENERIC trees
30 TRY_CATCH_EXPR, TRY_FINALLY_EXPR, EH_ELSE_EXPR, WITH_CLEANUP_EXPR,
31 CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33 During initial gimplification (gimplify.c) these are lowered to the
34 GIMPLE_TRY, GIMPLE_CATCH, GIMPLE_EH_ELSE, and GIMPLE_EH_FILTER
35 nodes. The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are
36 converted into GIMPLE_TRY_FINALLY nodes; the others are a more
37 direct 1-1 conversion.
38
39 During pass_lower_eh (tree-eh.c) we record the nested structure
40 of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41 We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42 regions at this time. We can then flatten the statements within
43 the TRY nodes to straight-line code. Statements that had been within
44 TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45 so that we may remember what action is supposed to be taken if
46 a given statement does throw. During this lowering process,
47 we create an EH_LANDING_PAD node for each EH_REGION that has
48 some code within the function that needs to be executed if a
49 throw does happen. We also create RESX statements that are
50 used to transfer control from an inner EH_REGION to an outer
51 EH_REGION. We also create EH_DISPATCH statements as placeholders
52 for a runtime type comparison that should be made in order to
53 select the action to perform among different CATCH and EH_FILTER
54 regions.
55
56 During pass_lower_eh_dispatch (tree-eh.c), which is run after
57 all inlining is complete, we are able to run assign_filter_values,
58 which allows us to map the set of types manipulated by all of the
59 CATCH and EH_FILTER regions to a set of integers. This set of integers
60 will be how the exception runtime communicates with the code generated
61 within the function. We then expand the GIMPLE_EH_DISPATCH statements
62 to a switch or conditional branches that use the argument provided by
63 the runtime (__builtin_eh_filter) and the set of integers we computed
64 in assign_filter_values.
65
66 During pass_lower_resx (tree-eh.c), which is run near the end
67 of optimization, we expand RESX statements. If the eh region
68 that is outer to the RESX statement is a MUST_NOT_THROW, then
69 the RESX expands to some form of abort statement. If the eh
70 region that is outer to the RESX statement is within the current
71 function, then the RESX expands to a bookkeeping call
72 (__builtin_eh_copy_values) and a goto. Otherwise, the next
73 handler for the exception must be within a function somewhere
74 up the call chain, so we call back into the exception runtime
75 (__builtin_unwind_resume).
76
77 During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78 that create an rtl to eh_region mapping that corresponds to the
79 gimple to eh_region mapping that had been recorded in the
80 THROW_STMT_TABLE.
81
82 Then, via finish_eh_generation, we generate the real landing pads
83 to which the runtime will actually transfer control. These new
84 landing pads perform whatever bookkeeping is needed by the target
85 backend in order to resume execution within the current function.
86 Each of these new landing pads falls through into the post_landing_pad
87 label which had been used within the CFG up to this point. All
88 exception edges within the CFG are redirected to the new landing pads.
89 If the target uses setjmp to implement exceptions, the various extra
90 calls into the runtime to register and unregister the current stack
91 frame are emitted at this time.
92
93 During pass_convert_to_eh_region_ranges (except.c), we transform
94 the REG_EH_REGION notes attached to individual insns into
95 non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96 and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
97 same associated action within the exception region tree, meaning
98 that (1) the exception is caught by the same landing pad within the
99 current function, (2) the exception is blocked by the runtime with
100 a MUST_NOT_THROW region, or (3) the exception is not handled at all
101 within the current function.
102
103 Finally, during assembly generation, we call
104 output_function_exception_table (except.c) to emit the tables with
105 which the exception runtime can determine if a given stack frame
106 handles a given exception, and if so what filter value to provide
107 to the function when the non-local control transfer is effected.
108 If the target uses dwarf2 unwinding to implement exceptions, then
109 output_call_frame_info (dwarf2out.c) emits the required unwind data. */
110
111
112#include "config.h"
113#include "system.h"
114#include "coretypes.h"
115#include "backend.h"
116#include "target.h"
117#include "rtl.h"
118#include "tree.h"
119#include "cfghooks.h"
120#include "tree-pass.h"
121#include "memmodel.h"
122#include "tm_p.h"
123#include "stringpool.h"
124#include "expmed.h"
125#include "optabs.h"
126#include "emit-rtl.h"
127#include "cgraph.h"
128#include "diagnostic.h"
129#include "fold-const.h"
130#include "stor-layout.h"
131#include "explow.h"
132#include "stmt.h"
133#include "expr.h"
134#include "calls.h"
135#include "libfuncs.h"
136#include "except.h"
137#include "output.h"
138#include "dwarf2asm.h"
139#include "dwarf2out.h"
140#include "common/common-target.h"
141#include "langhooks.h"
142#include "cfgrtl.h"
143#include "tree-pretty-print.h"
144#include "cfgloop.h"
145#include "builtins.h"
146#include "tree-hash-traits.h"
147
148static GTY(()) int call_site_base;
149
150static GTY(()) hash_map<tree_hash, tree> *type_to_runtime_map;
151
152static GTY(()) tree setjmp_fn;
153
154/* Describe the SjLj_Function_Context structure. */
155static GTY(()) tree sjlj_fc_type_node;
156static int sjlj_fc_call_site_ofs;
157static int sjlj_fc_data_ofs;
158static int sjlj_fc_personality_ofs;
159static int sjlj_fc_lsda_ofs;
160static int sjlj_fc_jbuf_ofs;
161
162
163struct GTY(()) call_site_record_d
164{
165 rtx landing_pad;
166 int action;
167};
168
169/* In the following structure and associated functions,
170 we represent entries in the action table as 1-based indices.
171 Special cases are:
172
173 0: null action record, non-null landing pad; implies cleanups
174 -1: null action record, null landing pad; implies no action
175 -2: no call-site entry; implies must_not_throw
176 -3: we have yet to process outer regions
177
178 Further, no special cases apply to the "next" field of the record.
179 For next, 0 means end of list. */
180
181struct action_record
182{
183 int offset;
184 int filter;
185 int next;
186};
187
188/* Hashtable helpers. */
189
190struct action_record_hasher : free_ptr_hash <action_record>
191{
192 static inline hashval_t hash (const action_record *);
193 static inline bool equal (const action_record *, const action_record *);
194};
195
196inline hashval_t
197action_record_hasher::hash (const action_record *entry)
198{
199 return entry->next * 1009 + entry->filter;
200}
201
202inline bool
203action_record_hasher::equal (const action_record *entry,
204 const action_record *data)
205{
206 return entry->filter == data->filter && entry->next == data->next;
207}
208
209typedef hash_table<action_record_hasher> action_hash_type;
210
211static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
212 eh_landing_pad *);
213
214static void dw2_build_landing_pads (void);
215
216static int collect_one_action_chain (action_hash_type *, eh_region);
217static int add_call_site (rtx, int, int);
218
219static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
220static void push_sleb128 (vec<uchar, va_gc> **, int);
221static int dw2_size_of_call_site_table (int);
222static int sjlj_size_of_call_site_table (void);
223static void dw2_output_call_site_table (int, int);
224static void sjlj_output_call_site_table (void);
225
226
227void
228init_eh (void)
229{
230 if (! flag_exceptionsglobal_options.x_flag_exceptions)
231 return;
232
233 type_to_runtime_map = hash_map<tree_hash, tree>::create_ggc (31);
234
235 /* Create the SjLj_Function_Context structure. This should match
236 the definition in unwind-sjlj.c. */
237 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
238 {
239 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
240
241 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
242
243 f_prev = build_decl (BUILTINS_LOCATION((location_t) 1),
244 FIELD_DECL, get_identifier ("__prev")(__builtin_constant_p ("__prev") ? get_identifier_with_length
(("__prev"), strlen ("__prev")) : get_identifier ("__prev"))
,
245 build_pointer_type (sjlj_fc_type_node));
246 DECL_FIELD_CONTEXT (f_prev)((tree_check ((f_prev), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 246, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context)
= sjlj_fc_type_node;
247
248 f_cs = build_decl (BUILTINS_LOCATION((location_t) 1),
249 FIELD_DECL, get_identifier ("__call_site")(__builtin_constant_p ("__call_site") ? get_identifier_with_length
(("__call_site"), strlen ("__call_site")) : get_identifier (
"__call_site"))
,
250 integer_type_nodeinteger_types[itk_int]);
251 DECL_FIELD_CONTEXT (f_cs)((tree_check ((f_cs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 251, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context)
= sjlj_fc_type_node;
252
253 tmp = build_index_type (size_int (4 - 1)size_int_kind (4 - 1, stk_sizetype));
254 tmp = build_array_type (lang_hooks.types.type_for_mode
255 (targetm.unwind_word_mode (), 1),
256 tmp);
257 f_data = build_decl (BUILTINS_LOCATION((location_t) 1),
258 FIELD_DECL, get_identifier ("__data")(__builtin_constant_p ("__data") ? get_identifier_with_length
(("__data"), strlen ("__data")) : get_identifier ("__data"))
, tmp);
259 DECL_FIELD_CONTEXT (f_data)((tree_check ((f_data), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 259, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context)
= sjlj_fc_type_node;
260
261 f_per = build_decl (BUILTINS_LOCATION((location_t) 1),
262 FIELD_DECL, get_identifier ("__personality")(__builtin_constant_p ("__personality") ? get_identifier_with_length
(("__personality"), strlen ("__personality")) : get_identifier
("__personality"))
,
263 ptr_type_nodeglobal_trees[TI_PTR_TYPE]);
264 DECL_FIELD_CONTEXT (f_per)((tree_check ((f_per), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 264, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context)
= sjlj_fc_type_node;
265
266 f_lsda = build_decl (BUILTINS_LOCATION((location_t) 1),
267 FIELD_DECL, get_identifier ("__lsda")(__builtin_constant_p ("__lsda") ? get_identifier_with_length
(("__lsda"), strlen ("__lsda")) : get_identifier ("__lsda"))
,
268 ptr_type_nodeglobal_trees[TI_PTR_TYPE]);
269 DECL_FIELD_CONTEXT (f_lsda)((tree_check ((f_lsda), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 269, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context)
= sjlj_fc_type_node;
270
271#ifdef DONT_USE_BUILTIN_SETJMP
272#ifdef JMP_BUF_SIZE
273 tmp = size_int (JMP_BUF_SIZE - 1)size_int_kind (JMP_BUF_SIZE - 1, stk_sizetype);
274#else
275 /* Should be large enough for most systems, if it is not,
276 JMP_BUF_SIZE should be defined with the proper value. It will
277 also tend to be larger than necessary for most systems, a more
278 optimal port will define JMP_BUF_SIZE. */
279 tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1)size_int_kind (76 + 2 - 1, stk_sizetype);
280#endif
281#else
282 /* Compute a minimally sized jump buffer. We need room to store at
283 least 3 pointers - stack pointer, frame pointer and return address.
284 Plus for some targets we need room for an extra pointer - in the
285 case of MIPS this is the global pointer. This makes a total of four
286 pointers, but to be safe we actually allocate room for 5.
287
288 If pointers are smaller than words then we allocate enough room for
289 5 words, just in case the backend needs this much room. For more
290 discussion on this issue see:
291 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html. */
292 if (POINTER_SIZE(((global_options.x_ix86_isa_flags & (1UL << 58)) !=
0) ? 32 : ((8) * (((global_options.x_ix86_isa_flags & (1UL
<< 1)) != 0) ? 8 : 4)))
> BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
)
293 tmp = size_int (5 - 1)size_int_kind (5 - 1, stk_sizetype);
294 else
295 tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1)size_int_kind ((5 * ((8) * (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4)) / (((global_options.
x_ix86_isa_flags & (1UL << 58)) != 0) ? 32 : ((8) *
(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)))) - 1, stk_sizetype)
;
296#endif
297
298 tmp = build_index_type (tmp);
299 tmp = build_array_type (ptr_type_nodeglobal_trees[TI_PTR_TYPE], tmp);
300 f_jbuf = build_decl (BUILTINS_LOCATION((location_t) 1),
301 FIELD_DECL, get_identifier ("__jbuf")(__builtin_constant_p ("__jbuf") ? get_identifier_with_length
(("__jbuf"), strlen ("__jbuf")) : get_identifier ("__jbuf"))
, tmp);
302#ifdef DONT_USE_BUILTIN_SETJMP
303 /* We don't know what the alignment requirements of the
304 runtime's jmp_buf has. Overestimate. */
305 SET_DECL_ALIGN (f_jbuf, BIGGEST_ALIGNMENT)(((contains_struct_check ((f_jbuf), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 305, __FUNCTION__))->decl_common.align) = ffs_hwi ((((global_options
.x_target_flags & (1U << 12)) != 0) ? 32 : (((global_options
.x_ix86_isa_flags & (1UL << 15)) != 0) ? 512 : (((global_options
.x_ix86_isa_flags & (1UL << 8)) != 0) ? 256 : 128))
)))
;
306 DECL_USER_ALIGN (f_jbuf)((contains_struct_check ((f_jbuf), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 306, __FUNCTION__))->base.u.bits.user_align)
= 1;
307#endif
308 DECL_FIELD_CONTEXT (f_jbuf)((tree_check ((f_jbuf), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 308, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context)
= sjlj_fc_type_node;
309
310 TYPE_FIELDS (sjlj_fc_type_node)((tree_check3 ((sjlj_fc_type_node), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 310, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
= f_prev;
311 TREE_CHAIN (f_prev)((contains_struct_check ((f_prev), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 311, __FUNCTION__))->common.chain)
= f_cs;
312 TREE_CHAIN (f_cs)((contains_struct_check ((f_cs), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 312, __FUNCTION__))->common.chain)
= f_data;
313 TREE_CHAIN (f_data)((contains_struct_check ((f_data), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 313, __FUNCTION__))->common.chain)
= f_per;
314 TREE_CHAIN (f_per)((contains_struct_check ((f_per), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 314, __FUNCTION__))->common.chain)
= f_lsda;
315 TREE_CHAIN (f_lsda)((contains_struct_check ((f_lsda), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 315, __FUNCTION__))->common.chain)
= f_jbuf;
316
317 layout_type (sjlj_fc_type_node);
318
319 /* Cache the interesting field offsets so that we have
320 easy access from rtl. */
321 sjlj_fc_call_site_ofs
322 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs)((tree_check ((f_cs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 322, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
)
323 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)((tree_check ((f_cs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 323, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
) / BITS_PER_UNIT(8));
324 sjlj_fc_data_ofs
325 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_data)((tree_check ((f_data), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 325, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
)
326 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)((tree_check ((f_data), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 326, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
) / BITS_PER_UNIT(8));
327 sjlj_fc_personality_ofs
328 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_per)((tree_check ((f_per), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 328, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
)
329 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)((tree_check ((f_per), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 329, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
) / BITS_PER_UNIT(8));
330 sjlj_fc_lsda_ofs
331 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda)((tree_check ((f_lsda), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 331, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
)
332 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)((tree_check ((f_lsda), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 332, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
) / BITS_PER_UNIT(8));
333 sjlj_fc_jbuf_ofs
334 = (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf)((tree_check ((f_jbuf), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 334, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
)
335 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)((tree_check ((f_jbuf), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 335, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
) / BITS_PER_UNIT(8));
336
337#ifdef DONT_USE_BUILTIN_SETJMP
338 tmp = build_function_type_list (integer_type_nodeinteger_types[itk_int], TREE_TYPE (f_jbuf)((contains_struct_check ((f_jbuf), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 338, __FUNCTION__))->typed.type)
,
339 NULLnullptr);
340 setjmp_fn = build_decl (BUILTINS_LOCATION((location_t) 1), FUNCTION_DECL,
341 get_identifier ("setjmp")(__builtin_constant_p ("setjmp") ? get_identifier_with_length
(("setjmp"), strlen ("setjmp")) : get_identifier ("setjmp"))
, tmp);
342 TREE_PUBLIC (setjmp_fn)((setjmp_fn)->base.public_flag) = 1;
343 DECL_EXTERNAL (setjmp_fn)((contains_struct_check ((setjmp_fn), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 343, __FUNCTION__))->decl_common.decl_flag_1)
= 1;
344 DECL_ASSEMBLER_NAME (setjmp_fn)decl_assembler_name (setjmp_fn);
345#endif
346 }
347}
348
349void
350init_eh_for_function (void)
351{
352 cfun(cfun + 0)->eh = ggc_cleared_alloc<eh_status> ();
353
354 /* Make sure zero'th entries are used. */
355 vec_safe_push (cfun(cfun + 0)->eh->region_array, (eh_region)0);
356 vec_safe_push (cfun(cfun + 0)->eh->lp_array, (eh_landing_pad)0);
357}
358
359/* Routines to generate the exception tree somewhat directly.
360 These are used from tree-eh.c when processing exception related
361 nodes during tree optimization. */
362
363static eh_region
364gen_eh_region (enum eh_region_type type, eh_region outer)
365{
366 eh_region new_eh;
367
368 /* Insert a new blank region as a leaf in the tree. */
369 new_eh = ggc_cleared_alloc<eh_region_d> ();
370 new_eh->type = type;
371 new_eh->outer = outer;
372 if (outer)
2
Assuming 'outer' is non-null
3
Taking true branch
373 {
374 new_eh->next_peer = outer->inner;
375 outer->inner = new_eh;
376 }
377 else
378 {
379 new_eh->next_peer = cfun(cfun + 0)->eh->region_tree;
380 cfun(cfun + 0)->eh->region_tree = new_eh;
381 }
382
383 new_eh->index = vec_safe_length (cfun(cfun + 0)->eh->region_array);
384 vec_safe_push (cfun(cfun + 0)->eh->region_array, new_eh);
4
Passing value via 1st parameter 'v'
5
Calling 'vec_safe_push<eh_region_d *, va_gc>'
385
386 /* Copy the language's notion of whether to use __cxa_end_cleanup. */
387 if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
388 new_eh->use_cxa_end_cleanup = true;
389
390 return new_eh;
391}
392
393eh_region
394gen_eh_region_cleanup (eh_region outer)
395{
396 return gen_eh_region (ERT_CLEANUP, outer);
397}
398
399eh_region
400gen_eh_region_try (eh_region outer)
401{
402 return gen_eh_region (ERT_TRY, outer);
403}
404
405eh_catch
406gen_eh_region_catch (eh_region t, tree type_or_list)
407{
408 eh_catch c, l;
409 tree type_list, type_node;
410
411 gcc_assert (t->type == ERT_TRY)((void)(!(t->type == ERT_TRY) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 411, __FUNCTION__), 0 : 0))
;
412
413 /* Ensure to always end up with a type list to normalize further
414 processing, then register each type against the runtime types map. */
415 type_list = type_or_list;
416 if (type_or_list)
417 {
418 if (TREE_CODE (type_or_list)((enum tree_code) (type_or_list)->base.code) != TREE_LIST)
419 type_list = tree_cons (NULL_TREE(tree) nullptr, type_or_list, NULL_TREE(tree) nullptr);
420
421 type_node = type_list;
422 for (; type_node; type_node = TREE_CHAIN (type_node)((contains_struct_check ((type_node), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 422, __FUNCTION__))->common.chain)
)
423 add_type_for_runtime (TREE_VALUE (type_node)((tree_check ((type_node), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 423, __FUNCTION__, (TREE_LIST)))->list.value)
);
424 }
425
426 c = ggc_cleared_alloc<eh_catch_d> ();
427 c->type_list = type_list;
428 l = t->u.eh_try.last_catch;
429 c->prev_catch = l;
430 if (l)
431 l->next_catch = c;
432 else
433 t->u.eh_try.first_catch = c;
434 t->u.eh_try.last_catch = c;
435
436 return c;
437}
438
439eh_region
440gen_eh_region_allowed (eh_region outer, tree allowed)
441{
442 eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
443 region->u.allowed.type_list = allowed;
444
445 for (; allowed ; allowed = TREE_CHAIN (allowed)((contains_struct_check ((allowed), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 445, __FUNCTION__))->common.chain)
)
446 add_type_for_runtime (TREE_VALUE (allowed)((tree_check ((allowed), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 446, __FUNCTION__, (TREE_LIST)))->list.value)
);
447
448 return region;
449}
450
451eh_region
452gen_eh_region_must_not_throw (eh_region outer)
453{
454 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
1
Calling 'gen_eh_region'
455}
456
457eh_landing_pad
458gen_eh_landing_pad (eh_region region)
459{
460 eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
461
462 lp->next_lp = region->landing_pads;
463 lp->region = region;
464 lp->index = vec_safe_length (cfun(cfun + 0)->eh->lp_array);
465 region->landing_pads = lp;
466
467 vec_safe_push (cfun(cfun + 0)->eh->lp_array, lp);
468
469 return lp;
470}
471
472eh_region
473get_eh_region_from_number_fn (struct function *ifun, int i)
474{
475 return (*ifun->eh->region_array)[i];
476}
477
478eh_region
479get_eh_region_from_number (int i)
480{
481 return get_eh_region_from_number_fn (cfun(cfun + 0), i);
482}
483
484eh_landing_pad
485get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
486{
487 return (*ifun->eh->lp_array)[i];
488}
489
490eh_landing_pad
491get_eh_landing_pad_from_number (int i)
492{
493 return get_eh_landing_pad_from_number_fn (cfun(cfun + 0), i);
494}
495
496eh_region
497get_eh_region_from_lp_number_fn (struct function *ifun, int i)
498{
499 if (i < 0)
500 return (*ifun->eh->region_array)[-i];
501 else if (i == 0)
502 return NULLnullptr;
503 else
504 {
505 eh_landing_pad lp;
506 lp = (*ifun->eh->lp_array)[i];
507 return lp->region;
508 }
509}
510
511eh_region
512get_eh_region_from_lp_number (int i)
513{
514 return get_eh_region_from_lp_number_fn (cfun(cfun + 0), i);
515}
516
517/* Returns true if the current function has exception handling regions. */
518
519bool
520current_function_has_exception_handlers (void)
521{
522 return cfun(cfun + 0)->eh->region_tree != NULLnullptr;
523}
524
525/* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
526 Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
527
528struct duplicate_eh_regions_data
529{
530 duplicate_eh_regions_map label_map;
531 void *label_map_data;
532 hash_map<void *, void *> *eh_map;
533};
534
535static void
536duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
537 eh_region old_r, eh_region outer)
538{
539 eh_landing_pad old_lp, new_lp;
540 eh_region new_r;
541
542 new_r = gen_eh_region (old_r->type, outer);
543 gcc_assert (!data->eh_map->put (old_r, new_r))((void)(!(!data->eh_map->put (old_r, new_r)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 543, __FUNCTION__), 0 : 0))
;
544
545 switch (old_r->type)
546 {
547 case ERT_CLEANUP:
548 break;
549
550 case ERT_TRY:
551 {
552 eh_catch oc, nc;
553 for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
554 {
555 /* We should be doing all our region duplication before and
556 during inlining, which is before filter lists are created. */
557 gcc_assert (oc->filter_list == NULL)((void)(!(oc->filter_list == nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 557, __FUNCTION__), 0 : 0))
;
558 nc = gen_eh_region_catch (new_r, oc->type_list);
559 nc->label = data->label_map (oc->label, data->label_map_data);
560 }
561 }
562 break;
563
564 case ERT_ALLOWED_EXCEPTIONS:
565 new_r->u.allowed.type_list = old_r->u.allowed.type_list;
566 if (old_r->u.allowed.label)
567 new_r->u.allowed.label
568 = data->label_map (old_r->u.allowed.label, data->label_map_data);
569 else
570 new_r->u.allowed.label = NULL_TREE(tree) nullptr;
571 break;
572
573 case ERT_MUST_NOT_THROW:
574 new_r->u.must_not_throw.failure_loc =
575 LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc)((IS_ADHOC_LOC (old_r->u.must_not_throw.failure_loc)) ? get_location_from_adhoc_loc
(line_table, old_r->u.must_not_throw.failure_loc) : (old_r
->u.must_not_throw.failure_loc))
;
576 new_r->u.must_not_throw.failure_decl =
577 old_r->u.must_not_throw.failure_decl;
578 break;
579 }
580
581 for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
582 {
583 /* Don't bother copying unused landing pads. */
584 if (old_lp->post_landing_pad == NULLnullptr)
585 continue;
586
587 new_lp = gen_eh_landing_pad (new_r);
588 gcc_assert (!data->eh_map->put (old_lp, new_lp))((void)(!(!data->eh_map->put (old_lp, new_lp)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 588, __FUNCTION__), 0 : 0))
;
589
590 new_lp->post_landing_pad
591 = data->label_map (old_lp->post_landing_pad, data->label_map_data);
592 EH_LANDING_PAD_NR (new_lp->post_landing_pad)((tree_check ((new_lp->post_landing_pad), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 592, __FUNCTION__, (LABEL_DECL)))->label_decl.eh_landing_pad_nr
)
= new_lp->index;
593 }
594
595 /* Make sure to preserve the original use of __cxa_end_cleanup. */
596 new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
597
598 for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
599 duplicate_eh_regions_1 (data, old_r, new_r);
600}
601
602/* Duplicate the EH regions from IFUN rooted at COPY_REGION into
603 the current function and root the tree below OUTER_REGION.
604 The special case of COPY_REGION of NULL means all regions.
605 Remap labels using MAP/MAP_DATA callback. Return a pointer map
606 that allows the caller to remap uses of both EH regions and
607 EH landing pads. */
608
609hash_map<void *, void *> *
610duplicate_eh_regions (struct function *ifun,
611 eh_region copy_region, int outer_lp,
612 duplicate_eh_regions_map map, void *map_data)
613{
614 struct duplicate_eh_regions_data data;
615 eh_region outer_region;
616
617 if (flag_checkingglobal_options.x_flag_checking)
618 verify_eh_tree (ifun);
619
620 data.label_map = map;
621 data.label_map_data = map_data;
622 data.eh_map = new hash_map<void *, void *>;
623
624 outer_region = get_eh_region_from_lp_number_fn (cfun(cfun + 0), outer_lp);
625
626 /* Copy all the regions in the subtree. */
627 if (copy_region)
628 duplicate_eh_regions_1 (&data, copy_region, outer_region);
629 else
630 {
631 eh_region r;
632 for (r = ifun->eh->region_tree; r ; r = r->next_peer)
633 duplicate_eh_regions_1 (&data, r, outer_region);
634 }
635
636 if (flag_checkingglobal_options.x_flag_checking)
637 verify_eh_tree (cfun(cfun + 0));
638
639 return data.eh_map;
640}
641
642/* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
643
644eh_region
645eh_region_outermost (struct function *ifun, eh_region region_a,
646 eh_region region_b)
647{
648 gcc_assert (ifun->eh->region_array)((void)(!(ifun->eh->region_array) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 648, __FUNCTION__), 0 : 0))
;
649 gcc_assert (ifun->eh->region_tree)((void)(!(ifun->eh->region_tree) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 649, __FUNCTION__), 0 : 0))
;
650
651 auto_sbitmap b_outer (ifun->eh->region_array->length ());
652 bitmap_clear (b_outer);
653
654 do
655 {
656 bitmap_set_bit (b_outer, region_b->index);
657 region_b = region_b->outer;
658 }
659 while (region_b);
660
661 do
662 {
663 if (bitmap_bit_p (b_outer, region_a->index))
664 break;
665 region_a = region_a->outer;
666 }
667 while (region_a);
668
669 return region_a;
670}
671
672void
673add_type_for_runtime (tree type)
674{
675 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
676 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == NOP_EXPR)
677 return;
678
679 bool existed = false;
680 tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
681 if (!existed)
682 *slot = lang_hooks.eh_runtime_type (type);
683}
684
685tree
686lookup_type_for_runtime (tree type)
687{
688 /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
689 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == NOP_EXPR)
690 return type;
691
692 /* We should have always inserted the data earlier. */
693 return *type_to_runtime_map->get (type);
694}
695
696
697/* Represent an entry in @TTypes for either catch actions
698 or exception filter actions. */
699struct ttypes_filter {
700 tree t;
701 int filter;
702};
703
704/* Helper for ttypes_filter hashing. */
705
706struct ttypes_filter_hasher : free_ptr_hash <ttypes_filter>
707{
708 typedef tree_node *compare_type;
709 static inline hashval_t hash (const ttypes_filter *);
710 static inline bool equal (const ttypes_filter *, const tree_node *);
711};
712
713/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
714 (a tree) for a @TTypes type node we are thinking about adding. */
715
716inline bool
717ttypes_filter_hasher::equal (const ttypes_filter *entry, const tree_node *data)
718{
719 return entry->t == data;
720}
721
722inline hashval_t
723ttypes_filter_hasher::hash (const ttypes_filter *entry)
724{
725 return TREE_HASH (entry->t)((size_t) (entry->t) & 0777777);
726}
727
728typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
729
730
731/* Helper for ehspec hashing. */
732
733struct ehspec_hasher : free_ptr_hash <ttypes_filter>
734{
735 static inline hashval_t hash (const ttypes_filter *);
736 static inline bool equal (const ttypes_filter *, const ttypes_filter *);
737};
738
739/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
740 exception specification list we are thinking about adding. */
741/* ??? Currently we use the type lists in the order given. Someone
742 should put these in some canonical order. */
743
744inline bool
745ehspec_hasher::equal (const ttypes_filter *entry, const ttypes_filter *data)
746{
747 return type_list_equal (entry->t, data->t);
748}
749
750/* Hash function for exception specification lists. */
751
752inline hashval_t
753ehspec_hasher::hash (const ttypes_filter *entry)
754{
755 hashval_t h = 0;
756 tree list;
757
758 for (list = entry->t; list ; list = TREE_CHAIN (list)((contains_struct_check ((list), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 758, __FUNCTION__))->common.chain)
)
759 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list))((size_t) (((tree_check ((list), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 759, __FUNCTION__, (TREE_LIST)))->list.value)) & 0777777
)
;
760 return h;
761}
762
763typedef hash_table<ehspec_hasher> ehspec_hash_type;
764
765
766/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
767 to speed up the search. Return the filter value to be used. */
768
769static int
770add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
771{
772 struct ttypes_filter **slot, *n;
773
774 slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type)((size_t) (type) & 0777777),
775 INSERT);
776
777 if ((n = *slot) == NULLnullptr)
778 {
779 /* Filter value is a 1 based table index. */
780
781 n = XNEW (struct ttypes_filter)((struct ttypes_filter *) xmalloc (sizeof (struct ttypes_filter
)))
;
782 n->t = type;
783 n->filter = vec_safe_length (cfun(cfun + 0)->eh->ttype_data) + 1;
784 *slot = n;
785
786 vec_safe_push (cfun(cfun + 0)->eh->ttype_data, type);
787 }
788
789 return n->filter;
790}
791
792/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
793 to speed up the search. Return the filter value to be used. */
794
795static int
796add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
797 tree list)
798{
799 struct ttypes_filter **slot, *n;
800 struct ttypes_filter dummy;
801
802 dummy.t = list;
803 slot = ehspec_hash->find_slot (&dummy, INSERT);
804
805 if ((n = *slot) == NULLnullptr)
806 {
807 int len;
808
809 if (targetm.arm_eabi_unwinder)
810 len = vec_safe_length (cfun(cfun + 0)->eh->ehspec_data.arm_eabi);
811 else
812 len = vec_safe_length (cfun(cfun + 0)->eh->ehspec_data.other);
813
814 /* Filter value is a -1 based byte index into a uleb128 buffer. */
815
816 n = XNEW (struct ttypes_filter)((struct ttypes_filter *) xmalloc (sizeof (struct ttypes_filter
)))
;
817 n->t = list;
818 n->filter = -(len + 1);
819 *slot = n;
820
821 /* Generate a 0 terminated list of filter values. */
822 for (; list ; list = TREE_CHAIN (list)((contains_struct_check ((list), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 822, __FUNCTION__))->common.chain)
)
823 {
824 if (targetm.arm_eabi_unwinder)
825 vec_safe_push (cfun(cfun + 0)->eh->ehspec_data.arm_eabi, TREE_VALUE (list)((tree_check ((list), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 825, __FUNCTION__, (TREE_LIST)))->list.value)
);
826 else
827 {
828 /* Look up each type in the list and encode its filter
829 value as a uleb128. */
830 push_uleb128 (&cfun(cfun + 0)->eh->ehspec_data.other,
831 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)((tree_check ((list), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 831, __FUNCTION__, (TREE_LIST)))->list.value)
));
832 }
833 }
834 if (targetm.arm_eabi_unwinder)
835 vec_safe_push (cfun(cfun + 0)->eh->ehspec_data.arm_eabi, NULL_TREE(tree) nullptr);
836 else
837 vec_safe_push (cfun(cfun + 0)->eh->ehspec_data.other, (uchar)0);
838 }
839
840 return n->filter;
841}
842
843/* Generate the action filter values to be used for CATCH and
844 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
845 we use lots of landing pads, and so every type or list can share
846 the same filter value, which saves table space. */
847
848void
849assign_filter_values (void)
850{
851 int i;
852 eh_region r;
853 eh_catch c;
854
855 vec_alloc (cfun(cfun + 0)->eh->ttype_data, 16);
856 if (targetm.arm_eabi_unwinder)
857 vec_alloc (cfun(cfun + 0)->eh->ehspec_data.arm_eabi, 64);
858 else
859 vec_alloc (cfun(cfun + 0)->eh->ehspec_data.other, 64);
860
861 ehspec_hash_type ehspec (31);
862 ttypes_hash_type ttypes (31);
863
864 for (i = 1; vec_safe_iterate (cfun(cfun + 0)->eh->region_array, i, &r); ++i)
865 {
866 if (r == NULLnullptr)
867 continue;
868
869 switch (r->type)
870 {
871 case ERT_TRY:
872 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
873 {
874 /* Whatever type_list is (NULL or true list), we build a list
875 of filters for the region. */
876 c->filter_list = NULL_TREE(tree) nullptr;
877
878 if (c->type_list != NULLnullptr)
879 {
880 /* Get a filter value for each of the types caught and store
881 them in the region's dedicated list. */
882 tree tp_node = c->type_list;
883
884 for ( ; tp_node; tp_node = TREE_CHAIN (tp_node)((contains_struct_check ((tp_node), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 884, __FUNCTION__))->common.chain)
)
885 {
886 int flt
887 = add_ttypes_entry (&ttypes, TREE_VALUE (tp_node)((tree_check ((tp_node), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 887, __FUNCTION__, (TREE_LIST)))->list.value)
);
888 tree flt_node = build_int_cst (integer_type_nodeinteger_types[itk_int], flt);
889
890 c->filter_list
891 = tree_cons (NULL_TREE(tree) nullptr, flt_node, c->filter_list);
892 }
893 }
894 else
895 {
896 /* Get a filter value for the NULL list also since it
897 will need an action record anyway. */
898 int flt = add_ttypes_entry (&ttypes, NULLnullptr);
899 tree flt_node = build_int_cst (integer_type_nodeinteger_types[itk_int], flt);
900
901 c->filter_list
902 = tree_cons (NULL_TREE(tree) nullptr, flt_node, NULLnullptr);
903 }
904 }
905 break;
906
907 case ERT_ALLOWED_EXCEPTIONS:
908 r->u.allowed.filter
909 = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
910 break;
911
912 default:
913 break;
914 }
915 }
916}
917
918/* Emit SEQ into basic block just before INSN (that is assumed to be
919 first instruction of some existing BB and return the newly
920 produced block. */
921static basic_block
922emit_to_new_bb_before (rtx_insn *seq, rtx_insn *insn)
923{
924 rtx_insn *next, *last;
925 basic_block bb;
926 edge e;
927 edge_iterator ei;
928
929 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
930 call), we don't want it to go into newly created landing pad or other EH
931 construct. */
932 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds)ei_start_1 (&(BLOCK_FOR_INSN (insn)->preds)); (e = ei_safe_edge (ei)); )
933 if (e->flags & EDGE_FALLTHRU)
934 force_nonfallthru (e);
935 else
936 ei_next (&ei);
937
938 /* Make sure to put the location of INSN or a subsequent instruction on SEQ
939 to avoid inheriting the location of the previous instruction. */
940 next = insn;
941 while (next && !NONDEBUG_INSN_P (next)((((enum rtx_code) (next)->code) == INSN) || (((enum rtx_code
) (next)->code) == JUMP_INSN) || (((enum rtx_code) (next)->
code) == CALL_INSN))
)
942 next = NEXT_INSN (next);
943 if (next)
944 last = emit_insn_before_setloc (seq, insn, INSN_LOCATION (next));
945 else
946 last = emit_insn_before (seq, insn);
947 if (BARRIER_P (last)(((enum rtx_code) (last)->code) == BARRIER))
948 last = PREV_INSN (last);
949 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
950 update_bb_for_insn (bb);
951 bb->flags |= BB_SUPERBLOCK;
952 return bb;
953}
954
955/* A subroutine of dw2_build_landing_pads, also used for edge splitting
956 at the rtl level. Emit the code required by the target at a landing
957 pad for the given region. */
958
959static void
960expand_dw2_landing_pad_for_region (eh_region region)
961{
962 if (targetm.have_exception_receiver ())
963 emit_insn (targetm.gen_exception_receiver ());
964 else if (targetm.have_nonlocal_goto_receiver ())
965 emit_insn (targetm.gen_nonlocal_goto_receiver ());
966 else
967 { /* Nothing */ }
968
969 if (region->exc_ptr_reg)
970 emit_move_insn (region->exc_ptr_reg,
971 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)((0) <= 1 ? (0) : (~(unsigned int) 0))));
972 if (region->filter_reg)
973 emit_move_insn (region->filter_reg,
974 gen_rtx_REG (targetm.eh_return_filter_mode (),
975 EH_RETURN_DATA_REGNO (1)((1) <= 1 ? (1) : (~(unsigned int) 0))));
976}
977
978/* Expand the extra code needed at landing pads for dwarf2 unwinding. */
979
980static void
981dw2_build_landing_pads (void)
982{
983 int i;
984 eh_landing_pad lp;
985 int e_flags = EDGE_FALLTHRU;
986
987 /* If we're going to partition blocks, we need to be able to add
988 new landing pads later, which means that we need to hold on to
989 the post-landing-pad block. Prevent it from being merged away.
990 We'll remove this bit after partitioning. */
991 if (flag_reorder_blocks_and_partitionglobal_options.x_flag_reorder_blocks_and_partition)
992 e_flags |= EDGE_PRESERVE;
993
994 for (i = 1; vec_safe_iterate (cfun(cfun + 0)->eh->lp_array, i, &lp); ++i)
995 {
996 basic_block bb;
997 rtx_insn *seq;
998
999 if (lp == NULLnullptr || lp->post_landing_pad == NULLnullptr)
1000 continue;
1001
1002 start_sequence ();
1003
1004 lp->landing_pad = gen_label_rtx ();
1005 emit_label (lp->landing_pad);
1006 LABEL_PRESERVE_P (lp->landing_pad)(__extension__ ({ __typeof ((lp->landing_pad)) const _rtx =
((lp->landing_pad)); if (((enum rtx_code) (_rtx)->code
) != CODE_LABEL && ((enum rtx_code) (_rtx)->code) !=
NOTE) rtl_check_failed_flag ("LABEL_PRESERVE_P",_rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1006, __FUNCTION__); _rtx; })->in_struct)
= 1;
1007
1008 expand_dw2_landing_pad_for_region (lp->region);
1009
1010 seq = get_insns ();
1011 end_sequence ();
1012
1013 bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1014 bb->count = bb->next_bb->count;
1015 make_single_succ_edge (bb, bb->next_bb, e_flags);
1016 if (current_loops((cfun + 0)->x_current_loops))
1017 {
1018 class loop *loop = bb->next_bb->loop_father;
1019 /* If we created a pre-header block, add the new block to the
1020 outer loop, otherwise to the loop itself. */
1021 if (bb->next_bb == loop->header)
1022 add_bb_to_loop (bb, loop_outer (loop));
1023 else
1024 add_bb_to_loop (bb, loop);
1025 }
1026 }
1027}
1028
1029
1030static vec<int> sjlj_lp_call_site_index;
1031
1032/* Process all active landing pads. Assign each one a compact dispatch
1033 index, and a call-site index. */
1034
1035static int
1036sjlj_assign_call_site_values (void)
1037{
1038 action_hash_type ar_hash (31);
1039 int i, disp_index;
1040 eh_landing_pad lp;
1041
1042 vec_alloc (crtl(&x_rtl)->eh.action_record_data, 64);
1043
1044 disp_index = 0;
1045 call_site_base = 1;
1046 for (i = 1; vec_safe_iterate (cfun(cfun + 0)->eh->lp_array, i, &lp); ++i)
1047 if (lp && lp->post_landing_pad)
1048 {
1049 int action, call_site;
1050
1051 /* First: build the action table. */
1052 action = collect_one_action_chain (&ar_hash, lp->region);
1053
1054 /* Next: assign call-site values. If dwarf2 terms, this would be
1055 the region number assigned by convert_to_eh_region_ranges, but
1056 handles no-action and must-not-throw differently. */
1057 /* Map must-not-throw to otherwise unused call-site index 0. */
1058 if (action == -2)
1059 call_site = 0;
1060 /* Map no-action to otherwise unused call-site index -1. */
1061 else if (action == -1)
1062 call_site = -1;
1063 /* Otherwise, look it up in the table. */
1064 else
1065 call_site = add_call_site (GEN_INT (disp_index)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (disp_index)), action, 0);
1066 sjlj_lp_call_site_index[i] = call_site;
1067
1068 disp_index++;
1069 }
1070
1071 return disp_index;
1072}
1073
1074/* Emit code to record the current call-site index before every
1075 insn that can throw. */
1076
1077static void
1078sjlj_mark_call_sites (void)
1079{
1080 int last_call_site = -2;
1081 rtx_insn *insn;
1082 rtx mem;
1083
1084 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1085 {
1086 eh_landing_pad lp;
1087 eh_region r;
1088 bool nothrow;
1089 int this_call_site;
1090 rtx_insn *before, *p;
1091
1092 /* Reset value tracking at extended basic block boundaries. */
1093 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL))
1094 last_call_site = -2;
1095
1096 /* If the function allocates dynamic stack space, the context must
1097 be updated after every allocation/deallocation accordingly. */
1098 if (NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE) && NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_UPDATE_SJLJ_CONTEXT)
1099 {
1100 rtx buf_addr;
1101
1102 start_sequence ();
1103 buf_addr = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, XEXP (crtl->eh.sjlj_fc, 0)((((&x_rtl)->eh.sjlj_fc)->u.fld[0]).rt_rtx),
1104 sjlj_fc_jbuf_ofs);
1105 expand_builtin_update_setjmp_buf (buf_addr);
1106 p = get_insns ();
1107 end_sequence ();
1108 emit_insn_before (p, insn);
1109 }
1110
1111 if (! INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
1112 continue;
1113
1114 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1115 if (nothrow)
1116 continue;
1117 if (lp)
1118 this_call_site = sjlj_lp_call_site_index[lp->index];
1119 else if (r == NULLnullptr)
1120 {
1121 /* Calls (and trapping insns) without notes are outside any
1122 exception handling region in this function. Mark them as
1123 no action. */
1124 this_call_site = -1;
1125 }
1126 else
1127 {
1128 gcc_assert (r->type == ERT_MUST_NOT_THROW)((void)(!(r->type == ERT_MUST_NOT_THROW) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1128, __FUNCTION__), 0 : 0))
;
1129 this_call_site = 0;
1130 }
1131
1132 if (this_call_site != -1)
1133 crtl(&x_rtl)->uses_eh_lsda = 1;
1134
1135 if (this_call_site == last_call_site)
1136 continue;
1137
1138 /* Don't separate a call from it's argument loads. */
1139 before = insn;
1140 if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
1141 before = find_first_parameter_load (insn, NULLnullptr);
1142
1143 start_sequence ();
1144 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),adjust_address_1 ((&x_rtl)->eh.sjlj_fc, ((((enum tree_code
) ((tree_class_check ((integer_types[itk_int]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1144, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(integer_types[itk_int]) : (integer_types[itk_int])->type_common
.mode), sjlj_fc_call_site_ofs, 1, 1, 0, 0)
1145 sjlj_fc_call_site_ofs)adjust_address_1 ((&x_rtl)->eh.sjlj_fc, ((((enum tree_code
) ((tree_class_check ((integer_types[itk_int]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1144, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(integer_types[itk_int]) : (integer_types[itk_int])->type_common
.mode), sjlj_fc_call_site_ofs, 1, 1, 0, 0)
;
1146 emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)((machine_mode) (mem)->mode)));
1147 p = get_insns ();
1148 end_sequence ();
1149
1150 emit_insn_before (p, before);
1151 last_call_site = this_call_site;
1152 }
1153}
1154
1155/* Construct the SjLj_Function_Context. */
1156
1157static void
1158sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1159{
1160 rtx_insn *fn_begin, *seq;
1161 rtx fc, mem;
1162 bool fn_begin_outside_block;
1163 rtx personality = get_personality_function (current_function_decl);
1164
1165 fc = crtl(&x_rtl)->eh.sjlj_fc;
1166
1167 start_sequence ();
1168
1169 /* We're storing this libcall's address into memory instead of
1170 calling it directly. Thus, we must call assemble_external_libcall
1171 here, as we cannot depend on emit_library_call to do it for us. */
1172 assemble_external_libcall (personality);
1173 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs)adjust_address_1 (fc, (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), sjlj_fc_personality_ofs
, 1, 1, 0, 0)
;
1174 emit_move_insn (mem, personality);
1175
1176 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs)adjust_address_1 (fc, (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), sjlj_fc_lsda_ofs
, 1, 1, 0, 0)
;
1177 if (crtl(&x_rtl)->uses_eh_lsda)
1178 {
1179 char buf[20];
1180 rtx sym;
1181
1182 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no)do { char *__p; (buf)[0] = '*'; (buf)[1] = '.'; __p = stpcpy (
&(buf)[2], "LLSDA"); sprint_ul (__p, (unsigned long) (((cfun
+ 0)->funcdef_no))); } while (0)
;
1183 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf))gen_rtx_fmt_s0_stat ((SYMBOL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((ggc_alloc_string ((buf), -1 ))) )
;
1184 SYMBOL_REF_FLAGS (sym)(__extension__ ({ __typeof ((sym)) const _rtx = ((sym)); if (
((enum rtx_code) (_rtx)->code) != SYMBOL_REF) rtl_check_failed_flag
("SYMBOL_REF_FLAGS", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1184, __FUNCTION__); _rtx; }) ->u2.symbol_ref_flags)
= SYMBOL_FLAG_LOCAL(1 << 1);
1185 emit_move_insn (mem, sym);
1186 }
1187 else
1188 emit_move_insn (mem, const0_rtx(const_int_rtx[64]));
1189
1190 if (dispatch_label)
1191 {
1192 rtx addr = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, XEXP (fc, 0)(((fc)->u.fld[0]).rt_rtx), sjlj_fc_jbuf_ofs);
1193
1194#ifdef DONT_USE_BUILTIN_SETJMP
1195 addr = copy_addr_to_reg (addr);
1196 addr = convert_memory_address (ptr_mode, addr)convert_memory_address_addr_space ((ptr_mode), (addr), 0);
1197 tree addr_tree = make_tree (ptr_type_nodeglobal_trees[TI_PTR_TYPE], addr);
1198
1199 tree call_expr = build_call_expr (setjmp_fn, 1, addr_tree);
1200 rtx x = expand_call (call_expr, NULL_RTX(rtx) 0, false);
1201
1202 emit_cmp_and_jump_insns (x, const0_rtx(const_int_rtx[64]), NE, 0,
1203 TYPE_MODE (integer_type_node)((((enum tree_code) ((tree_class_check ((integer_types[itk_int
]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1203, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(integer_types[itk_int]) : (integer_types[itk_int])->type_common
.mode)
, 0,
1204 dispatch_label,
1205 profile_probability::unlikely ());
1206#else
1207 expand_builtin_setjmp_setup (addr, dispatch_label);
1208#endif
1209 }
1210
1211 emit_library_call (unwind_sjlj_register_libfunc((this_target_libfuncs->x_libfunc_table)[LTI_unwind_sjlj_register
])
, LCT_NORMAL, VOIDmode((void) 0, E_VOIDmode),
1212 XEXP (fc, 0)(((fc)->u.fld[0]).rt_rtx), Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1213
1214 seq = get_insns ();
1215 end_sequence ();
1216
1217 /* ??? Instead of doing this at the beginning of the function,
1218 do this in a block that is at loop level 0 and dominates all
1219 can_throw_internal instructions. */
1220
1221 fn_begin_outside_block = true;
1222 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1223 if (NOTE_P (fn_begin)(((enum rtx_code) (fn_begin)->code) == NOTE))
1224 {
1225 if (NOTE_KIND (fn_begin)(((fn_begin)->u.fld[4]).rt_int) == NOTE_INSN_FUNCTION_BEG)
1226 break;
1227 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin)((((enum rtx_code) (fn_begin)->code) == NOTE) && (
((fn_begin)->u.fld[4]).rt_int) == NOTE_INSN_BASIC_BLOCK)
)
1228 fn_begin_outside_block = false;
1229 }
1230
1231#ifdef DONT_USE_BUILTIN_SETJMP
1232 if (dispatch_label)
1233 {
1234 /* The sequence contains a branch in the middle so we need to force
1235 the creation of a new basic block by means of BB_SUPERBLOCK. */
1236 if (fn_begin_outside_block)
1237 {
1238 basic_block bb
1239 = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)));
1240 if (JUMP_P (BB_END (bb))(((enum rtx_code) ((bb)->il.x.rtl->end_)->code) == JUMP_INSN
)
)
1241 emit_insn_before (seq, BB_END (bb)(bb)->il.x.rtl->end_);
1242 else
1243 emit_insn_after (seq, BB_END (bb)(bb)->il.x.rtl->end_);
1244 }
1245 else
1246 emit_insn_after (seq, fn_begin);
1247
1248 single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))->flags |= BB_SUPERBLOCK;
1249 return;
1250 }
1251#endif
1252
1253 if (fn_begin_outside_block)
1254 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr)));
1255 else
1256 emit_insn_after (seq, fn_begin);
1257}
1258
1259/* Call back from expand_function_end to know where we should put
1260 the call to unwind_sjlj_unregister_libfunc if needed. */
1261
1262void
1263sjlj_emit_function_exit_after (rtx_insn *after)
1264{
1265 crtl(&x_rtl)->eh.sjlj_exit_after = after;
1266}
1267
1268static void
1269sjlj_emit_function_exit (void)
1270{
1271 rtx_insn *seq, *insn;
1272
1273 start_sequence ();
1274
1275 emit_library_call (unwind_sjlj_unregister_libfunc((this_target_libfuncs->x_libfunc_table)[LTI_unwind_sjlj_unregister
])
, LCT_NORMAL, VOIDmode((void) 0, E_VOIDmode),
1276 XEXP (crtl->eh.sjlj_fc, 0)((((&x_rtl)->eh.sjlj_fc)->u.fld[0]).rt_rtx), Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1277
1278 seq = get_insns ();
1279 end_sequence ();
1280
1281 /* ??? Really this can be done in any block at loop level 0 that
1282 post-dominates all can_throw_internal instructions. This is
1283 the last possible moment. */
1284
1285 insn = crtl(&x_rtl)->eh.sjlj_exit_after;
1286 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL))
1287 insn = NEXT_INSN (insn);
1288
1289 emit_insn_after (seq, insn);
1290}
1291
1292static void
1293sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1294{
1295 scalar_int_mode unwind_word_mode = targetm.unwind_word_mode ();
1296 scalar_int_mode filter_mode = targetm.eh_return_filter_mode ();
1297 eh_landing_pad lp;
1298 rtx mem, fc, exc_ptr_reg, filter_reg;
1299 rtx_insn *seq;
1300 basic_block bb;
1301 eh_region r;
1302 int i, disp_index;
1303 vec<tree> dispatch_labels = vNULL;
1304
1305 fc = crtl(&x_rtl)->eh.sjlj_fc;
1306
1307 start_sequence ();
1308
1309 emit_label (dispatch_label);
1310
1311#ifndef DONT_USE_BUILTIN_SETJMP
1312 expand_builtin_setjmp_receiver (dispatch_label);
1313
1314 /* The caller of expand_builtin_setjmp_receiver is responsible for
1315 making sure that the label doesn't vanish. The only other caller
1316 is the expander for __builtin_setjmp_receiver, which places this
1317 label on the nonlocal_goto_label list. Since we're modeling these
1318 CFG edges more exactly, we can use the forced_labels list instead. */
1319 LABEL_PRESERVE_P (dispatch_label)(__extension__ ({ __typeof ((dispatch_label)) const _rtx = ((
dispatch_label)); if (((enum rtx_code) (_rtx)->code) != CODE_LABEL
&& ((enum rtx_code) (_rtx)->code) != NOTE) rtl_check_failed_flag
("LABEL_PRESERVE_P",_rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1319, __FUNCTION__); _rtx; })->in_struct)
= 1;
1320 vec_safe_push<rtx_insn *> (forced_labels((&x_rtl)->expr.x_forced_labels), dispatch_label);
1321#endif
1322
1323 /* Load up exc_ptr and filter values from the function context. */
1324 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs)adjust_address_1 (fc, unwind_word_mode, sjlj_fc_data_ofs, 1, 1
, 0, 0)
;
1325 if (unwind_word_mode != ptr_mode)
1326 {
1327#ifdef POINTERS_EXTEND_UNSIGNED1
1328 mem = convert_memory_address (ptr_mode, mem)convert_memory_address_addr_space ((ptr_mode), (mem), 0);
1329#else
1330 mem = convert_to_mode (ptr_mode, mem, 0);
1331#endif
1332 }
1333 exc_ptr_reg = force_reg (ptr_mode, mem);
1334
1335 mem = adjust_address (fc, unwind_word_mode,adjust_address_1 (fc, unwind_word_mode, sjlj_fc_data_ofs + GET_MODE_SIZE
(unwind_word_mode), 1, 1, 0, 0)
1336 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode))adjust_address_1 (fc, unwind_word_mode, sjlj_fc_data_ofs + GET_MODE_SIZE
(unwind_word_mode), 1, 1, 0, 0)
;
1337 if (unwind_word_mode != filter_mode)
1338 mem = convert_to_mode (filter_mode, mem, 0);
1339 filter_reg = force_reg (filter_mode, mem);
1340
1341 /* Jump to one of the directly reachable regions. */
1342
1343 disp_index = 0;
1344 rtx_code_label *first_reachable_label = NULLnullptr;
1345
1346 /* If there's exactly one call site in the function, don't bother
1347 generating a switch statement. */
1348 if (num_dispatch > 1)
1349 dispatch_labels.create (num_dispatch);
1350
1351 for (i = 1; vec_safe_iterate (cfun(cfun + 0)->eh->lp_array, i, &lp); ++i)
1352 if (lp && lp->post_landing_pad)
1353 {
1354 rtx_insn *seq2;
1355 rtx_code_label *label;
1356
1357 start_sequence ();
1358
1359 lp->landing_pad = dispatch_label;
1360
1361 if (num_dispatch > 1)
1362 {
1363 tree t_label, case_elt, t;
1364
1365 t_label = create_artificial_label (UNKNOWN_LOCATION((location_t) 0));
1366 t = build_int_cst (integer_type_nodeinteger_types[itk_int], disp_index);
1367 case_elt = build_case_label (t, NULLnullptr, t_label);
1368 dispatch_labels.quick_push (case_elt);
1369 label = jump_target_rtx (t_label);
1370 }
1371 else
1372 label = gen_label_rtx ();
1373
1374 if (disp_index == 0)
1375 first_reachable_label = label;
1376 emit_label (label);
1377
1378 r = lp->region;
1379 if (r->exc_ptr_reg)
1380 emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1381 if (r->filter_reg)
1382 emit_move_insn (r->filter_reg, filter_reg);
1383
1384 seq2 = get_insns ();
1385 end_sequence ();
1386
1387 rtx_insn *before = label_rtx (lp->post_landing_pad);
1388 bb = emit_to_new_bb_before (seq2, before);
1389 make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1390 if (current_loops((cfun + 0)->x_current_loops))
1391 {
1392 class loop *loop = bb->next_bb->loop_father;
1393 /* If we created a pre-header block, add the new block to the
1394 outer loop, otherwise to the loop itself. */
1395 if (bb->next_bb == loop->header)
1396 add_bb_to_loop (bb, loop_outer (loop));
1397 else
1398 add_bb_to_loop (bb, loop);
1399 /* ??? For multiple dispatches we will end up with edges
1400 from the loop tree root into this loop, making it a
1401 multiple-entry loop. Discard all affected loops. */
1402 if (num_dispatch > 1)
1403 {
1404 for (loop = bb->loop_father;
1405 loop_outer (loop); loop = loop_outer (loop))
1406 mark_loop_for_removal (loop);
1407 }
1408 }
1409
1410 disp_index++;
1411 }
1412 gcc_assert (disp_index == num_dispatch)((void)(!(disp_index == num_dispatch) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1412, __FUNCTION__), 0 : 0))
;
1413
1414 if (num_dispatch > 1)
1415 {
1416 rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),adjust_address_1 (fc, ((((enum tree_code) ((tree_class_check (
(integer_types[itk_int]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1416, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(integer_types[itk_int]) : (integer_types[itk_int])->type_common
.mode), sjlj_fc_call_site_ofs, 1, 1, 0, 0)
1417 sjlj_fc_call_site_ofs)adjust_address_1 (fc, ((((enum tree_code) ((tree_class_check (
(integer_types[itk_int]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1416, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(integer_types[itk_int]) : (integer_types[itk_int])->type_common
.mode), sjlj_fc_call_site_ofs, 1, 1, 0, 0)
;
1418 expand_sjlj_dispatch_table (disp, dispatch_labels);
1419 }
1420
1421 seq = get_insns ();
1422 end_sequence ();
1423
1424 bb = emit_to_new_bb_before (seq, first_reachable_label);
1425 if (num_dispatch == 1)
1426 {
1427 make_single_succ_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1428 if (current_loops((cfun + 0)->x_current_loops))
1429 {
1430 class loop *loop = bb->next_bb->loop_father;
1431 /* If we created a pre-header block, add the new block to the
1432 outer loop, otherwise to the loop itself. */
1433 if (bb->next_bb == loop->header)
1434 add_bb_to_loop (bb, loop_outer (loop));
1435 else
1436 add_bb_to_loop (bb, loop);
1437 }
1438 }
1439 else
1440 {
1441 /* We are not wiring up edges here, but as the dispatcher call
1442 is at function begin simply associate the block with the
1443 outermost (non-)loop. */
1444 if (current_loops((cfun + 0)->x_current_loops))
1445 add_bb_to_loop (bb, current_loops((cfun + 0)->x_current_loops)->tree_root);
1446 }
1447}
1448
1449static void
1450sjlj_build_landing_pads (void)
1451{
1452 int num_dispatch;
1453
1454 num_dispatch = vec_safe_length (cfun(cfun + 0)->eh->lp_array);
1455 if (num_dispatch == 0)
1456 return;
1457 sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch, true);
1458
1459 num_dispatch = sjlj_assign_call_site_values ();
1460 if (num_dispatch > 0)
1461 {
1462 rtx_code_label *dispatch_label = gen_label_rtx ();
1463 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,ix86_local_alignment ((sjlj_fc_type_node), (((((enum tree_code
) ((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1464, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(sjlj_fc_type_node) : (sjlj_fc_type_node)->type_common.mode
)), (((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1465, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((sjlj_fc_type_node)->type_common.align - 1) : 0)))
1464 TYPE_MODE (sjlj_fc_type_node),ix86_local_alignment ((sjlj_fc_type_node), (((((enum tree_code
) ((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1464, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(sjlj_fc_type_node) : (sjlj_fc_type_node)->type_common.mode
)), (((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1465, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((sjlj_fc_type_node)->type_common.align - 1) : 0)))
1465 TYPE_ALIGN (sjlj_fc_type_node))ix86_local_alignment ((sjlj_fc_type_node), (((((enum tree_code
) ((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1464, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(sjlj_fc_type_node) : (sjlj_fc_type_node)->type_common.mode
)), (((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1465, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((sjlj_fc_type_node)->type_common.align - 1) : 0)))
;
1466 crtl(&x_rtl)->eh.sjlj_fc
1467 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node)((((enum tree_code) ((tree_class_check ((sjlj_fc_type_node), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1467, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(sjlj_fc_type_node) : (sjlj_fc_type_node)->type_common.mode
)
,
1468 int_size_in_bytes (sjlj_fc_type_node),
1469 align);
1470
1471 sjlj_mark_call_sites ();
1472 sjlj_emit_function_enter (dispatch_label);
1473 sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1474 sjlj_emit_function_exit ();
1475 }
1476
1477 /* If we do not have any landing pads, we may still need to register a
1478 personality routine and (empty) LSDA to handle must-not-throw regions. */
1479 else if (function_needs_eh_personality (cfun(cfun + 0)) != eh_personality_none)
1480 {
1481 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,ix86_local_alignment ((sjlj_fc_type_node), (((((enum tree_code
) ((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1482, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(sjlj_fc_type_node) : (sjlj_fc_type_node)->type_common.mode
)), (((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1483, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((sjlj_fc_type_node)->type_common.align - 1) : 0)))
1482 TYPE_MODE (sjlj_fc_type_node),ix86_local_alignment ((sjlj_fc_type_node), (((((enum tree_code
) ((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1482, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(sjlj_fc_type_node) : (sjlj_fc_type_node)->type_common.mode
)), (((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1483, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((sjlj_fc_type_node)->type_common.align - 1) : 0)))
1483 TYPE_ALIGN (sjlj_fc_type_node))ix86_local_alignment ((sjlj_fc_type_node), (((((enum tree_code
) ((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1482, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(sjlj_fc_type_node) : (sjlj_fc_type_node)->type_common.mode
)), (((tree_class_check ((sjlj_fc_type_node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1483, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((sjlj_fc_type_node)->type_common.align - 1) : 0)))
;
1484 crtl(&x_rtl)->eh.sjlj_fc
1485 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node)((((enum tree_code) ((tree_class_check ((sjlj_fc_type_node), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1485, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(sjlj_fc_type_node) : (sjlj_fc_type_node)->type_common.mode
)
,
1486 int_size_in_bytes (sjlj_fc_type_node),
1487 align);
1488
1489 sjlj_mark_call_sites ();
1490 sjlj_emit_function_enter (NULLnullptr);
1491 sjlj_emit_function_exit ();
1492 }
1493
1494 sjlj_lp_call_site_index.release ();
1495}
1496
1497/* Update the sjlj function context. This function should be called
1498 whenever we allocate or deallocate dynamic stack space. */
1499
1500void
1501update_sjlj_context (void)
1502{
1503 if (!flag_exceptionsglobal_options.x_flag_exceptions)
1504 return;
1505
1506 emit_note (NOTE_INSN_UPDATE_SJLJ_CONTEXT);
1507}
1508
1509/* After initial rtl generation, call back to finish generating
1510 exception support code. */
1511
1512void
1513finish_eh_generation (void)
1514{
1515 basic_block bb;
1516
1517 /* Construct the landing pads. */
1518 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1519 sjlj_build_landing_pads ();
1520 else
1521 dw2_build_landing_pads ();
1522
1523 break_superblocks ();
1524
1525 /* Redirect all EH edges from the post_landing_pad to the landing pad. */
1526 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
1527 {
1528 eh_landing_pad lp;
1529 edge_iterator ei;
1530 edge e;
1531
1532 lp = get_eh_landing_pad_from_rtx (BB_END (bb)(bb)->il.x.rtl->end_);
1533
1534 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
1535 if (e->flags & EDGE_EH)
1536 break;
1537
1538 /* We should not have generated any new throwing insns during this
1539 pass, and we should not have lost any EH edges, so we only need
1540 to handle two cases here:
1541 (1) reachable handler and an existing edge to post-landing-pad,
1542 (2) no reachable handler and no edge. */
1543 gcc_assert ((lp != NULL) == (e != NULL))((void)(!((lp != nullptr) == (e != nullptr)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1543, __FUNCTION__), 0 : 0))
;
1544 if (lp != NULLnullptr)
1545 {
1546 gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad))((void)(!((e->dest)->il.x.head_ == label_rtx (lp->post_landing_pad
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1546, __FUNCTION__), 0 : 0))
;
1547
1548 redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1549 e->flags |= (CALL_P (BB_END (bb))(((enum rtx_code) ((bb)->il.x.rtl->end_)->code) == CALL_INSN
)
1550 ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1551 : EDGE_ABNORMAL);
1552 }
1553 }
1554
1555 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1556 /* Kludge for Alpha (see alpha_gp_save_rtx). */
1557 || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))->insns.r)
1558 commit_edge_insertions ();
1559}
1560
1561/* This section handles removing dead code for flow. */
1562
1563void
1564remove_eh_landing_pad (eh_landing_pad lp)
1565{
1566 eh_landing_pad *pp;
1567
1568 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1569 continue;
1570 *pp = lp->next_lp;
1571
1572 if (lp->post_landing_pad)
1573 EH_LANDING_PAD_NR (lp->post_landing_pad)((tree_check ((lp->post_landing_pad), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1573, __FUNCTION__, (LABEL_DECL)))->label_decl.eh_landing_pad_nr
)
= 0;
1574 (*cfun(cfun + 0)->eh->lp_array)[lp->index] = NULLnullptr;
1575}
1576
1577/* Splice the EH region at PP from the region tree. */
1578
1579static void
1580remove_eh_handler_splicer (eh_region *pp)
1581{
1582 eh_region region = *pp;
1583 eh_landing_pad lp;
1584
1585 for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1586 {
1587 if (lp->post_landing_pad)
1588 EH_LANDING_PAD_NR (lp->post_landing_pad)((tree_check ((lp->post_landing_pad), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1588, __FUNCTION__, (LABEL_DECL)))->label_decl.eh_landing_pad_nr
)
= 0;
1589 (*cfun(cfun + 0)->eh->lp_array)[lp->index] = NULLnullptr;
1590 }
1591
1592 if (region->inner)
1593 {
1594 eh_region p, outer;
1595 outer = region->outer;
1596
1597 *pp = p = region->inner;
1598 do
1599 {
1600 p->outer = outer;
1601 pp = &p->next_peer;
1602 p = *pp;
1603 }
1604 while (p);
1605 }
1606 *pp = region->next_peer;
1607
1608 (*cfun(cfun + 0)->eh->region_array)[region->index] = NULLnullptr;
1609}
1610
1611/* Splice a single EH region REGION from the region tree.
1612
1613 To unlink REGION, we need to find the pointer to it with a relatively
1614 expensive search in REGION's outer region. If you are going to
1615 remove a number of handlers, using remove_unreachable_eh_regions may
1616 be a better option. */
1617
1618void
1619remove_eh_handler (eh_region region)
1620{
1621 eh_region *pp, *pp_start, p, outer;
1622
1623 outer = region->outer;
1624 if (outer)
1625 pp_start = &outer->inner;
1626 else
1627 pp_start = &cfun(cfun + 0)->eh->region_tree;
1628 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1629 continue;
1630
1631 remove_eh_handler_splicer (pp);
1632}
1633
1634/* Worker for remove_unreachable_eh_regions.
1635 PP is a pointer to the region to start a region tree depth-first
1636 search from. R_REACHABLE is the set of regions that have to be
1637 preserved. */
1638
1639static void
1640remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1641{
1642 while (*pp)
1643 {
1644 eh_region region = *pp;
1645 remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1646 if (!bitmap_bit_p (r_reachable, region->index))
1647 remove_eh_handler_splicer (pp);
1648 else
1649 pp = &region->next_peer;
1650 }
1651}
1652
1653/* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1654 Do this by traversing the EH tree top-down and splice out regions that
1655 are not marked. By removing regions from the leaves, we avoid costly
1656 searches in the region tree. */
1657
1658void
1659remove_unreachable_eh_regions (sbitmap r_reachable)
1660{
1661 remove_unreachable_eh_regions_worker (&cfun(cfun + 0)->eh->region_tree, r_reachable);
1662}
1663
1664/* Invokes CALLBACK for every exception handler landing pad label.
1665 Only used by reload hackery; should not be used by new code. */
1666
1667void
1668for_each_eh_label (void (*callback) (rtx))
1669{
1670 eh_landing_pad lp;
1671 int i;
1672
1673 for (i = 1; vec_safe_iterate (cfun(cfun + 0)->eh->lp_array, i, &lp); ++i)
1674 {
1675 if (lp)
1676 {
1677 rtx_code_label *lab = lp->landing_pad;
1678 if (lab && LABEL_P (lab)(((enum rtx_code) (lab)->code) == CODE_LABEL))
1679 (*callback) (lab);
1680 }
1681 }
1682}
1683
1684/* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1685 call insn.
1686
1687 At the gimple level, we use LP_NR
1688 > 0 : The statement transfers to landing pad LP_NR
1689 = 0 : The statement is outside any EH region
1690 < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1691
1692 At the rtl level, we use LP_NR
1693 > 0 : The insn transfers to landing pad LP_NR
1694 = 0 : The insn cannot throw
1695 < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1696 = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1697 missing note: The insn is outside any EH region.
1698
1699 ??? This difference probably ought to be avoided. We could stand
1700 to record nothrow for arbitrary gimple statements, and so avoid
1701 some moderately complex lookups in stmt_could_throw_p. Perhaps
1702 NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
1703 no-nonlocal-goto property should be recorded elsewhere as a bit
1704 on the call_insn directly. Perhaps we should make more use of
1705 attaching the trees to call_insns (reachable via symbol_ref in
1706 direct call cases) and just pull the data out of the trees. */
1707
1708void
1709make_reg_eh_region_note (rtx_insn *insn, int ecf_flags, int lp_nr)
1710{
1711 rtx value;
1712 if (ecf_flags & ECF_NOTHROW(1 << 6))
1713 value = const0_rtx(const_int_rtx[64]);
1714 else if (lp_nr != 0)
1715 value = GEN_INT (lp_nr)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (lp_nr));
1716 else
1717 return;
1718 add_reg_note (insn, REG_EH_REGION, value);
1719}
1720
1721/* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1722 nor perform a non-local goto. Replace the region note if it
1723 already exists. */
1724
1725void
1726make_reg_eh_region_note_nothrow_nononlocal (rtx_insn *insn)
1727{
1728 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX(rtx) 0);
1729 rtx intmin = GEN_INT (INT_MIN)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), ((-2147483647 -1))
)
;
1730
1731 if (note != 0)
1732 XEXP (note, 0)(((note)->u.fld[0]).rt_rtx) = intmin;
1733 else
1734 add_reg_note (insn, REG_EH_REGION, intmin);
1735}
1736
1737/* Return true if INSN could throw, assuming no REG_EH_REGION note
1738 to the contrary. */
1739
1740bool
1741insn_could_throw_p (const_rtx insn)
1742{
1743 if (!flag_exceptionsglobal_options.x_flag_exceptions)
1744 return false;
1745 if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
1746 return true;
1747 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
&& cfun(cfun + 0)->can_throw_non_call_exceptions)
1748 return may_trap_p (PATTERN (insn));
1749 return false;
1750}
1751
1752/* Copy an REG_EH_REGION note to each insn that might throw beginning
1753 at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
1754 to look for a note, or the note itself. */
1755
1756void
1757copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1758{
1759 rtx_insn *insn;
1760 rtx note = note_or_insn;
1761
1762 if (INSN_P (note_or_insn)(((((enum rtx_code) (note_or_insn)->code) == INSN) || (((enum
rtx_code) (note_or_insn)->code) == JUMP_INSN) || (((enum rtx_code
) (note_or_insn)->code) == CALL_INSN)) || (((enum rtx_code
) (note_or_insn)->code) == DEBUG_INSN))
)
1763 {
1764 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX(rtx) 0);
1765 if (note == NULLnullptr)
1766 return;
1767 }
1768 else if (is_a <rtx_insn *> (note_or_insn))
1769 return;
1770 note = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx);
1771
1772 for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1773 if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX(rtx) 0)
1774 && insn_could_throw_p (insn))
1775 add_reg_note (insn, REG_EH_REGION, note);
1776}
1777
1778/* Likewise, but iterate backward. */
1779
1780void
1781copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1782{
1783 rtx_insn *insn;
1784 rtx note = note_or_insn;
1785
1786 if (INSN_P (note_or_insn)(((((enum rtx_code) (note_or_insn)->code) == INSN) || (((enum
rtx_code) (note_or_insn)->code) == JUMP_INSN) || (((enum rtx_code
) (note_or_insn)->code) == CALL_INSN)) || (((enum rtx_code
) (note_or_insn)->code) == DEBUG_INSN))
)
1787 {
1788 note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX(rtx) 0);
1789 if (note == NULLnullptr)
1790 return;
1791 }
1792 else if (is_a <rtx_insn *> (note_or_insn))
1793 return;
1794 note = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx);
1795
1796 for (insn = last; insn != first; insn = PREV_INSN (insn))
1797 if (insn_could_throw_p (insn))
1798 add_reg_note (insn, REG_EH_REGION, note);
1799}
1800
1801
1802/* Extract all EH information from INSN. Return true if the insn
1803 was marked NOTHROW. */
1804
1805static bool
1806get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1807 eh_landing_pad *plp)
1808{
1809 eh_landing_pad lp = NULLnullptr;
1810 eh_region r = NULLnullptr;
1811 bool ret = false;
1812 rtx note;
1813 int lp_nr;
1814
1815 if (! INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
1816 goto egress;
1817
1818 if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN)
1819 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SEQUENCE)
1820 insn = XVECEXP (PATTERN (insn), 0, 0)(((((PATTERN (insn))->u.fld[0]).rt_rtvec))->elem[0]);
1821
1822 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX(rtx) 0);
1823 if (!note)
1824 {
1825 ret = !insn_could_throw_p (insn);
1826 goto egress;
1827 }
1828
1829 lp_nr = INTVAL (XEXP (note, 0))(((((note)->u.fld[0]).rt_rtx))->u.hwint[0]);
1830 if (lp_nr == 0 || lp_nr == INT_MIN(-2147483647 -1))
1831 {
1832 ret = true;
1833 goto egress;
1834 }
1835
1836 if (lp_nr < 0)
1837 r = (*cfun(cfun + 0)->eh->region_array)[-lp_nr];
1838 else
1839 {
1840 lp = (*cfun(cfun + 0)->eh->lp_array)[lp_nr];
1841 r = lp->region;
1842 }
1843
1844 egress:
1845 *plp = lp;
1846 *pr = r;
1847 return ret;
1848}
1849
1850/* Return the landing pad to which INSN may go, or NULL if it does not
1851 have a reachable landing pad within this function. */
1852
1853eh_landing_pad
1854get_eh_landing_pad_from_rtx (const_rtx insn)
1855{
1856 eh_landing_pad lp;
1857 eh_region r;
1858
1859 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1860 return lp;
1861}
1862
1863/* Return the region to which INSN may go, or NULL if it does not
1864 have a reachable region within this function. */
1865
1866eh_region
1867get_eh_region_from_rtx (const_rtx insn)
1868{
1869 eh_landing_pad lp;
1870 eh_region r;
1871
1872 get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1873 return r;
1874}
1875
1876/* Return true if INSN throws and is caught by something in this function. */
1877
1878bool
1879can_throw_internal (const_rtx insn)
1880{
1881 return get_eh_landing_pad_from_rtx (insn) != NULLnullptr;
1882}
1883
1884/* Return true if INSN throws and escapes from the current function. */
1885
1886bool
1887can_throw_external (const_rtx insn)
1888{
1889 eh_landing_pad lp;
1890 eh_region r;
1891 bool nothrow;
1892
1893 if (! INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
1894 return false;
1895
1896 if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN)
1897 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SEQUENCE)
1898 {
1899 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1900 int i, n = seq->len ();
1901
1902 for (i = 0; i < n; i++)
1903 if (can_throw_external (seq->element (i)))
1904 return true;
1905
1906 return false;
1907 }
1908
1909 nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1910
1911 /* If we can't throw, we obviously can't throw external. */
1912 if (nothrow)
1913 return false;
1914
1915 /* If we have an internal landing pad, then we're not external. */
1916 if (lp != NULLnullptr)
1917 return false;
1918
1919 /* If we're not within an EH region, then we are external. */
1920 if (r == NULLnullptr)
1921 return true;
1922
1923 /* The only thing that ought to be left is MUST_NOT_THROW regions,
1924 which don't always have landing pads. */
1925 gcc_assert (r->type == ERT_MUST_NOT_THROW)((void)(!(r->type == ERT_MUST_NOT_THROW) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 1925, __FUNCTION__), 0 : 0))
;
1926 return false;
1927}
1928
1929/* Return true if INSN cannot throw at all. */
1930
1931bool
1932insn_nothrow_p (const_rtx insn)
1933{
1934 eh_landing_pad lp;
1935 eh_region r;
1936
1937 if (! INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
1938 return true;
1939
1940 if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN)
1941 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SEQUENCE)
1942 {
1943 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1944 int i, n = seq->len ();
1945
1946 for (i = 0; i < n; i++)
1947 if (!insn_nothrow_p (seq->element (i)))
1948 return false;
1949
1950 return true;
1951 }
1952
1953 return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1954}
1955
1956/* Return true if INSN can perform a non-local goto. */
1957/* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
1958
1959bool
1960can_nonlocal_goto (const rtx_insn *insn)
1961{
1962 if (nonlocal_goto_handler_labels((&x_rtl)->x_nonlocal_goto_handler_labels) && CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
1963 {
1964 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX(rtx) 0);
1965 if (!note || INTVAL (XEXP (note, 0))(((((note)->u.fld[0]).rt_rtx))->u.hwint[0]) != INT_MIN(-2147483647 -1))
1966 return true;
1967 }
1968 return false;
1969}
1970
1971/* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
1972
1973static unsigned int
1974set_nothrow_function_flags (void)
1975{
1976 rtx_insn *insn;
1977
1978 crtl(&x_rtl)->nothrow = 1;
1979
1980 /* Assume crtl->all_throwers_are_sibcalls until we encounter
1981 something that can throw an exception. We specifically exempt
1982 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1983 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
1984 is optimistic. */
1985
1986 crtl(&x_rtl)->all_throwers_are_sibcalls = 1;
1987
1988 /* If we don't know that this implementation of the function will
1989 actually be used, then we must not set TREE_NOTHROW, since
1990 callers must not assume that this function does not throw. */
1991 if (TREE_NOTHROW (current_function_decl)((current_function_decl)->base.nothrow_flag))
1992 return 0;
1993
1994 if (! flag_exceptionsglobal_options.x_flag_exceptions)
1995 return 0;
1996
1997 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1998 if (can_throw_external (insn))
1999 {
2000 crtl(&x_rtl)->nothrow = 0;
2001
2002 if (!CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN) || !SIBLING_CALL_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if
(((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("SIBLING_CALL_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2002, __FUNCTION__); _rtx; })->jump)
)
2003 {
2004 crtl(&x_rtl)->all_throwers_are_sibcalls = 0;
2005 return 0;
2006 }
2007 }
2008
2009 if (crtl(&x_rtl)->nothrow
2010 && (cgraph_node::get (current_function_decl)->get_availability ()
2011 >= AVAIL_AVAILABLE))
2012 {
2013 struct cgraph_node *node = cgraph_node::get (current_function_decl);
2014 struct cgraph_edge *e;
2015 for (e = node->callers; e; e = e->next_caller)
2016 e->can_throw_external = false;
2017 node->set_nothrow_flag (true);
2018
2019 if (dump_file)
2020 fprintf (dump_file, "Marking function nothrow: %s\n\n",
2021 current_function_name ());
2022 }
2023 return 0;
2024}
2025
2026namespace {
2027
2028const pass_data pass_data_set_nothrow_function_flags =
2029{
2030 RTL_PASS, /* type */
2031 "nothrow", /* name */
2032 OPTGROUP_NONE, /* optinfo_flags */
2033 TV_NONE, /* tv_id */
2034 0, /* properties_required */
2035 0, /* properties_provided */
2036 0, /* properties_destroyed */
2037 0, /* todo_flags_start */
2038 0, /* todo_flags_finish */
2039};
2040
2041class pass_set_nothrow_function_flags : public rtl_opt_pass
2042{
2043public:
2044 pass_set_nothrow_function_flags (gcc::context *ctxt)
2045 : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2046 {}
2047
2048 /* opt_pass methods: */
2049 virtual unsigned int execute (function *)
2050 {
2051 return set_nothrow_function_flags ();
2052 }
2053
2054}; // class pass_set_nothrow_function_flags
2055
2056} // anon namespace
2057
2058rtl_opt_pass *
2059make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2060{
2061 return new pass_set_nothrow_function_flags (ctxt);
2062}
2063
2064
2065/* Various hooks for unwind library. */
2066
2067/* Expand the EH support builtin functions:
2068 __builtin_eh_pointer and __builtin_eh_filter. */
2069
2070static eh_region
2071expand_builtin_eh_common (tree region_nr_t)
2072{
2073 HOST_WIDE_INTlong region_nr;
2074 eh_region region;
2075
2076 gcc_assert (tree_fits_shwi_p (region_nr_t))((void)(!(tree_fits_shwi_p (region_nr_t)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2076, __FUNCTION__), 0 : 0))
;
2077 region_nr = tree_to_shwi (region_nr_t);
2078
2079 region = (*cfun(cfun + 0)->eh->region_array)[region_nr];
2080
2081 /* ??? We shouldn't have been able to delete a eh region without
2082 deleting all the code that depended on it. */
2083 gcc_assert (region != NULL)((void)(!(region != nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2083, __FUNCTION__), 0 : 0))
;
2084
2085 return region;
2086}
2087
2088/* Expand to the exc_ptr value from the given eh region. */
2089
2090rtx
2091expand_builtin_eh_pointer (tree exp)
2092{
2093 eh_region region
2094 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2094, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2094, __FUNCTION__)))))
);
2095 if (region->exc_ptr_reg == NULLnullptr)
2096 region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2097 return region->exc_ptr_reg;
2098}
2099
2100/* Expand to the filter value from the given eh region. */
2101
2102rtx
2103expand_builtin_eh_filter (tree exp)
2104{
2105 eh_region region
2106 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2106, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2106, __FUNCTION__)))))
);
2107 if (region->filter_reg == NULLnullptr)
2108 region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2109 return region->filter_reg;
2110}
2111
2112/* Copy the exc_ptr and filter values from one landing pad's registers
2113 to another. This is used to inline the resx statement. */
2114
2115rtx
2116expand_builtin_eh_copy_values (tree exp)
2117{
2118 eh_region dst
2119 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2119, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2119, __FUNCTION__)))))
);
2120 eh_region src
2121 = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2121, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2121, __FUNCTION__)))))
);
2122 scalar_int_mode fmode = targetm.eh_return_filter_mode ();
2123
2124 if (dst->exc_ptr_reg == NULLnullptr)
2125 dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2126 if (src->exc_ptr_reg == NULLnullptr)
2127 src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2128
2129 if (dst->filter_reg == NULLnullptr)
2130 dst->filter_reg = gen_reg_rtx (fmode);
2131 if (src->filter_reg == NULLnullptr)
2132 src->filter_reg = gen_reg_rtx (fmode);
2133
2134 emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2135 emit_move_insn (dst->filter_reg, src->filter_reg);
2136
2137 return const0_rtx(const_int_rtx[64]);
2138}
2139
2140/* Do any necessary initialization to access arbitrary stack frames.
2141 On the SPARC, this means flushing the register windows. */
2142
2143void
2144expand_builtin_unwind_init (void)
2145{
2146 /* Set this so all the registers get saved in our frame; we need to be
2147 able to copy the saved values for any registers from frames we unwind. */
2148 crtl(&x_rtl)->saves_all_registers = 1;
2149
2150 SETUP_FRAME_ADDRESSES ()ix86_setup_frame_addresses ();
2151}
2152
2153/* Map a non-negative number to an eh return data register number; expands
2154 to -1 if no return data register is associated with the input number.
2155 At least the inputs 0 and 1 must be mapped; the target may provide more. */
2156
2157rtx
2158expand_builtin_eh_return_data_regno (tree exp)
2159{
2160 tree which = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2160, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2160, __FUNCTION__)))))
;
2161 unsigned HOST_WIDE_INTlong iwhich;
2162
2163 if (TREE_CODE (which)((enum tree_code) (which)->base.code) != INTEGER_CST)
2164 {
2165 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2166 return constm1_rtx(const_int_rtx[64 -1]);
2167 }
2168
2169 iwhich = tree_to_uhwi (which);
2170 iwhich = EH_RETURN_DATA_REGNO (iwhich)((iwhich) <= 1 ? (iwhich) : (~(unsigned int) 0));
2171 if (iwhich == INVALID_REGNUM(~(unsigned int) 0))
2172 return constm1_rtx(const_int_rtx[64 -1]);
2173
2174#ifdef DWARF_FRAME_REGNUM
2175 iwhich = DWARF_FRAME_REGNUM (iwhich)(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? dbx64_register_map[iwhich] : svr4_dbx_register_map[iwhich
])
;
2176#else
2177 iwhich = DBX_REGISTER_NUMBER (iwhich)(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? dbx64_register_map[iwhich] : svr4_dbx_register_map[iwhich
])
;
2178#endif
2179
2180 return GEN_INT (iwhich)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (iwhich));
2181}
2182
2183/* Given a value extracted from the return address register or stack slot,
2184 return the actual address encoded in that value. */
2185
2186rtx
2187expand_builtin_extract_return_addr (tree addr_tree)
2188{
2189 rtx addr = expand_expr (addr_tree, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
2190
2191 if (GET_MODE (addr)((machine_mode) (addr)->mode) != Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
2192 && GET_MODE (addr)((machine_mode) (addr)->mode) != VOIDmode((void) 0, E_VOIDmode))
2193 {
2194#ifdef POINTERS_EXTEND_UNSIGNED1
2195 addr = convert_memory_address (Pmode, addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (addr), 0)
;
2196#else
2197 addr = convert_to_mode (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, addr, 0);
2198#endif
2199 }
2200
2201 /* First mask out any unwanted bits. */
2202 rtx mask = MASK_RETURN_ADDR(rtx) 0;
2203 if (mask)
2204 expand_and (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, addr, mask, addr);
2205
2206 /* Then adjust to find the real return address. */
2207 if (RETURN_ADDR_OFFSET0)
2208 addr = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, addr, RETURN_ADDR_OFFSET0);
2209
2210 return addr;
2211}
2212
2213/* Given an actual address in addr_tree, do any necessary encoding
2214 and return the value to be stored in the return address register or
2215 stack slot so the epilogue will return to that address. */
2216
2217rtx
2218expand_builtin_frob_return_addr (tree addr_tree)
2219{
2220 rtx addr = expand_expr (addr_tree, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL);
2221
2222 addr = convert_memory_address (Pmode, addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (addr), 0)
;
2223
2224 if (RETURN_ADDR_OFFSET0)
2225 {
2226 addr = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, addr);
2227 addr = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, addr, -RETURN_ADDR_OFFSET0);
2228 }
2229
2230 return addr;
2231}
2232
2233/* Set up the epilogue with the magic bits we'll need to return to the
2234 exception handler. */
2235
2236void
2237expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
2238 tree handler_tree)
2239{
2240 rtx tmp;
2241
2242#ifdef EH_RETURN_STACKADJ_RTXgen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), 2)
2243 tmp = expand_expr (stackadj_tree, crtl(&x_rtl)->eh.ehr_stackadj,
2244 VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2245 tmp = convert_memory_address (Pmode, tmp)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (tmp), 0)
;
2246 if (!crtl(&x_rtl)->eh.ehr_stackadj)
2247 crtl(&x_rtl)->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2248 else if (tmp != crtl(&x_rtl)->eh.ehr_stackadj)
2249 emit_move_insn (crtl(&x_rtl)->eh.ehr_stackadj, tmp);
2250#endif
2251
2252 tmp = expand_expr (handler_tree, crtl(&x_rtl)->eh.ehr_handler,
2253 VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
2254 tmp = convert_memory_address (Pmode, tmp)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (tmp), 0)
;
2255 if (!crtl(&x_rtl)->eh.ehr_handler)
2256 crtl(&x_rtl)->eh.ehr_handler = copy_addr_to_reg (tmp);
2257 else if (tmp != crtl(&x_rtl)->eh.ehr_handler)
2258 emit_move_insn (crtl(&x_rtl)->eh.ehr_handler, tmp);
2259
2260 if (!crtl(&x_rtl)->eh.ehr_label)
2261 crtl(&x_rtl)->eh.ehr_label = gen_label_rtx ();
2262 emit_jump (crtl(&x_rtl)->eh.ehr_label);
2263}
2264
2265/* Expand __builtin_eh_return. This exit path from the function loads up
2266 the eh return data registers, adjusts the stack, and branches to a
2267 given PC other than the normal return address. */
2268
2269void
2270expand_eh_return (void)
2271{
2272 rtx_code_label *around_label;
2273
2274 if (! crtl(&x_rtl)->eh.ehr_label)
2275 return;
2276
2277 crtl(&x_rtl)->calls_eh_return = 1;
2278
2279#ifdef EH_RETURN_STACKADJ_RTXgen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), 2)
2280 emit_move_insn (EH_RETURN_STACKADJ_RTXgen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), 2)
, const0_rtx(const_int_rtx[64]));
2281#endif
2282
2283 around_label = gen_label_rtx ();
2284 emit_jump (around_label);
2285
2286 emit_label (crtl(&x_rtl)->eh.ehr_label);
2287 clobber_return_register ();
2288
2289#ifdef EH_RETURN_STACKADJ_RTXgen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), 2)
2290 emit_move_insn (EH_RETURN_STACKADJ_RTXgen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), 2)
, crtl(&x_rtl)->eh.ehr_stackadj);
2291#endif
2292
2293 if (targetm.have_eh_return ())
2294 emit_insn (targetm.gen_eh_return (crtl(&x_rtl)->eh.ehr_handler));
2295 else
2296 {
2297 if (rtx handler = EH_RETURN_HANDLER_RTXnullptr)
2298 emit_move_insn (handler, crtl(&x_rtl)->eh.ehr_handler);
2299 else
2300 error ("%<__builtin_eh_return%> not supported on this target");
2301 }
2302
2303 emit_label (around_label);
2304}
2305
2306/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2307 POINTERS_EXTEND_UNSIGNED and return it. */
2308
2309rtx
2310expand_builtin_extend_pointer (tree addr_tree)
2311{
2312 rtx addr = expand_expr (addr_tree, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL);
2313 int extend;
2314
2315#ifdef POINTERS_EXTEND_UNSIGNED1
2316 extend = POINTERS_EXTEND_UNSIGNED1;
2317#else
2318 /* The previous EH code did an unsigned extend by default, so we do this also
2319 for consistency. */
2320 extend = 1;
2321#endif
2322
2323 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2324}
2325
2326static int
2327add_action_record (action_hash_type *ar_hash, int filter, int next)
2328{
2329 struct action_record **slot, *new_ar, tmp;
2330
2331 tmp.filter = filter;
2332 tmp.next = next;
2333 slot = ar_hash->find_slot (&tmp, INSERT);
2334
2335 if ((new_ar = *slot) == NULLnullptr)
2336 {
2337 new_ar = XNEW (struct action_record)((struct action_record *) xmalloc (sizeof (struct action_record
)))
;
2338 new_ar->offset = crtl(&x_rtl)->eh.action_record_data->length () + 1;
2339 new_ar->filter = filter;
2340 new_ar->next = next;
2341 *slot = new_ar;
2342
2343 /* The filter value goes in untouched. The link to the next
2344 record is a "self-relative" byte offset, or zero to indicate
2345 that there is no next record. So convert the absolute 1 based
2346 indices we've been carrying around into a displacement. */
2347
2348 push_sleb128 (&crtl(&x_rtl)->eh.action_record_data, filter);
2349 if (next)
2350 next -= crtl(&x_rtl)->eh.action_record_data->length () + 1;
2351 push_sleb128 (&crtl(&x_rtl)->eh.action_record_data, next);
2352 }
2353
2354 return new_ar->offset;
2355}
2356
2357static int
2358collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2359{
2360 int next;
2361
2362 /* If we've reached the top of the region chain, then we have
2363 no actions, and require no landing pad. */
2364 if (region == NULLnullptr)
2365 return -1;
2366
2367 switch (region->type)
2368 {
2369 case ERT_CLEANUP:
2370 {
2371 eh_region r;
2372 /* A cleanup adds a zero filter to the beginning of the chain, but
2373 there are special cases to look out for. If there are *only*
2374 cleanups along a path, then it compresses to a zero action.
2375 Further, if there are multiple cleanups along a path, we only
2376 need to represent one of them, as that is enough to trigger
2377 entry to the landing pad at runtime. */
2378 next = collect_one_action_chain (ar_hash, region->outer);
2379 if (next <= 0)
2380 return 0;
2381 for (r = region->outer; r ; r = r->outer)
2382 if (r->type == ERT_CLEANUP)
2383 return next;
2384 return add_action_record (ar_hash, 0, next);
2385 }
2386
2387 case ERT_TRY:
2388 {
2389 eh_catch c;
2390
2391 /* Process the associated catch regions in reverse order.
2392 If there's a catch-all handler, then we don't need to
2393 search outer regions. Use a magic -3 value to record
2394 that we haven't done the outer search. */
2395 next = -3;
2396 for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2397 {
2398 if (c->type_list == NULLnullptr)
2399 {
2400 /* Retrieve the filter from the head of the filter list
2401 where we have stored it (see assign_filter_values). */
2402 int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((c->
filter_list), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2402, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2402, __FUNCTION__)))
;
2403 next = add_action_record (ar_hash, filter, 0);
2404 }
2405 else
2406 {
2407 /* Once the outer search is done, trigger an action record for
2408 each filter we have. */
2409 tree flt_node;
2410
2411 if (next == -3)
2412 {
2413 next = collect_one_action_chain (ar_hash, region->outer);
2414
2415 /* If there is no next action, terminate the chain. */
2416 if (next == -1)
2417 next = 0;
2418 /* If all outer actions are cleanups or must_not_throw,
2419 we'll have no action record for it, since we had wanted
2420 to encode these states in the call-site record directly.
2421 Add a cleanup action to the chain to catch these. */
2422 else if (next <= 0)
2423 next = add_action_record (ar_hash, 0, 0);
2424 }
2425
2426 flt_node = c->filter_list;
2427 for (; flt_node; flt_node = TREE_CHAIN (flt_node)((contains_struct_check ((flt_node), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2427, __FUNCTION__))->common.chain)
)
2428 {
2429 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((flt_node
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2429, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2429, __FUNCTION__)))
;
2430 next = add_action_record (ar_hash, filter, next);
2431 }
2432 }
2433 }
2434 return next;
2435 }
2436
2437 case ERT_ALLOWED_EXCEPTIONS:
2438 /* An exception specification adds its filter to the
2439 beginning of the chain. */
2440 next = collect_one_action_chain (ar_hash, region->outer);
2441
2442 /* If there is no next action, terminate the chain. */
2443 if (next == -1)
2444 next = 0;
2445 /* If all outer actions are cleanups or must_not_throw,
2446 we'll have no action record for it, since we had wanted
2447 to encode these states in the call-site record directly.
2448 Add a cleanup action to the chain to catch these. */
2449 else if (next <= 0)
2450 next = add_action_record (ar_hash, 0, 0);
2451
2452 return add_action_record (ar_hash, region->u.allowed.filter, next);
2453
2454 case ERT_MUST_NOT_THROW:
2455 /* A must-not-throw region with no inner handlers or cleanups
2456 requires no call-site entry. Note that this differs from
2457 the no handler or cleanup case in that we do require an lsda
2458 to be generated. Return a magic -2 value to record this. */
2459 return -2;
2460 }
2461
2462 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2462, __FUNCTION__))
;
2463}
2464
2465static int
2466add_call_site (rtx landing_pad, int action, int section)
2467{
2468 call_site_record record;
2469
2470 record = ggc_alloc<call_site_record_d> ();
2471 record->landing_pad = landing_pad;
2472 record->action = action;
2473
2474 vec_safe_push (crtl(&x_rtl)->eh.call_site_record_v[section], record);
2475
2476 return call_site_base + crtl(&x_rtl)->eh.call_site_record_v[section]->length () - 1;
2477}
2478
2479static rtx_note *
2480emit_note_eh_region_end (rtx_insn *insn)
2481{
2482 return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2483}
2484
2485/* Add NOP after NOTE_INSN_SWITCH_TEXT_SECTIONS when the cold section starts
2486 with landing pad.
2487 With landing pad being at offset 0 from the start label of the section
2488 we would miss EH delivery because 0 is special and means no landing pad. */
2489
2490static bool
2491maybe_add_nop_after_section_switch (void)
2492{
2493 if (!crtl(&x_rtl)->uses_eh_lsda
2494 || !crtl(&x_rtl)->eh.call_site_record_v[1])
2495 return false;
2496 int n = vec_safe_length (crtl(&x_rtl)->eh.call_site_record_v[1]);
2497 hash_set<rtx_insn *> visited;
2498
2499 for (int i = 0; i < n; ++i)
2500 {
2501 struct call_site_record_d *cs
2502 = (*crtl(&x_rtl)->eh.call_site_record_v[1])[i];
2503 if (cs->landing_pad)
2504 {
2505 rtx_insn *insn = as_a <rtx_insn *> (cs->landing_pad);
2506 while (true)
2507 {
2508 /* Landing pads have LABEL_PRESERVE_P flag set. This check make
2509 sure that we do not walk past landing pad visited earlier
2510 which would result in possible quadratic behaviour. */
2511 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL) && LABEL_PRESERVE_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if
(((enum rtx_code) (_rtx)->code) != CODE_LABEL && (
(enum rtx_code) (_rtx)->code) != NOTE) rtl_check_failed_flag
("LABEL_PRESERVE_P",_rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2511, __FUNCTION__); _rtx; })->in_struct)
2512 && visited.add (insn))
2513 break;
2514
2515 /* Conservatively assume that ASM insn may be empty. We have
2516 now way to tell what they contain. */
2517 if (active_insn_p (insn)
2518 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) != ASM_INPUT
2519 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) != ASM_OPERANDS)
2520 break;
2521
2522 /* If we reached the start of hot section, then NOP will be
2523 needed. */
2524 if (GET_CODE (insn)((enum rtx_code) (insn)->code) == NOTE
2525 && NOTE_KIND (insn)(((insn)->u.fld[4]).rt_int) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2526 {
2527 emit_insn_after (gen_nop (), insn);
2528 break;
2529 }
2530
2531 /* We visit only labels from cold section. We should never hit
2532 begining of the insn stream here. */
2533 insn = PREV_INSN (insn);
2534 }
2535 }
2536 }
2537 return false;
2538}
2539
2540/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2541 The new note numbers will not refer to region numbers, but
2542 instead to call site entries. */
2543
2544static unsigned int
2545convert_to_eh_region_ranges (void)
2546{
2547 rtx insn;
2548 rtx_insn *iter;
2549 rtx_note *note;
2550 action_hash_type ar_hash (31);
2551 int last_action = -3;
2552 rtx_insn *last_action_insn = NULLnullptr;
2553 rtx last_landing_pad = NULL_RTX(rtx) 0;
2554 rtx_insn *first_no_action_insn = NULLnullptr;
2555 int call_site = 0;
2556 int cur_sec = 0;
2557 rtx_insn *section_switch_note = NULLnullptr;
2558 rtx_insn *first_no_action_insn_before_switch = NULLnullptr;
2559 rtx_insn *last_no_action_insn_before_switch = NULLnullptr;
2560 int saved_call_site_base = call_site_base;
2561
2562 vec_alloc (crtl(&x_rtl)->eh.action_record_data, 64);
2563
2564 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2565 if (INSN_P (iter)(((((enum rtx_code) (iter)->code) == INSN) || (((enum rtx_code
) (iter)->code) == JUMP_INSN) || (((enum rtx_code) (iter)->
code) == CALL_INSN)) || (((enum rtx_code) (iter)->code) ==
DEBUG_INSN))
)
2566 {
2567 eh_landing_pad lp;
2568 eh_region region;
2569 bool nothrow;
2570 int this_action;
2571 rtx_code_label *this_landing_pad;
2572
2573 insn = iter;
2574 if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN)
2575 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SEQUENCE)
2576 insn = XVECEXP (PATTERN (insn), 0, 0)(((((PATTERN (insn))->u.fld[0]).rt_rtvec))->elem[0]);
2577
2578 nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2579 if (nothrow)
2580 continue;
2581 if (region)
2582 this_action = collect_one_action_chain (&ar_hash, region);
2583 else
2584 this_action = -1;
2585
2586 /* Existence of catch handlers, or must-not-throw regions
2587 implies that an lsda is needed (even if empty). */
2588 if (this_action != -1)
2589 crtl(&x_rtl)->uses_eh_lsda = 1;
2590
2591 /* Delay creation of region notes for no-action regions
2592 until we're sure that an lsda will be required. */
2593 else if (last_action == -3)
2594 {
2595 first_no_action_insn = iter;
2596 last_action = -1;
2597 }
2598
2599 if (this_action >= 0)
2600 this_landing_pad = lp->landing_pad;
2601 else
2602 this_landing_pad = NULLnullptr;
2603
2604 /* Differing actions or landing pads implies a change in call-site
2605 info, which implies some EH_REGION note should be emitted. */
2606 if (last_action != this_action
2607 || last_landing_pad != this_landing_pad)
2608 {
2609 /* If there is a queued no-action region in the other section
2610 with hot/cold partitioning, emit it now. */
2611 if (first_no_action_insn_before_switch)
2612 {
2613 gcc_assert (this_action != -1((void)(!(this_action != -1 && last_action == (first_no_action_insn
? -1 : -3)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2615, __FUNCTION__), 0 : 0))
2614 && last_action == (first_no_action_insn((void)(!(this_action != -1 && last_action == (first_no_action_insn
? -1 : -3)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2615, __FUNCTION__), 0 : 0))
2615 ? -1 : -3))((void)(!(this_action != -1 && last_action == (first_no_action_insn
? -1 : -3)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2615, __FUNCTION__), 0 : 0))
;
2616 call_site = add_call_site (NULL_RTX(rtx) 0, 0, 0);
2617 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2618 first_no_action_insn_before_switch);
2619 NOTE_EH_HANDLER (note)(((note)->u.fld[3]).rt_int) = call_site;
2620 note
2621 = emit_note_eh_region_end (last_no_action_insn_before_switch);
2622 NOTE_EH_HANDLER (note)(((note)->u.fld[3]).rt_int) = call_site;
2623 gcc_assert (last_action != -3((void)(!(last_action != -3 || (last_action_insn == last_no_action_insn_before_switch
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2625, __FUNCTION__), 0 : 0))
2624 || (last_action_insn((void)(!(last_action != -3 || (last_action_insn == last_no_action_insn_before_switch
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2625, __FUNCTION__), 0 : 0))
2625 == last_no_action_insn_before_switch))((void)(!(last_action != -3 || (last_action_insn == last_no_action_insn_before_switch
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2625, __FUNCTION__), 0 : 0))
;
2626 first_no_action_insn_before_switch = NULLnullptr;
2627 last_no_action_insn_before_switch = NULLnullptr;
2628 call_site_base++;
2629 }
2630 /* If we'd not seen a previous action (-3) or the previous
2631 action was must-not-throw (-2), then we do not need an
2632 end note. */
2633 if (last_action >= -1)
2634 {
2635 /* If we delayed the creation of the begin, do it now. */
2636 if (first_no_action_insn)
2637 {
2638 call_site = add_call_site (NULL_RTX(rtx) 0, 0, cur_sec);
2639 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2640 first_no_action_insn);
2641 NOTE_EH_HANDLER (note)(((note)->u.fld[3]).rt_int) = call_site;
2642 first_no_action_insn = NULLnullptr;
2643 }
2644
2645 note = emit_note_eh_region_end (last_action_insn);
2646 NOTE_EH_HANDLER (note)(((note)->u.fld[3]).rt_int) = call_site;
2647 }
2648
2649 /* If the new action is must-not-throw, then no region notes
2650 are created. */
2651 if (this_action >= -1)
2652 {
2653 call_site = add_call_site (this_landing_pad,
2654 this_action < 0 ? 0 : this_action,
2655 cur_sec);
2656 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2657 NOTE_EH_HANDLER (note)(((note)->u.fld[3]).rt_int) = call_site;
2658 }
2659
2660 last_action = this_action;
2661 last_landing_pad = this_landing_pad;
2662 }
2663 last_action_insn = iter;
2664 }
2665 else if (NOTE_P (iter)(((enum rtx_code) (iter)->code) == NOTE)
2666 && NOTE_KIND (iter)(((iter)->u.fld[4]).rt_int) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2667 {
2668 gcc_assert (section_switch_note == NULL_RTX)((void)(!(section_switch_note == (rtx) 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2668, __FUNCTION__), 0 : 0))
;
2669 gcc_assert (flag_reorder_blocks_and_partition)((void)(!(global_options.x_flag_reorder_blocks_and_partition)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2669, __FUNCTION__), 0 : 0))
;
2670 section_switch_note = iter;
2671 if (first_no_action_insn)
2672 {
2673 first_no_action_insn_before_switch = first_no_action_insn;
2674 last_no_action_insn_before_switch = last_action_insn;
2675 first_no_action_insn = NULLnullptr;
2676 gcc_assert (last_action == -1)((void)(!(last_action == -1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2676, __FUNCTION__), 0 : 0))
;
2677 last_action = -3;
2678 }
2679 /* Force closing of current EH region before section switch and
2680 opening a new one afterwards. */
2681 else if (last_action != -3)
2682 last_landing_pad = pc_rtx;
2683 if (crtl(&x_rtl)->eh.call_site_record_v[cur_sec])
2684 call_site_base += crtl(&x_rtl)->eh.call_site_record_v[cur_sec]->length ();
2685 cur_sec++;
2686 gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL)((void)(!((&x_rtl)->eh.call_site_record_v[cur_sec] == nullptr
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2686, __FUNCTION__), 0 : 0))
;
2687 vec_alloc (crtl(&x_rtl)->eh.call_site_record_v[cur_sec], 10);
2688 }
2689
2690 if (last_action >= -1 && ! first_no_action_insn)
2691 {
2692 note = emit_note_eh_region_end (last_action_insn);
2693 NOTE_EH_HANDLER (note)(((note)->u.fld[3]).rt_int) = call_site;
2694 }
2695
2696 call_site_base = saved_call_site_base;
2697
2698 return 0;
2699}
2700
2701namespace {
2702
2703const pass_data pass_data_convert_to_eh_region_ranges =
2704{
2705 RTL_PASS, /* type */
2706 "eh_ranges", /* name */
2707 OPTGROUP_NONE, /* optinfo_flags */
2708 TV_NONE, /* tv_id */
2709 0, /* properties_required */
2710 0, /* properties_provided */
2711 0, /* properties_destroyed */
2712 0, /* todo_flags_start */
2713 0, /* todo_flags_finish */
2714};
2715
2716class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2717{
2718public:
2719 pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2720 : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2721 {}
2722
2723 /* opt_pass methods: */
2724 virtual bool gate (function *);
2725 virtual unsigned int execute (function *)
2726 {
2727 int ret = convert_to_eh_region_ranges ();
2728 maybe_add_nop_after_section_switch ();
2729 return ret;
2730 }
2731
2732}; // class pass_convert_to_eh_region_ranges
2733
2734bool
2735pass_convert_to_eh_region_ranges::gate (function *)
2736{
2737 /* Nothing to do for SJLJ exceptions or if no regions created. */
2738 if (cfun(cfun + 0)->eh->region_tree == NULLnullptr)
2739 return false;
2740 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2741 return false;
2742 return true;
2743}
2744
2745} // anon namespace
2746
2747rtl_opt_pass *
2748make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2749{
2750 return new pass_convert_to_eh_region_ranges (ctxt);
2751}
2752
2753static void
2754push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2755{
2756 do
2757 {
2758 unsigned char byte = value & 0x7f;
2759 value >>= 7;
2760 if (value)
2761 byte |= 0x80;
2762 vec_safe_push (*data_area, byte);
2763 }
2764 while (value);
2765}
2766
2767static void
2768push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2769{
2770 unsigned char byte;
2771 int more;
2772
2773 do
2774 {
2775 byte = value & 0x7f;
2776 value >>= 7;
2777 more = ! ((value == 0 && (byte & 0x40) == 0)
2778 || (value == -1 && (byte & 0x40) != 0));
2779 if (more)
2780 byte |= 0x80;
2781 vec_safe_push (*data_area, byte);
2782 }
2783 while (more);
2784}
2785
2786
2787static int
2788dw2_size_of_call_site_table (int section)
2789{
2790 int n = vec_safe_length (crtl(&x_rtl)->eh.call_site_record_v[section]);
2791 int size = n * (4 + 4 + 4);
2792 int i;
2793
2794 for (i = 0; i < n; ++i)
2795 {
2796 struct call_site_record_d *cs =
2797 (*crtl(&x_rtl)->eh.call_site_record_v[section])[i];
2798 size += size_of_uleb128 (cs->action);
2799 }
2800
2801 return size;
2802}
2803
2804static int
2805sjlj_size_of_call_site_table (void)
2806{
2807 int n = vec_safe_length (crtl(&x_rtl)->eh.call_site_record_v[0]);
2808 int size = 0;
2809 int i;
2810
2811 for (i = 0; i < n; ++i)
2812 {
2813 struct call_site_record_d *cs =
2814 (*crtl(&x_rtl)->eh.call_site_record_v[0])[i];
2815 size += size_of_uleb128 (INTVAL (cs->landing_pad)((cs->landing_pad)->u.hwint[0]));
2816 size += size_of_uleb128 (cs->action);
2817 }
2818
2819 return size;
2820}
2821
2822static void
2823dw2_output_call_site_table (int cs_format, int section)
2824{
2825 int n = vec_safe_length (crtl(&x_rtl)->eh.call_site_record_v[section]);
2826 int i;
2827 const char *begin;
2828
2829 if (section == 0)
2830 begin = current_function_func_begin_label;
2831 else if (first_function_block_is_cold)
2832 begin = crtl(&x_rtl)->subsections.hot_section_label;
2833 else
2834 begin = crtl(&x_rtl)->subsections.cold_section_label;
2835
2836 for (i = 0; i < n; ++i)
2837 {
2838 struct call_site_record_d *cs = (*crtl(&x_rtl)->eh.call_site_record_v[section])[i];
2839 char reg_start_lab[32];
2840 char reg_end_lab[32];
2841 char landing_pad_lab[32];
2842
2843 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i)do { char *__p; (reg_start_lab)[0] = '*'; (reg_start_lab)[1] =
'.'; __p = stpcpy (&(reg_start_lab)[2], "LEHB"); sprint_ul
(__p, (unsigned long) (call_site_base + i)); } while (0)
;
2844 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i)do { char *__p; (reg_end_lab)[0] = '*'; (reg_end_lab)[1] = '.'
; __p = stpcpy (&(reg_end_lab)[2], "LEHE"); sprint_ul (__p
, (unsigned long) (call_site_base + i)); } while (0)
;
2845
2846 if (cs->landing_pad)
2847 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",do { char *__p; (landing_pad_lab)[0] = '*'; (landing_pad_lab)
[1] = '.'; __p = stpcpy (&(landing_pad_lab)[2], "L"); sprint_ul
(__p, (unsigned long) ((((cs->landing_pad)->u.fld[5]).
rt_int))); } while (0)
2848 CODE_LABEL_NUMBER (cs->landing_pad))do { char *__p; (landing_pad_lab)[0] = '*'; (landing_pad_lab)
[1] = '.'; __p = stpcpy (&(landing_pad_lab)[2], "L"); sprint_ul
(__p, (unsigned long) ((((cs->landing_pad)->u.fld[5]).
rt_int))); } while (0)
;
2849
2850 /* ??? Perhaps use insn length scaling if the assembler supports
2851 generic arithmetic. */
2852 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2853 data4 if the function is small enough. */
2854 if (cs_format == DW_EH_PE_uleb1280x01)
2855 {
2856 dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2857 "region %d start", i);
2858 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2859 "length");
2860 if (cs->landing_pad)
2861 dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2862 "landing pad");
2863 else
2864 dw2_asm_output_data_uleb128 (0, "landing pad");
2865 }
2866 else
2867 {
2868 dw2_asm_output_delta (4, reg_start_lab, begin,
2869 "region %d start", i);
2870 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2871 if (cs->landing_pad)
2872 dw2_asm_output_delta (4, landing_pad_lab, begin,
2873 "landing pad");
2874 else
2875 dw2_asm_output_data (4, 0, "landing pad");
2876 }
2877 dw2_asm_output_data_uleb128 (cs->action, "action");
2878 }
2879
2880 call_site_base += n;
2881}
2882
2883static void
2884sjlj_output_call_site_table (void)
2885{
2886 int n = vec_safe_length (crtl(&x_rtl)->eh.call_site_record_v[0]);
2887 int i;
2888
2889 for (i = 0; i < n; ++i)
2890 {
2891 struct call_site_record_d *cs = (*crtl(&x_rtl)->eh.call_site_record_v[0])[i];
2892
2893 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad)((cs->landing_pad)->u.hwint[0]),
2894 "region %d landing pad", i);
2895 dw2_asm_output_data_uleb128 (cs->action, "action");
2896 }
2897
2898 call_site_base += n;
2899}
2900
2901/* Switch to the section that should be used for exception tables. */
2902
2903static void
2904switch_to_exception_section (const char * ARG_UNUSED (fnname)fnname __attribute__ ((__unused__)))
2905{
2906 section *s;
2907
2908 if (exception_section)
2909 s = exception_section;
2910 else
2911 {
2912 int flags;
2913
2914 if (EH_TABLES_CAN_BE_READ_ONLY1)
2915 {
2916 int tt_format =
2917 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1)asm_preferred_eh_data_format ((0), (1));
2918 flags = ((! flag_picglobal_options.x_flag_pic
2919 || ((tt_format & 0x70) != DW_EH_PE_absptr0x00
2920 && (tt_format & 0x70) != DW_EH_PE_aligned0x50))
2921 ? 0 : SECTION_WRITE0x00200);
2922 }
2923 else
2924 flags = SECTION_WRITE0x00200;
2925
2926 /* Compute the section and cache it into exception_section,
2927 unless it depends on the function name. */
2928 if (targetm_common.have_named_sections)
2929 {
2930#ifdef HAVE_LD_EH_GC_SECTIONS1
2931 if (flag_function_sectionsglobal_options.x_flag_function_sections
2932 || (DECL_COMDAT_GROUP (current_function_decl)decl_comdat_group (current_function_decl) && HAVE_COMDAT_GROUP1))
2933 {
2934 char *section_name = XNEWVEC (char, strlen (fnname) + 32)((char *) xmalloc (sizeof (char) * (strlen (fnname) + 32)));
2935 /* The EH table must match the code section, so only mark
2936 it linkonce if we have COMDAT groups to tie them together. */
2937 if (DECL_COMDAT_GROUP (current_function_decl)decl_comdat_group (current_function_decl) && HAVE_COMDAT_GROUP1)
2938 flags |= SECTION_LINKONCE0x00800;
2939 sprintf (section_name, ".gcc_except_table.%s", fnname);
2940 s = get_section (section_name, flags, current_function_decl);
2941 free (section_name);
2942 }
2943 else
2944#endif
2945 exception_section
2946 = s = get_section (".gcc_except_table", flags, NULLnullptr);
2947 }
2948 else
2949 exception_section
2950 = s = flags == SECTION_WRITE0x00200 ? data_section : readonly_data_section;
2951 }
2952
2953 switch_to_section (s);
2954}
2955
2956/* Output a reference from an exception table to the type_info object TYPE.
2957 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2958 the value. */
2959
2960static void
2961output_ttype (tree type, int tt_format, int tt_format_size)
2962{
2963 rtx value;
2964 bool is_public = true;
2965
2966 if (type == NULL_TREE(tree) nullptr)
2967 value = const0_rtx(const_int_rtx[64]);
2968 else
2969 {
2970 /* FIXME lto. pass_ipa_free_lang_data changes all types to
2971 runtime types so TYPE should already be a runtime type
2972 reference. When pass_ipa_free_lang data is made a default
2973 pass, we can then remove the call to lookup_type_for_runtime
2974 below. */
2975 if (TYPE_P (type)(tree_code_type[(int) (((enum tree_code) (type)->base.code
))] == tcc_type)
)
2976 type = lookup_type_for_runtime (type);
2977
2978 value = expand_expr (type, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_INITIALIZER);
2979
2980 /* Let cgraph know that the rtti decl is used. Not all of the
2981 paths below go through assemble_integer, which would take
2982 care of this for us. */
2983 STRIP_NOPS (type)(type) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((type)))))
;
2984 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ADDR_EXPR)
2985 {
2986 type = TREE_OPERAND (type, 0)(*((const_cast<tree*> (tree_operand_check ((type), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2986, __FUNCTION__)))))
;
2987 if (VAR_P (type)(((enum tree_code) (type)->base.code) == VAR_DECL))
2988 is_public = TREE_PUBLIC (type)((type)->base.public_flag);
2989 }
2990 else
2991 gcc_assert (TREE_CODE (type) == INTEGER_CST)((void)(!(((enum tree_code) (type)->base.code) == INTEGER_CST
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/except.c"
, 2991, __FUNCTION__), 0 : 0))
;
2992 }
2993
2994 /* Allow the target to override the type table entry format. */
2995 if (targetm.asm_out.ttype (value))
2996 return;
2997
2998 if (tt_format == DW_EH_PE_absptr0x00 || tt_format == DW_EH_PE_aligned0x50)
2999 assemble_integer (value, tt_format_size,
3000 tt_format_size * BITS_PER_UNIT(8), 1);
3001 else
3002 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULLnullptr);
3003}
3004
3005/* Output an exception table for the current function according to SECTION.
3006
3007 If the function has been partitioned into hot and cold parts, value 0 for
3008 SECTION refers to the table associated with the hot part while value 1
3009 refers to the table associated with the cold part. If the function has
3010 not been partitioned, value 0 refers to the single exception table. */
3011
3012static void
3013output_one_function_exception_table (int section)
3014{
3015 int tt_format, cs_format, lp_format, i;
3016 char ttype_label[32];
3017 char cs_after_size_label[32];
3018 char cs_end_label[32];
3019 int call_site_len;
3020 int have_tt_data;
3021 int tt_format_size = 0;
3022
3023 have_tt_data = (vec_safe_length (cfun(cfun + 0)->eh->ttype_data)
3024 || (targetm.arm_eabi_unwinder
3025 ? vec_safe_length (cfun(cfun + 0)->eh->ehspec_data.arm_eabi)
3026 : vec_safe_length (cfun(cfun + 0)->eh->ehspec_data.other)));
3027
3028 /* Indicate the format of the @TType entries. */
3029 if (! have_tt_data)
3030 tt_format = DW_EH_PE_omit0xff;
3031 else
3032 {
3033 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1)asm_preferred_eh_data_format ((0), (1));
3034 if (HAVE_AS_LEB1281)
3035 ASM_GENERATE_INTERNAL_LABEL (ttype_label,do { char *__p; (ttype_label)[0] = '*'; (ttype_label)[1] = '.'
; __p = stpcpy (&(ttype_label)[2], section ? "LLSDATTC" :
"LLSDATT"); sprint_ul (__p, (unsigned long) (((cfun + 0)->
funcdef_no))); } while (0)
3036 section ? "LLSDATTC" : "LLSDATT",do { char *__p; (ttype_label)[0] = '*'; (ttype_label)[1] = '.'
; __p = stpcpy (&(ttype_label)[2], section ? "LLSDATTC" :
"LLSDATT"); sprint_ul (__p, (unsigned long) (((cfun + 0)->
funcdef_no))); } while (0)
3037 current_function_funcdef_no)do { char *__p; (ttype_label)[0] = '*'; (ttype_label)[1] = '.'
; __p = stpcpy (&(ttype_label)[2], section ? "LLSDATTC" :
"LLSDATT"); sprint_ul (__p, (unsigned long) (((cfun + 0)->
funcdef_no))); } while (0)
;
3038
3039 tt_format_size = size_of_encoded_value (tt_format);
3040
3041 assemble_align (tt_format_size * BITS_PER_UNIT(8));
3042 }
3043
3044 targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
3045 current_function_funcdef_no((cfun + 0)->funcdef_no));
3046
3047 /* The LSDA header. */
3048
3049 /* Indicate the format of the landing pad start pointer. An omitted
3050 field implies @LPStart == @Start. */
3051 /* Currently we always put @LPStart == @Start. This field would
3052 be most useful in moving the landing pads completely out of
3053 line to another section, but it could also be used to minimize
3054 the size of uleb128 landing pad offsets. */
3055 lp_format = DW_EH_PE_omit0xff;
3056 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3057 eh_data_format_name (lp_format));
3058
3059 /* @LPStart pointer would go here. */
3060
3061 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3062 eh_data_format_name (tt_format));
3063
3064 if (!HAVE_AS_LEB1281)
3065 {
3066 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3067 call_site_len = sjlj_size_of_call_site_table ();
3068 else
3069 call_site_len = dw2_size_of_call_site_table (section);
3070 }
3071
3072 /* A pc-relative 4-byte displacement to the @TType data. */
3073 if (have_tt_data)
3074 {
3075 if (HAVE_AS_LEB1281)
3076 {
3077 char ttype_after_disp_label[32];
3078 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,do { char *__p; (ttype_after_disp_label)[0] = '*'; (ttype_after_disp_label
)[1] = '.'; __p = stpcpy (&(ttype_after_disp_label)[2], section
? "LLSDATTDC" : "LLSDATTD"); sprint_ul (__p, (unsigned long)
(((cfun + 0)->funcdef_no))); } while (0)
3079 section ? "LLSDATTDC" : "LLSDATTD",do { char *__p; (ttype_after_disp_label)[0] = '*'; (ttype_after_disp_label
)[1] = '.'; __p = stpcpy (&(ttype_after_disp_label)[2], section
? "LLSDATTDC" : "LLSDATTD"); sprint_ul (__p, (unsigned long)
(((cfun + 0)->funcdef_no))); } while (0)
3080 current_function_funcdef_no)do { char *__p; (ttype_after_disp_label)[0] = '*'; (ttype_after_disp_label
)[1] = '.'; __p = stpcpy (&(ttype_after_disp_label)[2], section
? "LLSDATTDC" : "LLSDATTD"); sprint_ul (__p, (unsigned long)
(((cfun + 0)->funcdef_no))); } while (0)
;
3081 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3082 "@TType base offset");
3083 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label)do { assemble_name ((asm_out_file), (ttype_after_disp_label))
; fputs (":\n", (asm_out_file)); } while (0)
;
3084 }
3085 else
3086 {
3087 /* Ug. Alignment queers things. */
3088 unsigned int before_disp, after_disp, last_disp, disp;
3089
3090 before_disp = 1 + 1;
3091 after_disp = (1 + size_of_uleb128 (call_site_len)
3092 + call_site_len
3093 + vec_safe_length (crtl(&x_rtl)->eh.action_record_data)
3094 + (vec_safe_length (cfun(cfun + 0)->eh->ttype_data)
3095 * tt_format_size));
3096
3097 disp = after_disp;
3098 do
3099 {
3100 unsigned int disp_size, pad;
3101
3102 last_disp = disp;
3103 disp_size = size_of_uleb128 (disp);
3104 pad = before_disp + disp_size + after_disp;
3105 if (pad % tt_format_size)
3106 pad = tt_format_size - (pad % tt_format_size);
3107 else
3108 pad = 0;
3109 disp = after_disp + pad;
3110 }
3111 while (disp != last_disp);
3112
3113 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3114 }
3115 }
3116
3117 /* Indicate the format of the call-site offsets. */
3118 if (HAVE_AS_LEB1281)
3119 cs_format = DW_EH_PE_uleb1280x01;
3120 else
3121 cs_format = DW_EH_PE_udata40x03;
3122
3123 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3124 eh_data_format_name (cs_format));
3125
3126 if (HAVE_AS_LEB1281)
3127 {
3128 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,do { char *__p; (cs_after_size_label)[0] = '*'; (cs_after_size_label
)[1] = '.'; __p = stpcpy (&(cs_after_size_label)[2], section
? "LLSDACSBC" : "LLSDACSB"); sprint_ul (__p, (unsigned long)
(((cfun + 0)->funcdef_no))); } while (0)
3129 section ? "LLSDACSBC" : "LLSDACSB",do { char *__p; (cs_after_size_label)[0] = '*'; (cs_after_size_label
)[1] = '.'; __p = stpcpy (&(cs_after_size_label)[2], section
? "LLSDACSBC" : "LLSDACSB"); sprint_ul (__p, (unsigned long)
(((cfun + 0)->funcdef_no))); } while (0)
3130 current_function_funcdef_no)do { char *__p; (cs_after_size_label)[0] = '*'; (cs_after_size_label
)[1] = '.'; __p = stpcpy (&(cs_after_size_label)[2], section
? "LLSDACSBC" : "LLSDACSB"); sprint_ul (__p, (unsigned long)
(((cfun + 0)->funcdef_no))); } while (0)
;
3131 ASM_GENERATE_INTERNAL_LABEL (cs_end_label,do { char *__p; (cs_end_label)[0] = '*'; (cs_end_label)[1] = '.'
; __p = stpcpy (&(cs_end_label)[2], section ? "LLSDACSEC"
: "LLSDACSE"); sprint_ul (__p, (unsigned long) (((cfun + 0)->
funcdef_no))); } while (0)
3132 section ? "LLSDACSEC" : "LLSDACSE",do { char *__p; (cs_end_label)[0] = '*'; (cs_end_label)[1] = '.'
; __p = stpcpy (&(cs_end_label)[2], section ? "LLSDACSEC"
: "LLSDACSE"); sprint_ul (__p, (unsigned long) (((cfun + 0)->
funcdef_no))); } while (0)
3133 current_function_funcdef_no)do { char *__p; (cs_end_label)[0] = '*'; (cs_end_label)[1] = '.'
; __p = stpcpy (&(cs_end_label)[2], section ? "LLSDACSEC"
: "LLSDACSE"); sprint_ul (__p, (unsigned long) (((cfun + 0)->
funcdef_no))); } while (0)
;
3134 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3135 "Call-site table length");
3136 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label)do { assemble_name ((asm_out_file), (cs_after_size_label)); fputs
(":\n", (asm_out_file)); } while (0)
;
3137 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3138 sjlj_output_call_site_table ();
3139 else
3140 dw2_output_call_site_table (cs_format, section);
3141 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label)do { assemble_name ((asm_out_file), (cs_end_label)); fputs (":\n"
, (asm_out_file)); } while (0)
;
3142 }
3143 else
3144 {
3145 dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3146 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3147 sjlj_output_call_site_table ();
3148 else
3149 dw2_output_call_site_table (cs_format, section);
3150 }
3151
3152 /* ??? Decode and interpret the data for flag_debug_asm. */
3153 {
3154 uchar uc;
3155 FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)for (i = 0; (*(&x_rtl)->eh.action_record_data).iterate
((i), &(uc)); ++(i))
3156 dw2_asm_output_data (1, uc, i ? NULLnullptr : "Action record table");
3157 }
3158
3159 if (have_tt_data)
3160 assemble_align (tt_format_size * BITS_PER_UNIT(8));
3161
3162 i = vec_safe_length (cfun(cfun + 0)->eh->ttype_data);
3163 while (i-- > 0)
3164 {
3165 tree type = (*cfun(cfun + 0)->eh->ttype_data)[i];
3166 output_ttype (type, tt_format, tt_format_size);
3167 }
3168
3169 if (HAVE_AS_LEB1281 && have_tt_data)
3170 ASM_OUTPUT_LABEL (asm_out_file, ttype_label)do { assemble_name ((asm_out_file), (ttype_label)); fputs (":\n"
, (asm_out_file)); } while (0)
;
3171
3172 /* ??? Decode and interpret the data for flag_debug_asm. */
3173 if (targetm.arm_eabi_unwinder)
3174 {
3175 tree type;
3176 for (i = 0;
3177 vec_safe_iterate (cfun(cfun + 0)->eh->ehspec_data.arm_eabi, i, &type); ++i)
3178 output_ttype (type, tt_format, tt_format_size);
3179 }
3180 else
3181 {
3182 uchar uc;
3183 for (i = 0;
3184 vec_safe_iterate (cfun(cfun + 0)->eh->ehspec_data.other, i, &uc); ++i)
3185 dw2_asm_output_data (1, uc,
3186 i ? NULLnullptr : "Exception specification table");
3187 }
3188}
3189
3190/* Output an exception table for the current function according to SECTION,
3191 switching back and forth from the function section appropriately.
3192
3193 If the function has been partitioned into hot and cold parts, value 0 for
3194 SECTION refers to the table associated with the hot part while value 1
3195 refers to the table associated with the cold part. If the function has
3196 not been partitioned, value 0 refers to the single exception table. */
3197
3198void
3199output_function_exception_table (int section)
3200{
3201 const char *fnname = get_fnname_from_decl (current_function_decl);
3202 rtx personality = get_personality_function (current_function_decl);
3203
3204 /* Not all functions need anything. */
3205 if (!crtl(&x_rtl)->uses_eh_lsda
3206 || targetm_common.except_unwind_info (&global_options) == UI_NONE)
3207 return;
3208
3209 /* No need to emit any boilerplate stuff for the cold part. */
3210 if (section == 1 && !crtl(&x_rtl)->eh.call_site_record_v[1])
3211 return;
3212
3213 if (personality)
3214 {
3215 assemble_external_libcall (personality);
3216
3217 if (targetm.asm_out.emit_except_personality)
3218 targetm.asm_out.emit_except_personality (personality);
3219 }
3220
3221 switch_to_exception_section (fnname);
3222
3223 /* If the target wants a label to begin the table, emit it here. */
3224 targetm.asm_out.emit_except_table_label (asm_out_file);
3225
3226 /* Do the real work. */
3227 output_one_function_exception_table (section);
3228
3229 switch_to_section (current_function_section ());
3230}
3231
3232void
3233set_eh_throw_stmt_table (function *fun, hash_map<gimple *, int> *table)
3234{
3235 fun->eh->throw_stmt_table = table;
3236}
3237
3238hash_map<gimple *, int> *
3239get_eh_throw_stmt_table (struct function *fun)
3240{
3241 return fun->eh->throw_stmt_table;
3242}
3243
3244/* Determine if the function needs an EH personality function. */
3245
3246enum eh_personality_kind
3247function_needs_eh_personality (struct function *fn)
3248{
3249 enum eh_personality_kind kind = eh_personality_none;
3250 eh_region i;
3251
3252 FOR_ALL_EH_REGION_FN (i, fn)for ((i) = (fn)->eh->region_tree; (i) != nullptr; (i) =
ehr_next (i, nullptr))
3253 {
3254 switch (i->type)
3255 {
3256 case ERT_CLEANUP:
3257 /* Can do with any personality including the generic C one. */
3258 kind = eh_personality_any;
3259 break;
3260
3261 case ERT_TRY:
3262 case ERT_ALLOWED_EXCEPTIONS:
3263 /* Always needs a EH personality function. The generic C
3264 personality doesn't handle these even for empty type lists. */
3265 return eh_personality_lang;
3266
3267 case ERT_MUST_NOT_THROW:
3268 /* Always needs a EH personality function. The language may specify
3269 what abort routine that must be used, e.g. std::terminate. */
3270 return eh_personality_lang;
3271 }
3272 }
3273
3274 return kind;
3275}
3276
3277/* Dump EH information to OUT. */
3278
3279void
3280dump_eh_tree (FILE * out, struct function *fun)
3281{
3282 eh_region i;
3283 int depth = 0;
3284 static const char *const type_name[] = {
3285 "cleanup", "try", "allowed_exceptions", "must_not_throw"
3286 };
3287
3288 i = fun->eh->region_tree;
3289 if (!i)
3290 return;
3291
3292 fprintf (out, "Eh tree:\n");
3293 while (1)
3294 {
3295 fprintf (out, " %*s %i %s", depth * 2, "",
3296 i->index, type_name[(int) i->type]);
3297
3298 if (i->landing_pads)
3299 {
3300 eh_landing_pad lp;
3301
3302 fprintf (out, " land:");
3303 if (current_ir_type () == IR_GIMPLE)
3304 {
3305 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3306 {
3307 fprintf (out, "{%i,", lp->index);
3308 print_generic_expr (out, lp->post_landing_pad);
3309 fputc ('}', out);
3310 if (lp->next_lp)
3311 fputc (',', out);
3312 }
3313 }
3314 else
3315 {
3316 for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3317 {
3318 fprintf (out, "{%i,", lp->index);
3319 if (lp->landing_pad)
3320 fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3321 NOTE_P (lp->landing_pad)(((enum rtx_code) (lp->landing_pad)->code) == NOTE) ? "(del)" : "");
3322 else
3323 fprintf (out, "(nil),");
3324 if (lp->post_landing_pad)
3325 {
3326 rtx_insn *lab = label_rtx (lp->post_landing_pad);
3327 fprintf (out, "%i%s}", INSN_UID (lab),
3328 NOTE_P (lab)(((enum rtx_code) (lab)->code) == NOTE) ? "(del)" : "");
3329 }
3330 else
3331 fprintf (out, "(nil)}");
3332 if (lp->next_lp)
3333 fputc (',', out);
3334 }
3335 }
3336 }
3337
3338 switch (i->type)
3339 {
3340 case ERT_CLEANUP:
3341 case ERT_MUST_NOT_THROW:
3342 break;
3343
3344 case ERT_TRY:
3345 {
3346 eh_catch c;
3347 fprintf (out, " catch:");
3348 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3349 {
3350 fputc ('{', out);
3351 if (c->label)
3352 {
3353 fprintf (out, "lab:");
3354 print_generic_expr (out, c->label);
3355 fputc (';', out);
3356 }
3357 print_generic_expr (out, c->type_list);
3358 fputc ('}', out);
3359 if (c->next_catch)
3360 fputc (',', out);
3361 }
3362 }
3363 break;
3364
3365 case ERT_ALLOWED_EXCEPTIONS:
3366 fprintf (out, " filter :%i types:", i->u.allowed.filter);
3367 print_generic_expr (out, i->u.allowed.type_list);
3368 break;
3369 }
3370 fputc ('\n', out);
3371
3372 /* If there are sub-regions, process them. */
3373 if (i->inner)
3374 i = i->inner, depth++;
3375 /* If there are peers, process them. */
3376 else if (i->next_peer)
3377 i = i->next_peer;
3378 /* Otherwise, step back up the tree to the next peer. */
3379 else
3380 {
3381 do
3382 {
3383 i = i->outer;
3384 depth--;
3385 if (i == NULLnullptr)
3386 return;
3387 }
3388 while (i->next_peer == NULLnullptr);
3389 i = i->next_peer;
3390 }
3391 }
3392}
3393
3394/* Dump the EH tree for FN on stderr. */
3395
3396DEBUG_FUNCTION__attribute__ ((__used__)) void
3397debug_eh_tree (struct function *fn)
3398{
3399 dump_eh_tree (stderrstderr, fn);
3400}
3401
3402/* Verify invariants on EH datastructures. */
3403
3404DEBUG_FUNCTION__attribute__ ((__used__)) void
3405verify_eh_tree (struct function *fun)
3406{
3407 eh_region r, outer;
3408 int nvisited_lp, nvisited_r;
3409 int count_lp, count_r, depth, i;
3410 eh_landing_pad lp;
3411 bool err = false;
3412
3413 if (!fun->eh->region_tree)
3414 return;
3415
3416 count_r = 0;
3417 for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3418 if (r)
3419 {
3420 if (r->index == i)
3421 count_r++;
3422 else
3423 {
3424 error ("%<region_array%> is corrupted for region %i", r->index);
3425 err = true;
3426 }
3427 }
3428
3429 count_lp = 0;
3430 for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3431 if (lp)
3432 {
3433 if (lp->index == i)
3434 count_lp++;
3435 else
3436 {
3437 error ("%<lp_array%> is corrupted for lp %i", lp->index);
3438 err = true;
3439 }
3440 }
3441
3442 depth = nvisited_lp = nvisited_r = 0;
3443 outer = NULLnullptr;
3444 r = fun->eh->region_tree;
3445 while (1)
3446 {
3447 if ((*fun->eh->region_array)[r->index] != r)
3448 {
3449 error ("%<region_array%> is corrupted for region %i", r->index);
3450 err = true;
3451 }
3452 if (r->outer != outer)
3453 {
3454 error ("outer block of region %i is wrong", r->index);
3455 err = true;
3456 }
3457 if (depth < 0)
3458 {
3459 error ("negative nesting depth of region %i", r->index);
3460 err = true;
3461 }
3462 nvisited_r++;
3463
3464 for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3465 {
3466 if ((*fun->eh->lp_array)[lp->index] != lp)
3467 {
3468 error ("%<lp_array%> is corrupted for lp %i", lp->index);
3469 err = true;
3470 }
3471 if (lp->region != r)
3472 {
3473 error ("region of lp %i is wrong", lp->index);
3474 err = true;
3475 }
3476 nvisited_lp++;
3477 }
3478
3479 if (r->inner)
3480 outer = r, r = r->inner, depth++;
3481 else if (r->next_peer)
3482 r = r->next_peer;
3483 else
3484 {
3485 do
3486 {
3487 r = r->outer;
3488 if (r == NULLnullptr)
3489 goto region_done;
3490 depth--;
3491 outer = r->outer;
3492 }
3493 while (r->next_peer == NULLnullptr);
3494 r = r->next_peer;
3495 }
3496 }
3497 region_done:
3498 if (depth != 0)
3499 {
3500 error ("tree list ends on depth %i", depth);
3501 err = true;
3502 }
3503 if (count_r != nvisited_r)
3504 {
3505 error ("%<region_array%> does not match %<region_tree%>");
3506 err = true;
3507 }
3508 if (count_lp != nvisited_lp)
3509 {
3510 error ("%<lp_array%> does not match %<region_tree%>");
3511 err = true;
3512 }
3513
3514 if (err)
3515 {
3516 dump_eh_tree (stderrstderr, fun);
3517 internal_error ("%qs failed", __func__);
3518 }
3519}
3520
3521#include "gt-except.h"

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h

1/* Vector API for GNU compiler.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Nathan Sidwell <nathan@codesourcery.com>
4 Re-implemented in C++ by Diego Novillo <dnovillo@google.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#ifndef GCC_VEC_H
23#define GCC_VEC_H
24
25/* Some gen* file have no ggc support as the header file gtype-desc.h is
26 missing. Provide these definitions in case ggc.h has not been included.
27 This is not a problem because any code that runs before gengtype is built
28 will never need to use GC vectors.*/
29
30extern void ggc_free (void *);
31extern size_t ggc_round_alloc_size (size_t requested_size);
32extern void *ggc_realloc (void *, size_t MEM_STAT_DECL);
33
34/* Templated vector type and associated interfaces.
35
36 The interface functions are typesafe and use inline functions,
37 sometimes backed by out-of-line generic functions. The vectors are
38 designed to interoperate with the GTY machinery.
39
40 There are both 'index' and 'iterate' accessors. The index accessor
41 is implemented by operator[]. The iterator returns a boolean
42 iteration condition and updates the iteration variable passed by
43 reference. Because the iterator will be inlined, the address-of
44 can be optimized away.
45
46 Each operation that increases the number of active elements is
47 available in 'quick' and 'safe' variants. The former presumes that
48 there is sufficient allocated space for the operation to succeed
49 (it dies if there is not). The latter will reallocate the
50 vector, if needed. Reallocation causes an exponential increase in
51 vector size. If you know you will be adding N elements, it would
52 be more efficient to use the reserve operation before adding the
53 elements with the 'quick' operation. This will ensure there are at
54 least as many elements as you ask for, it will exponentially
55 increase if there are too few spare slots. If you want reserve a
56 specific number of slots, but do not want the exponential increase
57 (for instance, you know this is the last allocation), use the
58 reserve_exact operation. You can also create a vector of a
59 specific size from the get go.
60
61 You should prefer the push and pop operations, as they append and
62 remove from the end of the vector. If you need to remove several
63 items in one go, use the truncate operation. The insert and remove
64 operations allow you to change elements in the middle of the
65 vector. There are two remove operations, one which preserves the
66 element ordering 'ordered_remove', and one which does not
67 'unordered_remove'. The latter function copies the end element
68 into the removed slot, rather than invoke a memmove operation. The
69 'lower_bound' function will determine where to place an item in the
70 array using insert that will maintain sorted order.
71
72 Vectors are template types with three arguments: the type of the
73 elements in the vector, the allocation strategy, and the physical
74 layout to use
75
76 Four allocation strategies are supported:
77
78 - Heap: allocation is done using malloc/free. This is the
79 default allocation strategy.
80
81 - GC: allocation is done using ggc_alloc/ggc_free.
82
83 - GC atomic: same as GC with the exception that the elements
84 themselves are assumed to be of an atomic type that does
85 not need to be garbage collected. This means that marking
86 routines do not need to traverse the array marking the
87 individual elements. This increases the performance of
88 GC activities.
89
90 Two physical layouts are supported:
91
92 - Embedded: The vector is structured using the trailing array
93 idiom. The last member of the structure is an array of size
94 1. When the vector is initially allocated, a single memory
95 block is created to hold the vector's control data and the
96 array of elements. These vectors cannot grow without
97 reallocation (see discussion on embeddable vectors below).
98
99 - Space efficient: The vector is structured as a pointer to an
100 embedded vector. This is the default layout. It means that
101 vectors occupy a single word of storage before initial
102 allocation. Vectors are allowed to grow (the internal
103 pointer is reallocated but the main vector instance does not
104 need to relocate).
105
106 The type, allocation and layout are specified when the vector is
107 declared.
108
109 If you need to directly manipulate a vector, then the 'address'
110 accessor will return the address of the start of the vector. Also
111 the 'space' predicate will tell you whether there is spare capacity
112 in the vector. You will not normally need to use these two functions.
113
114 Notes on the different layout strategies
115
116 * Embeddable vectors (vec<T, A, vl_embed>)
117
118 These vectors are suitable to be embedded in other data
119 structures so that they can be pre-allocated in a contiguous
120 memory block.
121
122 Embeddable vectors are implemented using the trailing array
123 idiom, thus they are not resizeable without changing the address
124 of the vector object itself. This means you cannot have
125 variables or fields of embeddable vector type -- always use a
126 pointer to a vector. The one exception is the final field of a
127 structure, which could be a vector type.
128
129 You will have to use the embedded_size & embedded_init calls to
130 create such objects, and they will not be resizeable (so the
131 'safe' allocation variants are not available).
132
133 Properties of embeddable vectors:
134
135 - The whole vector and control data are allocated in a single
136 contiguous block. It uses the trailing-vector idiom, so
137 allocation must reserve enough space for all the elements
138 in the vector plus its control data.
139 - The vector cannot be re-allocated.
140 - The vector cannot grow nor shrink.
141 - No indirections needed for access/manipulation.
142 - It requires 2 words of storage (prior to vector allocation).
143
144
145 * Space efficient vector (vec<T, A, vl_ptr>)
146
147 These vectors can grow dynamically and are allocated together
148 with their control data. They are suited to be included in data
149 structures. Prior to initial allocation, they only take a single
150 word of storage.
151
152 These vectors are implemented as a pointer to embeddable vectors.
153 The semantics allow for this pointer to be NULL to represent
154 empty vectors. This way, empty vectors occupy minimal space in
155 the structure containing them.
156
157 Properties:
158
159 - The whole vector and control data are allocated in a single
160 contiguous block.
161 - The whole vector may be re-allocated.
162 - Vector data may grow and shrink.
163 - Access and manipulation requires a pointer test and
164 indirection.
165 - It requires 1 word of storage (prior to vector allocation).
166
167 An example of their use would be,
168
169 struct my_struct {
170 // A space-efficient vector of tree pointers in GC memory.
171 vec<tree, va_gc, vl_ptr> v;
172 };
173
174 struct my_struct *s;
175
176 if (s->v.length ()) { we have some contents }
177 s->v.safe_push (decl); // append some decl onto the end
178 for (ix = 0; s->v.iterate (ix, &elt); ix++)
179 { do something with elt }
180*/
181
182/* Support function for statistics. */
183extern void dump_vec_loc_statistics (void);
184
185/* Hashtable mapping vec addresses to descriptors. */
186extern htab_t vec_mem_usage_hash;
187
188/* Control data for vectors. This contains the number of allocated
189 and used slots inside a vector. */
190
191struct vec_prefix
192{
193 /* FIXME - These fields should be private, but we need to cater to
194 compilers that have stricter notions of PODness for types. */
195
196 /* Memory allocation support routines in vec.c. */
197 void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO);
198 void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO);
199 static unsigned calculate_allocation (vec_prefix *, unsigned, bool);
200 static unsigned calculate_allocation_1 (unsigned, unsigned);
201
202 /* Note that vec_prefix should be a base class for vec, but we use
203 offsetof() on vector fields of tree structures (e.g.,
204 tree_binfo::base_binfos), and offsetof only supports base types.
205
206 To compensate, we make vec_prefix a field inside vec and make
207 vec a friend class of vec_prefix so it can access its fields. */
208 template <typename, typename, typename> friend struct vec;
209
210 /* The allocator types also need access to our internals. */
211 friend struct va_gc;
212 friend struct va_gc_atomic;
213 friend struct va_heap;
214
215 unsigned m_alloc : 31;
216 unsigned m_using_auto_storage : 1;
217 unsigned m_num;
218};
219
220/* Calculate the number of slots to reserve a vector, making sure that
221 RESERVE slots are free. If EXACT grow exactly, otherwise grow
222 exponentially. PFX is the control data for the vector. */
223
224inline unsigned
225vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve,
226 bool exact)
227{
228 if (exact)
229 return (pfx ? pfx->m_num : 0) + reserve;
230 else if (!pfx)
231 return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve));
232 return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve);
233}
234
235template<typename, typename, typename> struct vec;
236
237/* Valid vector layouts
238
239 vl_embed - Embeddable vector that uses the trailing array idiom.
240 vl_ptr - Space efficient vector that uses a pointer to an
241 embeddable vector. */
242struct vl_embed { };
243struct vl_ptr { };
244
245
246/* Types of supported allocations
247
248 va_heap - Allocation uses malloc/free.
249 va_gc - Allocation uses ggc_alloc.
250 va_gc_atomic - Same as GC, but individual elements of the array
251 do not need to be marked during collection. */
252
253/* Allocator type for heap vectors. */
254struct va_heap
255{
256 /* Heap vectors are frequently regular instances, so use the vl_ptr
257 layout for them. */
258 typedef vl_ptr default_layout;
259
260 template<typename T>
261 static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool
262 CXX_MEM_STAT_INFO);
263
264 template<typename T>
265 static void release (vec<T, va_heap, vl_embed> *&);
266};
267
268
269/* Allocator for heap memory. Ensure there are at least RESERVE free
270 slots in V. If EXACT is true, grow exactly, else grow
271 exponentially. As a special case, if the vector had not been
272 allocated and RESERVE is 0, no vector will be created. */
273
274template<typename T>
275inline void
276va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact
277 MEM_STAT_DECL)
278{
279 size_t elt_size = sizeof (T);
280 unsigned alloc
281 = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact);
282 gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 282, __FUNCTION__), 0 : 0))
;
283
284 if (GATHER_STATISTICS0 && v)
285 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
286 v->allocated (), false);
287
288 size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc);
289 unsigned nelem = v ? v->length () : 0;
290 v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size));
291 v->embedded_init (alloc, nelem);
292
293 if (GATHER_STATISTICS0)
294 v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT);
295}
296
297
298#if GCC_VERSION(4 * 1000 + 2) >= 4007
299#pragma GCC diagnostic push
300#pragma GCC diagnostic ignored "-Wfree-nonheap-object"
301#endif
302
303/* Free the heap space allocated for vector V. */
304
305template<typename T>
306void
307va_heap::release (vec<T, va_heap, vl_embed> *&v)
308{
309 size_t elt_size = sizeof (T);
310 if (v == NULLnullptr)
311 return;
312
313 if (GATHER_STATISTICS0)
314 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
315 v->allocated (), true);
316 ::free (v);
317 v = NULLnullptr;
318}
319
320#if GCC_VERSION(4 * 1000 + 2) >= 4007
321#pragma GCC diagnostic pop
322#endif
323
324/* Allocator type for GC vectors. Notice that we need the structure
325 declaration even if GC is not enabled. */
326
327struct va_gc
328{
329 /* Use vl_embed as the default layout for GC vectors. Due to GTY
330 limitations, GC vectors must always be pointers, so it is more
331 efficient to use a pointer to the vl_embed layout, rather than
332 using a pointer to a pointer as would be the case with vl_ptr. */
333 typedef vl_embed default_layout;
334
335 template<typename T, typename A>
336 static void reserve (vec<T, A, vl_embed> *&, unsigned, bool
337 CXX_MEM_STAT_INFO);
338
339 template<typename T, typename A>
340 static void release (vec<T, A, vl_embed> *&v);
341};
342
343
344/* Free GC memory used by V and reset V to NULL. */
345
346template<typename T, typename A>
347inline void
348va_gc::release (vec<T, A, vl_embed> *&v)
349{
350 if (v)
351 ::ggc_free (v);
352 v = NULLnullptr;
353}
354
355
356/* Allocator for GC memory. Ensure there are at least RESERVE free
357 slots in V. If EXACT is true, grow exactly, else grow
358 exponentially. As a special case, if the vector had not been
359 allocated and RESERVE is 0, no vector will be created. */
360
361template<typename T, typename A>
362void
363va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact
364 MEM_STAT_DECL)
365{
366 unsigned alloc
367 = vec_prefix::calculate_allocation (v
10.1
'v' is non-null
10.1
'v' is non-null
? &v->m_vecpfx : 0, reserve, exact);
11
'?' condition is true
368 if (!alloc)
12
Assuming 'alloc' is 0
13
Taking true branch
369 {
370 ::ggc_free (v);
371 v = NULLnullptr;
14
Null pointer value stored to field 'region_array'
372 return;
373 }
374
375 /* Calculate the amount of space we want. */
376 size_t size = vec<T, A, vl_embed>::embedded_size (alloc);
377
378 /* Ask the allocator how much space it will really give us. */
379 size = ::ggc_round_alloc_size (size);
380
381 /* Adjust the number of slots accordingly. */
382 size_t vec_offset = sizeof (vec_prefix);
383 size_t elt_size = sizeof (T);
384 alloc = (size - vec_offset) / elt_size;
385
386 /* And finally, recalculate the amount of space we ask for. */
387 size = vec_offset + alloc * elt_size;
388
389 unsigned nelem = v ? v->length () : 0;
390 v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size
391 PASS_MEM_STAT));
392 v->embedded_init (alloc, nelem);
393}
394
395
396/* Allocator type for GC vectors. This is for vectors of types
397 atomics w.r.t. collection, so allocation and deallocation is
398 completely inherited from va_gc. */
399struct va_gc_atomic : va_gc
400{
401};
402
403
404/* Generic vector template. Default values for A and L indicate the
405 most commonly used strategies.
406
407 FIXME - Ideally, they would all be vl_ptr to encourage using regular
408 instances for vectors, but the existing GTY machinery is limited
409 in that it can only deal with GC objects that are pointers
410 themselves.
411
412 This means that vector operations that need to deal with
413 potentially NULL pointers, must be provided as free
414 functions (see the vec_safe_* functions above). */
415template<typename T,
416 typename A = va_heap,
417 typename L = typename A::default_layout>
418struct GTY((user)) vec
419{
420};
421
422/* Allow C++11 range-based 'for' to work directly on vec<T>*. */
423template<typename T, typename A, typename L>
424T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
425template<typename T, typename A, typename L>
426T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; }
427template<typename T, typename A, typename L>
428const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
429template<typename T, typename A, typename L>
430const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; }
431
432/* Generic vec<> debug helpers.
433
434 These need to be instantiated for each vec<TYPE> used throughout
435 the compiler like this:
436
437 DEFINE_DEBUG_VEC (TYPE)
438
439 The reason we have a debug_helper() is because GDB can't
440 disambiguate a plain call to debug(some_vec), and it must be called
441 like debug<TYPE>(some_vec). */
442
443template<typename T>
444void
445debug_helper (vec<T> &ref)
446{
447 unsigned i;
448 for (i = 0; i < ref.length (); ++i)
449 {
450 fprintf (stderrstderr, "[%d] = ", i);
451 debug_slim (ref[i]);
452 fputc ('\n', stderrstderr);
453 }
454}
455
456/* We need a separate va_gc variant here because default template
457 argument for functions cannot be used in c++-98. Once this
458 restriction is removed, those variant should be folded with the
459 above debug_helper. */
460
461template<typename T>
462void
463debug_helper (vec<T, va_gc> &ref)
464{
465 unsigned i;
466 for (i = 0; i < ref.length (); ++i)
467 {
468 fprintf (stderrstderr, "[%d] = ", i);
469 debug_slim (ref[i]);
470 fputc ('\n', stderrstderr);
471 }
472}
473
474/* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper
475 functions for a type T. */
476
477#define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void
debug_helper (vec<T, va_gc> &); __attribute__ ((__used__
)) void debug (vec<T> &ref) { debug_helper <T>
(ref); } __attribute__ ((__used__)) void debug (vec<T>
*ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"
); } __attribute__ ((__used__)) void debug (vec<T, va_gc>
&ref) { debug_helper <T> (ref); } __attribute__ ((
__used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug
(*ptr); else fprintf (stderr, "<nil>\n"); }
\
478 template void debug_helper (vec<T> &); \
479 template void debug_helper (vec<T, va_gc> &); \
480 /* Define the vec<T> debug functions. */ \
481 DEBUG_FUNCTION__attribute__ ((__used__)) void \
482 debug (vec<T> &ref) \
483 { \
484 debug_helper <T> (ref); \
485 } \
486 DEBUG_FUNCTION__attribute__ ((__used__)) void \
487 debug (vec<T> *ptr) \
488 { \
489 if (ptr) \
490 debug (*ptr); \
491 else \
492 fprintf (stderrstderr, "<nil>\n"); \
493 } \
494 /* Define the vec<T, va_gc> debug functions. */ \
495 DEBUG_FUNCTION__attribute__ ((__used__)) void \
496 debug (vec<T, va_gc> &ref) \
497 { \
498 debug_helper <T> (ref); \
499 } \
500 DEBUG_FUNCTION__attribute__ ((__used__)) void \
501 debug (vec<T, va_gc> *ptr) \
502 { \
503 if (ptr) \
504 debug (*ptr); \
505 else \
506 fprintf (stderrstderr, "<nil>\n"); \
507 }
508
509/* Default-construct N elements in DST. */
510
511template <typename T>
512inline void
513vec_default_construct (T *dst, unsigned n)
514{
515#ifdef BROKEN_VALUE_INITIALIZATION
516 /* Versions of GCC before 4.4 sometimes leave certain objects
517 uninitialized when value initialized, though if the type has
518 user defined default ctor, that ctor is invoked. As a workaround
519 perform clearing first and then the value initialization, which
520 fixes the case when value initialization doesn't initialize due to
521 the bugs and should initialize to all zeros, but still allows
522 vectors for types with user defined default ctor that initializes
523 some or all elements to non-zero. If T has no user defined
524 default ctor and some non-static data members have user defined
525 default ctors that initialize to non-zero the workaround will
526 still not work properly; in that case we just need to provide
527 user defined default ctor. */
528 memset (dst, '\0', sizeof (T) * n);
529#endif
530 for ( ; n; ++dst, --n)
531 ::new (static_cast<void*>(dst)) T ();
532}
533
534/* Copy-construct N elements in DST from *SRC. */
535
536template <typename T>
537inline void
538vec_copy_construct (T *dst, const T *src, unsigned n)
539{
540 for ( ; n; ++dst, ++src, --n)
541 ::new (static_cast<void*>(dst)) T (*src);
542}
543
544/* Type to provide NULL values for vec<T, A, L>. This is used to
545 provide nil initializers for vec instances. Since vec must be
546 a POD, we cannot have proper ctor/dtor for it. To initialize
547 a vec instance, you can assign it the value vNULL. This isn't
548 needed for file-scope and function-local static vectors, which
549 are zero-initialized by default. */
550struct vnull
551{
552 template <typename T, typename A, typename L>
553 CONSTEXPRconstexpr operator vec<T, A, L> () const { return vec<T, A, L>(); }
554};
555extern vnull vNULL;
556
557
558/* Embeddable vector. These vectors are suitable to be embedded
559 in other data structures so that they can be pre-allocated in a
560 contiguous memory block.
561
562 Embeddable vectors are implemented using the trailing array idiom,
563 thus they are not resizeable without changing the address of the
564 vector object itself. This means you cannot have variables or
565 fields of embeddable vector type -- always use a pointer to a
566 vector. The one exception is the final field of a structure, which
567 could be a vector type.
568
569 You will have to use the embedded_size & embedded_init calls to
570 create such objects, and they will not be resizeable (so the 'safe'
571 allocation variants are not available).
572
573 Properties:
574
575 - The whole vector and control data are allocated in a single
576 contiguous block. It uses the trailing-vector idiom, so
577 allocation must reserve enough space for all the elements
578 in the vector plus its control data.
579 - The vector cannot be re-allocated.
580 - The vector cannot grow nor shrink.
581 - No indirections needed for access/manipulation.
582 - It requires 2 words of storage (prior to vector allocation). */
583
584template<typename T, typename A>
585struct GTY((user)) vec<T, A, vl_embed>
586{
587public:
588 unsigned allocated (void) const { return m_vecpfx.m_alloc; }
589 unsigned length (void) const { return m_vecpfx.m_num; }
590 bool is_empty (void) const { return m_vecpfx.m_num == 0; }
591 T *address (void) { return m_vecdata; }
592 const T *address (void) const { return m_vecdata; }
593 T *begin () { return address (); }
594 const T *begin () const { return address (); }
595 T *end () { return address () + length (); }
596 const T *end () const { return address () + length (); }
597 const T &operator[] (unsigned) const;
598 T &operator[] (unsigned);
599 T &last (void);
600 bool space (unsigned) const;
601 bool iterate (unsigned, T *) const;
602 bool iterate (unsigned, T **) const;
603 vec *copy (ALONE_CXX_MEM_STAT_INFO) const;
604 void splice (const vec &);
605 void splice (const vec *src);
606 T *quick_push (const T &);
607 T &pop (void);
608 void truncate (unsigned);
609 void quick_insert (unsigned, const T &);
610 void ordered_remove (unsigned);
611 void unordered_remove (unsigned);
612 void block_remove (unsigned, unsigned);
613 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
614 void sort (int (*) (const void *, const void *, void *), void *);
615 T *bsearch (const void *key, int (*compar)(const void *, const void *));
616 T *bsearch (const void *key,
617 int (*compar)(const void *, const void *, void *), void *);
618 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
619 bool contains (const T &search) const;
620 static size_t embedded_size (unsigned);
621 void embedded_init (unsigned, unsigned = 0, unsigned = 0);
622 void quick_grow (unsigned len);
623 void quick_grow_cleared (unsigned len);
624
625 /* vec class can access our internal data and functions. */
626 template <typename, typename, typename> friend struct vec;
627
628 /* The allocator types also need access to our internals. */
629 friend struct va_gc;
630 friend struct va_gc_atomic;
631 friend struct va_heap;
632
633 /* FIXME - These fields should be private, but we need to cater to
634 compilers that have stricter notions of PODness for types. */
635 vec_prefix m_vecpfx;
636 T m_vecdata[1];
637};
638
639
640/* Convenience wrapper functions to use when dealing with pointers to
641 embedded vectors. Some functionality for these vectors must be
642 provided via free functions for these reasons:
643
644 1- The pointer may be NULL (e.g., before initial allocation).
645
646 2- When the vector needs to grow, it must be reallocated, so
647 the pointer will change its value.
648
649 Because of limitations with the current GC machinery, all vectors
650 in GC memory *must* be pointers. */
651
652
653/* If V contains no room for NELEMS elements, return false. Otherwise,
654 return true. */
655template<typename T, typename A>
656inline bool
657vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems)
658{
659 return v ? v->space (nelems) : nelems == 0;
660}
661
662
663/* If V is NULL, return 0. Otherwise, return V->length(). */
664template<typename T, typename A>
665inline unsigned
666vec_safe_length (const vec<T, A, vl_embed> *v)
667{
668 return v ? v->length () : 0;
669}
670
671
672/* If V is NULL, return NULL. Otherwise, return V->address(). */
673template<typename T, typename A>
674inline T *
675vec_safe_address (vec<T, A, vl_embed> *v)
676{
677 return v ? v->address () : NULLnullptr;
678}
679
680
681/* If V is NULL, return true. Otherwise, return V->is_empty(). */
682template<typename T, typename A>
683inline bool
684vec_safe_is_empty (vec<T, A, vl_embed> *v)
685{
686 return v ? v->is_empty () : true;
687}
688
689/* If V does not have space for NELEMS elements, call
690 V->reserve(NELEMS, EXACT). */
691template<typename T, typename A>
692inline bool
693vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false
694 CXX_MEM_STAT_INFO)
695{
696 bool extend = nelems
6.1
'nelems' is 1
6.1
'nelems' is 1
? !vec_safe_space (v, nelems) : false;
7
'?' condition is true
8
Assuming the condition is true
697 if (extend
8.1
'extend' is true
8.1
'extend' is true
)
9
Taking true branch
698 A::reserve (v, nelems, exact PASS_MEM_STAT);
10
Calling 'va_gc::reserve'
15
Returning from 'va_gc::reserve'
699 return extend;
700}
701
702template<typename T, typename A>
703inline bool
704vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems
705 CXX_MEM_STAT_INFO)
706{
707 return vec_safe_reserve (v, nelems, true PASS_MEM_STAT);
708}
709
710
711/* Allocate GC memory for V with space for NELEMS slots. If NELEMS
712 is 0, V is initialized to NULL. */
713
714template<typename T, typename A>
715inline void
716vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO)
717{
718 v = NULLnullptr;
719 vec_safe_reserve (v, nelems, false PASS_MEM_STAT);
720}
721
722
723/* Free the GC memory allocated by vector V and set it to NULL. */
724
725template<typename T, typename A>
726inline void
727vec_free (vec<T, A, vl_embed> *&v)
728{
729 A::release (v);
730}
731
732
733/* Grow V to length LEN. Allocate it, if necessary. */
734template<typename T, typename A>
735inline void
736vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len,
737 bool exact = false CXX_MEM_STAT_INFO)
738{
739 unsigned oldlen = vec_safe_length (v);
740 gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 740, __FUNCTION__), 0 : 0))
;
741 vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT);
742 v->quick_grow (len);
743}
744
745
746/* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */
747template<typename T, typename A>
748inline void
749vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len,
750 bool exact = false CXX_MEM_STAT_INFO)
751{
752 unsigned oldlen = vec_safe_length (v);
753 vec_safe_grow (v, len, exact PASS_MEM_STAT);
754 vec_default_construct (v->address () + oldlen, len - oldlen);
755}
756
757
758/* Assume V is not NULL. */
759
760template<typename T>
761inline void
762vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v,
763 unsigned len, bool exact = false CXX_MEM_STAT_INFO)
764{
765 v->safe_grow_cleared (len, exact PASS_MEM_STAT);
766}
767
768/* If V does not have space for NELEMS elements, call
769 V->reserve(NELEMS, EXACT). */
770
771template<typename T>
772inline bool
773vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false
774 CXX_MEM_STAT_INFO)
775{
776 return v->reserve (nelems, exact);
777}
778
779
780/* If V is NULL return false, otherwise return V->iterate(IX, PTR). */
781template<typename T, typename A>
782inline bool
783vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr)
784{
785 if (v)
786 return v->iterate (ix, ptr);
787 else
788 {
789 *ptr = 0;
790 return false;
791 }
792}
793
794template<typename T, typename A>
795inline bool
796vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr)
797{
798 if (v)
799 return v->iterate (ix, ptr);
800 else
801 {
802 *ptr = 0;
803 return false;
804 }
805}
806
807
808/* If V has no room for one more element, reallocate it. Then call
809 V->quick_push(OBJ). */
810template<typename T, typename A>
811inline T *
812vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO)
813{
814 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
6
Calling 'vec_safe_reserve<eh_region_d *, va_gc>'
16
Returning from 'vec_safe_reserve<eh_region_d *, va_gc>'
815 return v->quick_push (obj);
17
Called C++ object pointer is null
816}
817
818
819/* if V has no room for one more element, reallocate it. Then call
820 V->quick_insert(IX, OBJ). */
821template<typename T, typename A>
822inline void
823vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj
824 CXX_MEM_STAT_INFO)
825{
826 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
827 v->quick_insert (ix, obj);
828}
829
830
831/* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */
832template<typename T, typename A>
833inline void
834vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size)
835{
836 if (v)
837 v->truncate (size);
838}
839
840
841/* If SRC is not NULL, return a pointer to a copy of it. */
842template<typename T, typename A>
843inline vec<T, A, vl_embed> *
844vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO)
845{
846 return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr;
847}
848
849/* Copy the elements from SRC to the end of DST as if by memcpy.
850 Reallocate DST, if necessary. */
851template<typename T, typename A>
852inline void
853vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src
854 CXX_MEM_STAT_INFO)
855{
856 unsigned src_len = vec_safe_length (src);
857 if (src_len)
858 {
859 vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len
860 PASS_MEM_STAT);
861 dst->splice (*src);
862 }
863}
864
865/* Return true if SEARCH is an element of V. Note that this is O(N) in the
866 size of the vector and so should be used with care. */
867
868template<typename T, typename A>
869inline bool
870vec_safe_contains (vec<T, A, vl_embed> *v, const T &search)
871{
872 return v ? v->contains (search) : false;
873}
874
875/* Index into vector. Return the IX'th element. IX must be in the
876 domain of the vector. */
877
878template<typename T, typename A>
879inline const T &
880vec<T, A, vl_embed>::operator[] (unsigned ix) const
881{
882 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 882, __FUNCTION__), 0 : 0))
;
883 return m_vecdata[ix];
884}
885
886template<typename T, typename A>
887inline T &
888vec<T, A, vl_embed>::operator[] (unsigned ix)
889{
890 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 890, __FUNCTION__), 0 : 0))
;
891 return m_vecdata[ix];
892}
893
894
895/* Get the final element of the vector, which must not be empty. */
896
897template<typename T, typename A>
898inline T &
899vec<T, A, vl_embed>::last (void)
900{
901 gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 901, __FUNCTION__), 0 : 0))
;
902 return (*this)[m_vecpfx.m_num - 1];
903}
904
905
906/* If this vector has space for NELEMS additional entries, return
907 true. You usually only need to use this if you are doing your
908 own vector reallocation, for instance on an embedded vector. This
909 returns true in exactly the same circumstances that vec::reserve
910 will. */
911
912template<typename T, typename A>
913inline bool
914vec<T, A, vl_embed>::space (unsigned nelems) const
915{
916 return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems;
917}
918
919
920/* Return iteration condition and update PTR to point to the IX'th
921 element of this vector. Use this to iterate over the elements of a
922 vector as follows,
923
924 for (ix = 0; vec<T, A>::iterate (v, ix, &ptr); ix++)
925 continue; */
926
927template<typename T, typename A>
928inline bool
929vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const
930{
931 if (ix < m_vecpfx.m_num)
932 {
933 *ptr = m_vecdata[ix];
934 return true;
935 }
936 else
937 {
938 *ptr = 0;
939 return false;
940 }
941}
942
943
944/* Return iteration condition and update *PTR to point to the
945 IX'th element of this vector. Use this to iterate over the
946 elements of a vector as follows,
947
948 for (ix = 0; v->iterate (ix, &ptr); ix++)
949 continue;
950
951 This variant is for vectors of objects. */
952
953template<typename T, typename A>
954inline bool
955vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const
956{
957 if (ix < m_vecpfx.m_num)
958 {
959 *ptr = CONST_CAST (T *, &m_vecdata[ix])(const_cast<T *> ((&m_vecdata[ix])));
960 return true;
961 }
962 else
963 {
964 *ptr = 0;
965 return false;
966 }
967}
968
969
970/* Return a pointer to a copy of this vector. */
971
972template<typename T, typename A>
973inline vec<T, A, vl_embed> *
974vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const
975{
976 vec<T, A, vl_embed> *new_vec = NULLnullptr;
977 unsigned len = length ();
978 if (len)
979 {
980 vec_alloc (new_vec, len PASS_MEM_STAT);
981 new_vec->embedded_init (len, len);
982 vec_copy_construct (new_vec->address (), m_vecdata, len);
983 }
984 return new_vec;
985}
986
987
988/* Copy the elements from SRC to the end of this vector as if by memcpy.
989 The vector must have sufficient headroom available. */
990
991template<typename T, typename A>
992inline void
993vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src)
994{
995 unsigned len = src.length ();
996 if (len)
997 {
998 gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 998, __FUNCTION__), 0 : 0))
;
999 vec_copy_construct (end (), src.address (), len);
1000 m_vecpfx.m_num += len;
1001 }
1002}
1003
1004template<typename T, typename A>
1005inline void
1006vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src)
1007{
1008 if (src)
1009 splice (*src);
1010}
1011
1012
1013/* Push OBJ (a new element) onto the end of the vector. There must be
1014 sufficient space in the vector. Return a pointer to the slot
1015 where OBJ was inserted. */
1016
1017template<typename T, typename A>
1018inline T *
1019vec<T, A, vl_embed>::quick_push (const T &obj)
1020{
1021 gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1021, __FUNCTION__), 0 : 0))
;
1022 T *slot = &m_vecdata[m_vecpfx.m_num++];
1023 *slot = obj;
1024 return slot;
1025}
1026
1027
1028/* Pop and return the last element off the end of the vector. */
1029
1030template<typename T, typename A>
1031inline T &
1032vec<T, A, vl_embed>::pop (void)
1033{
1034 gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1034, __FUNCTION__), 0 : 0))
;
1035 return m_vecdata[--m_vecpfx.m_num];
1036}
1037
1038
1039/* Set the length of the vector to SIZE. The new length must be less
1040 than or equal to the current length. This is an O(1) operation. */
1041
1042template<typename T, typename A>
1043inline void
1044vec<T, A, vl_embed>::truncate (unsigned size)
1045{
1046 gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1046, __FUNCTION__), 0 : 0))
;
1047 m_vecpfx.m_num = size;
1048}
1049
1050
1051/* Insert an element, OBJ, at the IXth position of this vector. There
1052 must be sufficient space. */
1053
1054template<typename T, typename A>
1055inline void
1056vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj)
1057{
1058 gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1058, __FUNCTION__), 0 : 0))
;
1059 gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1059, __FUNCTION__), 0 : 0))
;
1060 T *slot = &m_vecdata[ix];
1061 memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T));
1062 *slot = obj;
1063}
1064
1065
1066/* Remove an element from the IXth position of this vector. Ordering of
1067 remaining elements is preserved. This is an O(N) operation due to
1068 memmove. */
1069
1070template<typename T, typename A>
1071inline void
1072vec<T, A, vl_embed>::ordered_remove (unsigned ix)
1073{
1074 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1074, __FUNCTION__), 0 : 0))
;
1075 T *slot = &m_vecdata[ix];
1076 memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T));
1077}
1078
1079
1080/* Remove elements in [START, END) from VEC for which COND holds. Ordering of
1081 remaining elements is preserved. This is an O(N) operation. */
1082
1083#define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
1084 elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
\
1085 { \
1086 gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1086, __FUNCTION__), 0 : 0))
; \
1087 for (read_index = write_index = (start); read_index < (end); \
1088 ++read_index) \
1089 { \
1090 elem_ptr = &(vec)[read_index]; \
1091 bool remove_p = (cond); \
1092 if (remove_p) \
1093 continue; \
1094 \
1095 if (read_index != write_index) \
1096 (vec)[write_index] = (vec)[read_index]; \
1097 \
1098 write_index++; \
1099 } \
1100 \
1101 if (read_index - write_index > 0) \
1102 (vec).block_remove (write_index, read_index - write_index); \
1103 }
1104
1105
1106/* Remove elements from VEC for which COND holds. Ordering of remaining
1107 elements is preserved. This is an O(N) operation. */
1108
1109#define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1110 cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
\
1111 VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1112 elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1113
1114/* Remove an element from the IXth position of this vector. Ordering of
1115 remaining elements is destroyed. This is an O(1) operation. */
1116
1117template<typename T, typename A>
1118inline void
1119vec<T, A, vl_embed>::unordered_remove (unsigned ix)
1120{
1121 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1121, __FUNCTION__), 0 : 0))
;
1122 m_vecdata[ix] = m_vecdata[--m_vecpfx.m_num];
1123}
1124
1125
1126/* Remove LEN elements starting at the IXth. Ordering is retained.
1127 This is an O(N) operation due to memmove. */
1128
1129template<typename T, typename A>
1130inline void
1131vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len)
1132{
1133 gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1133, __FUNCTION__), 0 : 0))
;
1134 T *slot = &m_vecdata[ix];
1135 m_vecpfx.m_num -= len;
1136 memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T));
1137}
1138
1139
1140/* Sort the contents of this vector with qsort. CMP is the comparison
1141 function to pass to qsort. */
1142
1143template<typename T, typename A>
1144inline void
1145vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
1146{
1147 if (length () > 1)
1148 gcc_qsort (address (), length (), sizeof (T), cmp);
1149}
1150
1151/* Sort the contents of this vector with qsort. CMP is the comparison
1152 function to pass to qsort. */
1153
1154template<typename T, typename A>
1155inline void
1156vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *),
1157 void *data)
1158{
1159 if (length () > 1)
1160 gcc_sort_r (address (), length (), sizeof (T), cmp, data);
1161}
1162
1163
1164/* Search the contents of the sorted vector with a binary search.
1165 CMP is the comparison function to pass to bsearch. */
1166
1167template<typename T, typename A>
1168inline T *
1169vec<T, A, vl_embed>::bsearch (const void *key,
1170 int (*compar) (const void *, const void *))
1171{
1172 const void *base = this->address ();
1173 size_t nmemb = this->length ();
1174 size_t size = sizeof (T);
1175 /* The following is a copy of glibc stdlib-bsearch.h. */
1176 size_t l, u, idx;
1177 const void *p;
1178 int comparison;
1179
1180 l = 0;
1181 u = nmemb;
1182 while (l < u)
1183 {
1184 idx = (l + u) / 2;
1185 p = (const void *) (((const char *) base) + (idx * size));
1186 comparison = (*compar) (key, p);
1187 if (comparison < 0)
1188 u = idx;
1189 else if (comparison > 0)
1190 l = idx + 1;
1191 else
1192 return (T *)const_cast<void *>(p);
1193 }
1194
1195 return NULLnullptr;
1196}
1197
1198/* Search the contents of the sorted vector with a binary search.
1199 CMP is the comparison function to pass to bsearch. */
1200
1201template<typename T, typename A>
1202inline T *
1203vec<T, A, vl_embed>::bsearch (const void *key,
1204 int (*compar) (const void *, const void *,
1205 void *), void *data)
1206{
1207 const void *base = this->address ();
1208 size_t nmemb = this->length ();
1209 size_t size = sizeof (T);
1210 /* The following is a copy of glibc stdlib-bsearch.h. */
1211 size_t l, u, idx;
1212 const void *p;
1213 int comparison;
1214
1215 l = 0;
1216 u = nmemb;
1217 while (l < u)
1218 {
1219 idx = (l + u) / 2;
1220 p = (const void *) (((const char *) base) + (idx * size));
1221 comparison = (*compar) (key, p, data);
1222 if (comparison < 0)
1223 u = idx;
1224 else if (comparison > 0)
1225 l = idx + 1;
1226 else
1227 return (T *)const_cast<void *>(p);
1228 }
1229
1230 return NULLnullptr;
1231}
1232
1233/* Return true if SEARCH is an element of V. Note that this is O(N) in the
1234 size of the vector and so should be used with care. */
1235
1236template<typename T, typename A>
1237inline bool
1238vec<T, A, vl_embed>::contains (const T &search) const
1239{
1240 unsigned int len = length ();
1241 for (unsigned int i = 0; i < len; i++)
1242 if ((*this)[i] == search)
1243 return true;
1244
1245 return false;
1246}
1247
1248/* Find and return the first position in which OBJ could be inserted
1249 without changing the ordering of this vector. LESSTHAN is a
1250 function that returns true if the first argument is strictly less
1251 than the second. */
1252
1253template<typename T, typename A>
1254unsigned
1255vec<T, A, vl_embed>::lower_bound (T obj, bool (*lessthan)(const T &, const T &))
1256 const
1257{
1258 unsigned int len = length ();
1259 unsigned int half, middle;
1260 unsigned int first = 0;
1261 while (len > 0)
1262 {
1263 half = len / 2;
1264 middle = first;
1265 middle += half;
1266 T middle_elem = (*this)[middle];
1267 if (lessthan (middle_elem, obj))
1268 {
1269 first = middle;
1270 ++first;
1271 len = len - half - 1;
1272 }
1273 else
1274 len = half;
1275 }
1276 return first;
1277}
1278
1279
1280/* Return the number of bytes needed to embed an instance of an
1281 embeddable vec inside another data structure.
1282
1283 Use these methods to determine the required size and initialization
1284 of a vector V of type T embedded within another structure (as the
1285 final member):
1286
1287 size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc);
1288 void v->embedded_init (unsigned alloc, unsigned num);
1289
1290 These allow the caller to perform the memory allocation. */
1291
1292template<typename T, typename A>
1293inline size_t
1294vec<T, A, vl_embed>::embedded_size (unsigned alloc)
1295{
1296 struct alignas (T) U { char data[sizeof (T)]; };
1297 typedef vec<U, A, vl_embed> vec_embedded;
1298 typedef typename std::conditional<std::is_standard_layout<T>::value,
1299 vec, vec_embedded>::type vec_stdlayout;
1300 static_assert (sizeof (vec_stdlayout) == sizeof (vec), "");
1301 static_assert (alignof (vec_stdlayout) == alignof (vec), "");
1302 return offsetof (vec_stdlayout, m_vecdata)__builtin_offsetof(vec_stdlayout, m_vecdata) + alloc * sizeof (T);
1303}
1304
1305
1306/* Initialize the vector to contain room for ALLOC elements and
1307 NUM active elements. */
1308
1309template<typename T, typename A>
1310inline void
1311vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut)
1312{
1313 m_vecpfx.m_alloc = alloc;
1314 m_vecpfx.m_using_auto_storage = aut;
1315 m_vecpfx.m_num = num;
1316}
1317
1318
1319/* Grow the vector to a specific length. LEN must be as long or longer than
1320 the current length. The new elements are uninitialized. */
1321
1322template<typename T, typename A>
1323inline void
1324vec<T, A, vl_embed>::quick_grow (unsigned len)
1325{
1326 gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1326, __FUNCTION__), 0 : 0))
;
1327 m_vecpfx.m_num = len;
1328}
1329
1330
1331/* Grow the vector to a specific length. LEN must be as long or longer than
1332 the current length. The new elements are initialized to zero. */
1333
1334template<typename T, typename A>
1335inline void
1336vec<T, A, vl_embed>::quick_grow_cleared (unsigned len)
1337{
1338 unsigned oldlen = length ();
1339 size_t growby = len - oldlen;
1340 quick_grow (len);
1341 if (growby != 0)
1342 vec_default_construct (address () + oldlen, growby);
1343}
1344
1345/* Garbage collection support for vec<T, A, vl_embed>. */
1346
1347template<typename T>
1348void
1349gt_ggc_mx (vec<T, va_gc> *v)
1350{
1351 extern void gt_ggc_mx (T &);
1352 for (unsigned i = 0; i < v->length (); i++)
1353 gt_ggc_mx ((*v)[i]);
1354}
1355
1356template<typename T>
1357void
1358gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1359{
1360 /* Nothing to do. Vectors of atomic types wrt GC do not need to
1361 be traversed. */
1362}
1363
1364
1365/* PCH support for vec<T, A, vl_embed>. */
1366
1367template<typename T, typename A>
1368void
1369gt_pch_nx (vec<T, A, vl_embed> *v)
1370{
1371 extern void gt_pch_nx (T &);
1372 for (unsigned i = 0; i < v->length (); i++)
1373 gt_pch_nx ((*v)[i]);
1374}
1375
1376template<typename T, typename A>
1377void
1378gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1379{
1380 for (unsigned i = 0; i < v->length (); i++)
1381 op (&((*v)[i]), cookie);
1382}
1383
1384template<typename T, typename A>
1385void
1386gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1387{
1388 extern void gt_pch_nx (T *, gt_pointer_operator, void *);
1389 for (unsigned i = 0; i < v->length (); i++)
1390 gt_pch_nx (&((*v)[i]), op, cookie);
1391}
1392
1393
1394/* Space efficient vector. These vectors can grow dynamically and are
1395 allocated together with their control data. They are suited to be
1396 included in data structures. Prior to initial allocation, they
1397 only take a single word of storage.
1398
1399 These vectors are implemented as a pointer to an embeddable vector.
1400 The semantics allow for this pointer to be NULL to represent empty
1401 vectors. This way, empty vectors occupy minimal space in the
1402 structure containing them.
1403
1404 Properties:
1405
1406 - The whole vector and control data are allocated in a single
1407 contiguous block.
1408 - The whole vector may be re-allocated.
1409 - Vector data may grow and shrink.
1410 - Access and manipulation requires a pointer test and
1411 indirection.
1412 - It requires 1 word of storage (prior to vector allocation).
1413
1414
1415 Limitations:
1416
1417 These vectors must be PODs because they are stored in unions.
1418 (http://en.wikipedia.org/wiki/Plain_old_data_structures).
1419 As long as we use C++03, we cannot have constructors nor
1420 destructors in classes that are stored in unions. */
1421
1422template<typename T>
1423struct vec<T, va_heap, vl_ptr>
1424{
1425public:
1426 /* Memory allocation and deallocation for the embedded vector.
1427 Needed because we cannot have proper ctors/dtors defined. */
1428 void create (unsigned nelems CXX_MEM_STAT_INFO);
1429 void release (void);
1430
1431 /* Vector operations. */
1432 bool exists (void) const
1433 { return m_vec != NULLnullptr; }
1434
1435 bool is_empty (void) const
1436 { return m_vec ? m_vec->is_empty () : true; }
1437
1438 unsigned length (void) const
1439 { return m_vec ? m_vec->length () : 0; }
1440
1441 T *address (void)
1442 { return m_vec ? m_vec->m_vecdata : NULLnullptr; }
1443
1444 const T *address (void) const
1445 { return m_vec ? m_vec->m_vecdata : NULLnullptr; }
1446
1447 T *begin () { return address (); }
1448 const T *begin () const { return address (); }
1449 T *end () { return begin () + length (); }
1450 const T *end () const { return begin () + length (); }
1451 const T &operator[] (unsigned ix) const
1452 { return (*m_vec)[ix]; }
1453
1454 bool operator!=(const vec &other) const
1455 { return !(*this == other); }
1456
1457 bool operator==(const vec &other) const
1458 { return address () == other.address (); }
1459
1460 T &operator[] (unsigned ix)
1461 { return (*m_vec)[ix]; }
1462
1463 T &last (void)
1464 { return m_vec->last (); }
1465
1466 bool space (int nelems) const
1467 { return m_vec ? m_vec->space (nelems) : nelems == 0; }
1468
1469 bool iterate (unsigned ix, T *p) const;
1470 bool iterate (unsigned ix, T **p) const;
1471 vec copy (ALONE_CXX_MEM_STAT_INFO) const;
1472 bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO);
1473 bool reserve_exact (unsigned CXX_MEM_STAT_INFO);
1474 void splice (const vec &);
1475 void safe_splice (const vec & CXX_MEM_STAT_INFO);
1476 T *quick_push (const T &);
1477 T *safe_push (const T &CXX_MEM_STAT_INFO);
1478 T &pop (void);
1479 void truncate (unsigned);
1480 void safe_grow (unsigned, bool = false CXX_MEM_STAT_INFO);
1481 void safe_grow_cleared (unsigned, bool = false CXX_MEM_STAT_INFO);
1482 void quick_grow (unsigned);
1483 void quick_grow_cleared (unsigned);
1484 void quick_insert (unsigned, const T &);
1485 void safe_insert (unsigned, const T & CXX_MEM_STAT_INFO);
1486 void ordered_remove (unsigned);
1487 void unordered_remove (unsigned);
1488 void block_remove (unsigned, unsigned);
1489 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
1490 void sort (int (*) (const void *, const void *, void *), void *);
1491 T *bsearch (const void *key, int (*compar)(const void *, const void *));
1492 T *bsearch (const void *key,
1493 int (*compar)(const void *, const void *, void *), void *);
1494 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
1495 bool contains (const T &search) const;
1496 void reverse (void);
1497
1498 bool using_auto_storage () const;
1499
1500 /* FIXME - This field should be private, but we need to cater to
1501 compilers that have stricter notions of PODness for types. */
1502 vec<T, va_heap, vl_embed> *m_vec;
1503};
1504
1505
1506/* auto_vec is a subclass of vec that automatically manages creating and
1507 releasing the internal vector. If N is non zero then it has N elements of
1508 internal storage. The default is no internal storage, and you probably only
1509 want to ask for internal storage for vectors on the stack because if the
1510 size of the vector is larger than the internal storage that space is wasted.
1511 */
1512template<typename T, size_t N = 0>
1513class auto_vec : public vec<T, va_heap>
1514{
1515public:
1516 auto_vec ()
1517 {
1518 m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1);
1519 this->m_vec = &m_auto;
1520 }
1521
1522 auto_vec (size_t s)
1523 {
1524 if (s > N)
1525 {
1526 this->create (s);
1527 return;
1528 }
1529
1530 m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1);
1531 this->m_vec = &m_auto;
1532 }
1533
1534 ~auto_vec ()
1535 {
1536 this->release ();
1537 }
1538
1539private:
1540 vec<T, va_heap, vl_embed> m_auto;
1541 T m_data[MAX (N - 1, 1)((N - 1) > (1) ? (N - 1) : (1))];
1542};
1543
1544/* auto_vec is a sub class of vec whose storage is released when it is
1545 destroyed. */
1546template<typename T>
1547class auto_vec<T, 0> : public vec<T, va_heap>
1548{
1549public:
1550 auto_vec () { this->m_vec = NULLnullptr; }
1551 auto_vec (size_t n) { this->create (n); }
1552 ~auto_vec () { this->release (); }
1553
1554 auto_vec (vec<T, va_heap>&& r)
1555 {
1556 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1556, __FUNCTION__), 0 : 0))
;
1557 this->m_vec = r.m_vec;
1558 r.m_vec = NULLnullptr;
1559 }
1560 auto_vec& operator= (vec<T, va_heap>&& r)
1561 {
1562 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1562, __FUNCTION__), 0 : 0))
;
1563 this->release ();
1564 this->m_vec = r.m_vec;
1565 r.m_vec = NULLnullptr;
1566 return *this;
1567 }
1568};
1569
1570
1571/* Allocate heap memory for pointer V and create the internal vector
1572 with space for NELEMS elements. If NELEMS is 0, the internal
1573 vector is initialized to empty. */
1574
1575template<typename T>
1576inline void
1577vec_alloc (vec<T> *&v, unsigned nelems CXX_MEM_STAT_INFO)
1578{
1579 v = new vec<T>;
1580 v->create (nelems PASS_MEM_STAT);
1581}
1582
1583
1584/* A subclass of auto_vec <char *> that frees all of its elements on
1585 deletion. */
1586
1587class auto_string_vec : public auto_vec <char *>
1588{
1589 public:
1590 ~auto_string_vec ();
1591};
1592
1593/* A subclass of auto_vec <T *> that deletes all of its elements on
1594 destruction.
1595
1596 This is a crude way for a vec to "own" the objects it points to
1597 and clean up automatically.
1598
1599 For example, no attempt is made to delete elements when an item
1600 within the vec is overwritten.
1601
1602 We can't rely on gnu::unique_ptr within a container,
1603 since we can't rely on move semantics in C++98. */
1604
1605template <typename T>
1606class auto_delete_vec : public auto_vec <T *>
1607{
1608 public:
1609 auto_delete_vec () {}
1610 auto_delete_vec (size_t s) : auto_vec <T *> (s) {}
1611
1612 ~auto_delete_vec ();
1613
1614private:
1615 DISABLE_COPY_AND_ASSIGN(auto_delete_vec)auto_delete_vec (const auto_delete_vec&) = delete; void operator
= (const auto_delete_vec &) = delete
;
1616};
1617
1618/* Conditionally allocate heap memory for VEC and its internal vector. */
1619
1620template<typename T>
1621inline void
1622vec_check_alloc (vec<T, va_heap> *&vec, unsigned nelems CXX_MEM_STAT_INFO)
1623{
1624 if (!vec)
1625 vec_alloc (vec, nelems PASS_MEM_STAT);
1626}
1627
1628
1629/* Free the heap memory allocated by vector V and set it to NULL. */
1630
1631template<typename T>
1632inline void
1633vec_free (vec<T> *&v)
1634{
1635 if (v == NULLnullptr)
1636 return;
1637
1638 v->release ();
1639 delete v;
1640 v = NULLnullptr;
1641}
1642
1643
1644/* Return iteration condition and update PTR to point to the IX'th
1645 element of this vector. Use this to iterate over the elements of a
1646 vector as follows,
1647
1648 for (ix = 0; v.iterate (ix, &ptr); ix++)
1649 continue; */
1650
1651template<typename T>
1652inline bool
1653vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T *ptr) const
1654{
1655 if (m_vec)
1656 return m_vec->iterate (ix, ptr);
1657 else
1658 {
1659 *ptr = 0;
1660 return false;
1661 }
1662}
1663
1664
1665/* Return iteration condition and update *PTR to point to the
1666 IX'th element of this vector. Use this to iterate over the
1667 elements of a vector as follows,
1668
1669 for (ix = 0; v->iterate (ix, &ptr); ix++)
1670 continue;
1671
1672 This variant is for vectors of objects. */
1673
1674template<typename T>
1675inline bool
1676vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T **ptr) const
1677{
1678 if (m_vec)
1679 return m_vec->iterate (ix, ptr);
1680 else
1681 {
1682 *ptr = 0;
1683 return false;
1684 }
1685}
1686
1687
1688/* Convenience macro for forward iteration. */
1689#define FOR_EACH_VEC_ELT(V, I, P)for (I = 0; (V).iterate ((I), &(P)); ++(I)) \
1690 for (I = 0; (V).iterate ((I), &(P)); ++(I))
1691
1692#define FOR_EACH_VEC_SAFE_ELT(V, I, P)for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) \
1693 for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I))
1694
1695/* Likewise, but start from FROM rather than 0. */
1696#define FOR_EACH_VEC_ELT_FROM(V, I, P, FROM)for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) \
1697 for (I = (FROM); (V).iterate ((I), &(P)); ++(I))
1698
1699/* Convenience macro for reverse iteration. */
1700#define FOR_EACH_VEC_ELT_REVERSE(V, I, P)for (I = (V).length () - 1; (V).iterate ((I), &(P)); (I)--
)
\
1701 for (I = (V).length () - 1; \
1702 (V).iterate ((I), &(P)); \
1703 (I)--)
1704
1705#define FOR_EACH_VEC_SAFE_ELT_REVERSE(V, I, P)for (I = vec_safe_length (V) - 1; vec_safe_iterate ((V), (I),
&(P)); (I)--)
\
1706 for (I = vec_safe_length (V) - 1; \
1707 vec_safe_iterate ((V), (I), &(P)); \
1708 (I)--)
1709
1710/* auto_string_vec's dtor, freeing all contained strings, automatically
1711 chaining up to ~auto_vec <char *>, which frees the internal buffer. */
1712
1713inline
1714auto_string_vec::~auto_string_vec ()
1715{
1716 int i;
1717 char *str;
1718 FOR_EACH_VEC_ELT (*this, i, str)for (i = 0; (*this).iterate ((i), &(str)); ++(i))
1719 free (str);
1720}
1721
1722/* auto_delete_vec's dtor, deleting all contained items, automatically
1723 chaining up to ~auto_vec <T*>, which frees the internal buffer. */
1724
1725template <typename T>
1726inline
1727auto_delete_vec<T>::~auto_delete_vec ()
1728{
1729 int i;
1730 T *item;
1731 FOR_EACH_VEC_ELT (*this, i, item)for (i = 0; (*this).iterate ((i), &(item)); ++(i))
1732 delete item;
1733}
1734
1735
1736/* Return a copy of this vector. */
1737
1738template<typename T>
1739inline vec<T, va_heap, vl_ptr>
1740vec<T, va_heap, vl_ptr>::copy (ALONE_MEM_STAT_DECLvoid) const
1741{
1742 vec<T, va_heap, vl_ptr> new_vec = vNULL;
1743 if (length ())
1744 new_vec.m_vec = m_vec->copy (ALONE_PASS_MEM_STAT);
1745 return new_vec;
1746}
1747
1748
1749/* Ensure that the vector has at least RESERVE slots available (if
1750 EXACT is false), or exactly RESERVE slots available (if EXACT is
1751 true).
1752
1753 This may create additional headroom if EXACT is false.
1754
1755 Note that this can cause the embedded vector to be reallocated.
1756 Returns true iff reallocation actually occurred. */
1757
1758template<typename T>
1759inline bool
1760vec<T, va_heap, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
1761{
1762 if (space (nelems))
1763 return false;
1764
1765 /* For now play a game with va_heap::reserve to hide our auto storage if any,
1766 this is necessary because it doesn't have enough information to know the
1767 embedded vector is in auto storage, and so should not be freed. */
1768 vec<T, va_heap, vl_embed> *oldvec = m_vec;
1769 unsigned int oldsize = 0;
1770 bool handle_auto_vec = m_vec && using_auto_storage ();
1771 if (handle_auto_vec)
1772 {
1773 m_vec = NULLnullptr;
1774 oldsize = oldvec->length ();
1775 nelems += oldsize;
1776 }
1777
1778 va_heap::reserve (m_vec, nelems, exact PASS_MEM_STAT);
1779 if (handle_auto_vec)
1780 {
1781 vec_copy_construct (m_vec->address (), oldvec->address (), oldsize);
1782 m_vec->m_vecpfx.m_num = oldsize;
1783 }
1784
1785 return true;
1786}
1787
1788
1789/* Ensure that this vector has exactly NELEMS slots available. This
1790 will not create additional headroom. Note this can cause the
1791 embedded vector to be reallocated. Returns true iff reallocation
1792 actually occurred. */
1793
1794template<typename T>
1795inline bool
1796vec<T, va_heap, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
1797{
1798 return reserve (nelems, true PASS_MEM_STAT);
1799}
1800
1801
1802/* Create the internal vector and reserve NELEMS for it. This is
1803 exactly like vec::reserve, but the internal vector is
1804 unconditionally allocated from scratch. The old one, if it
1805 existed, is lost. */
1806
1807template<typename T>
1808inline void
1809vec<T, va_heap, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
1810{
1811 m_vec = NULLnullptr;
1812 if (nelems > 0)
1813 reserve_exact (nelems PASS_MEM_STAT);
1814}
1815
1816
1817/* Free the memory occupied by the embedded vector. */
1818
1819template<typename T>
1820inline void
1821vec<T, va_heap, vl_ptr>::release (void)
1822{
1823 if (!m_vec)
1824 return;
1825
1826 if (using_auto_storage ())
1827 {
1828 m_vec->m_vecpfx.m_num = 0;
1829 return;
1830 }
1831
1832 va_heap::release (m_vec);
1833}
1834
1835/* Copy the elements from SRC to the end of this vector as if by memcpy.
1836 SRC and this vector must be allocated with the same memory
1837 allocation mechanism. This vector is assumed to have sufficient
1838 headroom available. */
1839
1840template<typename T>
1841inline void
1842vec<T, va_heap, vl_ptr>::splice (const vec<T, va_heap, vl_ptr> &src)
1843{
1844 if (src.length ())
1845 m_vec->splice (*(src.m_vec));
1846}
1847
1848
1849/* Copy the elements in SRC to the end of this vector as if by memcpy.
1850 SRC and this vector must be allocated with the same mechanism.
1851 If there is not enough headroom in this vector, it will be reallocated
1852 as needed. */
1853
1854template<typename T>
1855inline void
1856vec<T, va_heap, vl_ptr>::safe_splice (const vec<T, va_heap, vl_ptr> &src
1857 MEM_STAT_DECL)
1858{
1859 if (src.length ())
1860 {
1861 reserve_exact (src.length ());
1862 splice (src);
1863 }
1864}
1865
1866
1867/* Push OBJ (a new element) onto the end of the vector. There must be
1868 sufficient space in the vector. Return a pointer to the slot
1869 where OBJ was inserted. */
1870
1871template<typename T>
1872inline T *
1873vec<T, va_heap, vl_ptr>::quick_push (const T &obj)
1874{
1875 return m_vec->quick_push (obj);
1876}
1877
1878
1879/* Push a new element OBJ onto the end of this vector. Reallocates
1880 the embedded vector, if needed. Return a pointer to the slot where
1881 OBJ was inserted. */
1882
1883template<typename T>
1884inline T *
1885vec<T, va_heap, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL)
1886{
1887 reserve (1, false PASS_MEM_STAT);
1888 return quick_push (obj);
1889}
1890
1891
1892/* Pop and return the last element off the end of the vector. */
1893
1894template<typename T>
1895inline T &
1896vec<T, va_heap, vl_ptr>::pop (void)
1897{
1898 return m_vec->pop ();
1899}
1900
1901
1902/* Set the length of the vector to LEN. The new length must be less
1903 than or equal to the current length. This is an O(1) operation. */
1904
1905template<typename T>
1906inline void
1907vec<T, va_heap, vl_ptr>::truncate (unsigned size)
1908{
1909 if (m_vec)
1910 m_vec->truncate (size);
1911 else
1912 gcc_checking_assert (size == 0)((void)(!(size == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1912, __FUNCTION__), 0 : 0))
;
1913}
1914
1915
1916/* Grow the vector to a specific length. LEN must be as long or
1917 longer than the current length. The new elements are
1918 uninitialized. Reallocate the internal vector, if needed. */
1919
1920template<typename T>
1921inline void
1922vec<T, va_heap, vl_ptr>::safe_grow (unsigned len, bool exact MEM_STAT_DECL)
1923{
1924 unsigned oldlen = length ();
1925 gcc_checking_assert (oldlen <= len)((void)(!(oldlen <= len) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1925, __FUNCTION__), 0 : 0))
;
1926 reserve (len - oldlen, exact PASS_MEM_STAT);
1927 if (m_vec)
1928 m_vec->quick_grow (len);
1929 else
1930 gcc_checking_assert (len == 0)((void)(!(len == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1930, __FUNCTION__), 0 : 0))
;
1931}
1932
1933
1934/* Grow the embedded vector to a specific length. LEN must be as
1935 long or longer than the current length. The new elements are
1936 initialized to zero. Reallocate the internal vector, if needed. */
1937
1938template<typename T>
1939inline void
1940vec<T, va_heap, vl_ptr>::safe_grow_cleared (unsigned len, bool exact
1941 MEM_STAT_DECL)
1942{
1943 unsigned oldlen = length ();
1944 size_t growby = len - oldlen;
1945 safe_grow (len, exact PASS_MEM_STAT);
1946 if (growby != 0)
1947 vec_default_construct (address () + oldlen, growby);
1948}
1949
1950
1951/* Same as vec::safe_grow but without reallocation of the internal vector.
1952 If the vector cannot be extended, a runtime assertion will be triggered. */
1953
1954template<typename T>
1955inline void
1956vec<T, va_heap, vl_ptr>::quick_grow (unsigned len)
1957{
1958 gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1958, __FUNCTION__), 0 : 0))
;
1959 m_vec->quick_grow (len);
1960}
1961
1962
1963/* Same as vec::quick_grow_cleared but without reallocation of the
1964 internal vector. If the vector cannot be extended, a runtime
1965 assertion will be triggered. */
1966
1967template<typename T>
1968inline void
1969vec<T, va_heap, vl_ptr>::quick_grow_cleared (unsigned len)
1970{
1971 gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1971, __FUNCTION__), 0 : 0))
;
1972 m_vec->quick_grow_cleared (len);
1973}
1974
1975
1976/* Insert an element, OBJ, at the IXth position of this vector. There
1977 must be sufficient space. */
1978
1979template<typename T>
1980inline void
1981vec<T, va_heap, vl_ptr>::quick_insert (unsigned ix, const T &obj)
1982{
1983 m_vec->quick_insert (ix, obj);
1984}
1985
1986
1987/* Insert an element, OBJ, at the IXth position of the vector.
1988 Reallocate the embedded vector, if necessary. */
1989
1990template<typename T>
1991inline void
1992vec<T, va_heap, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL)
1993{
1994 reserve (1, false PASS_MEM_STAT);
1995 quick_insert (ix, obj);
1996}
1997
1998
1999/* Remove an element from the IXth position of this vector. Ordering of
2000 remaining elements is preserved. This is an O(N) operation due to
2001 a memmove. */
2002
2003template<typename T>
2004inline void
2005vec<T, va_heap, vl_ptr>::ordered_remove (unsigned ix)
2006{
2007 m_vec->ordered_remove (ix);
2008}
2009
2010
2011/* Remove an element from the IXth position of this vector. Ordering
2012 of remaining elements is destroyed. This is an O(1) operation. */
2013
2014template<typename T>
2015inline void
2016vec<T, va_heap, vl_ptr>::unordered_remove (unsigned ix)
2017{
2018 m_vec->unordered_remove (ix);
2019}
2020
2021
2022/* Remove LEN elements starting at the IXth. Ordering is retained.
2023 This is an O(N) operation due to memmove. */
2024
2025template<typename T>
2026inline void
2027vec<T, va_heap, vl_ptr>::block_remove (unsigned ix, unsigned len)
2028{
2029 m_vec->block_remove (ix, len);
2030}
2031
2032
2033/* Sort the contents of this vector with qsort. CMP is the comparison
2034 function to pass to qsort. */
2035
2036template<typename T>
2037inline void
2038vec<T, va_heap, vl_ptr>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
2039{
2040 if (m_vec)
2041 m_vec->qsort (cmp)qsort (cmp);
2042}
2043
2044/* Sort the contents of this vector with qsort. CMP is the comparison
2045 function to pass to qsort. */
2046
2047template<typename T>
2048inline void
2049vec<T, va_heap, vl_ptr>::sort (int (*cmp) (const void *, const void *,
2050 void *), void *data)
2051{
2052 if (m_vec)
2053 m_vec->sort (cmp, data);
2054}
2055
2056
2057/* Search the contents of the sorted vector with a binary search.
2058 CMP is the comparison function to pass to bsearch. */
2059
2060template<typename T>
2061inline T *
2062vec<T, va_heap, vl_ptr>::bsearch (const void *key,
2063 int (*cmp) (const void *, const void *))
2064{
2065 if (m_vec)
2066 return m_vec->bsearch (key, cmp);
2067 return NULLnullptr;
2068}
2069
2070/* Search the contents of the sorted vector with a binary search.
2071 CMP is the comparison function to pass to bsearch. */
2072
2073template<typename T>
2074inline T *
2075vec<T, va_heap, vl_ptr>::bsearch (const void *key,
2076 int (*cmp) (const void *, const void *,
2077 void *), void *data)
2078{
2079 if (m_vec)
2080 return m_vec->bsearch (key, cmp, data);
2081 return NULLnullptr;
2082}
2083
2084
2085/* Find and return the first position in which OBJ could be inserted
2086 without changing the ordering of this vector. LESSTHAN is a
2087 function that returns true if the first argument is strictly less
2088 than the second. */
2089
2090template<typename T>
2091inline unsigned
2092vec<T, va_heap, vl_ptr>::lower_bound (T obj,
2093 bool (*lessthan)(const T &, const T &))
2094 const
2095{
2096 return m_vec ? m_vec->lower_bound (obj, lessthan) : 0;
2097}
2098
2099/* Return true if SEARCH is an element of V. Note that this is O(N) in the
2100 size of the vector and so should be used with care. */
2101
2102template<typename T>
2103inline bool
2104vec<T, va_heap, vl_ptr>::contains (const T &search) const
2105{
2106 return m_vec ? m_vec->contains (search) : false;
2107}
2108
2109/* Reverse content of the vector. */
2110
2111template<typename T>
2112inline void
2113vec<T, va_heap, vl_ptr>::reverse (void)
2114{
2115 unsigned l = length ();
2116 T *ptr = address ();
2117
2118 for (unsigned i = 0; i < l / 2; i++)
2119 std::swap (ptr[i], ptr[l - i - 1]);
2120}
2121
2122template<typename T>
2123inline bool
2124vec<T, va_heap, vl_ptr>::using_auto_storage () const
2125{
2126 return m_vec->m_vecpfx.m_using_auto_storage;
2127}
2128
2129/* Release VEC and call release of all element vectors. */
2130
2131template<typename T>
2132inline void
2133release_vec_vec (vec<vec<T> > &vec)
2134{
2135 for (unsigned i = 0; i < vec.length (); i++)
2136 vec[i].release ();
2137
2138 vec.release ();
2139}
2140
2141// Provide a subset of the std::span functionality. (We can't use std::span
2142// itself because it's a C++20 feature.)
2143//
2144// In addition, provide an invalid value that is distinct from all valid
2145// sequences (including the empty sequence). This can be used to return
2146// failure without having to use std::optional.
2147//
2148// There is no operator bool because it would be ambiguous whether it is
2149// testing for a valid value or an empty sequence.
2150template<typename T>
2151class array_slice
2152{
2153 template<typename OtherT> friend class array_slice;
2154
2155public:
2156 using value_type = T;
2157 using iterator = T *;
2158 using const_iterator = const T *;
2159
2160 array_slice () : m_base (nullptr), m_size (0) {}
2161
2162 template<typename OtherT>
2163 array_slice (array_slice<OtherT> other)
2164 : m_base (other.m_base), m_size (other.m_size) {}
2165
2166 array_slice (iterator base, unsigned int size)
2167 : m_base (base), m_size (size) {}
2168
2169 template<size_t N>
2170 array_slice (T (&array)[N]) : m_base (array), m_size (N) {}
2171
2172 template<typename OtherT>
2173 array_slice (const vec<OtherT> &v)
2174 : m_base (v.address ()), m_size (v.length ()) {}
2175
2176 iterator begin () { return m_base; }
2177 iterator end () { return m_base + m_size; }
2178
2179 const_iterator begin () const { return m_base; }
2180 const_iterator end () const { return m_base + m_size; }
2181
2182 value_type &front ();
2183 value_type &back ();
2184 value_type &operator[] (unsigned int i);
2185
2186 const value_type &front () const;
2187 const value_type &back () const;
2188 const value_type &operator[] (unsigned int i) const;
2189
2190 size_t size () const { return m_size; }
2191 size_t size_bytes () const { return m_size * sizeof (T); }
2192 bool empty () const { return m_size == 0; }
2193
2194 // An invalid array_slice that represents a failed operation. This is
2195 // distinct from an empty slice, which is a valid result in some contexts.
2196 static array_slice invalid () { return { nullptr, ~0U }; }
2197
2198 // True if the array is valid, false if it is an array like INVALID.
2199 bool is_valid () const { return m_base || m_size == 0; }
2200
2201private:
2202 iterator m_base;
2203 unsigned int m_size;
2204};
2205
2206template<typename T>
2207inline typename array_slice<T>::value_type &
2208array_slice<T>::front ()
2209{
2210 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2210, __FUNCTION__), 0 : 0))
;
2211 return m_base[0];
2212}
2213
2214template<typename T>
2215inline const typename array_slice<T>::value_type &
2216array_slice<T>::front () const
2217{
2218 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2218, __FUNCTION__), 0 : 0))
;
2219 return m_base[0];
2220}
2221
2222template<typename T>
2223inline typename array_slice<T>::value_type &
2224array_slice<T>::back ()
2225{
2226 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2226, __FUNCTION__), 0 : 0))
;
2227 return m_base[m_size - 1];
2228}
2229
2230template<typename T>
2231inline const typename array_slice<T>::value_type &
2232array_slice<T>::back () const
2233{
2234 gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2234, __FUNCTION__), 0 : 0))
;
2235 return m_base[m_size - 1];
2236}
2237
2238template<typename T>
2239inline typename array_slice<T>::value_type &
2240array_slice<T>::operator[] (unsigned int i)
2241{
2242 gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2242, __FUNCTION__), 0 : 0))
;
2243 return m_base[i];
2244}
2245
2246template<typename T>
2247inline const typename array_slice<T>::value_type &
2248array_slice<T>::operator[] (unsigned int i) const
2249{
2250 gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 2250, __FUNCTION__), 0 : 0))
;
2251 return m_base[i];
2252}
2253
2254template<typename T>
2255array_slice<T>
2256make_array_slice (T *base, unsigned int size)
2257{
2258 return array_slice<T> (base, size);
2259}
2260
2261#if (GCC_VERSION(4 * 1000 + 2) >= 3000)
2262# pragma GCC poison m_vec m_vecpfx m_vecdata
2263#endif
2264
2265#endif // GCC_VEC_H