File: | build/gcc/vec.h |
Warning: | line 742, column 3 Called C++ object pointer is null |
Press '?' to see keyboard shortcuts
Keyboard shortcuts:
1 | /* Gimple IR support functions. | |||
2 | ||||
3 | Copyright (C) 2007-2021 Free Software Foundation, Inc. | |||
4 | Contributed by Aldy Hernandez <aldyh@redhat.com> | |||
5 | ||||
6 | This file is part of GCC. | |||
7 | ||||
8 | GCC is free software; you can redistribute it and/or modify it under | |||
9 | the terms of the GNU General Public License as published by the Free | |||
10 | Software Foundation; either version 3, or (at your option) any later | |||
11 | version. | |||
12 | ||||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |||
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |||
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |||
16 | for more details. | |||
17 | ||||
18 | You should have received a copy of the GNU General Public License | |||
19 | along with GCC; see the file COPYING3. If not see | |||
20 | <http://www.gnu.org/licenses/>. */ | |||
21 | ||||
22 | #include "config.h" | |||
23 | #include "system.h" | |||
24 | #include "coretypes.h" | |||
25 | #include "backend.h" | |||
26 | #include "tree.h" | |||
27 | #include "gimple.h" | |||
28 | #include "ssa.h" | |||
29 | #include "cgraph.h" | |||
30 | #include "diagnostic.h" | |||
31 | #include "alias.h" | |||
32 | #include "fold-const.h" | |||
33 | #include "calls.h" | |||
34 | #include "stor-layout.h" | |||
35 | #include "internal-fn.h" | |||
36 | #include "tree-eh.h" | |||
37 | #include "gimple-iterator.h" | |||
38 | #include "gimple-walk.h" | |||
39 | #include "gimplify.h" | |||
40 | #include "target.h" | |||
41 | #include "builtins.h" | |||
42 | #include "selftest.h" | |||
43 | #include "gimple-pretty-print.h" | |||
44 | #include "stringpool.h" | |||
45 | #include "attribs.h" | |||
46 | #include "asan.h" | |||
47 | #include "langhooks.h" | |||
48 | #include "attr-fnspec.h" | |||
49 | #include "ipa-modref-tree.h" | |||
50 | #include "ipa-modref.h" | |||
51 | ||||
52 | ||||
53 | /* All the tuples have their operand vector (if present) at the very bottom | |||
54 | of the structure. Therefore, the offset required to find the | |||
55 | operands vector the size of the structure minus the size of the 1 | |||
56 | element tree array at the end (see gimple_ops). */ | |||
57 | #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \ | |||
58 | (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0), | |||
59 | EXPORTED_CONSTextern const size_t gimple_ops_offset_[] = { | |||
60 | #include "gsstruct.def" | |||
61 | }; | |||
62 | #undef DEFGSSTRUCT | |||
63 | ||||
64 | #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT), | |||
65 | static const size_t gsstruct_code_size[] = { | |||
66 | #include "gsstruct.def" | |||
67 | }; | |||
68 | #undef DEFGSSTRUCT | |||
69 | ||||
70 | #define DEFGSCODE(SYM, NAME, GSSCODE) NAME, | |||
71 | const char *const gimple_code_name[] = { | |||
72 | #include "gimple.def" | |||
73 | }; | |||
74 | #undef DEFGSCODE | |||
75 | ||||
76 | #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE, | |||
77 | EXPORTED_CONSTextern const enum gimple_statement_structure_enum gss_for_code_[] = { | |||
78 | #include "gimple.def" | |||
79 | }; | |||
80 | #undef DEFGSCODE | |||
81 | ||||
82 | /* Gimple stats. */ | |||
83 | ||||
84 | uint64_t gimple_alloc_counts[(int) gimple_alloc_kind_all]; | |||
85 | uint64_t gimple_alloc_sizes[(int) gimple_alloc_kind_all]; | |||
86 | ||||
87 | /* Keep in sync with gimple.h:enum gimple_alloc_kind. */ | |||
88 | static const char * const gimple_alloc_kind_names[] = { | |||
89 | "assignments", | |||
90 | "phi nodes", | |||
91 | "conditionals", | |||
92 | "everything else" | |||
93 | }; | |||
94 | ||||
95 | /* Static gimple tuple members. */ | |||
96 | const enum gimple_code gassign::code_; | |||
97 | const enum gimple_code gcall::code_; | |||
98 | const enum gimple_code gcond::code_; | |||
99 | ||||
100 | ||||
101 | /* Gimple tuple constructors. | |||
102 | Note: Any constructor taking a ``gimple_seq'' as a parameter, can | |||
103 | be passed a NULL to start with an empty sequence. */ | |||
104 | ||||
105 | /* Set the code for statement G to CODE. */ | |||
106 | ||||
107 | static inline void | |||
108 | gimple_set_code (gimple *g, enum gimple_code code) | |||
109 | { | |||
110 | g->code = code; | |||
111 | } | |||
112 | ||||
113 | /* Return the number of bytes needed to hold a GIMPLE statement with | |||
114 | code CODE. */ | |||
115 | ||||
116 | size_t | |||
117 | gimple_size (enum gimple_code code, unsigned num_ops) | |||
118 | { | |||
119 | size_t size = gsstruct_code_size[gss_for_code (code)]; | |||
120 | if (num_ops > 0) | |||
121 | size += (sizeof (tree) * (num_ops - 1)); | |||
122 | return size; | |||
123 | } | |||
124 | ||||
125 | /* Initialize GIMPLE statement G with CODE and NUM_OPS. */ | |||
126 | ||||
127 | void | |||
128 | gimple_init (gimple *g, enum gimple_code code, unsigned num_ops) | |||
129 | { | |||
130 | gimple_set_code (g, code); | |||
131 | gimple_set_num_ops (g, num_ops); | |||
132 | ||||
133 | /* Do not call gimple_set_modified here as it has other side | |||
134 | effects and this tuple is still not completely built. */ | |||
135 | g->modified = 1; | |||
136 | gimple_init_singleton (g); | |||
137 | } | |||
138 | ||||
139 | /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS | |||
140 | operands. */ | |||
141 | ||||
142 | gimple * | |||
143 | gimple_alloc (enum gimple_code code, unsigned num_ops MEM_STAT_DECL) | |||
144 | { | |||
145 | size_t size; | |||
146 | gimple *stmt; | |||
147 | ||||
148 | size = gimple_size (code, num_ops); | |||
149 | if (GATHER_STATISTICS0) | |||
150 | { | |||
151 | enum gimple_alloc_kind kind = gimple_alloc_kind (code); | |||
152 | gimple_alloc_counts[(int) kind]++; | |||
153 | gimple_alloc_sizes[(int) kind] += size; | |||
154 | } | |||
155 | ||||
156 | stmt = ggc_alloc_cleared_gimple_statement_stat (size PASS_MEM_STAT); | |||
157 | gimple_init (stmt, code, num_ops); | |||
158 | return stmt; | |||
159 | } | |||
160 | ||||
161 | /* Set SUBCODE to be the code of the expression computed by statement G. */ | |||
162 | ||||
163 | static inline void | |||
164 | gimple_set_subcode (gimple *g, unsigned subcode) | |||
165 | { | |||
166 | /* We only have 16 bits for the RHS code. Assert that we are not | |||
167 | overflowing it. */ | |||
168 | gcc_assert (subcode < (1 << 16))((void)(!(subcode < (1 << 16)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 168, __FUNCTION__), 0 : 0)); | |||
169 | g->subcode = subcode; | |||
170 | } | |||
171 | ||||
172 | ||||
173 | ||||
174 | /* Build a tuple with operands. CODE is the statement to build (which | |||
175 | must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode | |||
176 | for the new tuple. NUM_OPS is the number of operands to allocate. */ | |||
177 | ||||
178 | #define gimple_build_with_ops(c, s, n)gimple_build_with_ops_stat (c, s, n ) \ | |||
179 | gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO) | |||
180 | ||||
181 | static gimple * | |||
182 | gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode, | |||
183 | unsigned num_ops MEM_STAT_DECL) | |||
184 | { | |||
185 | gimple *s = gimple_alloc (code, num_ops PASS_MEM_STAT); | |||
186 | gimple_set_subcode (s, subcode); | |||
187 | ||||
188 | return s; | |||
189 | } | |||
190 | ||||
191 | ||||
192 | /* Build a GIMPLE_RETURN statement returning RETVAL. */ | |||
193 | ||||
194 | greturn * | |||
195 | gimple_build_return (tree retval) | |||
196 | { | |||
197 | greturn *s | |||
198 | = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,gimple_build_with_ops_stat (GIMPLE_RETURN, ERROR_MARK, 2 ) | |||
199 | 2)gimple_build_with_ops_stat (GIMPLE_RETURN, ERROR_MARK, 2 )); | |||
200 | if (retval) | |||
201 | gimple_return_set_retval (s, retval); | |||
202 | return s; | |||
203 | } | |||
204 | ||||
205 | /* Reset alias information on call S. */ | |||
206 | ||||
207 | void | |||
208 | gimple_call_reset_alias_info (gcall *s) | |||
209 | { | |||
210 | if (gimple_call_flags (s) & ECF_CONST(1 << 0)) | |||
211 | memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution)); | |||
212 | else | |||
213 | pt_solution_reset (gimple_call_use_set (s)); | |||
214 | if (gimple_call_flags (s) & (ECF_CONST(1 << 0)|ECF_PURE(1 << 1)|ECF_NOVOPS(1 << 9))) | |||
215 | memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution)); | |||
216 | else | |||
217 | pt_solution_reset (gimple_call_clobber_set (s)); | |||
218 | } | |||
219 | ||||
220 | /* Helper for gimple_build_call, gimple_build_call_valist, | |||
221 | gimple_build_call_vec and gimple_build_call_from_tree. Build the basic | |||
222 | components of a GIMPLE_CALL statement to function FN with NARGS | |||
223 | arguments. */ | |||
224 | ||||
225 | static inline gcall * | |||
226 | gimple_build_call_1 (tree fn, unsigned nargs) | |||
227 | { | |||
228 | gcall *s | |||
229 | = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,gimple_build_with_ops_stat (GIMPLE_CALL, ERROR_MARK, nargs + 3 ) | |||
230 | nargs + 3)gimple_build_with_ops_stat (GIMPLE_CALL, ERROR_MARK, nargs + 3 )); | |||
231 | if (TREE_CODE (fn)((enum tree_code) (fn)->base.code) == FUNCTION_DECL) | |||
232 | fn = build_fold_addr_expr (fn)build_fold_addr_expr_loc (((location_t) 0), (fn)); | |||
233 | gimple_set_op (s, 1, fn); | |||
234 | gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn))((contains_struct_check ((((contains_struct_check ((fn), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 234, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 234, __FUNCTION__))->typed.type)); | |||
235 | gimple_call_reset_alias_info (s); | |||
236 | return s; | |||
237 | } | |||
238 | ||||
239 | ||||
240 | /* Build a GIMPLE_CALL statement to function FN with the arguments | |||
241 | specified in vector ARGS. */ | |||
242 | ||||
243 | gcall * | |||
244 | gimple_build_call_vec (tree fn, vec<tree> args) | |||
245 | { | |||
246 | unsigned i; | |||
247 | unsigned nargs = args.length (); | |||
248 | gcall *call = gimple_build_call_1 (fn, nargs); | |||
249 | ||||
250 | for (i = 0; i < nargs; i++) | |||
251 | gimple_call_set_arg (call, i, args[i]); | |||
252 | ||||
253 | return call; | |||
254 | } | |||
255 | ||||
256 | ||||
257 | /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of | |||
258 | arguments. The ... are the arguments. */ | |||
259 | ||||
260 | gcall * | |||
261 | gimple_build_call (tree fn, unsigned nargs, ...) | |||
262 | { | |||
263 | va_list ap; | |||
264 | gcall *call; | |||
265 | unsigned i; | |||
266 | ||||
267 | gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn))((void)(!(((enum tree_code) (fn)->base.code) == FUNCTION_DECL || is_gimple_call_addr (fn)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 267, __FUNCTION__), 0 : 0)); | |||
268 | ||||
269 | call = gimple_build_call_1 (fn, nargs); | |||
270 | ||||
271 | va_start (ap, nargs)__builtin_va_start(ap, nargs); | |||
272 | for (i = 0; i < nargs; i++) | |||
273 | gimple_call_set_arg (call, i, va_arg (ap, tree)__builtin_va_arg(ap, tree)); | |||
274 | va_end (ap)__builtin_va_end(ap); | |||
275 | ||||
276 | return call; | |||
277 | } | |||
278 | ||||
279 | ||||
280 | /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of | |||
281 | arguments. AP contains the arguments. */ | |||
282 | ||||
283 | gcall * | |||
284 | gimple_build_call_valist (tree fn, unsigned nargs, va_list ap) | |||
285 | { | |||
286 | gcall *call; | |||
287 | unsigned i; | |||
288 | ||||
289 | gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn))((void)(!(((enum tree_code) (fn)->base.code) == FUNCTION_DECL || is_gimple_call_addr (fn)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 289, __FUNCTION__), 0 : 0)); | |||
290 | ||||
291 | call = gimple_build_call_1 (fn, nargs); | |||
292 | ||||
293 | for (i = 0; i < nargs; i++) | |||
294 | gimple_call_set_arg (call, i, va_arg (ap, tree)__builtin_va_arg(ap, tree)); | |||
295 | ||||
296 | return call; | |||
297 | } | |||
298 | ||||
299 | ||||
300 | /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec. | |||
301 | Build the basic components of a GIMPLE_CALL statement to internal | |||
302 | function FN with NARGS arguments. */ | |||
303 | ||||
304 | static inline gcall * | |||
305 | gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs) | |||
306 | { | |||
307 | gcall *s | |||
308 | = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,gimple_build_with_ops_stat (GIMPLE_CALL, ERROR_MARK, nargs + 3 ) | |||
309 | nargs + 3)gimple_build_with_ops_stat (GIMPLE_CALL, ERROR_MARK, nargs + 3 )); | |||
310 | s->subcode |= GF_CALL_INTERNAL; | |||
311 | gimple_call_set_internal_fn (s, fn); | |||
312 | gimple_call_reset_alias_info (s); | |||
313 | return s; | |||
314 | } | |||
315 | ||||
316 | ||||
317 | /* Build a GIMPLE_CALL statement to internal function FN. NARGS is | |||
318 | the number of arguments. The ... are the arguments. */ | |||
319 | ||||
320 | gcall * | |||
321 | gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...) | |||
322 | { | |||
323 | va_list ap; | |||
324 | gcall *call; | |||
325 | unsigned i; | |||
326 | ||||
327 | call = gimple_build_call_internal_1 (fn, nargs); | |||
328 | va_start (ap, nargs)__builtin_va_start(ap, nargs); | |||
329 | for (i = 0; i < nargs; i++) | |||
330 | gimple_call_set_arg (call, i, va_arg (ap, tree)__builtin_va_arg(ap, tree)); | |||
331 | va_end (ap)__builtin_va_end(ap); | |||
332 | ||||
333 | return call; | |||
334 | } | |||
335 | ||||
336 | ||||
337 | /* Build a GIMPLE_CALL statement to internal function FN with the arguments | |||
338 | specified in vector ARGS. */ | |||
339 | ||||
340 | gcall * | |||
341 | gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args) | |||
342 | { | |||
343 | unsigned i, nargs; | |||
344 | gcall *call; | |||
345 | ||||
346 | nargs = args.length (); | |||
347 | call = gimple_build_call_internal_1 (fn, nargs); | |||
348 | for (i = 0; i < nargs; i++) | |||
349 | gimple_call_set_arg (call, i, args[i]); | |||
350 | ||||
351 | return call; | |||
352 | } | |||
353 | ||||
354 | ||||
355 | /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is | |||
356 | assumed to be in GIMPLE form already. Minimal checking is done of | |||
357 | this fact. */ | |||
358 | ||||
359 | gcall * | |||
360 | gimple_build_call_from_tree (tree t, tree fnptrtype) | |||
361 | { | |||
362 | unsigned i, nargs; | |||
363 | gcall *call; | |||
364 | ||||
365 | gcc_assert (TREE_CODE (t) == CALL_EXPR)((void)(!(((enum tree_code) (t)->base.code) == CALL_EXPR) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 365, __FUNCTION__), 0 : 0)); | |||
366 | ||||
367 | nargs = call_expr_nargs (t)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check ((t), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 367, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 367, __FUNCTION__)))) - 3); | |||
368 | ||||
369 | tree fndecl = NULL_TREE(tree) nullptr; | |||
370 | if (CALL_EXPR_FN (t)(*((const_cast<tree*> (tree_operand_check (((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 370, __FUNCTION__, (CALL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 370, __FUNCTION__))))) == NULL_TREE(tree) nullptr) | |||
371 | call = gimple_build_call_internal_1 (CALL_EXPR_IFN (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 371, __FUNCTION__, (CALL_EXPR)))->base.u.ifn), nargs); | |||
372 | else | |||
373 | { | |||
374 | fndecl = get_callee_fndecl (t); | |||
375 | call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t)(*((const_cast<tree*> (tree_operand_check (((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 375, __FUNCTION__, (CALL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 375, __FUNCTION__))))), nargs); | |||
376 | } | |||
377 | ||||
378 | for (i = 0; i < nargs; i++) | |||
379 | gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i)(*((const_cast<tree*> (tree_operand_check (((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 379, __FUNCTION__, (CALL_EXPR)))), ((i) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 379, __FUNCTION__)))))); | |||
380 | ||||
381 | gimple_set_block (call, TREE_BLOCK (t)(tree_block (t))); | |||
382 | gimple_set_location (call, EXPR_LOCATION (t)((((t)) && ((tree_code_type[(int) (((enum tree_code) ( (t))->base.code))]) >= tcc_reference && (tree_code_type [(int) (((enum tree_code) ((t))->base.code))]) <= tcc_expression )) ? (t)->exp.locus : ((location_t) 0))); | |||
383 | ||||
384 | /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */ | |||
385 | gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t)(*((const_cast<tree*> (tree_operand_check (((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 385, __FUNCTION__, (CALL_EXPR)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 385, __FUNCTION__)))))); | |||
386 | gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 386, __FUNCTION__, (CALL_EXPR)))->base.addressable_flag)); | |||
387 | gimple_call_set_must_tail (call, CALL_EXPR_MUST_TAIL_CALL (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 387, __FUNCTION__, (CALL_EXPR)))->base.static_flag)); | |||
388 | gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 388, __FUNCTION__, (CALL_EXPR)))->base.private_flag)); | |||
389 | if (fndecl | |||
390 | && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL) | |||
391 | && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl))((DECL_FUNCTION_CODE (fndecl)) == BUILT_IN_ALLOCA || (DECL_FUNCTION_CODE (fndecl)) == BUILT_IN_ALLOCA_WITH_ALIGN || (DECL_FUNCTION_CODE (fndecl)) == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX)) | |||
392 | gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 392, __FUNCTION__, (CALL_EXPR)))->base.protected_flag)); | |||
393 | else if (fndecl | |||
394 | && (DECL_IS_OPERATOR_NEW_P (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 394, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type == OPERATOR_NEW) | |||
395 | || DECL_IS_OPERATOR_DELETE_P (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 395, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type == OPERATOR_DELETE))) | |||
396 | gimple_call_set_from_new_or_delete (call, CALL_FROM_NEW_OR_DELETE_P (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 396, __FUNCTION__, (CALL_EXPR)))->base.protected_flag)); | |||
397 | else | |||
398 | gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 398, __FUNCTION__, (CALL_EXPR)))->base.protected_flag)); | |||
399 | gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 399, __FUNCTION__, (CALL_EXPR)))->base.public_flag)); | |||
400 | gimple_call_set_nothrow (call, TREE_NOTHROW (t)((t)->base.nothrow_flag)); | |||
401 | gimple_call_set_by_descriptor (call, CALL_EXPR_BY_DESCRIPTOR (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 401, __FUNCTION__, (CALL_EXPR)))->base.default_def_flag)); | |||
402 | gimple_set_no_warning (call, TREE_NO_WARNING (t)((t)->base.nowarning_flag)); | |||
403 | ||||
404 | if (fnptrtype) | |||
405 | { | |||
406 | gimple_call_set_fntype (call, TREE_TYPE (fnptrtype)((contains_struct_check ((fnptrtype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 406, __FUNCTION__))->typed.type)); | |||
407 | ||||
408 | /* Check if it's an indirect CALL and the type has the | |||
409 | nocf_check attribute. In that case propagate the information | |||
410 | to the gimple CALL insn. */ | |||
411 | if (!fndecl) | |||
412 | { | |||
413 | gcc_assert (POINTER_TYPE_P (fnptrtype))((void)(!((((enum tree_code) (fnptrtype)->base.code) == POINTER_TYPE || ((enum tree_code) (fnptrtype)->base.code) == REFERENCE_TYPE )) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 413, __FUNCTION__), 0 : 0)); | |||
414 | tree fntype = TREE_TYPE (fnptrtype)((contains_struct_check ((fnptrtype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 414, __FUNCTION__))->typed.type); | |||
415 | ||||
416 | if (lookup_attribute ("nocf_check", TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 416, __FUNCTION__))->type_common.attributes))) | |||
417 | gimple_call_set_nocf_check (call, TRUEtrue); | |||
418 | } | |||
419 | } | |||
420 | ||||
421 | return call; | |||
422 | } | |||
423 | ||||
424 | ||||
425 | /* Build a GIMPLE_ASSIGN statement. | |||
426 | ||||
427 | LHS of the assignment. | |||
428 | RHS of the assignment which can be unary or binary. */ | |||
429 | ||||
430 | gassign * | |||
431 | gimple_build_assign (tree lhs, tree rhs MEM_STAT_DECL) | |||
432 | { | |||
433 | enum tree_code subcode; | |||
434 | tree op1, op2, op3; | |||
435 | ||||
436 | extract_ops_from_tree (rhs, &subcode, &op1, &op2, &op3); | |||
437 | return gimple_build_assign (lhs, subcode, op1, op2, op3 PASS_MEM_STAT); | |||
438 | } | |||
439 | ||||
440 | ||||
441 | /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands | |||
442 | OP1, OP2 and OP3. */ | |||
443 | ||||
444 | static inline gassign * | |||
445 | gimple_build_assign_1 (tree lhs, enum tree_code subcode, tree op1, | |||
446 | tree op2, tree op3 MEM_STAT_DECL) | |||
447 | { | |||
448 | unsigned num_ops; | |||
449 | gassign *p; | |||
450 | ||||
451 | /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the | |||
452 | code). */ | |||
453 | num_ops = get_gimple_rhs_num_ops (subcode) + 1; | |||
454 | ||||
455 | p = as_a <gassign *> ( | |||
456 | gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops | |||
457 | PASS_MEM_STAT)); | |||
458 | gimple_assign_set_lhs (p, lhs); | |||
459 | gimple_assign_set_rhs1 (p, op1); | |||
460 | if (op2) | |||
461 | { | |||
462 | gcc_assert (num_ops > 2)((void)(!(num_ops > 2) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 462, __FUNCTION__), 0 : 0)); | |||
463 | gimple_assign_set_rhs2 (p, op2); | |||
464 | } | |||
465 | ||||
466 | if (op3) | |||
467 | { | |||
468 | gcc_assert (num_ops > 3)((void)(!(num_ops > 3) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 468, __FUNCTION__), 0 : 0)); | |||
469 | gimple_assign_set_rhs3 (p, op3); | |||
470 | } | |||
471 | ||||
472 | return p; | |||
473 | } | |||
474 | ||||
475 | /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands | |||
476 | OP1, OP2 and OP3. */ | |||
477 | ||||
478 | gassign * | |||
479 | gimple_build_assign (tree lhs, enum tree_code subcode, tree op1, | |||
480 | tree op2, tree op3 MEM_STAT_DECL) | |||
481 | { | |||
482 | return gimple_build_assign_1 (lhs, subcode, op1, op2, op3 PASS_MEM_STAT); | |||
483 | } | |||
484 | ||||
485 | /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands | |||
486 | OP1 and OP2. */ | |||
487 | ||||
488 | gassign * | |||
489 | gimple_build_assign (tree lhs, enum tree_code subcode, tree op1, | |||
490 | tree op2 MEM_STAT_DECL) | |||
491 | { | |||
492 | return gimple_build_assign_1 (lhs, subcode, op1, op2, NULL_TREE(tree) nullptr | |||
493 | PASS_MEM_STAT); | |||
494 | } | |||
495 | ||||
496 | /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operand OP1. */ | |||
497 | ||||
498 | gassign * | |||
499 | gimple_build_assign (tree lhs, enum tree_code subcode, tree op1 MEM_STAT_DECL) | |||
500 | { | |||
501 | return gimple_build_assign_1 (lhs, subcode, op1, NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr | |||
502 | PASS_MEM_STAT); | |||
503 | } | |||
504 | ||||
505 | ||||
506 | /* Build a GIMPLE_COND statement. | |||
507 | ||||
508 | PRED is the condition used to compare LHS and the RHS. | |||
509 | T_LABEL is the label to jump to if the condition is true. | |||
510 | F_LABEL is the label to jump to otherwise. */ | |||
511 | ||||
512 | gcond * | |||
513 | gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs, | |||
514 | tree t_label, tree f_label) | |||
515 | { | |||
516 | gcond *p; | |||
517 | ||||
518 | gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison)((void)(!(tree_code_type[(int) (pred_code)] == tcc_comparison ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 518, __FUNCTION__), 0 : 0)); | |||
519 | p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4)gimple_build_with_ops_stat (GIMPLE_COND, pred_code, 4 )); | |||
520 | gimple_cond_set_lhs (p, lhs); | |||
521 | gimple_cond_set_rhs (p, rhs); | |||
522 | gimple_cond_set_true_label (p, t_label); | |||
523 | gimple_cond_set_false_label (p, f_label); | |||
524 | return p; | |||
525 | } | |||
526 | ||||
527 | /* Build a GIMPLE_COND statement from the conditional expression tree | |||
528 | COND. T_LABEL and F_LABEL are as in gimple_build_cond. */ | |||
529 | ||||
530 | gcond * | |||
531 | gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label) | |||
532 | { | |||
533 | enum tree_code code; | |||
534 | tree lhs, rhs; | |||
535 | ||||
536 | gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs); | |||
537 | return gimple_build_cond (code, lhs, rhs, t_label, f_label); | |||
538 | } | |||
539 | ||||
540 | /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable | |||
541 | boolean expression tree COND. */ | |||
542 | ||||
543 | void | |||
544 | gimple_cond_set_condition_from_tree (gcond *stmt, tree cond) | |||
545 | { | |||
546 | enum tree_code code; | |||
547 | tree lhs, rhs; | |||
548 | ||||
549 | gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs); | |||
550 | gimple_cond_set_condition (stmt, code, lhs, rhs); | |||
551 | } | |||
552 | ||||
553 | /* Build a GIMPLE_LABEL statement for LABEL. */ | |||
554 | ||||
555 | glabel * | |||
556 | gimple_build_label (tree label) | |||
557 | { | |||
558 | glabel *p | |||
559 | = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1)gimple_build_with_ops_stat (GIMPLE_LABEL, ERROR_MARK, 1 )); | |||
560 | gimple_label_set_label (p, label); | |||
561 | return p; | |||
562 | } | |||
563 | ||||
564 | /* Build a GIMPLE_GOTO statement to label DEST. */ | |||
565 | ||||
566 | ggoto * | |||
567 | gimple_build_goto (tree dest) | |||
568 | { | |||
569 | ggoto *p | |||
570 | = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1)gimple_build_with_ops_stat (GIMPLE_GOTO, ERROR_MARK, 1 )); | |||
571 | gimple_goto_set_dest (p, dest); | |||
572 | return p; | |||
573 | } | |||
574 | ||||
575 | ||||
576 | /* Build a GIMPLE_NOP statement. */ | |||
577 | ||||
578 | gimple * | |||
579 | gimple_build_nop (void) | |||
580 | { | |||
581 | return gimple_alloc (GIMPLE_NOP, 0); | |||
582 | } | |||
583 | ||||
584 | ||||
585 | /* Build a GIMPLE_BIND statement. | |||
586 | VARS are the variables in BODY. | |||
587 | BLOCK is the containing block. */ | |||
588 | ||||
589 | gbind * | |||
590 | gimple_build_bind (tree vars, gimple_seq body, tree block) | |||
591 | { | |||
592 | gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0)); | |||
593 | gimple_bind_set_vars (p, vars); | |||
594 | if (body) | |||
595 | gimple_bind_set_body (p, body); | |||
596 | if (block) | |||
597 | gimple_bind_set_block (p, block); | |||
598 | return p; | |||
599 | } | |||
600 | ||||
601 | /* Helper function to set the simple fields of a asm stmt. | |||
602 | ||||
603 | STRING is a pointer to a string that is the asm blocks assembly code. | |||
604 | NINPUT is the number of register inputs. | |||
605 | NOUTPUT is the number of register outputs. | |||
606 | NCLOBBERS is the number of clobbered registers. | |||
607 | */ | |||
608 | ||||
609 | static inline gasm * | |||
610 | gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs, | |||
611 | unsigned nclobbers, unsigned nlabels) | |||
612 | { | |||
613 | gasm *p; | |||
614 | int size = strlen (string); | |||
615 | ||||
616 | p = as_a <gasm *> ( | |||
617 | gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,gimple_build_with_ops_stat (GIMPLE_ASM, ERROR_MARK, ninputs + noutputs + nclobbers + nlabels ) | |||
618 | ninputs + noutputs + nclobbers + nlabels)gimple_build_with_ops_stat (GIMPLE_ASM, ERROR_MARK, ninputs + noutputs + nclobbers + nlabels )); | |||
619 | ||||
620 | p->ni = ninputs; | |||
621 | p->no = noutputs; | |||
622 | p->nc = nclobbers; | |||
623 | p->nl = nlabels; | |||
624 | p->string = ggc_alloc_string (string, size); | |||
625 | ||||
626 | if (GATHER_STATISTICS0) | |||
627 | gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size; | |||
628 | ||||
629 | return p; | |||
630 | } | |||
631 | ||||
632 | /* Build a GIMPLE_ASM statement. | |||
633 | ||||
634 | STRING is the assembly code. | |||
635 | NINPUT is the number of register inputs. | |||
636 | NOUTPUT is the number of register outputs. | |||
637 | NCLOBBERS is the number of clobbered registers. | |||
638 | INPUTS is a vector of the input register parameters. | |||
639 | OUTPUTS is a vector of the output register parameters. | |||
640 | CLOBBERS is a vector of the clobbered register parameters. | |||
641 | LABELS is a vector of destination labels. */ | |||
642 | ||||
643 | gasm * | |||
644 | gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs, | |||
645 | vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers, | |||
646 | vec<tree, va_gc> *labels) | |||
647 | { | |||
648 | gasm *p; | |||
649 | unsigned i; | |||
650 | ||||
651 | p = gimple_build_asm_1 (string, | |||
652 | vec_safe_length (inputs), | |||
653 | vec_safe_length (outputs), | |||
654 | vec_safe_length (clobbers), | |||
655 | vec_safe_length (labels)); | |||
656 | ||||
657 | for (i = 0; i < vec_safe_length (inputs); i++) | |||
658 | gimple_asm_set_input_op (p, i, (*inputs)[i]); | |||
659 | ||||
660 | for (i = 0; i < vec_safe_length (outputs); i++) | |||
661 | gimple_asm_set_output_op (p, i, (*outputs)[i]); | |||
662 | ||||
663 | for (i = 0; i < vec_safe_length (clobbers); i++) | |||
664 | gimple_asm_set_clobber_op (p, i, (*clobbers)[i]); | |||
665 | ||||
666 | for (i = 0; i < vec_safe_length (labels); i++) | |||
667 | gimple_asm_set_label_op (p, i, (*labels)[i]); | |||
668 | ||||
669 | return p; | |||
670 | } | |||
671 | ||||
672 | /* Build a GIMPLE_CATCH statement. | |||
673 | ||||
674 | TYPES are the catch types. | |||
675 | HANDLER is the exception handler. */ | |||
676 | ||||
677 | gcatch * | |||
678 | gimple_build_catch (tree types, gimple_seq handler) | |||
679 | { | |||
680 | gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0)); | |||
681 | gimple_catch_set_types (p, types); | |||
682 | if (handler) | |||
683 | gimple_catch_set_handler (p, handler); | |||
684 | ||||
685 | return p; | |||
686 | } | |||
687 | ||||
688 | /* Build a GIMPLE_EH_FILTER statement. | |||
689 | ||||
690 | TYPES are the filter's types. | |||
691 | FAILURE is the filter's failure action. */ | |||
692 | ||||
693 | geh_filter * | |||
694 | gimple_build_eh_filter (tree types, gimple_seq failure) | |||
695 | { | |||
696 | geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0)); | |||
697 | gimple_eh_filter_set_types (p, types); | |||
698 | if (failure) | |||
699 | gimple_eh_filter_set_failure (p, failure); | |||
700 | ||||
701 | return p; | |||
702 | } | |||
703 | ||||
704 | /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */ | |||
705 | ||||
706 | geh_mnt * | |||
707 | gimple_build_eh_must_not_throw (tree decl) | |||
708 | { | |||
709 | geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0)); | |||
710 | ||||
711 | gcc_assert (TREE_CODE (decl) == FUNCTION_DECL)((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 711, __FUNCTION__), 0 : 0)); | |||
712 | gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN)((void)(!(flags_from_decl_or_type (decl) & (1 << 3) ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 712, __FUNCTION__), 0 : 0)); | |||
713 | gimple_eh_must_not_throw_set_fndecl (p, decl); | |||
714 | ||||
715 | return p; | |||
716 | } | |||
717 | ||||
718 | /* Build a GIMPLE_EH_ELSE statement. */ | |||
719 | ||||
720 | geh_else * | |||
721 | gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body) | |||
722 | { | |||
723 | geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0)); | |||
724 | gimple_eh_else_set_n_body (p, n_body); | |||
725 | gimple_eh_else_set_e_body (p, e_body); | |||
726 | return p; | |||
727 | } | |||
728 | ||||
729 | /* Build a GIMPLE_TRY statement. | |||
730 | ||||
731 | EVAL is the expression to evaluate. | |||
732 | CLEANUP is the cleanup expression. | |||
733 | KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on | |||
734 | whether this is a try/catch or a try/finally respectively. */ | |||
735 | ||||
736 | gtry * | |||
737 | gimple_build_try (gimple_seq eval, gimple_seq cleanup, | |||
738 | enum gimple_try_flags kind) | |||
739 | { | |||
740 | gtry *p; | |||
741 | ||||
742 | gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY)((void)(!(kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 742, __FUNCTION__), 0 : 0)); | |||
743 | p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0)); | |||
744 | gimple_set_subcode (p, kind); | |||
745 | if (eval) | |||
746 | gimple_try_set_eval (p, eval); | |||
747 | if (cleanup) | |||
748 | gimple_try_set_cleanup (p, cleanup); | |||
749 | ||||
750 | return p; | |||
751 | } | |||
752 | ||||
753 | /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement. | |||
754 | ||||
755 | CLEANUP is the cleanup expression. */ | |||
756 | ||||
757 | gimple * | |||
758 | gimple_build_wce (gimple_seq cleanup) | |||
759 | { | |||
760 | gimple *p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0); | |||
761 | if (cleanup) | |||
762 | gimple_wce_set_cleanup (p, cleanup); | |||
763 | ||||
764 | return p; | |||
765 | } | |||
766 | ||||
767 | ||||
768 | /* Build a GIMPLE_RESX statement. */ | |||
769 | ||||
770 | gresx * | |||
771 | gimple_build_resx (int region) | |||
772 | { | |||
773 | gresx *p | |||
774 | = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0)gimple_build_with_ops_stat (GIMPLE_RESX, ERROR_MARK, 0 )); | |||
775 | p->region = region; | |||
776 | return p; | |||
777 | } | |||
778 | ||||
779 | ||||
780 | /* The helper for constructing a gimple switch statement. | |||
781 | INDEX is the switch's index. | |||
782 | NLABELS is the number of labels in the switch excluding the default. | |||
783 | DEFAULT_LABEL is the default label for the switch statement. */ | |||
784 | ||||
785 | gswitch * | |||
786 | gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label) | |||
787 | { | |||
788 | /* nlabels + 1 default label + 1 index. */ | |||
789 | gcc_checking_assert (default_label)((void)(!(default_label) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 789, __FUNCTION__), 0 : 0)); | |||
790 | gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,gimple_build_with_ops_stat (GIMPLE_SWITCH, ERROR_MARK, 1 + 1 + nlabels ) | |||
791 | ERROR_MARK,gimple_build_with_ops_stat (GIMPLE_SWITCH, ERROR_MARK, 1 + 1 + nlabels ) | |||
792 | 1 + 1 + nlabels)gimple_build_with_ops_stat (GIMPLE_SWITCH, ERROR_MARK, 1 + 1 + nlabels )); | |||
793 | gimple_switch_set_index (p, index); | |||
794 | gimple_switch_set_default_label (p, default_label); | |||
795 | return p; | |||
796 | } | |||
797 | ||||
798 | /* Build a GIMPLE_SWITCH statement. | |||
799 | ||||
800 | INDEX is the switch's index. | |||
801 | DEFAULT_LABEL is the default label | |||
802 | ARGS is a vector of labels excluding the default. */ | |||
803 | ||||
804 | gswitch * | |||
805 | gimple_build_switch (tree index, tree default_label, vec<tree> args) | |||
806 | { | |||
807 | unsigned i, nlabels = args.length (); | |||
808 | ||||
809 | gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label); | |||
810 | ||||
811 | /* Copy the labels from the vector to the switch statement. */ | |||
812 | for (i = 0; i < nlabels; i++) | |||
813 | gimple_switch_set_label (p, i + 1, args[i]); | |||
814 | ||||
815 | return p; | |||
816 | } | |||
817 | ||||
818 | /* Build a GIMPLE_EH_DISPATCH statement. */ | |||
819 | ||||
820 | geh_dispatch * | |||
821 | gimple_build_eh_dispatch (int region) | |||
822 | { | |||
823 | geh_dispatch *p | |||
824 | = as_a <geh_dispatch *> ( | |||
825 | gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0)gimple_build_with_ops_stat (GIMPLE_EH_DISPATCH, ERROR_MARK, 0 )); | |||
826 | p->region = region; | |||
827 | return p; | |||
828 | } | |||
829 | ||||
830 | /* Build a new GIMPLE_DEBUG_BIND statement. | |||
831 | ||||
832 | VAR is bound to VALUE; block and location are taken from STMT. */ | |||
833 | ||||
834 | gdebug * | |||
835 | gimple_build_debug_bind (tree var, tree value, gimple *stmt MEM_STAT_DECL) | |||
836 | { | |||
837 | gdebug *p | |||
838 | = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG, | |||
839 | (unsigned)GIMPLE_DEBUG_BIND, 2 | |||
840 | PASS_MEM_STAT)); | |||
841 | gimple_debug_bind_set_var (p, var); | |||
842 | gimple_debug_bind_set_value (p, value); | |||
843 | if (stmt) | |||
844 | gimple_set_location (p, gimple_location (stmt)); | |||
845 | ||||
846 | return p; | |||
847 | } | |||
848 | ||||
849 | ||||
850 | /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement. | |||
851 | ||||
852 | VAR is bound to VALUE; block and location are taken from STMT. */ | |||
853 | ||||
854 | gdebug * | |||
855 | gimple_build_debug_source_bind (tree var, tree value, | |||
856 | gimple *stmt MEM_STAT_DECL) | |||
857 | { | |||
858 | gdebug *p | |||
859 | = as_a <gdebug *> ( | |||
860 | gimple_build_with_ops_stat (GIMPLE_DEBUG, | |||
861 | (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2 | |||
862 | PASS_MEM_STAT)); | |||
863 | ||||
864 | gimple_debug_source_bind_set_var (p, var); | |||
865 | gimple_debug_source_bind_set_value (p, value); | |||
866 | if (stmt) | |||
867 | gimple_set_location (p, gimple_location (stmt)); | |||
868 | ||||
869 | return p; | |||
870 | } | |||
871 | ||||
872 | ||||
873 | /* Build a new GIMPLE_DEBUG_BEGIN_STMT statement in BLOCK at | |||
874 | LOCATION. */ | |||
875 | ||||
876 | gdebug * | |||
877 | gimple_build_debug_begin_stmt (tree block, location_t location | |||
878 | MEM_STAT_DECL) | |||
879 | { | |||
880 | gdebug *p | |||
881 | = as_a <gdebug *> ( | |||
882 | gimple_build_with_ops_stat (GIMPLE_DEBUG, | |||
883 | (unsigned)GIMPLE_DEBUG_BEGIN_STMT, 0 | |||
884 | PASS_MEM_STAT)); | |||
885 | ||||
886 | gimple_set_location (p, location); | |||
887 | gimple_set_block (p, block); | |||
888 | cfun(cfun + 0)->debug_marker_count++; | |||
889 | ||||
890 | return p; | |||
891 | } | |||
892 | ||||
893 | ||||
894 | /* Build a new GIMPLE_DEBUG_INLINE_ENTRY statement in BLOCK at | |||
895 | LOCATION. The BLOCK links to the inlined function. */ | |||
896 | ||||
897 | gdebug * | |||
898 | gimple_build_debug_inline_entry (tree block, location_t location | |||
899 | MEM_STAT_DECL) | |||
900 | { | |||
901 | gdebug *p | |||
902 | = as_a <gdebug *> ( | |||
903 | gimple_build_with_ops_stat (GIMPLE_DEBUG, | |||
904 | (unsigned)GIMPLE_DEBUG_INLINE_ENTRY, 0 | |||
905 | PASS_MEM_STAT)); | |||
906 | ||||
907 | gimple_set_location (p, location); | |||
908 | gimple_set_block (p, block); | |||
909 | cfun(cfun + 0)->debug_marker_count++; | |||
910 | ||||
911 | return p; | |||
912 | } | |||
913 | ||||
914 | ||||
915 | /* Build a GIMPLE_OMP_CRITICAL statement. | |||
916 | ||||
917 | BODY is the sequence of statements for which only one thread can execute. | |||
918 | NAME is optional identifier for this critical block. | |||
919 | CLAUSES are clauses for this critical block. */ | |||
920 | ||||
921 | gomp_critical * | |||
922 | gimple_build_omp_critical (gimple_seq body, tree name, tree clauses) | |||
923 | { | |||
924 | gomp_critical *p | |||
925 | = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0)); | |||
926 | gimple_omp_critical_set_name (p, name); | |||
927 | gimple_omp_critical_set_clauses (p, clauses); | |||
928 | if (body) | |||
929 | gimple_omp_set_body (p, body); | |||
930 | ||||
931 | return p; | |||
932 | } | |||
933 | ||||
934 | /* Build a GIMPLE_OMP_FOR statement. | |||
935 | ||||
936 | BODY is sequence of statements inside the for loop. | |||
937 | KIND is the `for' variant. | |||
938 | CLAUSES are any of the construct's clauses. | |||
939 | COLLAPSE is the collapse count. | |||
940 | PRE_BODY is the sequence of statements that are loop invariant. */ | |||
941 | ||||
942 | gomp_for * | |||
943 | gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse, | |||
944 | gimple_seq pre_body) | |||
945 | { | |||
946 | gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0)); | |||
947 | if (body) | |||
948 | gimple_omp_set_body (p, body); | |||
949 | gimple_omp_for_set_clauses (p, clauses); | |||
950 | gimple_omp_for_set_kind (p, kind); | |||
951 | p->collapse = collapse; | |||
952 | p->iter = ggc_cleared_vec_alloc<gimple_omp_for_iter> (collapse); | |||
953 | ||||
954 | if (pre_body) | |||
955 | gimple_omp_for_set_pre_body (p, pre_body); | |||
956 | ||||
957 | return p; | |||
958 | } | |||
959 | ||||
960 | ||||
961 | /* Build a GIMPLE_OMP_PARALLEL statement. | |||
962 | ||||
963 | BODY is sequence of statements which are executed in parallel. | |||
964 | CLAUSES are the OMP parallel construct's clauses. | |||
965 | CHILD_FN is the function created for the parallel threads to execute. | |||
966 | DATA_ARG are the shared data argument(s). */ | |||
967 | ||||
968 | gomp_parallel * | |||
969 | gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn, | |||
970 | tree data_arg) | |||
971 | { | |||
972 | gomp_parallel *p | |||
973 | = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0)); | |||
974 | if (body) | |||
975 | gimple_omp_set_body (p, body); | |||
976 | gimple_omp_parallel_set_clauses (p, clauses); | |||
977 | gimple_omp_parallel_set_child_fn (p, child_fn); | |||
978 | gimple_omp_parallel_set_data_arg (p, data_arg); | |||
979 | ||||
980 | return p; | |||
981 | } | |||
982 | ||||
983 | ||||
984 | /* Build a GIMPLE_OMP_TASK statement. | |||
985 | ||||
986 | BODY is sequence of statements which are executed by the explicit task. | |||
987 | CLAUSES are the OMP task construct's clauses. | |||
988 | CHILD_FN is the function created for the parallel threads to execute. | |||
989 | DATA_ARG are the shared data argument(s). | |||
990 | COPY_FN is the optional function for firstprivate initialization. | |||
991 | ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */ | |||
992 | ||||
993 | gomp_task * | |||
994 | gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn, | |||
995 | tree data_arg, tree copy_fn, tree arg_size, | |||
996 | tree arg_align) | |||
997 | { | |||
998 | gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0)); | |||
999 | if (body) | |||
1000 | gimple_omp_set_body (p, body); | |||
1001 | gimple_omp_task_set_clauses (p, clauses); | |||
1002 | gimple_omp_task_set_child_fn (p, child_fn); | |||
1003 | gimple_omp_task_set_data_arg (p, data_arg); | |||
1004 | gimple_omp_task_set_copy_fn (p, copy_fn); | |||
1005 | gimple_omp_task_set_arg_size (p, arg_size); | |||
1006 | gimple_omp_task_set_arg_align (p, arg_align); | |||
1007 | ||||
1008 | return p; | |||
1009 | } | |||
1010 | ||||
1011 | ||||
1012 | /* Build a GIMPLE_OMP_SECTION statement for a sections statement. | |||
1013 | ||||
1014 | BODY is the sequence of statements in the section. */ | |||
1015 | ||||
1016 | gimple * | |||
1017 | gimple_build_omp_section (gimple_seq body) | |||
1018 | { | |||
1019 | gimple *p = gimple_alloc (GIMPLE_OMP_SECTION, 0); | |||
1020 | if (body) | |||
1021 | gimple_omp_set_body (p, body); | |||
1022 | ||||
1023 | return p; | |||
1024 | } | |||
1025 | ||||
1026 | ||||
1027 | /* Build a GIMPLE_OMP_MASTER statement. | |||
1028 | ||||
1029 | BODY is the sequence of statements to be executed by just the master. */ | |||
1030 | ||||
1031 | gimple * | |||
1032 | gimple_build_omp_master (gimple_seq body) | |||
1033 | { | |||
1034 | gimple *p = gimple_alloc (GIMPLE_OMP_MASTER, 0); | |||
1035 | if (body) | |||
1036 | gimple_omp_set_body (p, body); | |||
1037 | ||||
1038 | return p; | |||
1039 | } | |||
1040 | ||||
1041 | /* Build a GIMPLE_OMP_TASKGROUP statement. | |||
1042 | ||||
1043 | BODY is the sequence of statements to be executed by the taskgroup | |||
1044 | construct. | |||
1045 | CLAUSES are any of the construct's clauses. */ | |||
1046 | ||||
1047 | gimple * | |||
1048 | gimple_build_omp_taskgroup (gimple_seq body, tree clauses) | |||
1049 | { | |||
1050 | gimple *p = gimple_alloc (GIMPLE_OMP_TASKGROUP, 0); | |||
1051 | gimple_omp_taskgroup_set_clauses (p, clauses); | |||
1052 | if (body) | |||
1053 | gimple_omp_set_body (p, body); | |||
1054 | ||||
1055 | return p; | |||
1056 | } | |||
1057 | ||||
1058 | ||||
1059 | /* Build a GIMPLE_OMP_CONTINUE statement. | |||
1060 | ||||
1061 | CONTROL_DEF is the definition of the control variable. | |||
1062 | CONTROL_USE is the use of the control variable. */ | |||
1063 | ||||
1064 | gomp_continue * | |||
1065 | gimple_build_omp_continue (tree control_def, tree control_use) | |||
1066 | { | |||
1067 | gomp_continue *p | |||
1068 | = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0)); | |||
1069 | gimple_omp_continue_set_control_def (p, control_def); | |||
1070 | gimple_omp_continue_set_control_use (p, control_use); | |||
1071 | return p; | |||
1072 | } | |||
1073 | ||||
1074 | /* Build a GIMPLE_OMP_ORDERED statement. | |||
1075 | ||||
1076 | BODY is the sequence of statements inside a loop that will executed in | |||
1077 | sequence. | |||
1078 | CLAUSES are clauses for this statement. */ | |||
1079 | ||||
1080 | gomp_ordered * | |||
1081 | gimple_build_omp_ordered (gimple_seq body, tree clauses) | |||
1082 | { | |||
1083 | gomp_ordered *p | |||
1084 | = as_a <gomp_ordered *> (gimple_alloc (GIMPLE_OMP_ORDERED, 0)); | |||
1085 | gimple_omp_ordered_set_clauses (p, clauses); | |||
1086 | if (body) | |||
1087 | gimple_omp_set_body (p, body); | |||
1088 | ||||
1089 | return p; | |||
1090 | } | |||
1091 | ||||
1092 | ||||
1093 | /* Build a GIMPLE_OMP_RETURN statement. | |||
1094 | WAIT_P is true if this is a non-waiting return. */ | |||
1095 | ||||
1096 | gimple * | |||
1097 | gimple_build_omp_return (bool wait_p) | |||
1098 | { | |||
1099 | gimple *p = gimple_alloc (GIMPLE_OMP_RETURN, 0); | |||
1100 | if (wait_p) | |||
1101 | gimple_omp_return_set_nowait (p); | |||
1102 | ||||
1103 | return p; | |||
1104 | } | |||
1105 | ||||
1106 | ||||
1107 | /* Build a GIMPLE_OMP_SCAN statement. | |||
1108 | ||||
1109 | BODY is the sequence of statements to be executed by the scan | |||
1110 | construct. | |||
1111 | CLAUSES are any of the construct's clauses. */ | |||
1112 | ||||
1113 | gomp_scan * | |||
1114 | gimple_build_omp_scan (gimple_seq body, tree clauses) | |||
1115 | { | |||
1116 | gomp_scan *p | |||
1117 | = as_a <gomp_scan *> (gimple_alloc (GIMPLE_OMP_SCAN, 0)); | |||
1118 | gimple_omp_scan_set_clauses (p, clauses); | |||
1119 | if (body) | |||
1120 | gimple_omp_set_body (p, body); | |||
1121 | ||||
1122 | return p; | |||
1123 | } | |||
1124 | ||||
1125 | ||||
1126 | /* Build a GIMPLE_OMP_SECTIONS statement. | |||
1127 | ||||
1128 | BODY is a sequence of section statements. | |||
1129 | CLAUSES are any of the OMP sections contsruct's clauses: private, | |||
1130 | firstprivate, lastprivate, reduction, and nowait. */ | |||
1131 | ||||
1132 | gomp_sections * | |||
1133 | gimple_build_omp_sections (gimple_seq body, tree clauses) | |||
1134 | { | |||
1135 | gomp_sections *p | |||
1136 | = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0)); | |||
1137 | if (body) | |||
1138 | gimple_omp_set_body (p, body); | |||
1139 | gimple_omp_sections_set_clauses (p, clauses); | |||
1140 | ||||
1141 | return p; | |||
1142 | } | |||
1143 | ||||
1144 | ||||
1145 | /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */ | |||
1146 | ||||
1147 | gimple * | |||
1148 | gimple_build_omp_sections_switch (void) | |||
1149 | { | |||
1150 | return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0); | |||
1151 | } | |||
1152 | ||||
1153 | ||||
1154 | /* Build a GIMPLE_OMP_SINGLE statement. | |||
1155 | ||||
1156 | BODY is the sequence of statements that will be executed once. | |||
1157 | CLAUSES are any of the OMP single construct's clauses: private, firstprivate, | |||
1158 | copyprivate, nowait. */ | |||
1159 | ||||
1160 | gomp_single * | |||
1161 | gimple_build_omp_single (gimple_seq body, tree clauses) | |||
1162 | { | |||
1163 | gomp_single *p | |||
1164 | = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0)); | |||
1165 | if (body) | |||
1166 | gimple_omp_set_body (p, body); | |||
1167 | gimple_omp_single_set_clauses (p, clauses); | |||
1168 | ||||
1169 | return p; | |||
1170 | } | |||
1171 | ||||
1172 | ||||
1173 | /* Build a GIMPLE_OMP_TARGET statement. | |||
1174 | ||||
1175 | BODY is the sequence of statements that will be executed. | |||
1176 | KIND is the kind of the region. | |||
1177 | CLAUSES are any of the construct's clauses. */ | |||
1178 | ||||
1179 | gomp_target * | |||
1180 | gimple_build_omp_target (gimple_seq body, int kind, tree clauses) | |||
1181 | { | |||
1182 | gomp_target *p | |||
1183 | = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0)); | |||
1184 | if (body) | |||
1185 | gimple_omp_set_body (p, body); | |||
1186 | gimple_omp_target_set_clauses (p, clauses); | |||
1187 | gimple_omp_target_set_kind (p, kind); | |||
1188 | ||||
1189 | return p; | |||
1190 | } | |||
1191 | ||||
1192 | ||||
1193 | /* Build a GIMPLE_OMP_TEAMS statement. | |||
1194 | ||||
1195 | BODY is the sequence of statements that will be executed. | |||
1196 | CLAUSES are any of the OMP teams construct's clauses. */ | |||
1197 | ||||
1198 | gomp_teams * | |||
1199 | gimple_build_omp_teams (gimple_seq body, tree clauses) | |||
1200 | { | |||
1201 | gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0)); | |||
1202 | if (body) | |||
1203 | gimple_omp_set_body (p, body); | |||
1204 | gimple_omp_teams_set_clauses (p, clauses); | |||
1205 | ||||
1206 | return p; | |||
1207 | } | |||
1208 | ||||
1209 | ||||
1210 | /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */ | |||
1211 | ||||
1212 | gomp_atomic_load * | |||
1213 | gimple_build_omp_atomic_load (tree lhs, tree rhs, enum omp_memory_order mo) | |||
1214 | { | |||
1215 | gomp_atomic_load *p | |||
1216 | = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0)); | |||
1217 | gimple_omp_atomic_load_set_lhs (p, lhs); | |||
1218 | gimple_omp_atomic_load_set_rhs (p, rhs); | |||
1219 | gimple_omp_atomic_set_memory_order (p, mo); | |||
1220 | return p; | |||
1221 | } | |||
1222 | ||||
1223 | /* Build a GIMPLE_OMP_ATOMIC_STORE statement. | |||
1224 | ||||
1225 | VAL is the value we are storing. */ | |||
1226 | ||||
1227 | gomp_atomic_store * | |||
1228 | gimple_build_omp_atomic_store (tree val, enum omp_memory_order mo) | |||
1229 | { | |||
1230 | gomp_atomic_store *p | |||
1231 | = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0)); | |||
1232 | gimple_omp_atomic_store_set_val (p, val); | |||
1233 | gimple_omp_atomic_set_memory_order (p, mo); | |||
1234 | return p; | |||
1235 | } | |||
1236 | ||||
1237 | /* Build a GIMPLE_TRANSACTION statement. */ | |||
1238 | ||||
1239 | gtransaction * | |||
1240 | gimple_build_transaction (gimple_seq body) | |||
1241 | { | |||
1242 | gtransaction *p | |||
1243 | = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0)); | |||
1244 | gimple_transaction_set_body (p, body); | |||
1245 | gimple_transaction_set_label_norm (p, 0); | |||
1246 | gimple_transaction_set_label_uninst (p, 0); | |||
1247 | gimple_transaction_set_label_over (p, 0); | |||
1248 | return p; | |||
1249 | } | |||
1250 | ||||
1251 | #if defined ENABLE_GIMPLE_CHECKING1 | |||
1252 | /* Complain of a gimple type mismatch and die. */ | |||
1253 | ||||
1254 | void | |||
1255 | gimple_check_failed (const gimple *gs, const char *file, int line, | |||
1256 | const char *function, enum gimple_code code, | |||
1257 | enum tree_code subcode) | |||
1258 | { | |||
1259 | internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d", | |||
1260 | gimple_code_name[code], | |||
1261 | get_tree_code_name (subcode), | |||
1262 | gimple_code_name[gimple_code (gs)], | |||
1263 | gs->subcode > 0 | |||
1264 | ? get_tree_code_name ((enum tree_code) gs->subcode) | |||
1265 | : "", | |||
1266 | function, trim_filename (file), line); | |||
1267 | } | |||
1268 | #endif /* ENABLE_GIMPLE_CHECKING */ | |||
1269 | ||||
1270 | ||||
1271 | /* Link gimple statement GS to the end of the sequence *SEQ_P. If | |||
1272 | *SEQ_P is NULL, a new sequence is allocated. */ | |||
1273 | ||||
1274 | void | |||
1275 | gimple_seq_add_stmt (gimple_seq *seq_p, gimple *gs) | |||
1276 | { | |||
1277 | gimple_stmt_iterator si; | |||
1278 | if (gs == NULLnullptr) | |||
1279 | return; | |||
1280 | ||||
1281 | si = gsi_last (*seq_p)gsi_last_1 (&(*seq_p)); | |||
1282 | gsi_insert_after (&si, gs, GSI_NEW_STMT); | |||
1283 | } | |||
1284 | ||||
1285 | /* Link gimple statement GS to the end of the sequence *SEQ_P. If | |||
1286 | *SEQ_P is NULL, a new sequence is allocated. This function is | |||
1287 | similar to gimple_seq_add_stmt, but does not scan the operands. | |||
1288 | During gimplification, we need to manipulate statement sequences | |||
1289 | before the def/use vectors have been constructed. */ | |||
1290 | ||||
1291 | void | |||
1292 | gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple *gs) | |||
1293 | { | |||
1294 | gimple_stmt_iterator si; | |||
1295 | ||||
1296 | if (gs == NULLnullptr) | |||
1297 | return; | |||
1298 | ||||
1299 | si = gsi_last (*seq_p)gsi_last_1 (&(*seq_p)); | |||
1300 | gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT); | |||
1301 | } | |||
1302 | ||||
1303 | /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is | |||
1304 | NULL, a new sequence is allocated. */ | |||
1305 | ||||
1306 | void | |||
1307 | gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src) | |||
1308 | { | |||
1309 | gimple_stmt_iterator si; | |||
1310 | if (src == NULLnullptr) | |||
1311 | return; | |||
1312 | ||||
1313 | si = gsi_last (*dst_p)gsi_last_1 (&(*dst_p)); | |||
1314 | gsi_insert_seq_after (&si, src, GSI_NEW_STMT); | |||
1315 | } | |||
1316 | ||||
1317 | /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is | |||
1318 | NULL, a new sequence is allocated. This function is | |||
1319 | similar to gimple_seq_add_seq, but does not scan the operands. */ | |||
1320 | ||||
1321 | void | |||
1322 | gimple_seq_add_seq_without_update (gimple_seq *dst_p, gimple_seq src) | |||
1323 | { | |||
1324 | gimple_stmt_iterator si; | |||
1325 | if (src == NULLnullptr) | |||
1326 | return; | |||
1327 | ||||
1328 | si = gsi_last (*dst_p)gsi_last_1 (&(*dst_p)); | |||
1329 | gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); | |||
1330 | } | |||
1331 | ||||
1332 | /* Determine whether to assign a location to the statement GS. */ | |||
1333 | ||||
1334 | static bool | |||
1335 | should_carry_location_p (gimple *gs) | |||
1336 | { | |||
1337 | /* Don't emit a line note for a label. We particularly don't want to | |||
1338 | emit one for the break label, since it doesn't actually correspond | |||
1339 | to the beginning of the loop/switch. */ | |||
1340 | if (gimple_code (gs) == GIMPLE_LABEL) | |||
1341 | return false; | |||
1342 | ||||
1343 | return true; | |||
1344 | } | |||
1345 | ||||
1346 | /* Set the location for gimple statement GS to LOCATION. */ | |||
1347 | ||||
1348 | static void | |||
1349 | annotate_one_with_location (gimple *gs, location_t location) | |||
1350 | { | |||
1351 | if (!gimple_has_location (gs) | |||
1352 | && !gimple_do_not_emit_location_p (gs) | |||
1353 | && should_carry_location_p (gs)) | |||
1354 | gimple_set_location (gs, location); | |||
1355 | } | |||
1356 | ||||
1357 | /* Set LOCATION for all the statements after iterator GSI in sequence | |||
1358 | SEQ. If GSI is pointing to the end of the sequence, start with the | |||
1359 | first statement in SEQ. */ | |||
1360 | ||||
1361 | void | |||
1362 | annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi, | |||
1363 | location_t location) | |||
1364 | { | |||
1365 | if (gsi_end_p (gsi)) | |||
1366 | gsi = gsi_start (seq)gsi_start_1 (&(seq)); | |||
1367 | else | |||
1368 | gsi_next (&gsi); | |||
1369 | ||||
1370 | for (; !gsi_end_p (gsi); gsi_next (&gsi)) | |||
1371 | annotate_one_with_location (gsi_stmt (gsi), location); | |||
1372 | } | |||
1373 | ||||
1374 | /* Set the location for all the statements in a sequence STMT_P to LOCATION. */ | |||
1375 | ||||
1376 | void | |||
1377 | annotate_all_with_location (gimple_seq stmt_p, location_t location) | |||
1378 | { | |||
1379 | gimple_stmt_iterator i; | |||
1380 | ||||
1381 | if (gimple_seq_empty_p (stmt_p)) | |||
1382 | return; | |||
1383 | ||||
1384 | for (i = gsi_start (stmt_p)gsi_start_1 (&(stmt_p)); !gsi_end_p (i); gsi_next (&i)) | |||
1385 | { | |||
1386 | gimple *gs = gsi_stmt (i); | |||
1387 | annotate_one_with_location (gs, location); | |||
1388 | } | |||
1389 | } | |||
1390 | ||||
1391 | /* Helper function of empty_body_p. Return true if STMT is an empty | |||
1392 | statement. */ | |||
1393 | ||||
1394 | static bool | |||
1395 | empty_stmt_p (gimple *stmt) | |||
1396 | { | |||
1397 | if (gimple_code (stmt) == GIMPLE_NOP) | |||
1398 | return true; | |||
1399 | if (gbind *bind_stmt = dyn_cast <gbind *> (stmt)) | |||
1400 | return empty_body_p (gimple_bind_body (bind_stmt)); | |||
1401 | return false; | |||
1402 | } | |||
1403 | ||||
1404 | ||||
1405 | /* Return true if BODY contains nothing but empty statements. */ | |||
1406 | ||||
1407 | bool | |||
1408 | empty_body_p (gimple_seq body) | |||
1409 | { | |||
1410 | gimple_stmt_iterator i; | |||
1411 | ||||
1412 | if (gimple_seq_empty_p (body)) | |||
1413 | return true; | |||
1414 | for (i = gsi_start (body)gsi_start_1 (&(body)); !gsi_end_p (i); gsi_next (&i)) | |||
1415 | if (!empty_stmt_p (gsi_stmt (i)) | |||
1416 | && !is_gimple_debug (gsi_stmt (i))) | |||
1417 | return false; | |||
1418 | ||||
1419 | return true; | |||
1420 | } | |||
1421 | ||||
1422 | ||||
1423 | /* Perform a deep copy of sequence SRC and return the result. */ | |||
1424 | ||||
1425 | gimple_seq | |||
1426 | gimple_seq_copy (gimple_seq src) | |||
1427 | { | |||
1428 | gimple_stmt_iterator gsi; | |||
1429 | gimple_seq new_seq = NULLnullptr; | |||
1430 | gimple *stmt; | |||
1431 | ||||
1432 | for (gsi = gsi_start (src)gsi_start_1 (&(src)); !gsi_end_p (gsi); gsi_next (&gsi)) | |||
1433 | { | |||
1434 | stmt = gimple_copy (gsi_stmt (gsi)); | |||
1435 | gimple_seq_add_stmt (&new_seq, stmt); | |||
1436 | } | |||
1437 | ||||
1438 | return new_seq; | |||
1439 | } | |||
1440 | ||||
1441 | ||||
1442 | ||||
1443 | /* Return true if calls C1 and C2 are known to go to the same function. */ | |||
1444 | ||||
1445 | bool | |||
1446 | gimple_call_same_target_p (const gimple *c1, const gimple *c2) | |||
1447 | { | |||
1448 | if (gimple_call_internal_p (c1)) | |||
1449 | return (gimple_call_internal_p (c2) | |||
1450 | && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2) | |||
1451 | && (!gimple_call_internal_unique_p (as_a <const gcall *> (c1)) | |||
1452 | || c1 == c2)); | |||
1453 | else | |||
1454 | return (gimple_call_fn (c1) == gimple_call_fn (c2) | |||
1455 | || (gimple_call_fndecl (c1) | |||
1456 | && gimple_call_fndecl (c1) == gimple_call_fndecl (c2))); | |||
1457 | } | |||
1458 | ||||
1459 | /* Detect flags from a GIMPLE_CALL. This is just like | |||
1460 | call_expr_flags, but for gimple tuples. */ | |||
1461 | ||||
1462 | int | |||
1463 | gimple_call_flags (const gimple *stmt) | |||
1464 | { | |||
1465 | int flags = 0; | |||
1466 | ||||
1467 | if (gimple_call_internal_p (stmt)) | |||
1468 | flags = internal_fn_flags (gimple_call_internal_fn (stmt)); | |||
1469 | else | |||
1470 | { | |||
1471 | tree decl = gimple_call_fndecl (stmt); | |||
1472 | if (decl) | |||
1473 | flags = flags_from_decl_or_type (decl); | |||
1474 | flags |= flags_from_decl_or_type (gimple_call_fntype (stmt)); | |||
1475 | } | |||
1476 | ||||
1477 | if (stmt->subcode & GF_CALL_NOTHROW) | |||
1478 | flags |= ECF_NOTHROW(1 << 6); | |||
1479 | ||||
1480 | if (stmt->subcode & GF_CALL_BY_DESCRIPTOR) | |||
1481 | flags |= ECF_BY_DESCRIPTOR(1 << 14); | |||
1482 | ||||
1483 | return flags; | |||
1484 | } | |||
1485 | ||||
1486 | /* Return the "fn spec" string for call STMT. */ | |||
1487 | ||||
1488 | attr_fnspec | |||
1489 | gimple_call_fnspec (const gcall *stmt) | |||
1490 | { | |||
1491 | tree type, attr; | |||
1492 | ||||
1493 | if (gimple_call_internal_p (stmt)) | |||
1494 | { | |||
1495 | const_tree spec = internal_fn_fnspec (gimple_call_internal_fn (stmt)); | |||
1496 | if (spec) | |||
1497 | return spec; | |||
1498 | else | |||
1499 | return ""; | |||
1500 | } | |||
1501 | ||||
1502 | type = gimple_call_fntype (stmt); | |||
1503 | if (type) | |||
1504 | { | |||
1505 | attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1505, __FUNCTION__))->type_common.attributes)); | |||
1506 | if (attr) | |||
1507 | return TREE_VALUE (TREE_VALUE (attr))((tree_check ((((tree_check ((attr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1507, __FUNCTION__, (TREE_LIST)))->list.value)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1507, __FUNCTION__, (TREE_LIST)))->list.value); | |||
1508 | } | |||
1509 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) | |||
1510 | return builtin_fnspec (gimple_call_fndecl (stmt)); | |||
1511 | tree fndecl = gimple_call_fndecl (stmt); | |||
1512 | /* If the call is to a replaceable operator delete and results | |||
1513 | from a delete expression as opposed to a direct call to | |||
1514 | such operator, then we can treat it as free. */ | |||
1515 | if (fndecl | |||
1516 | && DECL_IS_OPERATOR_DELETE_P (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1516, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type == OPERATOR_DELETE) | |||
1517 | && DECL_IS_REPLACEABLE_OPERATOR (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1517, __FUNCTION__, (FUNCTION_DECL)))->function_decl.replaceable_operator ) | |||
1518 | && gimple_call_from_new_or_delete (stmt)) | |||
1519 | return ".co "; | |||
1520 | /* Similarly operator new can be treated as malloc. */ | |||
1521 | if (fndecl | |||
1522 | && DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fndecl)(((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1522, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type == OPERATOR_NEW) && ((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1522, __FUNCTION__, (FUNCTION_DECL)))->function_decl.replaceable_operator )) | |||
1523 | && gimple_call_from_new_or_delete (stmt)) | |||
1524 | return "mC"; | |||
1525 | return ""; | |||
1526 | } | |||
1527 | ||||
1528 | /* Detects argument flags for argument number ARG on call STMT. */ | |||
1529 | ||||
1530 | int | |||
1531 | gimple_call_arg_flags (const gcall *stmt, unsigned arg) | |||
1532 | { | |||
1533 | attr_fnspec fnspec = gimple_call_fnspec (stmt); | |||
1534 | int flags = 0; | |||
1535 | ||||
1536 | if (fnspec.known_p ()) | |||
1537 | { | |||
1538 | if (!fnspec.arg_specified_p (arg)) | |||
1539 | ; | |||
1540 | else if (!fnspec.arg_used_p (arg)) | |||
1541 | flags = EAF_UNUSED(1 << 3); | |||
1542 | else | |||
1543 | { | |||
1544 | if (fnspec.arg_direct_p (arg)) | |||
1545 | flags |= EAF_DIRECT(1 << 0); | |||
1546 | if (fnspec.arg_noescape_p (arg)) | |||
1547 | flags |= EAF_NOESCAPE(1 << 2) | EAF_NODIRECTESCAPE(1 << 4); | |||
1548 | if (fnspec.arg_readonly_p (arg)) | |||
1549 | flags |= EAF_NOCLOBBER(1 << 1); | |||
1550 | } | |||
1551 | } | |||
1552 | tree callee = gimple_call_fndecl (stmt); | |||
1553 | if (callee) | |||
1554 | { | |||
1555 | cgraph_node *node = cgraph_node::get (callee); | |||
1556 | modref_summary *summary = node ? get_modref_function_summary (node) | |||
1557 | : NULLnullptr; | |||
1558 | ||||
1559 | if (summary && summary->arg_flags.length () > arg) | |||
1560 | { | |||
1561 | int modref_flags = summary->arg_flags[arg]; | |||
1562 | ||||
1563 | /* We have possibly optimized out load. Be conservative here. */ | |||
1564 | if (!node->binds_to_current_def_p ()) | |||
1565 | { | |||
1566 | if ((modref_flags & EAF_UNUSED(1 << 3)) && !(flags & EAF_UNUSED(1 << 3))) | |||
1567 | modref_flags &= ~EAF_UNUSED(1 << 3); | |||
1568 | if ((modref_flags & EAF_DIRECT(1 << 0)) && !(flags & EAF_DIRECT(1 << 0))) | |||
1569 | modref_flags &= ~EAF_DIRECT(1 << 0); | |||
1570 | } | |||
1571 | flags |= modref_flags; | |||
1572 | } | |||
1573 | } | |||
1574 | return flags; | |||
1575 | } | |||
1576 | ||||
1577 | /* Detects return flags for the call STMT. */ | |||
1578 | ||||
1579 | int | |||
1580 | gimple_call_return_flags (const gcall *stmt) | |||
1581 | { | |||
1582 | if (gimple_call_flags (stmt) & ECF_MALLOC(1 << 4)) | |||
1583 | return ERF_NOALIAS(1 << 3); | |||
1584 | ||||
1585 | attr_fnspec fnspec = gimple_call_fnspec (stmt); | |||
1586 | ||||
1587 | unsigned int arg_no; | |||
1588 | if (fnspec.returns_arg (&arg_no)) | |||
1589 | return ERF_RETURNS_ARG(1 << 2) | arg_no; | |||
1590 | ||||
1591 | if (fnspec.returns_noalias_p ()) | |||
1592 | return ERF_NOALIAS(1 << 3); | |||
1593 | return 0; | |||
1594 | } | |||
1595 | ||||
1596 | ||||
1597 | /* Return true if call STMT is known to return a non-zero result. */ | |||
1598 | ||||
1599 | bool | |||
1600 | gimple_call_nonnull_result_p (gcall *call) | |||
1601 | { | |||
1602 | tree fndecl = gimple_call_fndecl (call); | |||
1603 | if (!fndecl) | |||
1604 | return false; | |||
1605 | if (flag_delete_null_pointer_checksglobal_options.x_flag_delete_null_pointer_checks && !flag_check_newglobal_options.x_flag_check_new | |||
1606 | && DECL_IS_OPERATOR_NEW_P (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1606, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type == OPERATOR_NEW) | |||
1607 | && !TREE_NOTHROW (fndecl)((fndecl)->base.nothrow_flag)) | |||
1608 | return true; | |||
1609 | ||||
1610 | /* References are always non-NULL. */ | |||
1611 | if (flag_delete_null_pointer_checksglobal_options.x_flag_delete_null_pointer_checks | |||
1612 | && TREE_CODE (TREE_TYPE (fndecl))((enum tree_code) (((contains_struct_check ((fndecl), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1612, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE) | |||
1613 | return true; | |||
1614 | ||||
1615 | if (flag_delete_null_pointer_checksglobal_options.x_flag_delete_null_pointer_checks | |||
1616 | && lookup_attribute ("returns_nonnull", | |||
1617 | TYPE_ATTRIBUTES (gimple_call_fntype (call))((tree_class_check ((gimple_call_fntype (call)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1617, __FUNCTION__))->type_common.attributes))) | |||
1618 | return true; | |||
1619 | return gimple_alloca_call_p (call); | |||
1620 | } | |||
1621 | ||||
1622 | ||||
1623 | /* If CALL returns a non-null result in an argument, return that arg. */ | |||
1624 | ||||
1625 | tree | |||
1626 | gimple_call_nonnull_arg (gcall *call) | |||
1627 | { | |||
1628 | tree fndecl = gimple_call_fndecl (call); | |||
1629 | if (!fndecl) | |||
1630 | return NULL_TREE(tree) nullptr; | |||
1631 | ||||
1632 | unsigned rf = gimple_call_return_flags (call); | |||
1633 | if (rf & ERF_RETURNS_ARG(1 << 2)) | |||
1634 | { | |||
1635 | unsigned argnum = rf & ERF_RETURN_ARG_MASK(3); | |||
1636 | if (argnum < gimple_call_num_args (call)) | |||
1637 | { | |||
1638 | tree arg = gimple_call_arg (call, argnum); | |||
1639 | if (SSA_VAR_P (arg)(((enum tree_code) (arg)->base.code) == VAR_DECL || ((enum tree_code) (arg)->base.code) == PARM_DECL || ((enum tree_code ) (arg)->base.code) == RESULT_DECL || ((enum tree_code) (arg )->base.code) == SSA_NAME) | |||
1640 | && infer_nonnull_range_by_attribute (call, arg)) | |||
1641 | return arg; | |||
1642 | } | |||
1643 | } | |||
1644 | return NULL_TREE(tree) nullptr; | |||
1645 | } | |||
1646 | ||||
1647 | ||||
1648 | /* Return true if GS is a copy assignment. */ | |||
1649 | ||||
1650 | bool | |||
1651 | gimple_assign_copy_p (gimple *gs) | |||
1652 | { | |||
1653 | return (gimple_assign_single_p (gs) | |||
1654 | && is_gimple_val (gimple_op (gs, 1))); | |||
1655 | } | |||
1656 | ||||
1657 | ||||
1658 | /* Return true if GS is a SSA_NAME copy assignment. */ | |||
1659 | ||||
1660 | bool | |||
1661 | gimple_assign_ssa_name_copy_p (gimple *gs) | |||
1662 | { | |||
1663 | return (gimple_assign_single_p (gs) | |||
1664 | && TREE_CODE (gimple_assign_lhs (gs))((enum tree_code) (gimple_assign_lhs (gs))->base.code) == SSA_NAME | |||
1665 | && TREE_CODE (gimple_assign_rhs1 (gs))((enum tree_code) (gimple_assign_rhs1 (gs))->base.code) == SSA_NAME); | |||
1666 | } | |||
1667 | ||||
1668 | ||||
1669 | /* Return true if GS is an assignment with a unary RHS, but the | |||
1670 | operator has no effect on the assigned value. The logic is adapted | |||
1671 | from STRIP_NOPS. This predicate is intended to be used in tuplifying | |||
1672 | instances in which STRIP_NOPS was previously applied to the RHS of | |||
1673 | an assignment. | |||
1674 | ||||
1675 | NOTE: In the use cases that led to the creation of this function | |||
1676 | and of gimple_assign_single_p, it is typical to test for either | |||
1677 | condition and to proceed in the same manner. In each case, the | |||
1678 | assigned value is represented by the single RHS operand of the | |||
1679 | assignment. I suspect there may be cases where gimple_assign_copy_p, | |||
1680 | gimple_assign_single_p, or equivalent logic is used where a similar | |||
1681 | treatment of unary NOPs is appropriate. */ | |||
1682 | ||||
1683 | bool | |||
1684 | gimple_assign_unary_nop_p (gimple *gs) | |||
1685 | { | |||
1686 | return (is_gimple_assign (gs) | |||
1687 | && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))((gimple_assign_rhs_code (gs)) == NOP_EXPR || (gimple_assign_rhs_code (gs)) == CONVERT_EXPR) | |||
1688 | || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR) | |||
1689 | && gimple_assign_rhs1 (gs) != error_mark_nodeglobal_trees[TI_ERROR_MARK] | |||
1690 | && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((gimple_assign_lhs (gs)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1690, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1690, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((gimple_assign_lhs (gs)), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1690, __FUNCTION__))->typed.type)) : (((contains_struct_check ((gimple_assign_lhs (gs)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1690, __FUNCTION__))->typed.type))->type_common.mode) | |||
1691 | == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))((((enum tree_code) ((tree_class_check ((((contains_struct_check ((gimple_assign_rhs1 (gs)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1691, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1691, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (((contains_struct_check ((gimple_assign_rhs1 (gs)), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1691, __FUNCTION__))->typed.type)) : (((contains_struct_check ((gimple_assign_rhs1 (gs)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1691, __FUNCTION__))->typed.type))->type_common.mode))); | |||
1692 | } | |||
1693 | ||||
1694 | /* Set BB to be the basic block holding G. */ | |||
1695 | ||||
1696 | void | |||
1697 | gimple_set_bb (gimple *stmt, basic_block bb) | |||
1698 | { | |||
1699 | stmt->bb = bb; | |||
1700 | ||||
1701 | if (gimple_code (stmt) != GIMPLE_LABEL) | |||
| ||||
1702 | return; | |||
1703 | ||||
1704 | /* If the statement is a label, add the label to block-to-labels map | |||
1705 | so that we can speed up edge creation for GIMPLE_GOTOs. */ | |||
1706 | if (cfun(cfun + 0)->cfg) | |||
1707 | { | |||
1708 | tree t; | |||
1709 | int uid; | |||
1710 | ||||
1711 | t = gimple_label_label (as_a <glabel *> (stmt)); | |||
1712 | uid = LABEL_DECL_UID (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1712, __FUNCTION__, (LABEL_DECL)))->label_decl.label_decl_uid ); | |||
1713 | if (uid == -1) | |||
1714 | { | |||
1715 | unsigned old_len = | |||
1716 | vec_safe_length (label_to_block_map_for_fn (cfun)(((cfun + 0))->cfg->x_label_to_block_map)); | |||
1717 | LABEL_DECL_UID (t)((tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1717, __FUNCTION__, (LABEL_DECL)))->label_decl.label_decl_uid ) = uid = cfun(cfun + 0)->cfg->last_label_uid++; | |||
1718 | if (old_len <= (unsigned) uid) | |||
1719 | vec_safe_grow_cleared (label_to_block_map_for_fn (cfun)(((cfun + 0))->cfg->x_label_to_block_map), uid + 1); | |||
1720 | } | |||
1721 | ||||
1722 | (*label_to_block_map_for_fn (cfun)(((cfun + 0))->cfg->x_label_to_block_map))[uid] = bb; | |||
1723 | } | |||
1724 | } | |||
1725 | ||||
1726 | ||||
1727 | /* Modify the RHS of the assignment pointed-to by GSI using the | |||
1728 | operands in the expression tree EXPR. | |||
1729 | ||||
1730 | NOTE: The statement pointed-to by GSI may be reallocated if it | |||
1731 | did not have enough operand slots. | |||
1732 | ||||
1733 | This function is useful to convert an existing tree expression into | |||
1734 | the flat representation used for the RHS of a GIMPLE assignment. | |||
1735 | It will reallocate memory as needed to expand or shrink the number | |||
1736 | of operand slots needed to represent EXPR. | |||
1737 | ||||
1738 | NOTE: If you find yourself building a tree and then calling this | |||
1739 | function, you are most certainly doing it the slow way. It is much | |||
1740 | better to build a new assignment or to use the function | |||
1741 | gimple_assign_set_rhs_with_ops, which does not require an | |||
1742 | expression tree to be built. */ | |||
1743 | ||||
1744 | void | |||
1745 | gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr) | |||
1746 | { | |||
1747 | enum tree_code subcode; | |||
1748 | tree op1, op2, op3; | |||
1749 | ||||
1750 | extract_ops_from_tree (expr, &subcode, &op1, &op2, &op3); | |||
1751 | gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2, op3); | |||
1752 | } | |||
1753 | ||||
1754 | ||||
1755 | /* Set the RHS of assignment statement pointed-to by GSI to CODE with | |||
1756 | operands OP1, OP2 and OP3. | |||
1757 | ||||
1758 | NOTE: The statement pointed-to by GSI may be reallocated if it | |||
1759 | did not have enough operand slots. */ | |||
1760 | ||||
1761 | void | |||
1762 | gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code, | |||
1763 | tree op1, tree op2, tree op3) | |||
1764 | { | |||
1765 | unsigned new_rhs_ops = get_gimple_rhs_num_ops (code); | |||
1766 | gimple *stmt = gsi_stmt (*gsi); | |||
1767 | gimple *old_stmt = stmt; | |||
1768 | ||||
1769 | /* If the new CODE needs more operands, allocate a new statement. */ | |||
1770 | if (gimple_num_ops (stmt) < new_rhs_ops + 1) | |||
1771 | { | |||
1772 | tree lhs = gimple_assign_lhs (old_stmt); | |||
1773 | stmt = gimple_alloc (gimple_code (old_stmt), new_rhs_ops + 1); | |||
1774 | memcpy (stmt, old_stmt, gimple_size (gimple_code (old_stmt))); | |||
1775 | gimple_init_singleton (stmt); | |||
1776 | ||||
1777 | /* The LHS needs to be reset as this also changes the SSA name | |||
1778 | on the LHS. */ | |||
1779 | gimple_assign_set_lhs (stmt, lhs); | |||
1780 | } | |||
1781 | ||||
1782 | gimple_set_num_ops (stmt, new_rhs_ops + 1); | |||
1783 | gimple_set_subcode (stmt, code); | |||
1784 | gimple_assign_set_rhs1 (stmt, op1); | |||
1785 | if (new_rhs_ops > 1) | |||
1786 | gimple_assign_set_rhs2 (stmt, op2); | |||
1787 | if (new_rhs_ops > 2) | |||
1788 | gimple_assign_set_rhs3 (stmt, op3); | |||
1789 | if (stmt != old_stmt) | |||
1790 | gsi_replace (gsi, stmt, false); | |||
1791 | } | |||
1792 | ||||
1793 | ||||
1794 | /* Return the LHS of a statement that performs an assignment, | |||
1795 | either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE | |||
1796 | for a call to a function that returns no value, or for a | |||
1797 | statement other than an assignment or a call. */ | |||
1798 | ||||
1799 | tree | |||
1800 | gimple_get_lhs (const gimple *stmt) | |||
1801 | { | |||
1802 | enum gimple_code code = gimple_code (stmt); | |||
1803 | ||||
1804 | if (code == GIMPLE_ASSIGN) | |||
1805 | return gimple_assign_lhs (stmt); | |||
1806 | else if (code == GIMPLE_CALL) | |||
1807 | return gimple_call_lhs (stmt); | |||
1808 | else if (code == GIMPLE_PHI) | |||
1809 | return gimple_phi_result (stmt); | |||
1810 | else | |||
1811 | return NULL_TREE(tree) nullptr; | |||
1812 | } | |||
1813 | ||||
1814 | ||||
1815 | /* Set the LHS of a statement that performs an assignment, | |||
1816 | either a GIMPLE_ASSIGN or a GIMPLE_CALL. */ | |||
1817 | ||||
1818 | void | |||
1819 | gimple_set_lhs (gimple *stmt, tree lhs) | |||
1820 | { | |||
1821 | enum gimple_code code = gimple_code (stmt); | |||
1822 | ||||
1823 | if (code == GIMPLE_ASSIGN) | |||
1824 | gimple_assign_set_lhs (stmt, lhs); | |||
1825 | else if (code == GIMPLE_CALL) | |||
1826 | gimple_call_set_lhs (stmt, lhs); | |||
1827 | else | |||
1828 | gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 1828, __FUNCTION__)); | |||
1829 | } | |||
1830 | ||||
1831 | ||||
1832 | /* Return a deep copy of statement STMT. All the operands from STMT | |||
1833 | are reallocated and copied using unshare_expr. The DEF, USE, VDEF | |||
1834 | and VUSE operand arrays are set to empty in the new copy. The new | |||
1835 | copy isn't part of any sequence. */ | |||
1836 | ||||
1837 | gimple * | |||
1838 | gimple_copy (gimple *stmt) | |||
1839 | { | |||
1840 | enum gimple_code code = gimple_code (stmt); | |||
1841 | unsigned num_ops = gimple_num_ops (stmt); | |||
1842 | gimple *copy = gimple_alloc (code, num_ops); | |||
1843 | unsigned i; | |||
1844 | ||||
1845 | /* Shallow copy all the fields from STMT. */ | |||
1846 | memcpy (copy, stmt, gimple_size (code)); | |||
1847 | gimple_init_singleton (copy); | |||
1848 | ||||
1849 | /* If STMT has sub-statements, deep-copy them as well. */ | |||
1850 | if (gimple_has_substatements (stmt)) | |||
1851 | { | |||
1852 | gimple_seq new_seq; | |||
1853 | tree t; | |||
1854 | ||||
1855 | switch (gimple_code (stmt)) | |||
1856 | { | |||
1857 | case GIMPLE_BIND: | |||
1858 | { | |||
1859 | gbind *bind_stmt = as_a <gbind *> (stmt); | |||
1860 | gbind *bind_copy = as_a <gbind *> (copy); | |||
1861 | new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt)); | |||
1862 | gimple_bind_set_body (bind_copy, new_seq); | |||
1863 | gimple_bind_set_vars (bind_copy, | |||
1864 | unshare_expr (gimple_bind_vars (bind_stmt))); | |||
1865 | gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt)); | |||
1866 | } | |||
1867 | break; | |||
1868 | ||||
1869 | case GIMPLE_CATCH: | |||
1870 | { | |||
1871 | gcatch *catch_stmt = as_a <gcatch *> (stmt); | |||
1872 | gcatch *catch_copy = as_a <gcatch *> (copy); | |||
1873 | new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt)); | |||
1874 | gimple_catch_set_handler (catch_copy, new_seq); | |||
1875 | t = unshare_expr (gimple_catch_types (catch_stmt)); | |||
1876 | gimple_catch_set_types (catch_copy, t); | |||
1877 | } | |||
1878 | break; | |||
1879 | ||||
1880 | case GIMPLE_EH_FILTER: | |||
1881 | { | |||
1882 | geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt); | |||
1883 | geh_filter *eh_filter_copy = as_a <geh_filter *> (copy); | |||
1884 | new_seq | |||
1885 | = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt)); | |||
1886 | gimple_eh_filter_set_failure (eh_filter_copy, new_seq); | |||
1887 | t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt)); | |||
1888 | gimple_eh_filter_set_types (eh_filter_copy, t); | |||
1889 | } | |||
1890 | break; | |||
1891 | ||||
1892 | case GIMPLE_EH_ELSE: | |||
1893 | { | |||
1894 | geh_else *eh_else_stmt = as_a <geh_else *> (stmt); | |||
1895 | geh_else *eh_else_copy = as_a <geh_else *> (copy); | |||
1896 | new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt)); | |||
1897 | gimple_eh_else_set_n_body (eh_else_copy, new_seq); | |||
1898 | new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt)); | |||
1899 | gimple_eh_else_set_e_body (eh_else_copy, new_seq); | |||
1900 | } | |||
1901 | break; | |||
1902 | ||||
1903 | case GIMPLE_TRY: | |||
1904 | { | |||
1905 | gtry *try_stmt = as_a <gtry *> (stmt); | |||
1906 | gtry *try_copy = as_a <gtry *> (copy); | |||
1907 | new_seq = gimple_seq_copy (gimple_try_eval (try_stmt)); | |||
1908 | gimple_try_set_eval (try_copy, new_seq); | |||
1909 | new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt)); | |||
1910 | gimple_try_set_cleanup (try_copy, new_seq); | |||
1911 | } | |||
1912 | break; | |||
1913 | ||||
1914 | case GIMPLE_OMP_FOR: | |||
1915 | new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt)); | |||
1916 | gimple_omp_for_set_pre_body (copy, new_seq); | |||
1917 | t = unshare_expr (gimple_omp_for_clauses (stmt)); | |||
1918 | gimple_omp_for_set_clauses (copy, t); | |||
1919 | { | |||
1920 | gomp_for *omp_for_copy = as_a <gomp_for *> (copy); | |||
1921 | omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter> | |||
1922 | ( gimple_omp_for_collapse (stmt)); | |||
1923 | } | |||
1924 | for (i = 0; i < gimple_omp_for_collapse (stmt); i++) | |||
1925 | { | |||
1926 | gimple_omp_for_set_cond (copy, i, | |||
1927 | gimple_omp_for_cond (stmt, i)); | |||
1928 | gimple_omp_for_set_index (copy, i, | |||
1929 | gimple_omp_for_index (stmt, i)); | |||
1930 | t = unshare_expr (gimple_omp_for_initial (stmt, i)); | |||
1931 | gimple_omp_for_set_initial (copy, i, t); | |||
1932 | t = unshare_expr (gimple_omp_for_final (stmt, i)); | |||
1933 | gimple_omp_for_set_final (copy, i, t); | |||
1934 | t = unshare_expr (gimple_omp_for_incr (stmt, i)); | |||
1935 | gimple_omp_for_set_incr (copy, i, t); | |||
1936 | } | |||
1937 | goto copy_omp_body; | |||
1938 | ||||
1939 | case GIMPLE_OMP_PARALLEL: | |||
1940 | { | |||
1941 | gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt); | |||
1942 | gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy); | |||
1943 | t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt)); | |||
1944 | gimple_omp_parallel_set_clauses (omp_par_copy, t); | |||
1945 | t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt)); | |||
1946 | gimple_omp_parallel_set_child_fn (omp_par_copy, t); | |||
1947 | t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt)); | |||
1948 | gimple_omp_parallel_set_data_arg (omp_par_copy, t); | |||
1949 | } | |||
1950 | goto copy_omp_body; | |||
1951 | ||||
1952 | case GIMPLE_OMP_TASK: | |||
1953 | t = unshare_expr (gimple_omp_task_clauses (stmt)); | |||
1954 | gimple_omp_task_set_clauses (copy, t); | |||
1955 | t = unshare_expr (gimple_omp_task_child_fn (stmt)); | |||
1956 | gimple_omp_task_set_child_fn (copy, t); | |||
1957 | t = unshare_expr (gimple_omp_task_data_arg (stmt)); | |||
1958 | gimple_omp_task_set_data_arg (copy, t); | |||
1959 | t = unshare_expr (gimple_omp_task_copy_fn (stmt)); | |||
1960 | gimple_omp_task_set_copy_fn (copy, t); | |||
1961 | t = unshare_expr (gimple_omp_task_arg_size (stmt)); | |||
1962 | gimple_omp_task_set_arg_size (copy, t); | |||
1963 | t = unshare_expr (gimple_omp_task_arg_align (stmt)); | |||
1964 | gimple_omp_task_set_arg_align (copy, t); | |||
1965 | goto copy_omp_body; | |||
1966 | ||||
1967 | case GIMPLE_OMP_CRITICAL: | |||
1968 | t = unshare_expr (gimple_omp_critical_name | |||
1969 | (as_a <gomp_critical *> (stmt))); | |||
1970 | gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t); | |||
1971 | t = unshare_expr (gimple_omp_critical_clauses | |||
1972 | (as_a <gomp_critical *> (stmt))); | |||
1973 | gimple_omp_critical_set_clauses (as_a <gomp_critical *> (copy), t); | |||
1974 | goto copy_omp_body; | |||
1975 | ||||
1976 | case GIMPLE_OMP_ORDERED: | |||
1977 | t = unshare_expr (gimple_omp_ordered_clauses | |||
1978 | (as_a <gomp_ordered *> (stmt))); | |||
1979 | gimple_omp_ordered_set_clauses (as_a <gomp_ordered *> (copy), t); | |||
1980 | goto copy_omp_body; | |||
1981 | ||||
1982 | case GIMPLE_OMP_SCAN: | |||
1983 | t = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)); | |||
1984 | t = unshare_expr (t); | |||
1985 | gimple_omp_scan_set_clauses (as_a <gomp_scan *> (copy), t); | |||
1986 | goto copy_omp_body; | |||
1987 | ||||
1988 | case GIMPLE_OMP_TASKGROUP: | |||
1989 | t = unshare_expr (gimple_omp_taskgroup_clauses (stmt)); | |||
1990 | gimple_omp_taskgroup_set_clauses (copy, t); | |||
1991 | goto copy_omp_body; | |||
1992 | ||||
1993 | case GIMPLE_OMP_SECTIONS: | |||
1994 | t = unshare_expr (gimple_omp_sections_clauses (stmt)); | |||
1995 | gimple_omp_sections_set_clauses (copy, t); | |||
1996 | t = unshare_expr (gimple_omp_sections_control (stmt)); | |||
1997 | gimple_omp_sections_set_control (copy, t); | |||
1998 | goto copy_omp_body; | |||
1999 | ||||
2000 | case GIMPLE_OMP_SINGLE: | |||
2001 | { | |||
2002 | gomp_single *omp_single_copy = as_a <gomp_single *> (copy); | |||
2003 | t = unshare_expr (gimple_omp_single_clauses (stmt)); | |||
2004 | gimple_omp_single_set_clauses (omp_single_copy, t); | |||
2005 | } | |||
2006 | goto copy_omp_body; | |||
2007 | ||||
2008 | case GIMPLE_OMP_TARGET: | |||
2009 | { | |||
2010 | gomp_target *omp_target_stmt = as_a <gomp_target *> (stmt); | |||
2011 | gomp_target *omp_target_copy = as_a <gomp_target *> (copy); | |||
2012 | t = unshare_expr (gimple_omp_target_clauses (omp_target_stmt)); | |||
2013 | gimple_omp_target_set_clauses (omp_target_copy, t); | |||
2014 | t = unshare_expr (gimple_omp_target_data_arg (omp_target_stmt)); | |||
2015 | gimple_omp_target_set_data_arg (omp_target_copy, t); | |||
2016 | } | |||
2017 | goto copy_omp_body; | |||
2018 | ||||
2019 | case GIMPLE_OMP_TEAMS: | |||
2020 | { | |||
2021 | gomp_teams *omp_teams_copy = as_a <gomp_teams *> (copy); | |||
2022 | t = unshare_expr (gimple_omp_teams_clauses (stmt)); | |||
2023 | gimple_omp_teams_set_clauses (omp_teams_copy, t); | |||
2024 | } | |||
2025 | /* FALLTHRU */ | |||
2026 | ||||
2027 | case GIMPLE_OMP_SECTION: | |||
2028 | case GIMPLE_OMP_MASTER: | |||
2029 | copy_omp_body: | |||
2030 | new_seq = gimple_seq_copy (gimple_omp_body (stmt)); | |||
2031 | gimple_omp_set_body (copy, new_seq); | |||
2032 | break; | |||
2033 | ||||
2034 | case GIMPLE_TRANSACTION: | |||
2035 | new_seq = gimple_seq_copy (gimple_transaction_body ( | |||
2036 | as_a <gtransaction *> (stmt))); | |||
2037 | gimple_transaction_set_body (as_a <gtransaction *> (copy), | |||
2038 | new_seq); | |||
2039 | break; | |||
2040 | ||||
2041 | case GIMPLE_WITH_CLEANUP_EXPR: | |||
2042 | new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt)); | |||
2043 | gimple_wce_set_cleanup (copy, new_seq); | |||
2044 | break; | |||
2045 | ||||
2046 | default: | |||
2047 | gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2047, __FUNCTION__)); | |||
2048 | } | |||
2049 | } | |||
2050 | ||||
2051 | /* Make copy of operands. */ | |||
2052 | for (i = 0; i < num_ops; i++) | |||
2053 | gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i))); | |||
2054 | ||||
2055 | if (gimple_has_mem_ops (stmt)) | |||
2056 | { | |||
2057 | gimple_set_vdef (copy, gimple_vdef (stmt)); | |||
2058 | gimple_set_vuse (copy, gimple_vuse (stmt)); | |||
2059 | } | |||
2060 | ||||
2061 | /* Clear out SSA operand vectors on COPY. */ | |||
2062 | if (gimple_has_ops (stmt)) | |||
2063 | { | |||
2064 | gimple_set_use_ops (copy, NULLnullptr); | |||
2065 | ||||
2066 | /* SSA operands need to be updated. */ | |||
2067 | gimple_set_modified (copy, true); | |||
2068 | } | |||
2069 | ||||
2070 | if (gimple_debug_nonbind_marker_p (stmt)) | |||
2071 | cfun(cfun + 0)->debug_marker_count++; | |||
2072 | ||||
2073 | return copy; | |||
2074 | } | |||
2075 | ||||
2076 | /* Move OLD_STMT's vuse and vdef operands to NEW_STMT, on the assumption | |||
2077 | that OLD_STMT is about to be removed. */ | |||
2078 | ||||
2079 | void | |||
2080 | gimple_move_vops (gimple *new_stmt, gimple *old_stmt) | |||
2081 | { | |||
2082 | tree vdef = gimple_vdef (old_stmt); | |||
2083 | gimple_set_vuse (new_stmt, gimple_vuse (old_stmt)); | |||
2084 | gimple_set_vdef (new_stmt, vdef); | |||
2085 | if (vdef && TREE_CODE (vdef)((enum tree_code) (vdef)->base.code) == SSA_NAME) | |||
2086 | SSA_NAME_DEF_STMT (vdef)(tree_check ((vdef), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2086, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt = new_stmt; | |||
2087 | } | |||
2088 | ||||
2089 | /* Return true if statement S has side-effects. We consider a | |||
2090 | statement to have side effects if: | |||
2091 | ||||
2092 | - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST. | |||
2093 | - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */ | |||
2094 | ||||
2095 | bool | |||
2096 | gimple_has_side_effects (const gimple *s) | |||
2097 | { | |||
2098 | if (is_gimple_debug (s)) | |||
2099 | return false; | |||
2100 | ||||
2101 | /* We don't have to scan the arguments to check for | |||
2102 | volatile arguments, though, at present, we still | |||
2103 | do a scan to check for TREE_SIDE_EFFECTS. */ | |||
2104 | if (gimple_has_volatile_ops (s)) | |||
2105 | return true; | |||
2106 | ||||
2107 | if (gimple_code (s) == GIMPLE_ASM | |||
2108 | && gimple_asm_volatile_p (as_a <const gasm *> (s))) | |||
2109 | return true; | |||
2110 | ||||
2111 | if (is_gimple_call (s)) | |||
2112 | { | |||
2113 | int flags = gimple_call_flags (s); | |||
2114 | ||||
2115 | /* An infinite loop is considered a side effect. */ | |||
2116 | if (!(flags & (ECF_CONST(1 << 0) | ECF_PURE(1 << 1))) | |||
2117 | || (flags & ECF_LOOPING_CONST_OR_PURE(1 << 2))) | |||
2118 | return true; | |||
2119 | ||||
2120 | return false; | |||
2121 | } | |||
2122 | ||||
2123 | return false; | |||
2124 | } | |||
2125 | ||||
2126 | /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p. | |||
2127 | Return true if S can trap. When INCLUDE_MEM is true, check whether | |||
2128 | the memory operations could trap. When INCLUDE_STORES is true and | |||
2129 | S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */ | |||
2130 | ||||
2131 | bool | |||
2132 | gimple_could_trap_p_1 (gimple *s, bool include_mem, bool include_stores) | |||
2133 | { | |||
2134 | tree t, div = NULL_TREE(tree) nullptr; | |||
2135 | enum tree_code op; | |||
2136 | ||||
2137 | if (include_mem) | |||
2138 | { | |||
2139 | unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0; | |||
2140 | ||||
2141 | for (i = start; i < gimple_num_ops (s); i++) | |||
2142 | if (tree_could_trap_p (gimple_op (s, i))) | |||
2143 | return true; | |||
2144 | } | |||
2145 | ||||
2146 | switch (gimple_code (s)) | |||
2147 | { | |||
2148 | case GIMPLE_ASM: | |||
2149 | return gimple_asm_volatile_p (as_a <gasm *> (s)); | |||
2150 | ||||
2151 | case GIMPLE_CALL: | |||
2152 | t = gimple_call_fndecl (s); | |||
2153 | /* Assume that calls to weak functions may trap. */ | |||
2154 | if (!t || !DECL_P (t)(tree_code_type[(int) (((enum tree_code) (t)->base.code))] == tcc_declaration) || DECL_WEAK (t)((contains_struct_check ((t), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2154, __FUNCTION__))->decl_with_vis.weak_flag)) | |||
2155 | return true; | |||
2156 | return false; | |||
2157 | ||||
2158 | case GIMPLE_ASSIGN: | |||
2159 | op = gimple_assign_rhs_code (s); | |||
2160 | ||||
2161 | /* For COND_EXPR and VEC_COND_EXPR only the condition may trap. */ | |||
2162 | if (op == COND_EXPR || op == VEC_COND_EXPR) | |||
2163 | return tree_could_trap_p (gimple_assign_rhs1 (s)); | |||
2164 | ||||
2165 | /* For comparisons we need to check rhs operand types instead of rhs type | |||
2166 | (which is BOOLEAN_TYPE). */ | |||
2167 | if (TREE_CODE_CLASS (op)tree_code_type[(int) (op)] == tcc_comparison) | |||
2168 | t = TREE_TYPE (gimple_assign_rhs1 (s))((contains_struct_check ((gimple_assign_rhs1 (s)), (TS_TYPED) , "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2168, __FUNCTION__))->typed.type); | |||
2169 | else | |||
2170 | t = gimple_expr_type (s); | |||
2171 | ||||
2172 | if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS) | |||
2173 | div = gimple_assign_rhs2 (s); | |||
2174 | ||||
2175 | return (operation_could_trap_p (op, FLOAT_TYPE_P (t)((((enum tree_code) (t)->base.code) == REAL_TYPE) || ((((enum tree_code) (t)->base.code) == COMPLEX_TYPE || (((enum tree_code ) (t)->base.code) == VECTOR_TYPE)) && (((enum tree_code ) (((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2175, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE ))), | |||
2176 | (INTEGRAL_TYPE_P (t)(((enum tree_code) (t)->base.code) == ENUMERAL_TYPE || ((enum tree_code) (t)->base.code) == BOOLEAN_TYPE || ((enum tree_code ) (t)->base.code) == INTEGER_TYPE) | |||
2177 | && TYPE_OVERFLOW_TRAPS (t)(!(any_integral_type_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2177, __FUNCTION__))->base.u.bits.unsigned_flag && global_options.x_flag_trapv)), | |||
2178 | div)); | |||
2179 | ||||
2180 | case GIMPLE_COND: | |||
2181 | t = TREE_TYPE (gimple_cond_lhs (s))((contains_struct_check ((gimple_cond_lhs (s)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2181, __FUNCTION__))->typed.type); | |||
2182 | return operation_could_trap_p (gimple_cond_code (s), | |||
2183 | FLOAT_TYPE_P (t)((((enum tree_code) (t)->base.code) == REAL_TYPE) || ((((enum tree_code) (t)->base.code) == COMPLEX_TYPE || (((enum tree_code ) (t)->base.code) == VECTOR_TYPE)) && (((enum tree_code ) (((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2183, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE ))), false, NULL_TREE(tree) nullptr); | |||
2184 | ||||
2185 | default: | |||
2186 | break; | |||
2187 | } | |||
2188 | ||||
2189 | return false; | |||
2190 | } | |||
2191 | ||||
2192 | /* Return true if statement S can trap. */ | |||
2193 | ||||
2194 | bool | |||
2195 | gimple_could_trap_p (gimple *s) | |||
2196 | { | |||
2197 | return gimple_could_trap_p_1 (s, true, true); | |||
2198 | } | |||
2199 | ||||
2200 | /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */ | |||
2201 | ||||
2202 | bool | |||
2203 | gimple_assign_rhs_could_trap_p (gimple *s) | |||
2204 | { | |||
2205 | gcc_assert (is_gimple_assign (s))((void)(!(is_gimple_assign (s)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2205, __FUNCTION__), 0 : 0)); | |||
2206 | return gimple_could_trap_p_1 (s, true, false); | |||
2207 | } | |||
2208 | ||||
2209 | ||||
2210 | /* Print debugging information for gimple stmts generated. */ | |||
2211 | ||||
2212 | void | |||
2213 | dump_gimple_statistics (void) | |||
2214 | { | |||
2215 | int i; | |||
2216 | uint64_t total_tuples = 0, total_bytes = 0; | |||
2217 | ||||
2218 | if (! GATHER_STATISTICS0) | |||
2219 | { | |||
2220 | fprintf (stderrstderr, "No GIMPLE statistics\n"); | |||
2221 | return; | |||
2222 | } | |||
2223 | ||||
2224 | fprintf (stderrstderr, "\nGIMPLE statements\n"); | |||
2225 | fprintf (stderrstderr, "Kind Stmts Bytes\n"); | |||
2226 | fprintf (stderrstderr, "---------------------------------------\n"); | |||
2227 | for (i = 0; i < (int) gimple_alloc_kind_all; ++i) | |||
2228 | { | |||
2229 | fprintf (stderrstderr, "%-20s %7" PRIu64"l" "u" "%c %10" PRIu64"l" "u" "%c\n", | |||
2230 | gimple_alloc_kind_names[i], | |||
2231 | SIZE_AMOUNT (gimple_alloc_counts[i])(uint64_t)(((gimple_alloc_counts[i]) < 10 * 1024 ? (gimple_alloc_counts [i]) : ((gimple_alloc_counts[i]) < 10 * (1024 * 1024) ? (gimple_alloc_counts [i]) / 1024 : (gimple_alloc_counts[i]) / (1024 * 1024)))), (( gimple_alloc_counts[i]) < 10 * 1024 ? ' ' : ((gimple_alloc_counts [i]) < 10 * (1024 * 1024) ? 'k' : 'M')), | |||
2232 | SIZE_AMOUNT (gimple_alloc_sizes[i])(uint64_t)(((gimple_alloc_sizes[i]) < 10 * 1024 ? (gimple_alloc_sizes [i]) : ((gimple_alloc_sizes[i]) < 10 * (1024 * 1024) ? (gimple_alloc_sizes [i]) / 1024 : (gimple_alloc_sizes[i]) / (1024 * 1024)))), ((gimple_alloc_sizes [i]) < 10 * 1024 ? ' ' : ((gimple_alloc_sizes[i]) < 10 * (1024 * 1024) ? 'k' : 'M'))); | |||
2233 | total_tuples += gimple_alloc_counts[i]; | |||
2234 | total_bytes += gimple_alloc_sizes[i]; | |||
2235 | } | |||
2236 | fprintf (stderrstderr, "---------------------------------------\n"); | |||
2237 | fprintf (stderrstderr, "%-20s %7" PRIu64"l" "u" "%c %10" PRIu64"l" "u" "%c\n", "Total", | |||
2238 | SIZE_AMOUNT (total_tuples)(uint64_t)(((total_tuples) < 10 * 1024 ? (total_tuples) : ( (total_tuples) < 10 * (1024 * 1024) ? (total_tuples) / 1024 : (total_tuples) / (1024 * 1024)))), ((total_tuples) < 10 * 1024 ? ' ' : ((total_tuples) < 10 * (1024 * 1024) ? 'k' : 'M')), SIZE_AMOUNT (total_bytes)(uint64_t)(((total_bytes) < 10 * 1024 ? (total_bytes) : (( total_bytes) < 10 * (1024 * 1024) ? (total_bytes) / 1024 : (total_bytes) / (1024 * 1024)))), ((total_bytes) < 10 * 1024 ? ' ' : ((total_bytes) < 10 * (1024 * 1024) ? 'k' : 'M'))); | |||
2239 | fprintf (stderrstderr, "---------------------------------------\n"); | |||
2240 | } | |||
2241 | ||||
2242 | ||||
2243 | /* Return the number of operands needed on the RHS of a GIMPLE | |||
2244 | assignment for an expression with tree code CODE. */ | |||
2245 | ||||
2246 | unsigned | |||
2247 | get_gimple_rhs_num_ops (enum tree_code code) | |||
2248 | { | |||
2249 | switch (get_gimple_rhs_class (code)) | |||
2250 | { | |||
2251 | case GIMPLE_UNARY_RHS: | |||
2252 | case GIMPLE_SINGLE_RHS: | |||
2253 | return 1; | |||
2254 | case GIMPLE_BINARY_RHS: | |||
2255 | return 2; | |||
2256 | case GIMPLE_TERNARY_RHS: | |||
2257 | return 3; | |||
2258 | default: | |||
2259 | gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2259, __FUNCTION__)); | |||
2260 | } | |||
2261 | } | |||
2262 | ||||
2263 | #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \ | |||
2264 | (unsigned char) \ | |||
2265 | ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \ | |||
2266 | : ((TYPE) == tcc_binary \ | |||
2267 | || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \ | |||
2268 | : ((TYPE) == tcc_constant \ | |||
2269 | || (TYPE) == tcc_declaration \ | |||
2270 | || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \ | |||
2271 | : ((SYM) == TRUTH_AND_EXPR \ | |||
2272 | || (SYM) == TRUTH_OR_EXPR \ | |||
2273 | || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \ | |||
2274 | : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \ | |||
2275 | : ((SYM) == COND_EXPR \ | |||
2276 | || (SYM) == WIDEN_MULT_PLUS_EXPR \ | |||
2277 | || (SYM) == WIDEN_MULT_MINUS_EXPR \ | |||
2278 | || (SYM) == DOT_PROD_EXPR \ | |||
2279 | || (SYM) == SAD_EXPR \ | |||
2280 | || (SYM) == REALIGN_LOAD_EXPR \ | |||
2281 | || (SYM) == VEC_COND_EXPR \ | |||
2282 | || (SYM) == VEC_PERM_EXPR \ | |||
2283 | || (SYM) == BIT_INSERT_EXPR) ? GIMPLE_TERNARY_RHS \ | |||
2284 | : ((SYM) == CONSTRUCTOR \ | |||
2285 | || (SYM) == OBJ_TYPE_REF \ | |||
2286 | || (SYM) == ASSERT_EXPR \ | |||
2287 | || (SYM) == ADDR_EXPR \ | |||
2288 | || (SYM) == WITH_SIZE_EXPR \ | |||
2289 | || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \ | |||
2290 | : GIMPLE_INVALID_RHS), | |||
2291 | #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS, | |||
2292 | ||||
2293 | const unsigned char gimple_rhs_class_table[] = { | |||
2294 | #include "all-tree.def" | |||
2295 | }; | |||
2296 | ||||
2297 | #undef DEFTREECODE | |||
2298 | #undef END_OF_BASE_TREE_CODES | |||
2299 | ||||
2300 | /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns | |||
2301 | a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if | |||
2302 | we failed to create one. */ | |||
2303 | ||||
2304 | tree | |||
2305 | canonicalize_cond_expr_cond (tree t) | |||
2306 | { | |||
2307 | /* Strip conversions around boolean operations. */ | |||
2308 | if (CONVERT_EXPR_P (t)((((enum tree_code) (t)->base.code)) == NOP_EXPR || (((enum tree_code) (t)->base.code)) == CONVERT_EXPR) | |||
2309 | && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2309, __FUNCTION__))))))->base.code)) | |||
2310 | || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))((enum tree_code) (((contains_struct_check (((*((const_cast< tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2310, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2310, __FUNCTION__))->typed.type))->base.code) | |||
2311 | == BOOLEAN_TYPE)) | |||
2312 | t = TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2312, __FUNCTION__))))); | |||
2313 | ||||
2314 | /* For !x use x == 0. */ | |||
2315 | if (TREE_CODE (t)((enum tree_code) (t)->base.code) == TRUTH_NOT_EXPR) | |||
2316 | { | |||
2317 | tree top0 = TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2317, __FUNCTION__))))); | |||
2318 | t = build2 (EQ_EXPR, TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2318, __FUNCTION__))->typed.type), | |||
2319 | top0, build_int_cst (TREE_TYPE (top0)((contains_struct_check ((top0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2319, __FUNCTION__))->typed.type), 0)); | |||
2320 | } | |||
2321 | /* For cmp ? 1 : 0 use cmp. */ | |||
2322 | else if (TREE_CODE (t)((enum tree_code) (t)->base.code) == COND_EXPR | |||
2323 | && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))(tree_code_type[(int) (((enum tree_code) ((*((const_cast<tree *> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2323, __FUNCTION__))))))->base.code))] == tcc_comparison ) | |||
2324 | && integer_onep (TREE_OPERAND (t, 1)(*((const_cast<tree*> (tree_operand_check ((t), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2324, __FUNCTION__)))))) | |||
2325 | && integer_zerop (TREE_OPERAND (t, 2)(*((const_cast<tree*> (tree_operand_check ((t), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2325, __FUNCTION__))))))) | |||
2326 | { | |||
2327 | tree top0 = TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2327, __FUNCTION__))))); | |||
2328 | t = build2 (TREE_CODE (top0)((enum tree_code) (top0)->base.code), TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2328, __FUNCTION__))->typed.type), | |||
2329 | TREE_OPERAND (top0, 0)(*((const_cast<tree*> (tree_operand_check ((top0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2329, __FUNCTION__))))), TREE_OPERAND (top0, 1)(*((const_cast<tree*> (tree_operand_check ((top0), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2329, __FUNCTION__)))))); | |||
2330 | } | |||
2331 | /* For x ^ y use x != y. */ | |||
2332 | else if (TREE_CODE (t)((enum tree_code) (t)->base.code) == BIT_XOR_EXPR) | |||
2333 | t = build2 (NE_EXPR, TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2333, __FUNCTION__))->typed.type), | |||
2334 | TREE_OPERAND (t, 0)(*((const_cast<tree*> (tree_operand_check ((t), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2334, __FUNCTION__))))), TREE_OPERAND (t, 1)(*((const_cast<tree*> (tree_operand_check ((t), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2334, __FUNCTION__)))))); | |||
2335 | ||||
2336 | if (is_gimple_condexpr (t)) | |||
2337 | return t; | |||
2338 | ||||
2339 | return NULL_TREE(tree) nullptr; | |||
2340 | } | |||
2341 | ||||
2342 | /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in | |||
2343 | the positions marked by the set ARGS_TO_SKIP. */ | |||
2344 | ||||
2345 | gcall * | |||
2346 | gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip) | |||
2347 | { | |||
2348 | int i; | |||
2349 | int nargs = gimple_call_num_args (stmt); | |||
2350 | auto_vec<tree> vargs (nargs); | |||
2351 | gcall *new_stmt; | |||
2352 | ||||
2353 | for (i = 0; i < nargs; i++) | |||
2354 | if (!bitmap_bit_p (args_to_skip, i)) | |||
2355 | vargs.quick_push (gimple_call_arg (stmt, i)); | |||
2356 | ||||
2357 | if (gimple_call_internal_p (stmt)) | |||
2358 | new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt), | |||
2359 | vargs); | |||
2360 | else | |||
2361 | new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs); | |||
2362 | ||||
2363 | if (gimple_call_lhs (stmt)) | |||
2364 | gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt)); | |||
2365 | ||||
2366 | gimple_set_vuse (new_stmt, gimple_vuse (stmt)); | |||
2367 | gimple_set_vdef (new_stmt, gimple_vdef (stmt)); | |||
2368 | ||||
2369 | if (gimple_has_location (stmt)) | |||
2370 | gimple_set_location (new_stmt, gimple_location (stmt)); | |||
2371 | gimple_call_copy_flags (new_stmt, stmt); | |||
2372 | gimple_call_set_chain (new_stmt, gimple_call_chain (stmt)); | |||
2373 | ||||
2374 | gimple_set_modified (new_stmt, true); | |||
2375 | ||||
2376 | return new_stmt; | |||
2377 | } | |||
2378 | ||||
2379 | ||||
2380 | ||||
2381 | /* Return true if the field decls F1 and F2 are at the same offset. | |||
2382 | ||||
2383 | This is intended to be used on GIMPLE types only. */ | |||
2384 | ||||
2385 | bool | |||
2386 | gimple_compare_field_offset (tree f1, tree f2) | |||
2387 | { | |||
2388 | if (DECL_OFFSET_ALIGN (f1)(((unsigned long)1) << (tree_check ((f1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2388, __FUNCTION__, (FIELD_DECL)))->decl_common.off_align ) == DECL_OFFSET_ALIGN (f2)(((unsigned long)1) << (tree_check ((f2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2388, __FUNCTION__, (FIELD_DECL)))->decl_common.off_align )) | |||
2389 | { | |||
2390 | tree offset1 = DECL_FIELD_OFFSET (f1)((tree_check ((f1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2390, __FUNCTION__, (FIELD_DECL)))->field_decl.offset); | |||
2391 | tree offset2 = DECL_FIELD_OFFSET (f2)((tree_check ((f2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2391, __FUNCTION__, (FIELD_DECL)))->field_decl.offset); | |||
2392 | return ((offset1 == offset2 | |||
2393 | /* Once gimplification is done, self-referential offsets are | |||
2394 | instantiated as operand #2 of the COMPONENT_REF built for | |||
2395 | each access and reset. Therefore, they are not relevant | |||
2396 | anymore and fields are interchangeable provided that they | |||
2397 | represent the same access. */ | |||
2398 | || (TREE_CODE (offset1)((enum tree_code) (offset1)->base.code) == PLACEHOLDER_EXPR | |||
2399 | && TREE_CODE (offset2)((enum tree_code) (offset2)->base.code) == PLACEHOLDER_EXPR | |||
2400 | && (DECL_SIZE (f1)((contains_struct_check ((f1), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2400, __FUNCTION__))->decl_common.size) == DECL_SIZE (f2)((contains_struct_check ((f2), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2400, __FUNCTION__))->decl_common.size) | |||
2401 | || (TREE_CODE (DECL_SIZE (f1))((enum tree_code) (((contains_struct_check ((f1), (TS_DECL_COMMON ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2401, __FUNCTION__))->decl_common.size))->base.code) == PLACEHOLDER_EXPR | |||
2402 | && TREE_CODE (DECL_SIZE (f2))((enum tree_code) (((contains_struct_check ((f2), (TS_DECL_COMMON ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2402, __FUNCTION__))->decl_common.size))->base.code) == PLACEHOLDER_EXPR) | |||
2403 | || operand_equal_p (DECL_SIZE (f1)((contains_struct_check ((f1), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2403, __FUNCTION__))->decl_common.size), DECL_SIZE (f2)((contains_struct_check ((f2), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2403, __FUNCTION__))->decl_common.size), 0)) | |||
2404 | && DECL_ALIGN (f1)(((contains_struct_check ((f1), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2404, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((f1), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2404, __FUNCTION__))->decl_common.align) - 1) : 0) == DECL_ALIGN (f2)(((contains_struct_check ((f2), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2404, __FUNCTION__))->decl_common.align) ? ((unsigned)1) << (((contains_struct_check ((f2), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2404, __FUNCTION__))->decl_common.align) - 1) : 0)) | |||
2405 | || operand_equal_p (offset1, offset2, 0)) | |||
2406 | && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1)((tree_check ((f1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2406, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ), | |||
2407 | DECL_FIELD_BIT_OFFSET (f2)((tree_check ((f2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2407, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset ))); | |||
2408 | } | |||
2409 | ||||
2410 | /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN | |||
2411 | should be, so handle differing ones specially by decomposing | |||
2412 | the offset into a byte and bit offset manually. */ | |||
2413 | if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1)((tree_check ((f1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2413, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)) | |||
2414 | && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2)((tree_check ((f2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2414, __FUNCTION__, (FIELD_DECL)))->field_decl.offset))) | |||
2415 | { | |||
2416 | unsigned HOST_WIDE_INTlong byte_offset1, byte_offset2; | |||
2417 | unsigned HOST_WIDE_INTlong bit_offset1, bit_offset2; | |||
2418 | bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((f1 ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2418, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset )), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2418, __FUNCTION__))); | |||
2419 | byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((f1 ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2419, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2419, __FUNCTION__))) | |||
2420 | + bit_offset1 / BITS_PER_UNIT(8)); | |||
2421 | bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((f2 ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2421, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset )), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2421, __FUNCTION__))); | |||
2422 | byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((f2 ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2422, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2422, __FUNCTION__))) | |||
2423 | + bit_offset2 / BITS_PER_UNIT(8)); | |||
2424 | if (byte_offset1 != byte_offset2) | |||
2425 | return false; | |||
2426 | return bit_offset1 % BITS_PER_UNIT(8) == bit_offset2 % BITS_PER_UNIT(8); | |||
2427 | } | |||
2428 | ||||
2429 | return false; | |||
2430 | } | |||
2431 | ||||
2432 | ||||
2433 | /* Return a type the same as TYPE except unsigned or | |||
2434 | signed according to UNSIGNEDP. */ | |||
2435 | ||||
2436 | static tree | |||
2437 | gimple_signed_or_unsigned_type (bool unsignedp, tree type) | |||
2438 | { | |||
2439 | tree type1; | |||
2440 | int i; | |||
2441 | ||||
2442 | type1 = TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2442, __FUNCTION__))->type_common.main_variant); | |||
2443 | if (type1 == signed_char_type_nodeinteger_types[itk_signed_char] | |||
2444 | || type1 == char_type_nodeinteger_types[itk_char] | |||
2445 | || type1 == unsigned_char_type_nodeinteger_types[itk_unsigned_char]) | |||
2446 | return unsignedp ? unsigned_char_type_nodeinteger_types[itk_unsigned_char] : signed_char_type_nodeinteger_types[itk_signed_char]; | |||
2447 | if (type1 == integer_type_nodeinteger_types[itk_int] || type1 == unsigned_type_nodeinteger_types[itk_unsigned_int]) | |||
2448 | return unsignedp ? unsigned_type_nodeinteger_types[itk_unsigned_int] : integer_type_nodeinteger_types[itk_int]; | |||
2449 | if (type1 == short_integer_type_nodeinteger_types[itk_short] || type1 == short_unsigned_type_nodeinteger_types[itk_unsigned_short]) | |||
2450 | return unsignedp ? short_unsigned_type_nodeinteger_types[itk_unsigned_short] : short_integer_type_nodeinteger_types[itk_short]; | |||
2451 | if (type1 == long_integer_type_nodeinteger_types[itk_long] || type1 == long_unsigned_type_nodeinteger_types[itk_unsigned_long]) | |||
2452 | return unsignedp ? long_unsigned_type_nodeinteger_types[itk_unsigned_long] : long_integer_type_nodeinteger_types[itk_long]; | |||
2453 | if (type1 == long_long_integer_type_nodeinteger_types[itk_long_long] | |||
2454 | || type1 == long_long_unsigned_type_nodeinteger_types[itk_unsigned_long_long]) | |||
2455 | return unsignedp | |||
2456 | ? long_long_unsigned_type_nodeinteger_types[itk_unsigned_long_long] | |||
2457 | : long_long_integer_type_nodeinteger_types[itk_long_long]; | |||
2458 | ||||
2459 | for (i = 0; i < NUM_INT_N_ENTS1; i ++) | |||
2460 | if (int_n_enabled_p[i] | |||
2461 | && (type1 == int_n_trees[i].unsigned_type | |||
2462 | || type1 == int_n_trees[i].signed_type)) | |||
2463 | return unsignedp | |||
2464 | ? int_n_trees[i].unsigned_type | |||
2465 | : int_n_trees[i].signed_type; | |||
2466 | ||||
2467 | #if HOST_BITS_PER_WIDE_INT64 >= 64 | |||
2468 | if (type1 == intTI_type_nodeglobal_trees[TI_INTTI_TYPE] || type1 == unsigned_intTI_type_nodeglobal_trees[TI_UINTTI_TYPE]) | |||
2469 | return unsignedp ? unsigned_intTI_type_nodeglobal_trees[TI_UINTTI_TYPE] : intTI_type_nodeglobal_trees[TI_INTTI_TYPE]; | |||
2470 | #endif | |||
2471 | if (type1 == intDI_type_nodeglobal_trees[TI_INTDI_TYPE] || type1 == unsigned_intDI_type_nodeglobal_trees[TI_UINTDI_TYPE]) | |||
2472 | return unsignedp ? unsigned_intDI_type_nodeglobal_trees[TI_UINTDI_TYPE] : intDI_type_nodeglobal_trees[TI_INTDI_TYPE]; | |||
2473 | if (type1 == intSI_type_nodeglobal_trees[TI_INTSI_TYPE] || type1 == unsigned_intSI_type_nodeglobal_trees[TI_UINTSI_TYPE]) | |||
2474 | return unsignedp ? unsigned_intSI_type_nodeglobal_trees[TI_UINTSI_TYPE] : intSI_type_nodeglobal_trees[TI_INTSI_TYPE]; | |||
2475 | if (type1 == intHI_type_nodeglobal_trees[TI_INTHI_TYPE] || type1 == unsigned_intHI_type_nodeglobal_trees[TI_UINTHI_TYPE]) | |||
2476 | return unsignedp ? unsigned_intHI_type_nodeglobal_trees[TI_UINTHI_TYPE] : intHI_type_nodeglobal_trees[TI_INTHI_TYPE]; | |||
2477 | if (type1 == intQI_type_nodeglobal_trees[TI_INTQI_TYPE] || type1 == unsigned_intQI_type_nodeglobal_trees[TI_UINTQI_TYPE]) | |||
2478 | return unsignedp ? unsigned_intQI_type_nodeglobal_trees[TI_UINTQI_TYPE] : intQI_type_nodeglobal_trees[TI_INTQI_TYPE]; | |||
2479 | ||||
2480 | #define GIMPLE_FIXED_TYPES(NAME) \ | |||
2481 | if (type1 == short_ ## NAME ## _type_node \ | |||
2482 | || type1 == unsigned_short_ ## NAME ## _type_node) \ | |||
2483 | return unsignedp ? unsigned_short_ ## NAME ## _type_node \ | |||
2484 | : short_ ## NAME ## _type_node; \ | |||
2485 | if (type1 == NAME ## _type_node \ | |||
2486 | || type1 == unsigned_ ## NAME ## _type_node) \ | |||
2487 | return unsignedp ? unsigned_ ## NAME ## _type_node \ | |||
2488 | : NAME ## _type_node; \ | |||
2489 | if (type1 == long_ ## NAME ## _type_node \ | |||
2490 | || type1 == unsigned_long_ ## NAME ## _type_node) \ | |||
2491 | return unsignedp ? unsigned_long_ ## NAME ## _type_node \ | |||
2492 | : long_ ## NAME ## _type_node; \ | |||
2493 | if (type1 == long_long_ ## NAME ## _type_node \ | |||
2494 | || type1 == unsigned_long_long_ ## NAME ## _type_node) \ | |||
2495 | return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \ | |||
2496 | : long_long_ ## NAME ## _type_node; | |||
2497 | ||||
2498 | #define GIMPLE_FIXED_MODE_TYPES(NAME) \ | |||
2499 | if (type1 == NAME ## _type_node \ | |||
2500 | || type1 == u ## NAME ## _type_node) \ | |||
2501 | return unsignedp ? u ## NAME ## _type_node \ | |||
2502 | : NAME ## _type_node; | |||
2503 | ||||
2504 | #define GIMPLE_FIXED_TYPES_SAT(NAME) \ | |||
2505 | if (type1 == sat_ ## short_ ## NAME ## _type_node \ | |||
2506 | || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \ | |||
2507 | return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \ | |||
2508 | : sat_ ## short_ ## NAME ## _type_node; \ | |||
2509 | if (type1 == sat_ ## NAME ## _type_node \ | |||
2510 | || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \ | |||
2511 | return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \ | |||
2512 | : sat_ ## NAME ## _type_node; \ | |||
2513 | if (type1 == sat_ ## long_ ## NAME ## _type_node \ | |||
2514 | || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \ | |||
2515 | return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \ | |||
2516 | : sat_ ## long_ ## NAME ## _type_node; \ | |||
2517 | if (type1 == sat_ ## long_long_ ## NAME ## _type_node \ | |||
2518 | || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \ | |||
2519 | return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \ | |||
2520 | : sat_ ## long_long_ ## NAME ## _type_node; | |||
2521 | ||||
2522 | #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \ | |||
2523 | if (type1 == sat_ ## NAME ## _type_node \ | |||
2524 | || type1 == sat_ ## u ## NAME ## _type_node) \ | |||
2525 | return unsignedp ? sat_ ## u ## NAME ## _type_node \ | |||
2526 | : sat_ ## NAME ## _type_node; | |||
2527 | ||||
2528 | GIMPLE_FIXED_TYPES (fract); | |||
2529 | GIMPLE_FIXED_TYPES_SAT (fract); | |||
2530 | GIMPLE_FIXED_TYPES (accum); | |||
2531 | GIMPLE_FIXED_TYPES_SAT (accum); | |||
2532 | ||||
2533 | GIMPLE_FIXED_MODE_TYPES (qq); | |||
2534 | GIMPLE_FIXED_MODE_TYPES (hq); | |||
2535 | GIMPLE_FIXED_MODE_TYPES (sq); | |||
2536 | GIMPLE_FIXED_MODE_TYPES (dq); | |||
2537 | GIMPLE_FIXED_MODE_TYPES (tq); | |||
2538 | GIMPLE_FIXED_MODE_TYPES_SAT (qq); | |||
2539 | GIMPLE_FIXED_MODE_TYPES_SAT (hq); | |||
2540 | GIMPLE_FIXED_MODE_TYPES_SAT (sq); | |||
2541 | GIMPLE_FIXED_MODE_TYPES_SAT (dq); | |||
2542 | GIMPLE_FIXED_MODE_TYPES_SAT (tq); | |||
2543 | GIMPLE_FIXED_MODE_TYPES (ha); | |||
2544 | GIMPLE_FIXED_MODE_TYPES (sa); | |||
2545 | GIMPLE_FIXED_MODE_TYPES (da); | |||
2546 | GIMPLE_FIXED_MODE_TYPES (ta); | |||
2547 | GIMPLE_FIXED_MODE_TYPES_SAT (ha); | |||
2548 | GIMPLE_FIXED_MODE_TYPES_SAT (sa); | |||
2549 | GIMPLE_FIXED_MODE_TYPES_SAT (da); | |||
2550 | GIMPLE_FIXED_MODE_TYPES_SAT (ta); | |||
2551 | ||||
2552 | /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not | |||
2553 | the precision; they have precision set to match their range, but | |||
2554 | may use a wider mode to match an ABI. If we change modes, we may | |||
2555 | wind up with bad conversions. For INTEGER_TYPEs in C, must check | |||
2556 | the precision as well, so as to yield correct results for | |||
2557 | bit-field types. C++ does not have these separate bit-field | |||
2558 | types, and producing a signed or unsigned variant of an | |||
2559 | ENUMERAL_TYPE may cause other problems as well. */ | |||
2560 | if (!INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE) | |||
2561 | || TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2561, __FUNCTION__))->base.u.bits.unsigned_flag) == unsignedp) | |||
2562 | return type; | |||
2563 | ||||
2564 | #define TYPE_OK(node) \ | |||
2565 | (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2565, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) == TYPE_MODE (node)((((enum tree_code) ((tree_class_check ((node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2565, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (node) : (node)->type_common.mode) \ | |||
2566 | && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2566, __FUNCTION__))->type_common.precision) == TYPE_PRECISION (node)((tree_class_check ((node), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2566, __FUNCTION__))->type_common.precision)) | |||
2567 | if (TYPE_OK (signed_char_type_nodeinteger_types[itk_signed_char])) | |||
2568 | return unsignedp ? unsigned_char_type_nodeinteger_types[itk_unsigned_char] : signed_char_type_nodeinteger_types[itk_signed_char]; | |||
2569 | if (TYPE_OK (integer_type_nodeinteger_types[itk_int])) | |||
2570 | return unsignedp ? unsigned_type_nodeinteger_types[itk_unsigned_int] : integer_type_nodeinteger_types[itk_int]; | |||
2571 | if (TYPE_OK (short_integer_type_nodeinteger_types[itk_short])) | |||
2572 | return unsignedp ? short_unsigned_type_nodeinteger_types[itk_unsigned_short] : short_integer_type_nodeinteger_types[itk_short]; | |||
2573 | if (TYPE_OK (long_integer_type_nodeinteger_types[itk_long])) | |||
2574 | return unsignedp ? long_unsigned_type_nodeinteger_types[itk_unsigned_long] : long_integer_type_nodeinteger_types[itk_long]; | |||
2575 | if (TYPE_OK (long_long_integer_type_nodeinteger_types[itk_long_long])) | |||
2576 | return (unsignedp | |||
2577 | ? long_long_unsigned_type_nodeinteger_types[itk_unsigned_long_long] | |||
2578 | : long_long_integer_type_nodeinteger_types[itk_long_long]); | |||
2579 | ||||
2580 | for (i = 0; i < NUM_INT_N_ENTS1; i ++) | |||
2581 | if (int_n_enabled_p[i] | |||
2582 | && TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2582, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode (type) : (type)->type_common.mode) == int_n_data[i].m | |||
2583 | && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2583, __FUNCTION__))->type_common.precision) == int_n_data[i].bitsize) | |||
2584 | return unsignedp | |||
2585 | ? int_n_trees[i].unsigned_type | |||
2586 | : int_n_trees[i].signed_type; | |||
2587 | ||||
2588 | #if HOST_BITS_PER_WIDE_INT64 >= 64 | |||
2589 | if (TYPE_OK (intTI_type_nodeglobal_trees[TI_INTTI_TYPE])) | |||
2590 | return unsignedp ? unsigned_intTI_type_nodeglobal_trees[TI_UINTTI_TYPE] : intTI_type_nodeglobal_trees[TI_INTTI_TYPE]; | |||
2591 | #endif | |||
2592 | if (TYPE_OK (intDI_type_nodeglobal_trees[TI_INTDI_TYPE])) | |||
2593 | return unsignedp ? unsigned_intDI_type_nodeglobal_trees[TI_UINTDI_TYPE] : intDI_type_nodeglobal_trees[TI_INTDI_TYPE]; | |||
2594 | if (TYPE_OK (intSI_type_nodeglobal_trees[TI_INTSI_TYPE])) | |||
2595 | return unsignedp ? unsigned_intSI_type_nodeglobal_trees[TI_UINTSI_TYPE] : intSI_type_nodeglobal_trees[TI_INTSI_TYPE]; | |||
2596 | if (TYPE_OK (intHI_type_nodeglobal_trees[TI_INTHI_TYPE])) | |||
2597 | return unsignedp ? unsigned_intHI_type_nodeglobal_trees[TI_UINTHI_TYPE] : intHI_type_nodeglobal_trees[TI_INTHI_TYPE]; | |||
2598 | if (TYPE_OK (intQI_type_nodeglobal_trees[TI_INTQI_TYPE])) | |||
2599 | return unsignedp ? unsigned_intQI_type_nodeglobal_trees[TI_UINTQI_TYPE] : intQI_type_nodeglobal_trees[TI_INTQI_TYPE]; | |||
2600 | ||||
2601 | #undef GIMPLE_FIXED_TYPES | |||
2602 | #undef GIMPLE_FIXED_MODE_TYPES | |||
2603 | #undef GIMPLE_FIXED_TYPES_SAT | |||
2604 | #undef GIMPLE_FIXED_MODE_TYPES_SAT | |||
2605 | #undef TYPE_OK | |||
2606 | ||||
2607 | return build_nonstandard_integer_type (TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2607, __FUNCTION__))->type_common.precision), unsignedp); | |||
2608 | } | |||
2609 | ||||
2610 | ||||
2611 | /* Return an unsigned type the same as TYPE in other respects. */ | |||
2612 | ||||
2613 | tree | |||
2614 | gimple_unsigned_type (tree type) | |||
2615 | { | |||
2616 | return gimple_signed_or_unsigned_type (true, type); | |||
2617 | } | |||
2618 | ||||
2619 | ||||
2620 | /* Return a signed type the same as TYPE in other respects. */ | |||
2621 | ||||
2622 | tree | |||
2623 | gimple_signed_type (tree type) | |||
2624 | { | |||
2625 | return gimple_signed_or_unsigned_type (false, type); | |||
2626 | } | |||
2627 | ||||
2628 | ||||
2629 | /* Return the typed-based alias set for T, which may be an expression | |||
2630 | or a type. Return -1 if we don't do anything special. */ | |||
2631 | ||||
2632 | alias_set_type | |||
2633 | gimple_get_alias_set (tree t) | |||
2634 | { | |||
2635 | /* That's all the expressions we handle specially. */ | |||
2636 | if (!TYPE_P (t)(tree_code_type[(int) (((enum tree_code) (t)->base.code))] == tcc_type)) | |||
2637 | return -1; | |||
2638 | ||||
2639 | /* For convenience, follow the C standard when dealing with | |||
2640 | character types. Any object may be accessed via an lvalue that | |||
2641 | has character type. */ | |||
2642 | if (t == char_type_nodeinteger_types[itk_char] | |||
2643 | || t == signed_char_type_nodeinteger_types[itk_signed_char] | |||
2644 | || t == unsigned_char_type_nodeinteger_types[itk_unsigned_char]) | |||
2645 | return 0; | |||
2646 | ||||
2647 | /* Allow aliasing between signed and unsigned variants of the same | |||
2648 | type. We treat the signed variant as canonical. */ | |||
2649 | if (TREE_CODE (t)((enum tree_code) (t)->base.code) == INTEGER_TYPE && TYPE_UNSIGNED (t)((tree_class_check ((t), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2649, __FUNCTION__))->base.u.bits.unsigned_flag)) | |||
2650 | { | |||
2651 | tree t1 = gimple_signed_type (t); | |||
2652 | ||||
2653 | /* t1 == t can happen for boolean nodes which are always unsigned. */ | |||
2654 | if (t1 != t) | |||
2655 | return get_alias_set (t1); | |||
2656 | } | |||
2657 | ||||
2658 | /* Allow aliasing between enumeral types and the underlying | |||
2659 | integer type. This is required for C since those are | |||
2660 | compatible types. */ | |||
2661 | else if (TREE_CODE (t)((enum tree_code) (t)->base.code) == ENUMERAL_TYPE) | |||
2662 | { | |||
2663 | tree t1 = lang_hooks.types.type_for_size (tree_to_uhwi (TYPE_SIZE (t)((tree_class_check ((t), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2663, __FUNCTION__))->type_common.size)), | |||
2664 | false /* short-cut above */); | |||
2665 | return get_alias_set (t1); | |||
2666 | } | |||
2667 | ||||
2668 | return -1; | |||
2669 | } | |||
2670 | ||||
2671 | ||||
2672 | /* Helper for gimple_ior_addresses_taken_1. */ | |||
2673 | ||||
2674 | static bool | |||
2675 | gimple_ior_addresses_taken_1 (gimple *, tree addr, tree, void *data) | |||
2676 | { | |||
2677 | bitmap addresses_taken = (bitmap)data; | |||
2678 | addr = get_base_address (addr); | |||
2679 | if (addr | |||
2680 | && DECL_P (addr)(tree_code_type[(int) (((enum tree_code) (addr)->base.code ))] == tcc_declaration)) | |||
2681 | { | |||
2682 | bitmap_set_bit (addresses_taken, DECL_UID (addr)((contains_struct_check ((addr), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2682, __FUNCTION__))->decl_minimal.uid)); | |||
2683 | return true; | |||
2684 | } | |||
2685 | return false; | |||
2686 | } | |||
2687 | ||||
2688 | /* Set the bit for the uid of all decls that have their address taken | |||
2689 | in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there | |||
2690 | were any in this stmt. */ | |||
2691 | ||||
2692 | bool | |||
2693 | gimple_ior_addresses_taken (bitmap addresses_taken, gimple *stmt) | |||
2694 | { | |||
2695 | return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULLnullptr, NULLnullptr, | |||
2696 | gimple_ior_addresses_taken_1); | |||
2697 | } | |||
2698 | ||||
2699 | ||||
2700 | /* Return true when STMTs arguments and return value match those of FNDECL, | |||
2701 | a decl of a builtin function. */ | |||
2702 | ||||
2703 | bool | |||
2704 | gimple_builtin_call_types_compatible_p (const gimple *stmt, tree fndecl) | |||
2705 | { | |||
2706 | gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) != NOT_BUILT_IN)((void)(!(((built_in_class) (tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2706, __FUNCTION__, (FUNCTION_DECL)))->function_decl.built_in_class ) != NOT_BUILT_IN) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2706, __FUNCTION__), 0 : 0)); | |||
2707 | ||||
2708 | tree ret = gimple_call_lhs (stmt); | |||
2709 | if (ret | |||
2710 | && !useless_type_conversion_p (TREE_TYPE (ret)((contains_struct_check ((ret), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2710, __FUNCTION__))->typed.type), | |||
2711 | TREE_TYPE (TREE_TYPE (fndecl))((contains_struct_check ((((contains_struct_check ((fndecl), ( TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2711, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2711, __FUNCTION__))->typed.type))) | |||
2712 | return false; | |||
2713 | ||||
2714 | tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl))((tree_check2 ((((contains_struct_check ((fndecl), (TS_TYPED) , "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2714, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2714, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common .values); | |||
2715 | unsigned nargs = gimple_call_num_args (stmt); | |||
2716 | for (unsigned i = 0; i < nargs; ++i) | |||
2717 | { | |||
2718 | /* Variadic args follow. */ | |||
2719 | if (!targs) | |||
2720 | return true; | |||
2721 | tree arg = gimple_call_arg (stmt, i); | |||
2722 | tree type = TREE_VALUE (targs)((tree_check ((targs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2722, __FUNCTION__, (TREE_LIST)))->list.value); | |||
2723 | if (!useless_type_conversion_p (type, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2723, __FUNCTION__))->typed.type)) | |||
2724 | /* char/short integral arguments are promoted to int | |||
2725 | by several frontends if targetm.calls.promote_prototypes | |||
2726 | is true. Allow such promotion too. */ | |||
2727 | && !(INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || ( (enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum tree_code) (type)->base.code) == INTEGER_TYPE) | |||
2728 | && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2728, __FUNCTION__))->type_common.precision) < TYPE_PRECISION (integer_type_node)((tree_class_check ((integer_types[itk_int]), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2728, __FUNCTION__))->type_common.precision) | |||
2729 | && targetm.calls.promote_prototypes (TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2729, __FUNCTION__))->typed.type)) | |||
2730 | && useless_type_conversion_p (integer_type_nodeinteger_types[itk_int], | |||
2731 | TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2731, __FUNCTION__))->typed.type)))) | |||
2732 | return false; | |||
2733 | targs = TREE_CHAIN (targs)((contains_struct_check ((targs), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2733, __FUNCTION__))->common.chain); | |||
2734 | } | |||
2735 | if (targs && !VOID_TYPE_P (TREE_VALUE (targs))(((enum tree_code) (((tree_check ((targs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2735, __FUNCTION__, (TREE_LIST)))->list.value))->base .code) == VOID_TYPE)) | |||
2736 | return false; | |||
2737 | return true; | |||
2738 | } | |||
2739 | ||||
2740 | /* Return true when STMT is operator a replaceable delete call. */ | |||
2741 | ||||
2742 | bool | |||
2743 | gimple_call_operator_delete_p (const gcall *stmt) | |||
2744 | { | |||
2745 | tree fndecl; | |||
2746 | ||||
2747 | if ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE(tree) nullptr) | |||
2748 | return DECL_IS_OPERATOR_DELETE_P (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2748, __FUNCTION__, (FUNCTION_DECL)))->function_decl.decl_type == OPERATOR_DELETE); | |||
2749 | return false; | |||
2750 | } | |||
2751 | ||||
2752 | /* Return true when STMT is builtins call. */ | |||
2753 | ||||
2754 | bool | |||
2755 | gimple_call_builtin_p (const gimple *stmt) | |||
2756 | { | |||
2757 | tree fndecl; | |||
2758 | if (is_gimple_call (stmt) | |||
2759 | && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE(tree) nullptr | |||
2760 | && DECL_BUILT_IN_CLASS (fndecl)((built_in_class) (tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2760, __FUNCTION__, (FUNCTION_DECL)))->function_decl.built_in_class ) != NOT_BUILT_IN) | |||
2761 | return gimple_builtin_call_types_compatible_p (stmt, fndecl); | |||
2762 | return false; | |||
2763 | } | |||
2764 | ||||
2765 | /* Return true when STMT is builtins call to CLASS. */ | |||
2766 | ||||
2767 | bool | |||
2768 | gimple_call_builtin_p (const gimple *stmt, enum built_in_class klass) | |||
2769 | { | |||
2770 | tree fndecl; | |||
2771 | if (is_gimple_call (stmt) | |||
2772 | && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE(tree) nullptr | |||
2773 | && DECL_BUILT_IN_CLASS (fndecl)((built_in_class) (tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2773, __FUNCTION__, (FUNCTION_DECL)))->function_decl.built_in_class ) == klass) | |||
2774 | return gimple_builtin_call_types_compatible_p (stmt, fndecl); | |||
2775 | return false; | |||
2776 | } | |||
2777 | ||||
2778 | /* Return true when STMT is builtins call to CODE of CLASS. */ | |||
2779 | ||||
2780 | bool | |||
2781 | gimple_call_builtin_p (const gimple *stmt, enum built_in_function code) | |||
2782 | { | |||
2783 | tree fndecl; | |||
2784 | if (is_gimple_call (stmt) | |||
2785 | && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE(tree) nullptr | |||
2786 | && fndecl_built_in_p (fndecl, code)) | |||
2787 | return gimple_builtin_call_types_compatible_p (stmt, fndecl); | |||
2788 | return false; | |||
2789 | } | |||
2790 | ||||
2791 | /* If CALL is a call to a combined_fn (i.e. an internal function or | |||
2792 | a normal built-in function), return its code, otherwise return | |||
2793 | CFN_LAST. */ | |||
2794 | ||||
2795 | combined_fn | |||
2796 | gimple_call_combined_fn (const gimple *stmt) | |||
2797 | { | |||
2798 | if (const gcall *call = dyn_cast <const gcall *> (stmt)) | |||
2799 | { | |||
2800 | if (gimple_call_internal_p (call)) | |||
2801 | return as_combined_fn (gimple_call_internal_fn (call)); | |||
2802 | ||||
2803 | tree fndecl = gimple_call_fndecl (stmt); | |||
2804 | if (fndecl | |||
2805 | && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL) | |||
2806 | && gimple_builtin_call_types_compatible_p (stmt, fndecl)) | |||
2807 | return as_combined_fn (DECL_FUNCTION_CODE (fndecl)); | |||
2808 | } | |||
2809 | return CFN_LAST; | |||
2810 | } | |||
2811 | ||||
2812 | /* Return true if STMT clobbers memory. STMT is required to be a | |||
2813 | GIMPLE_ASM. */ | |||
2814 | ||||
2815 | bool | |||
2816 | gimple_asm_clobbers_memory_p (const gasm *stmt) | |||
2817 | { | |||
2818 | unsigned i; | |||
2819 | ||||
2820 | for (i = 0; i < gimple_asm_nclobbers (stmt); i++) | |||
2821 | { | |||
2822 | tree op = gimple_asm_clobber_op (stmt, i); | |||
2823 | if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op))((const char *)((tree_check ((((tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2823, __FUNCTION__, (TREE_LIST)))->list.value)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2823, __FUNCTION__, (STRING_CST)))->string.str)), "memory") == 0) | |||
2824 | return true; | |||
2825 | } | |||
2826 | ||||
2827 | /* Non-empty basic ASM implicitly clobbers memory. */ | |||
2828 | if (gimple_asm_input_p (stmt) && strlen (gimple_asm_string (stmt)) != 0) | |||
2829 | return true; | |||
2830 | ||||
2831 | return false; | |||
2832 | } | |||
2833 | ||||
2834 | /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */ | |||
2835 | ||||
2836 | void | |||
2837 | dump_decl_set (FILE *file, bitmap set) | |||
2838 | { | |||
2839 | if (set) | |||
2840 | { | |||
2841 | bitmap_iterator bi; | |||
2842 | unsigned i; | |||
2843 | ||||
2844 | fprintf (file, "{ "); | |||
2845 | ||||
2846 | EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)for (bmp_iter_set_init (&(bi), (set), (0), &(i)); bmp_iter_set (&(bi), &(i)); bmp_iter_next (&(bi), &(i))) | |||
2847 | { | |||
2848 | fprintf (file, "D.%u", i); | |||
2849 | fprintf (file, " "); | |||
2850 | } | |||
2851 | ||||
2852 | fprintf (file, "}"); | |||
2853 | } | |||
2854 | else | |||
2855 | fprintf (file, "NIL"); | |||
2856 | } | |||
2857 | ||||
2858 | /* Return true when CALL is a call stmt that definitely doesn't | |||
2859 | free any memory or makes it unavailable otherwise. */ | |||
2860 | bool | |||
2861 | nonfreeing_call_p (gimple *call) | |||
2862 | { | |||
2863 | if (gimple_call_builtin_p (call, BUILT_IN_NORMAL) | |||
2864 | && gimple_call_flags (call) & ECF_LEAF(1 << 10)) | |||
2865 | switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call))) | |||
2866 | { | |||
2867 | /* Just in case these become ECF_LEAF in the future. */ | |||
2868 | case BUILT_IN_FREE: | |||
2869 | case BUILT_IN_TM_FREE: | |||
2870 | case BUILT_IN_REALLOC: | |||
2871 | case BUILT_IN_STACK_RESTORE: | |||
2872 | return false; | |||
2873 | default: | |||
2874 | return true; | |||
2875 | } | |||
2876 | else if (gimple_call_internal_p (call)) | |||
2877 | switch (gimple_call_internal_fn (call)) | |||
2878 | { | |||
2879 | case IFN_ABNORMAL_DISPATCHER: | |||
2880 | return true; | |||
2881 | case IFN_ASAN_MARK: | |||
2882 | return tree_to_uhwi (gimple_call_arg (call, 0)) == ASAN_MARK_UNPOISON; | |||
2883 | default: | |||
2884 | if (gimple_call_flags (call) & ECF_LEAF(1 << 10)) | |||
2885 | return true; | |||
2886 | return false; | |||
2887 | } | |||
2888 | ||||
2889 | tree fndecl = gimple_call_fndecl (call); | |||
2890 | if (!fndecl) | |||
2891 | return false; | |||
2892 | struct cgraph_node *n = cgraph_node::get (fndecl); | |||
2893 | if (!n) | |||
2894 | return false; | |||
2895 | enum availability availability; | |||
2896 | n = n->function_symbol (&availability); | |||
2897 | if (!n || availability <= AVAIL_INTERPOSABLE) | |||
2898 | return false; | |||
2899 | return n->nonfreeing_fn; | |||
2900 | } | |||
2901 | ||||
2902 | /* Return true when CALL is a call stmt that definitely need not | |||
2903 | be considered to be a memory barrier. */ | |||
2904 | bool | |||
2905 | nonbarrier_call_p (gimple *call) | |||
2906 | { | |||
2907 | if (gimple_call_flags (call) & (ECF_PURE(1 << 1) | ECF_CONST(1 << 0))) | |||
2908 | return true; | |||
2909 | /* Should extend this to have a nonbarrier_fn flag, just as above in | |||
2910 | the nonfreeing case. */ | |||
2911 | return false; | |||
2912 | } | |||
2913 | ||||
2914 | /* Callback for walk_stmt_load_store_ops. | |||
2915 | ||||
2916 | Return TRUE if OP will dereference the tree stored in DATA, FALSE | |||
2917 | otherwise. | |||
2918 | ||||
2919 | This routine only makes a superficial check for a dereference. Thus | |||
2920 | it must only be used if it is safe to return a false negative. */ | |||
2921 | static bool | |||
2922 | check_loadstore (gimple *, tree op, tree, void *data) | |||
2923 | { | |||
2924 | if (TREE_CODE (op)((enum tree_code) (op)->base.code) == MEM_REF || TREE_CODE (op)((enum tree_code) (op)->base.code) == TARGET_MEM_REF) | |||
2925 | { | |||
2926 | /* Some address spaces may legitimately dereference zero. */ | |||
2927 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2927, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2927, __FUNCTION__))->base.u.bits.address_space); | |||
2928 | if (targetm.addr_space.zero_address_valid (as)) | |||
2929 | return false; | |||
2930 | ||||
2931 | return operand_equal_p (TREE_OPERAND (op, 0)(*((const_cast<tree*> (tree_operand_check ((op), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2931, __FUNCTION__))))), (tree)data, 0); | |||
2932 | } | |||
2933 | return false; | |||
2934 | } | |||
2935 | ||||
2936 | ||||
2937 | /* Return true if OP can be inferred to be non-NULL after STMT executes, | |||
2938 | either by using a pointer dereference or attributes. */ | |||
2939 | bool | |||
2940 | infer_nonnull_range (gimple *stmt, tree op) | |||
2941 | { | |||
2942 | return (infer_nonnull_range_by_dereference (stmt, op) | |||
2943 | || infer_nonnull_range_by_attribute (stmt, op)); | |||
2944 | } | |||
2945 | ||||
2946 | /* Return true if OP can be inferred to be non-NULL after STMT | |||
2947 | executes by using a pointer dereference. */ | |||
2948 | bool | |||
2949 | infer_nonnull_range_by_dereference (gimple *stmt, tree op) | |||
2950 | { | |||
2951 | /* We can only assume that a pointer dereference will yield | |||
2952 | non-NULL if -fdelete-null-pointer-checks is enabled. */ | |||
2953 | if (!flag_delete_null_pointer_checksglobal_options.x_flag_delete_null_pointer_checks | |||
2954 | || !POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED) , "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2954, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2954, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) | |||
2955 | || gimple_code (stmt) == GIMPLE_ASM | |||
2956 | || gimple_clobber_p (stmt)) | |||
2957 | return false; | |||
2958 | ||||
2959 | if (walk_stmt_load_store_ops (stmt, (void *)op, | |||
2960 | check_loadstore, check_loadstore)) | |||
2961 | return true; | |||
2962 | ||||
2963 | return false; | |||
2964 | } | |||
2965 | ||||
2966 | /* Return true if OP can be inferred to be a non-NULL after STMT | |||
2967 | executes by using attributes. */ | |||
2968 | bool | |||
2969 | infer_nonnull_range_by_attribute (gimple *stmt, tree op) | |||
2970 | { | |||
2971 | /* We can only assume that a pointer dereference will yield | |||
2972 | non-NULL if -fdelete-null-pointer-checks is enabled. */ | |||
2973 | if (!flag_delete_null_pointer_checksglobal_options.x_flag_delete_null_pointer_checks | |||
2974 | || !POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED) , "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2974, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED ), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2974, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) | |||
2975 | || gimple_code (stmt) == GIMPLE_ASM) | |||
2976 | return false; | |||
2977 | ||||
2978 | if (is_gimple_call (stmt) && !gimple_call_internal_p (stmt)) | |||
2979 | { | |||
2980 | tree fntype = gimple_call_fntype (stmt); | |||
2981 | tree attrs = TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2981, __FUNCTION__))->type_common.attributes); | |||
2982 | for (; attrs; attrs = TREE_CHAIN (attrs)((contains_struct_check ((attrs), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2982, __FUNCTION__))->common.chain)) | |||
2983 | { | |||
2984 | attrs = lookup_attribute ("nonnull", attrs); | |||
2985 | ||||
2986 | /* If "nonnull" wasn't specified, we know nothing about | |||
2987 | the argument. */ | |||
2988 | if (attrs == NULL_TREE(tree) nullptr) | |||
2989 | return false; | |||
2990 | ||||
2991 | /* If "nonnull" applies to all the arguments, then ARG | |||
2992 | is non-null if it's in the argument list. */ | |||
2993 | if (TREE_VALUE (attrs)((tree_check ((attrs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2993, __FUNCTION__, (TREE_LIST)))->list.value) == NULL_TREE(tree) nullptr) | |||
2994 | { | |||
2995 | for (unsigned int i = 0; i < gimple_call_num_args (stmt); i++) | |||
2996 | { | |||
2997 | if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt, i)))(((enum tree_code) (((contains_struct_check ((gimple_call_arg (stmt, i)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2997, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE || ((enum tree_code) (((contains_struct_check ((gimple_call_arg (stmt, i)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 2997, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE ) | |||
2998 | && operand_equal_p (op, gimple_call_arg (stmt, i), 0)) | |||
2999 | return true; | |||
3000 | } | |||
3001 | return false; | |||
3002 | } | |||
3003 | ||||
3004 | /* Now see if op appears in the nonnull list. */ | |||
3005 | for (tree t = TREE_VALUE (attrs)((tree_check ((attrs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3005, __FUNCTION__, (TREE_LIST)))->list.value); t; t = TREE_CHAIN (t)((contains_struct_check ((t), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3005, __FUNCTION__))->common.chain)) | |||
3006 | { | |||
3007 | unsigned int idx = TREE_INT_CST_LOW (TREE_VALUE (t))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((t) , "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3007, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3007, __FUNCTION__))) - 1; | |||
3008 | if (idx < gimple_call_num_args (stmt)) | |||
3009 | { | |||
3010 | tree arg = gimple_call_arg (stmt, idx); | |||
3011 | if (operand_equal_p (op, arg, 0)) | |||
3012 | return true; | |||
3013 | } | |||
3014 | } | |||
3015 | } | |||
3016 | } | |||
3017 | ||||
3018 | /* If this function is marked as returning non-null, then we can | |||
3019 | infer OP is non-null if it is used in the return statement. */ | |||
3020 | if (greturn *return_stmt = dyn_cast <greturn *> (stmt)) | |||
3021 | if (gimple_return_retval (return_stmt) | |||
3022 | && operand_equal_p (gimple_return_retval (return_stmt), op, 0) | |||
3023 | && lookup_attribute ("returns_nonnull", | |||
3024 | TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))((tree_class_check ((((contains_struct_check ((current_function_decl ), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3024, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3024, __FUNCTION__))->type_common.attributes))) | |||
3025 | return true; | |||
3026 | ||||
3027 | return false; | |||
3028 | } | |||
3029 | ||||
3030 | /* Compare two case labels. Because the front end should already have | |||
3031 | made sure that case ranges do not overlap, it is enough to only compare | |||
3032 | the CASE_LOW values of each case label. */ | |||
3033 | ||||
3034 | static int | |||
3035 | compare_case_labels (const void *p1, const void *p2) | |||
3036 | { | |||
3037 | const_tree const case1 = *(const_tree const*)p1; | |||
3038 | const_tree const case2 = *(const_tree const*)p2; | |||
3039 | ||||
3040 | /* The 'default' case label always goes first. */ | |||
3041 | if (!CASE_LOW (case1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((case1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3041, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3041, __FUNCTION__)))))) | |||
3042 | return -1; | |||
3043 | else if (!CASE_LOW (case2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((case2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3043, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3043, __FUNCTION__)))))) | |||
3044 | return 1; | |||
3045 | else | |||
3046 | return tree_int_cst_compare (CASE_LOW (case1)(*((const_cast<tree*> (tree_operand_check (((tree_check ((case1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3046, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3046, __FUNCTION__))))), CASE_LOW (case2)(*((const_cast<tree*> (tree_operand_check (((tree_check ((case2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3046, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3046, __FUNCTION__)))))); | |||
3047 | } | |||
3048 | ||||
3049 | /* Sort the case labels in LABEL_VEC in place in ascending order. */ | |||
3050 | ||||
3051 | void | |||
3052 | sort_case_labels (vec<tree> label_vec) | |||
3053 | { | |||
3054 | label_vec.qsort (compare_case_labels)qsort (compare_case_labels); | |||
3055 | } | |||
3056 | ||||
3057 | /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement. | |||
3058 | ||||
3059 | LABELS is a vector that contains all case labels to look at. | |||
3060 | ||||
3061 | INDEX_TYPE is the type of the switch index expression. Case labels | |||
3062 | in LABELS are discarded if their values are not in the value range | |||
3063 | covered by INDEX_TYPE. The remaining case label values are folded | |||
3064 | to INDEX_TYPE. | |||
3065 | ||||
3066 | If a default case exists in LABELS, it is removed from LABELS and | |||
3067 | returned in DEFAULT_CASEP. If no default case exists, but the | |||
3068 | case labels already cover the whole range of INDEX_TYPE, a default | |||
3069 | case is returned pointing to one of the existing case labels. | |||
3070 | Otherwise DEFAULT_CASEP is set to NULL_TREE. | |||
3071 | ||||
3072 | DEFAULT_CASEP may be NULL, in which case the above comment doesn't | |||
3073 | apply and no action is taken regardless of whether a default case is | |||
3074 | found or not. */ | |||
3075 | ||||
3076 | void | |||
3077 | preprocess_case_label_vec_for_gimple (vec<tree> labels, | |||
3078 | tree index_type, | |||
3079 | tree *default_casep) | |||
3080 | { | |||
3081 | tree min_value, max_value; | |||
3082 | tree default_case = NULL_TREE(tree) nullptr; | |||
3083 | size_t i, len; | |||
3084 | ||||
3085 | i = 0; | |||
3086 | min_value = TYPE_MIN_VALUE (index_type)((tree_check5 ((index_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3086, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval ); | |||
3087 | max_value = TYPE_MAX_VALUE (index_type)((tree_check5 ((index_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3087, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval ); | |||
3088 | while (i < labels.length ()) | |||
3089 | { | |||
3090 | tree elt = labels[i]; | |||
3091 | tree low = CASE_LOW (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check ((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3091, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3091, __FUNCTION__))))); | |||
3092 | tree high = CASE_HIGH (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check ((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3092, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3092, __FUNCTION__))))); | |||
3093 | bool remove_element = FALSEfalse; | |||
3094 | ||||
3095 | if (low) | |||
3096 | { | |||
3097 | gcc_checking_assert (TREE_CODE (low) == INTEGER_CST)((void)(!(((enum tree_code) (low)->base.code) == INTEGER_CST ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3097, __FUNCTION__), 0 : 0)); | |||
3098 | gcc_checking_assert (!high || TREE_CODE (high) == INTEGER_CST)((void)(!(!high || ((enum tree_code) (high)->base.code) == INTEGER_CST) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3098, __FUNCTION__), 0 : 0)); | |||
3099 | ||||
3100 | /* This is a non-default case label, i.e. it has a value. | |||
3101 | ||||
3102 | See if the case label is reachable within the range of | |||
3103 | the index type. Remove out-of-range case values. Turn | |||
3104 | case ranges into a canonical form (high > low strictly) | |||
3105 | and convert the case label values to the index type. | |||
3106 | ||||
3107 | NB: The type of gimple_switch_index() may be the promoted | |||
3108 | type, but the case labels retain the original type. */ | |||
3109 | ||||
3110 | if (high) | |||
3111 | { | |||
3112 | /* This is a case range. Discard empty ranges. | |||
3113 | If the bounds or the range are equal, turn this | |||
3114 | into a simple (one-value) case. */ | |||
3115 | int cmp = tree_int_cst_compare (high, low); | |||
3116 | if (cmp < 0) | |||
3117 | remove_element = TRUEtrue; | |||
3118 | else if (cmp == 0) | |||
3119 | high = NULL_TREE(tree) nullptr; | |||
3120 | } | |||
3121 | ||||
3122 | if (! high) | |||
3123 | { | |||
3124 | /* If the simple case value is unreachable, ignore it. */ | |||
3125 | if ((TREE_CODE (min_value)((enum tree_code) (min_value)->base.code) == INTEGER_CST | |||
3126 | && tree_int_cst_compare (low, min_value) < 0) | |||
3127 | || (TREE_CODE (max_value)((enum tree_code) (max_value)->base.code) == INTEGER_CST | |||
3128 | && tree_int_cst_compare (low, max_value) > 0)) | |||
3129 | remove_element = TRUEtrue; | |||
3130 | else | |||
3131 | low = fold_convert (index_type, low)fold_convert_loc (((location_t) 0), index_type, low); | |||
3132 | } | |||
3133 | else | |||
3134 | { | |||
3135 | /* If the entire case range is unreachable, ignore it. */ | |||
3136 | if ((TREE_CODE (min_value)((enum tree_code) (min_value)->base.code) == INTEGER_CST | |||
3137 | && tree_int_cst_compare (high, min_value) < 0) | |||
3138 | || (TREE_CODE (max_value)((enum tree_code) (max_value)->base.code) == INTEGER_CST | |||
3139 | && tree_int_cst_compare (low, max_value) > 0)) | |||
3140 | remove_element = TRUEtrue; | |||
3141 | else | |||
3142 | { | |||
3143 | /* If the lower bound is less than the index type's | |||
3144 | minimum value, truncate the range bounds. */ | |||
3145 | if (TREE_CODE (min_value)((enum tree_code) (min_value)->base.code) == INTEGER_CST | |||
3146 | && tree_int_cst_compare (low, min_value) < 0) | |||
3147 | low = min_value; | |||
3148 | low = fold_convert (index_type, low)fold_convert_loc (((location_t) 0), index_type, low); | |||
3149 | ||||
3150 | /* If the upper bound is greater than the index type's | |||
3151 | maximum value, truncate the range bounds. */ | |||
3152 | if (TREE_CODE (max_value)((enum tree_code) (max_value)->base.code) == INTEGER_CST | |||
3153 | && tree_int_cst_compare (high, max_value) > 0) | |||
3154 | high = max_value; | |||
3155 | high = fold_convert (index_type, high)fold_convert_loc (((location_t) 0), index_type, high); | |||
3156 | ||||
3157 | /* We may have folded a case range to a one-value case. */ | |||
3158 | if (tree_int_cst_equal (low, high)) | |||
3159 | high = NULL_TREE(tree) nullptr; | |||
3160 | } | |||
3161 | } | |||
3162 | ||||
3163 | CASE_LOW (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check ((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3163, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3163, __FUNCTION__))))) = low; | |||
3164 | CASE_HIGH (elt)(*((const_cast<tree*> (tree_operand_check (((tree_check ((elt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3164, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3164, __FUNCTION__))))) = high; | |||
3165 | } | |||
3166 | else | |||
3167 | { | |||
3168 | gcc_assert (!default_case)((void)(!(!default_case) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3168, __FUNCTION__), 0 : 0)); | |||
3169 | default_case = elt; | |||
3170 | /* The default case must be passed separately to the | |||
3171 | gimple_build_switch routine. But if DEFAULT_CASEP | |||
3172 | is NULL, we do not remove the default case (it would | |||
3173 | be completely lost). */ | |||
3174 | if (default_casep) | |||
3175 | remove_element = TRUEtrue; | |||
3176 | } | |||
3177 | ||||
3178 | if (remove_element) | |||
3179 | labels.ordered_remove (i); | |||
3180 | else | |||
3181 | i++; | |||
3182 | } | |||
3183 | len = i; | |||
3184 | ||||
3185 | if (!labels.is_empty ()) | |||
3186 | sort_case_labels (labels); | |||
3187 | ||||
3188 | if (default_casep && !default_case) | |||
3189 | { | |||
3190 | /* If the switch has no default label, add one, so that we jump | |||
3191 | around the switch body. If the labels already cover the whole | |||
3192 | range of the switch index_type, add the default label pointing | |||
3193 | to one of the existing labels. */ | |||
3194 | if (len | |||
3195 | && TYPE_MIN_VALUE (index_type)((tree_check5 ((index_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3195, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval ) | |||
3196 | && TYPE_MAX_VALUE (index_type)((tree_check5 ((index_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3196, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval ) | |||
3197 | && tree_int_cst_equal (CASE_LOW (labels[0])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[0]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3197, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3197, __FUNCTION__))))), | |||
3198 | TYPE_MIN_VALUE (index_type)((tree_check5 ((index_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3198, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval ))) | |||
3199 | { | |||
3200 | tree low, high = CASE_HIGH (labels[len - 1])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[len - 1]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3200, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3200, __FUNCTION__))))); | |||
3201 | if (!high) | |||
3202 | high = CASE_LOW (labels[len - 1])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[len - 1]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3202, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3202, __FUNCTION__))))); | |||
3203 | if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)((tree_check5 ((index_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3203, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE ), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval ))) | |||
3204 | { | |||
3205 | tree widest_label = labels[0]; | |||
3206 | for (i = 1; i < len; i++) | |||
3207 | { | |||
3208 | high = CASE_LOW (labels[i])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[i]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3208, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3208, __FUNCTION__))))); | |||
3209 | low = CASE_HIGH (labels[i - 1])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[i - 1]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3209, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3209, __FUNCTION__))))); | |||
3210 | if (!low) | |||
3211 | low = CASE_LOW (labels[i - 1])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[i - 1]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3211, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3211, __FUNCTION__))))); | |||
3212 | ||||
3213 | if (CASE_HIGH (labels[i])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[i]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3213, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3213, __FUNCTION__))))) != NULL_TREE(tree) nullptr | |||
3214 | && (CASE_HIGH (widest_label)(*((const_cast<tree*> (tree_operand_check (((tree_check ((widest_label), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3214, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3214, __FUNCTION__))))) == NULL_TREE(tree) nullptr | |||
3215 | || (wi::gtu_p | |||
3216 | (wi::to_wide (CASE_HIGH (labels[i])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[i]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3216, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3216, __FUNCTION__)))))) | |||
3217 | - wi::to_wide (CASE_LOW (labels[i])(*((const_cast<tree*> (tree_operand_check (((tree_check ((labels[i]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3217, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3217, __FUNCTION__)))))), | |||
3218 | wi::to_wide (CASE_HIGH (widest_label)(*((const_cast<tree*> (tree_operand_check (((tree_check ((widest_label), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3218, __FUNCTION__, (CASE_LABEL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3218, __FUNCTION__)))))) | |||
3219 | - wi::to_wide (CASE_LOW (widest_label)(*((const_cast<tree*> (tree_operand_check (((tree_check ((widest_label), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3219, __FUNCTION__, (CASE_LABEL_EXPR)))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3219, __FUNCTION__)))))))))) | |||
3220 | widest_label = labels[i]; | |||
3221 | ||||
3222 | if (wi::to_wide (low) + 1 != wi::to_wide (high)) | |||
3223 | break; | |||
3224 | } | |||
3225 | if (i == len) | |||
3226 | { | |||
3227 | /* Designate the label with the widest range to be the | |||
3228 | default label. */ | |||
3229 | tree label = CASE_LABEL (widest_label)(*((const_cast<tree*> (tree_operand_check (((tree_check ((widest_label), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3229, __FUNCTION__, (CASE_LABEL_EXPR)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3229, __FUNCTION__))))); | |||
3230 | default_case = build_case_label (NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr, | |||
3231 | label); | |||
3232 | } | |||
3233 | } | |||
3234 | } | |||
3235 | } | |||
3236 | ||||
3237 | if (default_casep) | |||
3238 | *default_casep = default_case; | |||
3239 | } | |||
3240 | ||||
3241 | /* Set the location of all statements in SEQ to LOC. */ | |||
3242 | ||||
3243 | void | |||
3244 | gimple_seq_set_location (gimple_seq seq, location_t loc) | |||
3245 | { | |||
3246 | for (gimple_stmt_iterator i = gsi_start (seq)gsi_start_1 (&(seq)); !gsi_end_p (i); gsi_next (&i)) | |||
3247 | gimple_set_location (gsi_stmt (i), loc); | |||
3248 | } | |||
3249 | ||||
3250 | /* Release SSA_NAMEs in SEQ as well as the GIMPLE statements. */ | |||
3251 | ||||
3252 | void | |||
3253 | gimple_seq_discard (gimple_seq seq) | |||
3254 | { | |||
3255 | gimple_stmt_iterator gsi; | |||
3256 | ||||
3257 | for (gsi = gsi_start (seq)gsi_start_1 (&(seq)); !gsi_end_p (gsi); ) | |||
3258 | { | |||
3259 | gimple *stmt = gsi_stmt (gsi); | |||
3260 | gsi_remove (&gsi, true); | |||
3261 | release_defs (stmt); | |||
3262 | ggc_free (stmt); | |||
3263 | } | |||
3264 | } | |||
3265 | ||||
3266 | /* See if STMT now calls function that takes no parameters and if so, drop | |||
3267 | call arguments. This is used when devirtualization machinery redirects | |||
3268 | to __builtin_unreachable or __cxa_pure_virtual. */ | |||
3269 | ||||
3270 | void | |||
3271 | maybe_remove_unused_call_args (struct function *fn, gimple *stmt) | |||
3272 | { | |||
3273 | tree decl = gimple_call_fndecl (stmt); | |||
3274 | if (TYPE_ARG_TYPES (TREE_TYPE (decl))((tree_check2 ((((contains_struct_check ((decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3274, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3274, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common .values) | |||
3275 | && TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))((tree_check ((((tree_check2 ((((contains_struct_check ((decl ), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3275, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3275, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common .values)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3275, __FUNCTION__, (TREE_LIST)))->list.value) == void_type_nodeglobal_trees[TI_VOID_TYPE] | |||
3276 | && gimple_call_num_args (stmt)) | |||
3277 | { | |||
3278 | gimple_set_num_ops (stmt, 3); | |||
3279 | update_stmt_fn (fn, stmt); | |||
3280 | } | |||
3281 | } | |||
3282 | ||||
3283 | /* Return false if STMT will likely expand to real function call. */ | |||
3284 | ||||
3285 | bool | |||
3286 | gimple_inexpensive_call_p (gcall *stmt) | |||
3287 | { | |||
3288 | if (gimple_call_internal_p (stmt)) | |||
3289 | return true; | |||
3290 | tree decl = gimple_call_fndecl (stmt); | |||
3291 | if (decl && is_inexpensive_builtin (decl)) | |||
3292 | return true; | |||
3293 | return false; | |||
3294 | } | |||
3295 | ||||
3296 | /* Return a non-artificial location for STMT. If STMT does not have | |||
3297 | location information, get the location from EXPR. */ | |||
3298 | ||||
3299 | location_t | |||
3300 | gimple_or_expr_nonartificial_location (gimple *stmt, tree expr) | |||
3301 | { | |||
3302 | location_t loc = gimple_nonartificial_location (stmt); | |||
3303 | if (loc == UNKNOWN_LOCATION((location_t) 0) && EXPR_HAS_LOCATION (expr)(((IS_ADHOC_LOC (((((expr)) && ((tree_code_type[(int) (((enum tree_code) ((expr))->base.code))]) >= tcc_reference && (tree_code_type[(int) (((enum tree_code) ((expr)) ->base.code))]) <= tcc_expression)) ? (expr)->exp.locus : ((location_t) 0)))) ? get_location_from_adhoc_loc (line_table , ((((expr)) && ((tree_code_type[(int) (((enum tree_code ) ((expr))->base.code))]) >= tcc_reference && ( tree_code_type[(int) (((enum tree_code) ((expr))->base.code ))]) <= tcc_expression)) ? (expr)->exp.locus : ((location_t ) 0))) : (((((expr)) && ((tree_code_type[(int) (((enum tree_code) ((expr))->base.code))]) >= tcc_reference && (tree_code_type[(int) (((enum tree_code) ((expr))->base.code ))]) <= tcc_expression)) ? (expr)->exp.locus : ((location_t ) 0)))) != ((location_t) 0))) | |||
3304 | loc = tree_nonartificial_location (expr); | |||
3305 | return expansion_point_location_if_in_system_header (loc); | |||
3306 | } | |||
3307 | ||||
3308 | ||||
3309 | #if CHECKING_P1 | |||
3310 | ||||
3311 | namespace selftest { | |||
3312 | ||||
3313 | /* Selftests for core gimple structures. */ | |||
3314 | ||||
3315 | /* Verify that STMT is pretty-printed as EXPECTED. | |||
3316 | Helper function for selftests. */ | |||
3317 | ||||
3318 | static void | |||
3319 | verify_gimple_pp (const char *expected, gimple *stmt) | |||
3320 | { | |||
3321 | pretty_printer pp; | |||
3322 | pp_gimple_stmt_1 (&pp, stmt, 0 /* spc */, TDF_NONE /* flags */); | |||
3323 | ASSERT_STREQ (expected, pp_formatted_text (&pp))do { ::selftest::assert_streq ((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3323, __FUNCTION__)), "expected", "pp_formatted_text (&pp)" , (expected), (pp_formatted_text (&pp))); } while (0); | |||
3324 | } | |||
3325 | ||||
3326 | /* Build a GIMPLE_ASSIGN equivalent to | |||
3327 | tmp = 5; | |||
3328 | and verify various properties of it. */ | |||
3329 | ||||
3330 | static void | |||
3331 | test_assign_single () | |||
3332 | { | |||
3333 | tree type = integer_type_nodeinteger_types[itk_int]; | |||
3334 | tree lhs = build_decl (UNKNOWN_LOCATION((location_t) 0), VAR_DECL, | |||
3335 | get_identifier ("tmp")(__builtin_constant_p ("tmp") ? get_identifier_with_length (( "tmp"), strlen ("tmp")) : get_identifier ("tmp")), | |||
3336 | type); | |||
3337 | tree rhs = build_int_cst (type, 5); | |||
3338 | gassign *stmt = gimple_build_assign (lhs, rhs); | |||
3339 | verify_gimple_pp ("tmp = 5;", stmt); | |||
3340 | ||||
3341 | ASSERT_TRUE (is_gimple_assign (stmt))do { const char *desc_ = "ASSERT_TRUE (" "(is_gimple_assign (stmt))" ")"; bool actual_ = ((is_gimple_assign (stmt))); if (actual_ ) ::selftest::pass (((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3341, __FUNCTION__))), desc_); else ::selftest::fail (((::selftest ::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3341, __FUNCTION__))), desc_); } while (0); | |||
3342 | ASSERT_EQ (lhs, gimple_assign_lhs (stmt))do { const char *desc_ = "ASSERT_EQ (" "(lhs)" ", " "(gimple_assign_lhs (stmt))" ")"; if (((lhs)) == ((gimple_assign_lhs (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3342, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3342, __FUNCTION__)))), desc_); } while (0); | |||
3343 | ASSERT_EQ (lhs, gimple_get_lhs (stmt))do { const char *desc_ = "ASSERT_EQ (" "(lhs)" ", " "(gimple_get_lhs (stmt))" ")"; if (((lhs)) == ((gimple_get_lhs (stmt)))) ::selftest::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3343, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3343, __FUNCTION__)))), desc_); } while (0); | |||
3344 | ASSERT_EQ (rhs, gimple_assign_rhs1 (stmt))do { const char *desc_ = "ASSERT_EQ (" "(rhs)" ", " "(gimple_assign_rhs1 (stmt))" ")"; if (((rhs)) == ((gimple_assign_rhs1 (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3344, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3344, __FUNCTION__)))), desc_); } while (0); | |||
3345 | ASSERT_EQ (NULL, gimple_assign_rhs2 (stmt))do { const char *desc_ = "ASSERT_EQ (" "(nullptr)" ", " "(gimple_assign_rhs2 (stmt))" ")"; if (((nullptr)) == ((gimple_assign_rhs2 (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3345, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3345, __FUNCTION__)))), desc_); } while (0); | |||
3346 | ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt))do { const char *desc_ = "ASSERT_EQ (" "(nullptr)" ", " "(gimple_assign_rhs3 (stmt))" ")"; if (((nullptr)) == ((gimple_assign_rhs3 (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3346, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3346, __FUNCTION__)))), desc_); } while (0); | |||
3347 | ASSERT_TRUE (gimple_assign_single_p (stmt))do { const char *desc_ = "ASSERT_TRUE (" "(gimple_assign_single_p (stmt))" ")"; bool actual_ = ((gimple_assign_single_p (stmt))); if (actual_ ) ::selftest::pass (((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3347, __FUNCTION__))), desc_); else ::selftest::fail (((::selftest ::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3347, __FUNCTION__))), desc_); } while (0); | |||
3348 | ASSERT_EQ (INTEGER_CST, gimple_assign_rhs_code (stmt))do { const char *desc_ = "ASSERT_EQ (" "(INTEGER_CST)" ", " "(gimple_assign_rhs_code (stmt))" ")"; if (((INTEGER_CST)) == ((gimple_assign_rhs_code (stmt)) )) ::selftest::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3348, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3348, __FUNCTION__)))), desc_); } while (0); | |||
3349 | } | |||
3350 | ||||
3351 | /* Build a GIMPLE_ASSIGN equivalent to | |||
3352 | tmp = a * b; | |||
3353 | and verify various properties of it. */ | |||
3354 | ||||
3355 | static void | |||
3356 | test_assign_binop () | |||
3357 | { | |||
3358 | tree type = integer_type_nodeinteger_types[itk_int]; | |||
3359 | tree lhs = build_decl (UNKNOWN_LOCATION((location_t) 0), VAR_DECL, | |||
3360 | get_identifier ("tmp")(__builtin_constant_p ("tmp") ? get_identifier_with_length (( "tmp"), strlen ("tmp")) : get_identifier ("tmp")), | |||
3361 | type); | |||
3362 | tree a = build_decl (UNKNOWN_LOCATION((location_t) 0), VAR_DECL, | |||
3363 | get_identifier ("a")(__builtin_constant_p ("a") ? get_identifier_with_length (("a" ), strlen ("a")) : get_identifier ("a")), | |||
3364 | type); | |||
3365 | tree b = build_decl (UNKNOWN_LOCATION((location_t) 0), VAR_DECL, | |||
3366 | get_identifier ("b")(__builtin_constant_p ("b") ? get_identifier_with_length (("b" ), strlen ("b")) : get_identifier ("b")), | |||
3367 | type); | |||
3368 | gassign *stmt = gimple_build_assign (lhs, MULT_EXPR, a, b); | |||
3369 | verify_gimple_pp ("tmp = a * b;", stmt); | |||
3370 | ||||
3371 | ASSERT_TRUE (is_gimple_assign (stmt))do { const char *desc_ = "ASSERT_TRUE (" "(is_gimple_assign (stmt))" ")"; bool actual_ = ((is_gimple_assign (stmt))); if (actual_ ) ::selftest::pass (((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3371, __FUNCTION__))), desc_); else ::selftest::fail (((::selftest ::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3371, __FUNCTION__))), desc_); } while (0); | |||
3372 | ASSERT_EQ (lhs, gimple_assign_lhs (stmt))do { const char *desc_ = "ASSERT_EQ (" "(lhs)" ", " "(gimple_assign_lhs (stmt))" ")"; if (((lhs)) == ((gimple_assign_lhs (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3372, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3372, __FUNCTION__)))), desc_); } while (0); | |||
3373 | ASSERT_EQ (lhs, gimple_get_lhs (stmt))do { const char *desc_ = "ASSERT_EQ (" "(lhs)" ", " "(gimple_get_lhs (stmt))" ")"; if (((lhs)) == ((gimple_get_lhs (stmt)))) ::selftest::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3373, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3373, __FUNCTION__)))), desc_); } while (0); | |||
3374 | ASSERT_EQ (a, gimple_assign_rhs1 (stmt))do { const char *desc_ = "ASSERT_EQ (" "(a)" ", " "(gimple_assign_rhs1 (stmt))" ")"; if (((a)) == ((gimple_assign_rhs1 (stmt)))) ::selftest:: pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3374, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3374, __FUNCTION__)))), desc_); } while (0); | |||
3375 | ASSERT_EQ (b, gimple_assign_rhs2 (stmt))do { const char *desc_ = "ASSERT_EQ (" "(b)" ", " "(gimple_assign_rhs2 (stmt))" ")"; if (((b)) == ((gimple_assign_rhs2 (stmt)))) ::selftest:: pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3375, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3375, __FUNCTION__)))), desc_); } while (0); | |||
3376 | ASSERT_EQ (NULL, gimple_assign_rhs3 (stmt))do { const char *desc_ = "ASSERT_EQ (" "(nullptr)" ", " "(gimple_assign_rhs3 (stmt))" ")"; if (((nullptr)) == ((gimple_assign_rhs3 (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3376, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3376, __FUNCTION__)))), desc_); } while (0); | |||
3377 | ASSERT_FALSE (gimple_assign_single_p (stmt))do { const char *desc_ = "ASSERT_FALSE (" "(gimple_assign_single_p (stmt))" ")"; bool actual_ = ((gimple_assign_single_p (stmt))); if (actual_ ) ::selftest::fail (((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3377, __FUNCTION__))), desc_); else ::selftest::pass (((::selftest ::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3377, __FUNCTION__))), desc_); } while (0); | |||
3378 | ASSERT_EQ (MULT_EXPR, gimple_assign_rhs_code (stmt))do { const char *desc_ = "ASSERT_EQ (" "(MULT_EXPR)" ", " "(gimple_assign_rhs_code (stmt))" ")"; if (((MULT_EXPR)) == ((gimple_assign_rhs_code (stmt)))) ::selftest::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3378, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3378, __FUNCTION__)))), desc_); } while (0); | |||
3379 | } | |||
3380 | ||||
3381 | /* Build a GIMPLE_NOP and verify various properties of it. */ | |||
3382 | ||||
3383 | static void | |||
3384 | test_nop_stmt () | |||
3385 | { | |||
3386 | gimple *stmt = gimple_build_nop (); | |||
3387 | verify_gimple_pp ("GIMPLE_NOP", stmt); | |||
3388 | ASSERT_EQ (GIMPLE_NOP, gimple_code (stmt))do { const char *desc_ = "ASSERT_EQ (" "(GIMPLE_NOP)" ", " "(gimple_code (stmt))" ")"; if (((GIMPLE_NOP)) == ((gimple_code (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3388, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3388, __FUNCTION__)))), desc_); } while (0); | |||
3389 | ASSERT_EQ (NULL, gimple_get_lhs (stmt))do { const char *desc_ = "ASSERT_EQ (" "(nullptr)" ", " "(gimple_get_lhs (stmt))" ")"; if (((nullptr)) == ((gimple_get_lhs (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3389, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3389, __FUNCTION__)))), desc_); } while (0); | |||
3390 | ASSERT_FALSE (gimple_assign_single_p (stmt))do { const char *desc_ = "ASSERT_FALSE (" "(gimple_assign_single_p (stmt))" ")"; bool actual_ = ((gimple_assign_single_p (stmt))); if (actual_ ) ::selftest::fail (((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3390, __FUNCTION__))), desc_); else ::selftest::pass (((::selftest ::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3390, __FUNCTION__))), desc_); } while (0); | |||
3391 | } | |||
3392 | ||||
3393 | /* Build a GIMPLE_RETURN equivalent to | |||
3394 | return 7; | |||
3395 | and verify various properties of it. */ | |||
3396 | ||||
3397 | static void | |||
3398 | test_return_stmt () | |||
3399 | { | |||
3400 | tree type = integer_type_nodeinteger_types[itk_int]; | |||
3401 | tree val = build_int_cst (type, 7); | |||
3402 | greturn *stmt = gimple_build_return (val); | |||
3403 | verify_gimple_pp ("return 7;", stmt); | |||
3404 | ||||
3405 | ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt))do { const char *desc_ = "ASSERT_EQ (" "(GIMPLE_RETURN)" ", " "(gimple_code (stmt))" ")"; if (((GIMPLE_RETURN)) == ((gimple_code (stmt)))) ::selftest::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3405, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3405, __FUNCTION__)))), desc_); } while (0); | |||
3406 | ASSERT_EQ (NULL, gimple_get_lhs (stmt))do { const char *desc_ = "ASSERT_EQ (" "(nullptr)" ", " "(gimple_get_lhs (stmt))" ")"; if (((nullptr)) == ((gimple_get_lhs (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3406, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3406, __FUNCTION__)))), desc_); } while (0); | |||
3407 | ASSERT_EQ (val, gimple_return_retval (stmt))do { const char *desc_ = "ASSERT_EQ (" "(val)" ", " "(gimple_return_retval (stmt))" ")"; if (((val)) == ((gimple_return_retval (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3407, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3407, __FUNCTION__)))), desc_); } while (0); | |||
3408 | ASSERT_FALSE (gimple_assign_single_p (stmt))do { const char *desc_ = "ASSERT_FALSE (" "(gimple_assign_single_p (stmt))" ")"; bool actual_ = ((gimple_assign_single_p (stmt))); if (actual_ ) ::selftest::fail (((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3408, __FUNCTION__))), desc_); else ::selftest::pass (((::selftest ::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3408, __FUNCTION__))), desc_); } while (0); | |||
3409 | } | |||
3410 | ||||
3411 | /* Build a GIMPLE_RETURN equivalent to | |||
3412 | return; | |||
3413 | and verify various properties of it. */ | |||
3414 | ||||
3415 | static void | |||
3416 | test_return_without_value () | |||
3417 | { | |||
3418 | greturn *stmt = gimple_build_return (NULLnullptr); | |||
3419 | verify_gimple_pp ("return;", stmt); | |||
3420 | ||||
3421 | ASSERT_EQ (GIMPLE_RETURN, gimple_code (stmt))do { const char *desc_ = "ASSERT_EQ (" "(GIMPLE_RETURN)" ", " "(gimple_code (stmt))" ")"; if (((GIMPLE_RETURN)) == ((gimple_code (stmt)))) ::selftest::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3421, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3421, __FUNCTION__)))), desc_); } while (0); | |||
3422 | ASSERT_EQ (NULL, gimple_get_lhs (stmt))do { const char *desc_ = "ASSERT_EQ (" "(nullptr)" ", " "(gimple_get_lhs (stmt))" ")"; if (((nullptr)) == ((gimple_get_lhs (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3422, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3422, __FUNCTION__)))), desc_); } while (0); | |||
3423 | ASSERT_EQ (NULL, gimple_return_retval (stmt))do { const char *desc_ = "ASSERT_EQ (" "(nullptr)" ", " "(gimple_return_retval (stmt))" ")"; if (((nullptr)) == ((gimple_return_retval (stmt)))) ::selftest ::pass ((((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3423, __FUNCTION__)))), desc_); else ::selftest::fail ((((:: selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3423, __FUNCTION__)))), desc_); } while (0); | |||
3424 | ASSERT_FALSE (gimple_assign_single_p (stmt))do { const char *desc_ = "ASSERT_FALSE (" "(gimple_assign_single_p (stmt))" ")"; bool actual_ = ((gimple_assign_single_p (stmt))); if (actual_ ) ::selftest::fail (((::selftest::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3424, __FUNCTION__))), desc_); else ::selftest::pass (((::selftest ::location ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/gimple.c" , 3424, __FUNCTION__))), desc_); } while (0); | |||
3425 | } | |||
3426 | ||||
3427 | /* Run all of the selftests within this file. */ | |||
3428 | ||||
3429 | void | |||
3430 | gimple_c_tests () | |||
3431 | { | |||
3432 | test_assign_single (); | |||
3433 | test_assign_binop (); | |||
3434 | test_nop_stmt (); | |||
3435 | test_return_stmt (); | |||
3436 | test_return_without_value (); | |||
3437 | } | |||
3438 | ||||
3439 | } // namespace selftest | |||
3440 | ||||
3441 | ||||
3442 | #endif /* CHECKING_P */ |
1 | /* Vector API for GNU compiler. | ||||
2 | Copyright (C) 2004-2021 Free Software Foundation, Inc. | ||||
3 | Contributed by Nathan Sidwell <nathan@codesourcery.com> | ||||
4 | Re-implemented in C++ by Diego Novillo <dnovillo@google.com> | ||||
5 | |||||
6 | This file is part of GCC. | ||||
7 | |||||
8 | GCC is free software; you can redistribute it and/or modify it under | ||||
9 | the terms of the GNU General Public License as published by the Free | ||||
10 | Software Foundation; either version 3, or (at your option) any later | ||||
11 | version. | ||||
12 | |||||
13 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | ||||
14 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | ||||
15 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | ||||
16 | for more details. | ||||
17 | |||||
18 | You should have received a copy of the GNU General Public License | ||||
19 | along with GCC; see the file COPYING3. If not see | ||||
20 | <http://www.gnu.org/licenses/>. */ | ||||
21 | |||||
22 | #ifndef GCC_VEC_H | ||||
23 | #define GCC_VEC_H | ||||
24 | |||||
25 | /* Some gen* file have no ggc support as the header file gtype-desc.h is | ||||
26 | missing. Provide these definitions in case ggc.h has not been included. | ||||
27 | This is not a problem because any code that runs before gengtype is built | ||||
28 | will never need to use GC vectors.*/ | ||||
29 | |||||
30 | extern void ggc_free (void *); | ||||
31 | extern size_t ggc_round_alloc_size (size_t requested_size); | ||||
32 | extern void *ggc_realloc (void *, size_t MEM_STAT_DECL); | ||||
33 | |||||
34 | /* Templated vector type and associated interfaces. | ||||
35 | |||||
36 | The interface functions are typesafe and use inline functions, | ||||
37 | sometimes backed by out-of-line generic functions. The vectors are | ||||
38 | designed to interoperate with the GTY machinery. | ||||
39 | |||||
40 | There are both 'index' and 'iterate' accessors. The index accessor | ||||
41 | is implemented by operator[]. The iterator returns a boolean | ||||
42 | iteration condition and updates the iteration variable passed by | ||||
43 | reference. Because the iterator will be inlined, the address-of | ||||
44 | can be optimized away. | ||||
45 | |||||
46 | Each operation that increases the number of active elements is | ||||
47 | available in 'quick' and 'safe' variants. The former presumes that | ||||
48 | there is sufficient allocated space for the operation to succeed | ||||
49 | (it dies if there is not). The latter will reallocate the | ||||
50 | vector, if needed. Reallocation causes an exponential increase in | ||||
51 | vector size. If you know you will be adding N elements, it would | ||||
52 | be more efficient to use the reserve operation before adding the | ||||
53 | elements with the 'quick' operation. This will ensure there are at | ||||
54 | least as many elements as you ask for, it will exponentially | ||||
55 | increase if there are too few spare slots. If you want reserve a | ||||
56 | specific number of slots, but do not want the exponential increase | ||||
57 | (for instance, you know this is the last allocation), use the | ||||
58 | reserve_exact operation. You can also create a vector of a | ||||
59 | specific size from the get go. | ||||
60 | |||||
61 | You should prefer the push and pop operations, as they append and | ||||
62 | remove from the end of the vector. If you need to remove several | ||||
63 | items in one go, use the truncate operation. The insert and remove | ||||
64 | operations allow you to change elements in the middle of the | ||||
65 | vector. There are two remove operations, one which preserves the | ||||
66 | element ordering 'ordered_remove', and one which does not | ||||
67 | 'unordered_remove'. The latter function copies the end element | ||||
68 | into the removed slot, rather than invoke a memmove operation. The | ||||
69 | 'lower_bound' function will determine where to place an item in the | ||||
70 | array using insert that will maintain sorted order. | ||||
71 | |||||
72 | Vectors are template types with three arguments: the type of the | ||||
73 | elements in the vector, the allocation strategy, and the physical | ||||
74 | layout to use | ||||
75 | |||||
76 | Four allocation strategies are supported: | ||||
77 | |||||
78 | - Heap: allocation is done using malloc/free. This is the | ||||
79 | default allocation strategy. | ||||
80 | |||||
81 | - GC: allocation is done using ggc_alloc/ggc_free. | ||||
82 | |||||
83 | - GC atomic: same as GC with the exception that the elements | ||||
84 | themselves are assumed to be of an atomic type that does | ||||
85 | not need to be garbage collected. This means that marking | ||||
86 | routines do not need to traverse the array marking the | ||||
87 | individual elements. This increases the performance of | ||||
88 | GC activities. | ||||
89 | |||||
90 | Two physical layouts are supported: | ||||
91 | |||||
92 | - Embedded: The vector is structured using the trailing array | ||||
93 | idiom. The last member of the structure is an array of size | ||||
94 | 1. When the vector is initially allocated, a single memory | ||||
95 | block is created to hold the vector's control data and the | ||||
96 | array of elements. These vectors cannot grow without | ||||
97 | reallocation (see discussion on embeddable vectors below). | ||||
98 | |||||
99 | - Space efficient: The vector is structured as a pointer to an | ||||
100 | embedded vector. This is the default layout. It means that | ||||
101 | vectors occupy a single word of storage before initial | ||||
102 | allocation. Vectors are allowed to grow (the internal | ||||
103 | pointer is reallocated but the main vector instance does not | ||||
104 | need to relocate). | ||||
105 | |||||
106 | The type, allocation and layout are specified when the vector is | ||||
107 | declared. | ||||
108 | |||||
109 | If you need to directly manipulate a vector, then the 'address' | ||||
110 | accessor will return the address of the start of the vector. Also | ||||
111 | the 'space' predicate will tell you whether there is spare capacity | ||||
112 | in the vector. You will not normally need to use these two functions. | ||||
113 | |||||
114 | Notes on the different layout strategies | ||||
115 | |||||
116 | * Embeddable vectors (vec<T, A, vl_embed>) | ||||
117 | |||||
118 | These vectors are suitable to be embedded in other data | ||||
119 | structures so that they can be pre-allocated in a contiguous | ||||
120 | memory block. | ||||
121 | |||||
122 | Embeddable vectors are implemented using the trailing array | ||||
123 | idiom, thus they are not resizeable without changing the address | ||||
124 | of the vector object itself. This means you cannot have | ||||
125 | variables or fields of embeddable vector type -- always use a | ||||
126 | pointer to a vector. The one exception is the final field of a | ||||
127 | structure, which could be a vector type. | ||||
128 | |||||
129 | You will have to use the embedded_size & embedded_init calls to | ||||
130 | create such objects, and they will not be resizeable (so the | ||||
131 | 'safe' allocation variants are not available). | ||||
132 | |||||
133 | Properties of embeddable vectors: | ||||
134 | |||||
135 | - The whole vector and control data are allocated in a single | ||||
136 | contiguous block. It uses the trailing-vector idiom, so | ||||
137 | allocation must reserve enough space for all the elements | ||||
138 | in the vector plus its control data. | ||||
139 | - The vector cannot be re-allocated. | ||||
140 | - The vector cannot grow nor shrink. | ||||
141 | - No indirections needed for access/manipulation. | ||||
142 | - It requires 2 words of storage (prior to vector allocation). | ||||
143 | |||||
144 | |||||
145 | * Space efficient vector (vec<T, A, vl_ptr>) | ||||
146 | |||||
147 | These vectors can grow dynamically and are allocated together | ||||
148 | with their control data. They are suited to be included in data | ||||
149 | structures. Prior to initial allocation, they only take a single | ||||
150 | word of storage. | ||||
151 | |||||
152 | These vectors are implemented as a pointer to embeddable vectors. | ||||
153 | The semantics allow for this pointer to be NULL to represent | ||||
154 | empty vectors. This way, empty vectors occupy minimal space in | ||||
155 | the structure containing them. | ||||
156 | |||||
157 | Properties: | ||||
158 | |||||
159 | - The whole vector and control data are allocated in a single | ||||
160 | contiguous block. | ||||
161 | - The whole vector may be re-allocated. | ||||
162 | - Vector data may grow and shrink. | ||||
163 | - Access and manipulation requires a pointer test and | ||||
164 | indirection. | ||||
165 | - It requires 1 word of storage (prior to vector allocation). | ||||
166 | |||||
167 | An example of their use would be, | ||||
168 | |||||
169 | struct my_struct { | ||||
170 | // A space-efficient vector of tree pointers in GC memory. | ||||
171 | vec<tree, va_gc, vl_ptr> v; | ||||
172 | }; | ||||
173 | |||||
174 | struct my_struct *s; | ||||
175 | |||||
176 | if (s->v.length ()) { we have some contents } | ||||
177 | s->v.safe_push (decl); // append some decl onto the end | ||||
178 | for (ix = 0; s->v.iterate (ix, &elt); ix++) | ||||
179 | { do something with elt } | ||||
180 | */ | ||||
181 | |||||
182 | /* Support function for statistics. */ | ||||
183 | extern void dump_vec_loc_statistics (void); | ||||
184 | |||||
185 | /* Hashtable mapping vec addresses to descriptors. */ | ||||
186 | extern htab_t vec_mem_usage_hash; | ||||
187 | |||||
188 | /* Control data for vectors. This contains the number of allocated | ||||
189 | and used slots inside a vector. */ | ||||
190 | |||||
191 | struct vec_prefix | ||||
192 | { | ||||
193 | /* FIXME - These fields should be private, but we need to cater to | ||||
194 | compilers that have stricter notions of PODness for types. */ | ||||
195 | |||||
196 | /* Memory allocation support routines in vec.c. */ | ||||
197 | void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO); | ||||
198 | void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO); | ||||
199 | static unsigned calculate_allocation (vec_prefix *, unsigned, bool); | ||||
200 | static unsigned calculate_allocation_1 (unsigned, unsigned); | ||||
201 | |||||
202 | /* Note that vec_prefix should be a base class for vec, but we use | ||||
203 | offsetof() on vector fields of tree structures (e.g., | ||||
204 | tree_binfo::base_binfos), and offsetof only supports base types. | ||||
205 | |||||
206 | To compensate, we make vec_prefix a field inside vec and make | ||||
207 | vec a friend class of vec_prefix so it can access its fields. */ | ||||
208 | template <typename, typename, typename> friend struct vec; | ||||
209 | |||||
210 | /* The allocator types also need access to our internals. */ | ||||
211 | friend struct va_gc; | ||||
212 | friend struct va_gc_atomic; | ||||
213 | friend struct va_heap; | ||||
214 | |||||
215 | unsigned m_alloc : 31; | ||||
216 | unsigned m_using_auto_storage : 1; | ||||
217 | unsigned m_num; | ||||
218 | }; | ||||
219 | |||||
220 | /* Calculate the number of slots to reserve a vector, making sure that | ||||
221 | RESERVE slots are free. If EXACT grow exactly, otherwise grow | ||||
222 | exponentially. PFX is the control data for the vector. */ | ||||
223 | |||||
224 | inline unsigned | ||||
225 | vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve, | ||||
226 | bool exact) | ||||
227 | { | ||||
228 | if (exact) | ||||
229 | return (pfx ? pfx->m_num : 0) + reserve; | ||||
230 | else if (!pfx) | ||||
231 | return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve)); | ||||
232 | return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve); | ||||
233 | } | ||||
234 | |||||
235 | template<typename, typename, typename> struct vec; | ||||
236 | |||||
237 | /* Valid vector layouts | ||||
238 | |||||
239 | vl_embed - Embeddable vector that uses the trailing array idiom. | ||||
240 | vl_ptr - Space efficient vector that uses a pointer to an | ||||
241 | embeddable vector. */ | ||||
242 | struct vl_embed { }; | ||||
243 | struct vl_ptr { }; | ||||
244 | |||||
245 | |||||
246 | /* Types of supported allocations | ||||
247 | |||||
248 | va_heap - Allocation uses malloc/free. | ||||
249 | va_gc - Allocation uses ggc_alloc. | ||||
250 | va_gc_atomic - Same as GC, but individual elements of the array | ||||
251 | do not need to be marked during collection. */ | ||||
252 | |||||
253 | /* Allocator type for heap vectors. */ | ||||
254 | struct va_heap | ||||
255 | { | ||||
256 | /* Heap vectors are frequently regular instances, so use the vl_ptr | ||||
257 | layout for them. */ | ||||
258 | typedef vl_ptr default_layout; | ||||
259 | |||||
260 | template<typename T> | ||||
261 | static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool | ||||
262 | CXX_MEM_STAT_INFO); | ||||
263 | |||||
264 | template<typename T> | ||||
265 | static void release (vec<T, va_heap, vl_embed> *&); | ||||
266 | }; | ||||
267 | |||||
268 | |||||
269 | /* Allocator for heap memory. Ensure there are at least RESERVE free | ||||
270 | slots in V. If EXACT is true, grow exactly, else grow | ||||
271 | exponentially. As a special case, if the vector had not been | ||||
272 | allocated and RESERVE is 0, no vector will be created. */ | ||||
273 | |||||
274 | template<typename T> | ||||
275 | inline void | ||||
276 | va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact | ||||
277 | MEM_STAT_DECL) | ||||
278 | { | ||||
279 | size_t elt_size = sizeof (T); | ||||
280 | unsigned alloc | ||||
281 | = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact); | ||||
282 | gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 282, __FUNCTION__), 0 : 0)); | ||||
283 | |||||
284 | if (GATHER_STATISTICS0 && v) | ||||
285 | v->m_vecpfx.release_overhead (v, elt_size * v->allocated (), | ||||
286 | v->allocated (), false); | ||||
287 | |||||
288 | size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc); | ||||
289 | unsigned nelem = v ? v->length () : 0; | ||||
290 | v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size)); | ||||
291 | v->embedded_init (alloc, nelem); | ||||
292 | |||||
293 | if (GATHER_STATISTICS0) | ||||
294 | v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT); | ||||
295 | } | ||||
296 | |||||
297 | |||||
298 | #if GCC_VERSION(4 * 1000 + 2) >= 4007 | ||||
299 | #pragma GCC diagnostic push | ||||
300 | #pragma GCC diagnostic ignored "-Wfree-nonheap-object" | ||||
301 | #endif | ||||
302 | |||||
303 | /* Free the heap space allocated for vector V. */ | ||||
304 | |||||
305 | template<typename T> | ||||
306 | void | ||||
307 | va_heap::release (vec<T, va_heap, vl_embed> *&v) | ||||
308 | { | ||||
309 | size_t elt_size = sizeof (T); | ||||
310 | if (v == NULLnullptr) | ||||
311 | return; | ||||
312 | |||||
313 | if (GATHER_STATISTICS0) | ||||
314 | v->m_vecpfx.release_overhead (v, elt_size * v->allocated (), | ||||
315 | v->allocated (), true); | ||||
316 | ::free (v); | ||||
317 | v = NULLnullptr; | ||||
318 | } | ||||
319 | |||||
320 | #if GCC_VERSION(4 * 1000 + 2) >= 4007 | ||||
321 | #pragma GCC diagnostic pop | ||||
322 | #endif | ||||
323 | |||||
324 | /* Allocator type for GC vectors. Notice that we need the structure | ||||
325 | declaration even if GC is not enabled. */ | ||||
326 | |||||
327 | struct va_gc | ||||
328 | { | ||||
329 | /* Use vl_embed as the default layout for GC vectors. Due to GTY | ||||
330 | limitations, GC vectors must always be pointers, so it is more | ||||
331 | efficient to use a pointer to the vl_embed layout, rather than | ||||
332 | using a pointer to a pointer as would be the case with vl_ptr. */ | ||||
333 | typedef vl_embed default_layout; | ||||
334 | |||||
335 | template<typename T, typename A> | ||||
336 | static void reserve (vec<T, A, vl_embed> *&, unsigned, bool | ||||
337 | CXX_MEM_STAT_INFO); | ||||
338 | |||||
339 | template<typename T, typename A> | ||||
340 | static void release (vec<T, A, vl_embed> *&v); | ||||
341 | }; | ||||
342 | |||||
343 | |||||
344 | /* Free GC memory used by V and reset V to NULL. */ | ||||
345 | |||||
346 | template<typename T, typename A> | ||||
347 | inline void | ||||
348 | va_gc::release (vec<T, A, vl_embed> *&v) | ||||
349 | { | ||||
350 | if (v) | ||||
351 | ::ggc_free (v); | ||||
352 | v = NULLnullptr; | ||||
353 | } | ||||
354 | |||||
355 | |||||
356 | /* Allocator for GC memory. Ensure there are at least RESERVE free | ||||
357 | slots in V. If EXACT is true, grow exactly, else grow | ||||
358 | exponentially. As a special case, if the vector had not been | ||||
359 | allocated and RESERVE is 0, no vector will be created. */ | ||||
360 | |||||
361 | template<typename T, typename A> | ||||
362 | void | ||||
363 | va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact | ||||
364 | MEM_STAT_DECL) | ||||
365 | { | ||||
366 | unsigned alloc | ||||
367 | = vec_prefix::calculate_allocation (v
| ||||
368 | if (!alloc) | ||||
369 | { | ||||
370 | ::ggc_free (v); | ||||
371 | v = NULLnullptr; | ||||
372 | return; | ||||
373 | } | ||||
374 | |||||
375 | /* Calculate the amount of space we want. */ | ||||
376 | size_t size = vec<T, A, vl_embed>::embedded_size (alloc); | ||||
377 | |||||
378 | /* Ask the allocator how much space it will really give us. */ | ||||
379 | size = ::ggc_round_alloc_size (size); | ||||
380 | |||||
381 | /* Adjust the number of slots accordingly. */ | ||||
382 | size_t vec_offset = sizeof (vec_prefix); | ||||
383 | size_t elt_size = sizeof (T); | ||||
384 | alloc = (size - vec_offset) / elt_size; | ||||
385 | |||||
386 | /* And finally, recalculate the amount of space we ask for. */ | ||||
387 | size = vec_offset + alloc * elt_size; | ||||
388 | |||||
389 | unsigned nelem = v ? v->length () : 0; | ||||
390 | v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size | ||||
391 | PASS_MEM_STAT)); | ||||
392 | v->embedded_init (alloc, nelem); | ||||
393 | } | ||||
394 | |||||
395 | |||||
396 | /* Allocator type for GC vectors. This is for vectors of types | ||||
397 | atomics w.r.t. collection, so allocation and deallocation is | ||||
398 | completely inherited from va_gc. */ | ||||
399 | struct va_gc_atomic : va_gc | ||||
400 | { | ||||
401 | }; | ||||
402 | |||||
403 | |||||
404 | /* Generic vector template. Default values for A and L indicate the | ||||
405 | most commonly used strategies. | ||||
406 | |||||
407 | FIXME - Ideally, they would all be vl_ptr to encourage using regular | ||||
408 | instances for vectors, but the existing GTY machinery is limited | ||||
409 | in that it can only deal with GC objects that are pointers | ||||
410 | themselves. | ||||
411 | |||||
412 | This means that vector operations that need to deal with | ||||
413 | potentially NULL pointers, must be provided as free | ||||
414 | functions (see the vec_safe_* functions above). */ | ||||
415 | template<typename T, | ||||
416 | typename A = va_heap, | ||||
417 | typename L = typename A::default_layout> | ||||
418 | struct GTY((user)) vec | ||||
419 | { | ||||
420 | }; | ||||
421 | |||||
422 | /* Allow C++11 range-based 'for' to work directly on vec<T>*. */ | ||||
423 | template<typename T, typename A, typename L> | ||||
424 | T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; } | ||||
425 | template<typename T, typename A, typename L> | ||||
426 | T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; } | ||||
427 | template<typename T, typename A, typename L> | ||||
428 | const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; } | ||||
429 | template<typename T, typename A, typename L> | ||||
430 | const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; } | ||||
431 | |||||
432 | /* Generic vec<> debug helpers. | ||||
433 | |||||
434 | These need to be instantiated for each vec<TYPE> used throughout | ||||
435 | the compiler like this: | ||||
436 | |||||
437 | DEFINE_DEBUG_VEC (TYPE) | ||||
438 | |||||
439 | The reason we have a debug_helper() is because GDB can't | ||||
440 | disambiguate a plain call to debug(some_vec), and it must be called | ||||
441 | like debug<TYPE>(some_vec). */ | ||||
442 | |||||
443 | template<typename T> | ||||
444 | void | ||||
445 | debug_helper (vec<T> &ref) | ||||
446 | { | ||||
447 | unsigned i; | ||||
448 | for (i = 0; i < ref.length (); ++i) | ||||
449 | { | ||||
450 | fprintf (stderrstderr, "[%d] = ", i); | ||||
451 | debug_slim (ref[i]); | ||||
452 | fputc ('\n', stderrstderr); | ||||
453 | } | ||||
454 | } | ||||
455 | |||||
456 | /* We need a separate va_gc variant here because default template | ||||
457 | argument for functions cannot be used in c++-98. Once this | ||||
458 | restriction is removed, those variant should be folded with the | ||||
459 | above debug_helper. */ | ||||
460 | |||||
461 | template<typename T> | ||||
462 | void | ||||
463 | debug_helper (vec<T, va_gc> &ref) | ||||
464 | { | ||||
465 | unsigned i; | ||||
466 | for (i = 0; i < ref.length (); ++i) | ||||
467 | { | ||||
468 | fprintf (stderrstderr, "[%d] = ", i); | ||||
469 | debug_slim (ref[i]); | ||||
470 | fputc ('\n', stderrstderr); | ||||
471 | } | ||||
472 | } | ||||
473 | |||||
474 | /* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper | ||||
475 | functions for a type T. */ | ||||
476 | |||||
477 | #define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void debug_helper (vec<T, va_gc> &); __attribute__ ((__used__ )) void debug (vec<T> &ref) { debug_helper <T> (ref); } __attribute__ ((__used__)) void debug (vec<T> *ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n" ); } __attribute__ ((__used__)) void debug (vec<T, va_gc> &ref) { debug_helper <T> (ref); } __attribute__ (( __used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"); } \ | ||||
478 | template void debug_helper (vec<T> &); \ | ||||
479 | template void debug_helper (vec<T, va_gc> &); \ | ||||
480 | /* Define the vec<T> debug functions. */ \ | ||||
481 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
482 | debug (vec<T> &ref) \ | ||||
483 | { \ | ||||
484 | debug_helper <T> (ref); \ | ||||
485 | } \ | ||||
486 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
487 | debug (vec<T> *ptr) \ | ||||
488 | { \ | ||||
489 | if (ptr) \ | ||||
490 | debug (*ptr); \ | ||||
491 | else \ | ||||
492 | fprintf (stderrstderr, "<nil>\n"); \ | ||||
493 | } \ | ||||
494 | /* Define the vec<T, va_gc> debug functions. */ \ | ||||
495 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
496 | debug (vec<T, va_gc> &ref) \ | ||||
497 | { \ | ||||
498 | debug_helper <T> (ref); \ | ||||
499 | } \ | ||||
500 | DEBUG_FUNCTION__attribute__ ((__used__)) void \ | ||||
501 | debug (vec<T, va_gc> *ptr) \ | ||||
502 | { \ | ||||
503 | if (ptr) \ | ||||
504 | debug (*ptr); \ | ||||
505 | else \ | ||||
506 | fprintf (stderrstderr, "<nil>\n"); \ | ||||
507 | } | ||||
508 | |||||
509 | /* Default-construct N elements in DST. */ | ||||
510 | |||||
511 | template <typename T> | ||||
512 | inline void | ||||
513 | vec_default_construct (T *dst, unsigned n) | ||||
514 | { | ||||
515 | #ifdef BROKEN_VALUE_INITIALIZATION | ||||
516 | /* Versions of GCC before 4.4 sometimes leave certain objects | ||||
517 | uninitialized when value initialized, though if the type has | ||||
518 | user defined default ctor, that ctor is invoked. As a workaround | ||||
519 | perform clearing first and then the value initialization, which | ||||
520 | fixes the case when value initialization doesn't initialize due to | ||||
521 | the bugs and should initialize to all zeros, but still allows | ||||
522 | vectors for types with user defined default ctor that initializes | ||||
523 | some or all elements to non-zero. If T has no user defined | ||||
524 | default ctor and some non-static data members have user defined | ||||
525 | default ctors that initialize to non-zero the workaround will | ||||
526 | still not work properly; in that case we just need to provide | ||||
527 | user defined default ctor. */ | ||||
528 | memset (dst, '\0', sizeof (T) * n); | ||||
529 | #endif | ||||
530 | for ( ; n; ++dst, --n) | ||||
531 | ::new (static_cast<void*>(dst)) T (); | ||||
532 | } | ||||
533 | |||||
534 | /* Copy-construct N elements in DST from *SRC. */ | ||||
535 | |||||
536 | template <typename T> | ||||
537 | inline void | ||||
538 | vec_copy_construct (T *dst, const T *src, unsigned n) | ||||
539 | { | ||||
540 | for ( ; n; ++dst, ++src, --n) | ||||
541 | ::new (static_cast<void*>(dst)) T (*src); | ||||
542 | } | ||||
543 | |||||
544 | /* Type to provide NULL values for vec<T, A, L>. This is used to | ||||
545 | provide nil initializers for vec instances. Since vec must be | ||||
546 | a POD, we cannot have proper ctor/dtor for it. To initialize | ||||
547 | a vec instance, you can assign it the value vNULL. This isn't | ||||
548 | needed for file-scope and function-local static vectors, which | ||||
549 | are zero-initialized by default. */ | ||||
550 | struct vnull | ||||
551 | { | ||||
552 | template <typename T, typename A, typename L> | ||||
553 | CONSTEXPRconstexpr operator vec<T, A, L> () const { return vec<T, A, L>(); } | ||||
554 | }; | ||||
555 | extern vnull vNULL; | ||||
556 | |||||
557 | |||||
558 | /* Embeddable vector. These vectors are suitable to be embedded | ||||
559 | in other data structures so that they can be pre-allocated in a | ||||
560 | contiguous memory block. | ||||
561 | |||||
562 | Embeddable vectors are implemented using the trailing array idiom, | ||||
563 | thus they are not resizeable without changing the address of the | ||||
564 | vector object itself. This means you cannot have variables or | ||||
565 | fields of embeddable vector type -- always use a pointer to a | ||||
566 | vector. The one exception is the final field of a structure, which | ||||
567 | could be a vector type. | ||||
568 | |||||
569 | You will have to use the embedded_size & embedded_init calls to | ||||
570 | create such objects, and they will not be resizeable (so the 'safe' | ||||
571 | allocation variants are not available). | ||||
572 | |||||
573 | Properties: | ||||
574 | |||||
575 | - The whole vector and control data are allocated in a single | ||||
576 | contiguous block. It uses the trailing-vector idiom, so | ||||
577 | allocation must reserve enough space for all the elements | ||||
578 | in the vector plus its control data. | ||||
579 | - The vector cannot be re-allocated. | ||||
580 | - The vector cannot grow nor shrink. | ||||
581 | - No indirections needed for access/manipulation. | ||||
582 | - It requires 2 words of storage (prior to vector allocation). */ | ||||
583 | |||||
584 | template<typename T, typename A> | ||||
585 | struct GTY((user)) vec<T, A, vl_embed> | ||||
586 | { | ||||
587 | public: | ||||
588 | unsigned allocated (void) const { return m_vecpfx.m_alloc; } | ||||
589 | unsigned length (void) const { return m_vecpfx.m_num; } | ||||
590 | bool is_empty (void) const { return m_vecpfx.m_num == 0; } | ||||
591 | T *address (void) { return m_vecdata; } | ||||
592 | const T *address (void) const { return m_vecdata; } | ||||
593 | T *begin () { return address (); } | ||||
594 | const T *begin () const { return address (); } | ||||
595 | T *end () { return address () + length (); } | ||||
596 | const T *end () const { return address () + length (); } | ||||
597 | const T &operator[] (unsigned) const; | ||||
598 | T &operator[] (unsigned); | ||||
599 | T &last (void); | ||||
600 | bool space (unsigned) const; | ||||
601 | bool iterate (unsigned, T *) const; | ||||
602 | bool iterate (unsigned, T **) const; | ||||
603 | vec *copy (ALONE_CXX_MEM_STAT_INFO) const; | ||||
604 | void splice (const vec &); | ||||
605 | void splice (const vec *src); | ||||
606 | T *quick_push (const T &); | ||||
607 | T &pop (void); | ||||
608 | void truncate (unsigned); | ||||
609 | void quick_insert (unsigned, const T &); | ||||
610 | void ordered_remove (unsigned); | ||||
611 | void unordered_remove (unsigned); | ||||
612 | void block_remove (unsigned, unsigned); | ||||
613 | void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *)); | ||||
614 | void sort (int (*) (const void *, const void *, void *), void *); | ||||
615 | T *bsearch (const void *key, int (*compar)(const void *, const void *)); | ||||
616 | T *bsearch (const void *key, | ||||
617 | int (*compar)(const void *, const void *, void *), void *); | ||||
618 | unsigned lower_bound (T, bool (*)(const T &, const T &)) const; | ||||
619 | bool contains (const T &search) const; | ||||
620 | static size_t embedded_size (unsigned); | ||||
621 | void embedded_init (unsigned, unsigned = 0, unsigned = 0); | ||||
622 | void quick_grow (unsigned len); | ||||
623 | void quick_grow_cleared (unsigned len); | ||||
624 | |||||
625 | /* vec class can access our internal data and functions. */ | ||||
626 | template <typename, typename, typename> friend struct vec; | ||||
627 | |||||
628 | /* The allocator types also need access to our internals. */ | ||||
629 | friend struct va_gc; | ||||
630 | friend struct va_gc_atomic; | ||||
631 | friend struct va_heap; | ||||
632 | |||||
633 | /* FIXME - These fields should be private, but we need to cater to | ||||
634 | compilers that have stricter notions of PODness for types. */ | ||||
635 | vec_prefix m_vecpfx; | ||||
636 | T m_vecdata[1]; | ||||
637 | }; | ||||
638 | |||||
639 | |||||
640 | /* Convenience wrapper functions to use when dealing with pointers to | ||||
641 | embedded vectors. Some functionality for these vectors must be | ||||
642 | provided via free functions for these reasons: | ||||
643 | |||||
644 | 1- The pointer may be NULL (e.g., before initial allocation). | ||||
645 | |||||
646 | 2- When the vector needs to grow, it must be reallocated, so | ||||
647 | the pointer will change its value. | ||||
648 | |||||
649 | Because of limitations with the current GC machinery, all vectors | ||||
650 | in GC memory *must* be pointers. */ | ||||
651 | |||||
652 | |||||
653 | /* If V contains no room for NELEMS elements, return false. Otherwise, | ||||
654 | return true. */ | ||||
655 | template<typename T, typename A> | ||||
656 | inline bool | ||||
657 | vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems) | ||||
658 | { | ||||
659 | return v ? v->space (nelems) : nelems == 0; | ||||
660 | } | ||||
661 | |||||
662 | |||||
663 | /* If V is NULL, return 0. Otherwise, return V->length(). */ | ||||
664 | template<typename T, typename A> | ||||
665 | inline unsigned | ||||
666 | vec_safe_length (const vec<T, A, vl_embed> *v) | ||||
667 | { | ||||
668 | return v ? v->length () : 0; | ||||
669 | } | ||||
670 | |||||
671 | |||||
672 | /* If V is NULL, return NULL. Otherwise, return V->address(). */ | ||||
673 | template<typename T, typename A> | ||||
674 | inline T * | ||||
675 | vec_safe_address (vec<T, A, vl_embed> *v) | ||||
676 | { | ||||
677 | return v ? v->address () : NULLnullptr; | ||||
678 | } | ||||
679 | |||||
680 | |||||
681 | /* If V is NULL, return true. Otherwise, return V->is_empty(). */ | ||||
682 | template<typename T, typename A> | ||||
683 | inline bool | ||||
684 | vec_safe_is_empty (vec<T, A, vl_embed> *v) | ||||
685 | { | ||||
686 | return v ? v->is_empty () : true; | ||||
687 | } | ||||
688 | |||||
689 | /* If V does not have space for NELEMS elements, call | ||||
690 | V->reserve(NELEMS, EXACT). */ | ||||
691 | template<typename T, typename A> | ||||
692 | inline bool | ||||
693 | vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false | ||||
694 | CXX_MEM_STAT_INFO) | ||||
695 | { | ||||
696 | bool extend = nelems ? !vec_safe_space (v, nelems) : false; | ||||
697 | if (extend
| ||||
698 | A::reserve (v, nelems, exact PASS_MEM_STAT); | ||||
699 | return extend; | ||||
700 | } | ||||
701 | |||||
702 | template<typename T, typename A> | ||||
703 | inline bool | ||||
704 | vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems | ||||
705 | CXX_MEM_STAT_INFO) | ||||
706 | { | ||||
707 | return vec_safe_reserve (v, nelems, true PASS_MEM_STAT); | ||||
708 | } | ||||
709 | |||||
710 | |||||
711 | /* Allocate GC memory for V with space for NELEMS slots. If NELEMS | ||||
712 | is 0, V is initialized to NULL. */ | ||||
713 | |||||
714 | template<typename T, typename A> | ||||
715 | inline void | ||||
716 | vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO) | ||||
717 | { | ||||
718 | v = NULLnullptr; | ||||
719 | vec_safe_reserve (v, nelems, false PASS_MEM_STAT); | ||||
720 | } | ||||
721 | |||||
722 | |||||
723 | /* Free the GC memory allocated by vector V and set it to NULL. */ | ||||
724 | |||||
725 | template<typename T, typename A> | ||||
726 | inline void | ||||
727 | vec_free (vec<T, A, vl_embed> *&v) | ||||
728 | { | ||||
729 | A::release (v); | ||||
730 | } | ||||
731 | |||||
732 | |||||
733 | /* Grow V to length LEN. Allocate it, if necessary. */ | ||||
734 | template<typename T, typename A> | ||||
735 | inline void | ||||
736 | vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len, | ||||
737 | bool exact = false CXX_MEM_STAT_INFO) | ||||
738 | { | ||||
739 | unsigned oldlen = vec_safe_length (v); | ||||
740 | gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 740, __FUNCTION__), 0 : 0)); | ||||
741 | vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT); | ||||
742 | v->quick_grow (len); | ||||
| |||||
743 | } | ||||
744 | |||||
745 | |||||
746 | /* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */ | ||||
747 | template<typename T, typename A> | ||||
748 | inline void | ||||
749 | vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len, | ||||
750 | bool exact = false CXX_MEM_STAT_INFO) | ||||
751 | { | ||||
752 | unsigned oldlen = vec_safe_length (v); | ||||
753 | vec_safe_grow (v, len, exact PASS_MEM_STAT); | ||||
754 | vec_default_construct (v->address () + oldlen, len - oldlen); | ||||
755 | } | ||||
756 | |||||
757 | |||||
758 | /* Assume V is not NULL. */ | ||||
759 | |||||
760 | template<typename T> | ||||
761 | inline void | ||||
762 | vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v, | ||||
763 | unsigned len, bool exact = false CXX_MEM_STAT_INFO) | ||||
764 | { | ||||
765 | v->safe_grow_cleared (len, exact PASS_MEM_STAT); | ||||
766 | } | ||||
767 | |||||
768 | /* If V does not have space for NELEMS elements, call | ||||
769 | V->reserve(NELEMS, EXACT). */ | ||||
770 | |||||
771 | template<typename T> | ||||
772 | inline bool | ||||
773 | vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false | ||||
774 | CXX_MEM_STAT_INFO) | ||||
775 | { | ||||
776 | return v->reserve (nelems, exact); | ||||
777 | } | ||||
778 | |||||
779 | |||||
780 | /* If V is NULL return false, otherwise return V->iterate(IX, PTR). */ | ||||
781 | template<typename T, typename A> | ||||
782 | inline bool | ||||
783 | vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr) | ||||
784 | { | ||||
785 | if (v) | ||||
786 | return v->iterate (ix, ptr); | ||||
787 | else | ||||
788 | { | ||||
789 | *ptr = 0; | ||||
790 | return false; | ||||
791 | } | ||||
792 | } | ||||
793 | |||||
794 | template<typename T, typename A> | ||||
795 | inline bool | ||||
796 | vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr) | ||||
797 | { | ||||
798 | if (v) | ||||
799 | return v->iterate (ix, ptr); | ||||
800 | else | ||||
801 | { | ||||
802 | *ptr = 0; | ||||
803 | return false; | ||||
804 | } | ||||
805 | } | ||||
806 | |||||
807 | |||||
808 | /* If V has no room for one more element, reallocate it. Then call | ||||
809 | V->quick_push(OBJ). */ | ||||
810 | template<typename T, typename A> | ||||
811 | inline T * | ||||
812 | vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO) | ||||
813 | { | ||||
814 | vec_safe_reserve (v, 1, false PASS_MEM_STAT); | ||||
815 | return v->quick_push (obj); | ||||
816 | } | ||||
817 | |||||
818 | |||||
819 | /* if V has no room for one more element, reallocate it. Then call | ||||
820 | V->quick_insert(IX, OBJ). */ | ||||
821 | template<typename T, typename A> | ||||
822 | inline void | ||||
823 | vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj | ||||
824 | CXX_MEM_STAT_INFO) | ||||
825 | { | ||||
826 | vec_safe_reserve (v, 1, false PASS_MEM_STAT); | ||||
827 | v->quick_insert (ix, obj); | ||||
828 | } | ||||
829 | |||||
830 | |||||
831 | /* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */ | ||||
832 | template<typename T, typename A> | ||||
833 | inline void | ||||
834 | vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size) | ||||
835 | { | ||||
836 | if (v) | ||||
837 | v->truncate (size); | ||||
838 | } | ||||
839 | |||||
840 | |||||
841 | /* If SRC is not NULL, return a pointer to a copy of it. */ | ||||
842 | template<typename T, typename A> | ||||
843 | inline vec<T, A, vl_embed> * | ||||
844 | vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO) | ||||
845 | { | ||||
846 | return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr; | ||||
847 | } | ||||
848 | |||||
849 | /* Copy the elements from SRC to the end of DST as if by memcpy. | ||||
850 | Reallocate DST, if necessary. */ | ||||
851 | template<typename T, typename A> | ||||
852 | inline void | ||||
853 | vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src | ||||
854 | CXX_MEM_STAT_INFO) | ||||
855 | { | ||||
856 | unsigned src_len = vec_safe_length (src); | ||||
857 | if (src_len) | ||||
858 | { | ||||
859 | vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len | ||||
860 | PASS_MEM_STAT); | ||||
861 | dst->splice (*src); | ||||
862 | } | ||||
863 | } | ||||
864 | |||||
865 | /* Return true if SEARCH is an element of V. Note that this is O(N) in the | ||||
866 | size of the vector and so should be used with care. */ | ||||
867 | |||||
868 | template<typename T, typename A> | ||||
869 | inline bool | ||||
870 | vec_safe_contains (vec<T, A, vl_embed> *v, const T &search) | ||||
871 | { | ||||
872 | return v ? v->contains (search) : false; | ||||
873 | } | ||||
874 | |||||
875 | /* Index into vector. Return the IX'th element. IX must be in the | ||||
876 | domain of the vector. */ | ||||
877 | |||||
878 | template<typename T, typename A> | ||||
879 | inline const T & | ||||
880 | vec<T, A, vl_embed>::operator[] (unsigned ix) const | ||||
881 | { | ||||
882 | gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 882, __FUNCTION__), 0 : 0)); | ||||
883 | return m_vecdata[ix]; | ||||
884 | } | ||||
885 | |||||
886 | template<typename T, typename A> | ||||
887 | inline T & | ||||
888 | vec<T, A, vl_embed>::operator[] (unsigned ix) | ||||
889 | { | ||||
890 | gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 890, __FUNCTION__), 0 : 0)); | ||||
891 | return m_vecdata[ix]; | ||||
892 | } | ||||
893 | |||||
894 | |||||
895 | /* Get the final element of the vector, which must not be empty. */ | ||||
896 | |||||
897 | template<typename T, typename A> | ||||
898 | inline T & | ||||
899 | vec<T, A, vl_embed>::last (void) | ||||
900 | { | ||||
901 | gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 901, __FUNCTION__), 0 : 0)); | ||||
902 | return (*this)[m_vecpfx.m_num - 1]; | ||||
903 | } | ||||
904 | |||||
905 | |||||
906 | /* If this vector has space for NELEMS additional entries, return | ||||
907 | true. You usually only need to use this if you are doing your | ||||
908 | own vector reallocation, for instance on an embedded vector. This | ||||
909 | returns true in exactly the same circumstances that vec::reserve | ||||
910 | will. */ | ||||
911 | |||||
912 | template<typename T, typename A> | ||||
913 | inline bool | ||||
914 | vec<T, A, vl_embed>::space (unsigned nelems) const | ||||
915 | { | ||||
916 | return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems; | ||||
917 | } | ||||
918 | |||||
919 | |||||
920 | /* Return iteration condition and update PTR to point to the IX'th | ||||
921 | element of this vector. Use this to iterate over the elements of a | ||||
922 | vector as follows, | ||||
923 | |||||
924 | for (ix = 0; vec<T, A>::iterate (v, ix, &ptr); ix++) | ||||
925 | continue; */ | ||||
926 | |||||
927 | template<typename T, typename A> | ||||
928 | inline bool | ||||
929 | vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const | ||||
930 | { | ||||
931 | if (ix < m_vecpfx.m_num) | ||||
932 | { | ||||
933 | *ptr = m_vecdata[ix]; | ||||
934 | return true; | ||||
935 | } | ||||
936 | else | ||||
937 | { | ||||
938 | *ptr = 0; | ||||
939 | return false; | ||||
940 | } | ||||
941 | } | ||||
942 | |||||
943 | |||||
944 | /* Return iteration condition and update *PTR to point to the | ||||
945 | IX'th element of this vector. Use this to iterate over the | ||||
946 | elements of a vector as follows, | ||||
947 | |||||
948 | for (ix = 0; v->iterate (ix, &ptr); ix++) | ||||
949 | continue; | ||||
950 | |||||
951 | This variant is for vectors of objects. */ | ||||
952 | |||||
953 | template<typename T, typename A> | ||||
954 | inline bool | ||||
955 | vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const | ||||
956 | { | ||||
957 | if (ix < m_vecpfx.m_num) | ||||
958 | { | ||||
959 | *ptr = CONST_CAST (T *, &m_vecdata[ix])(const_cast<T *> ((&m_vecdata[ix]))); | ||||
960 | return true; | ||||
961 | } | ||||
962 | else | ||||
963 | { | ||||
964 | *ptr = 0; | ||||
965 | return false; | ||||
966 | } | ||||
967 | } | ||||
968 | |||||
969 | |||||
970 | /* Return a pointer to a copy of this vector. */ | ||||
971 | |||||
972 | template<typename T, typename A> | ||||
973 | inline vec<T, A, vl_embed> * | ||||
974 | vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const | ||||
975 | { | ||||
976 | vec<T, A, vl_embed> *new_vec = NULLnullptr; | ||||
977 | unsigned len = length (); | ||||
978 | if (len) | ||||
979 | { | ||||
980 | vec_alloc (new_vec, len PASS_MEM_STAT); | ||||
981 | new_vec->embedded_init (len, len); | ||||
982 | vec_copy_construct (new_vec->address (), m_vecdata, len); | ||||
983 | } | ||||
984 | return new_vec; | ||||
985 | } | ||||
986 | |||||
987 | |||||
988 | /* Copy the elements from SRC to the end of this vector as if by memcpy. | ||||
989 | The vector must have sufficient headroom available. */ | ||||
990 | |||||
991 | template<typename T, typename A> | ||||
992 | inline void | ||||
993 | vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src) | ||||
994 | { | ||||
995 | unsigned len = src.length (); | ||||
996 | if (len) | ||||
997 | { | ||||
998 | gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 998, __FUNCTION__), 0 : 0)); | ||||
999 | vec_copy_construct (end (), src.address (), len); | ||||
1000 | m_vecpfx.m_num += len; | ||||
1001 | } | ||||
1002 | } | ||||
1003 | |||||
1004 | template<typename T, typename A> | ||||
1005 | inline void | ||||
1006 | vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src) | ||||
1007 | { | ||||
1008 | if (src) | ||||
1009 | splice (*src); | ||||
1010 | } | ||||
1011 | |||||
1012 | |||||
1013 | /* Push OBJ (a new element) onto the end of the vector. There must be | ||||
1014 | sufficient space in the vector. Return a pointer to the slot | ||||
1015 | where OBJ was inserted. */ | ||||
1016 | |||||
1017 | template<typename T, typename A> | ||||
1018 | inline T * | ||||
1019 | vec<T, A, vl_embed>::quick_push (const T &obj) | ||||
1020 | { | ||||
1021 | gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1021, __FUNCTION__), 0 : 0)); | ||||
1022 | T *slot = &m_vecdata[m_vecpfx.m_num++]; | ||||
1023 | *slot = obj; | ||||
1024 | return slot; | ||||
1025 | } | ||||
1026 | |||||
1027 | |||||
1028 | /* Pop and return the last element off the end of the vector. */ | ||||
1029 | |||||
1030 | template<typename T, typename A> | ||||
1031 | inline T & | ||||
1032 | vec<T, A, vl_embed>::pop (void) | ||||
1033 | { | ||||
1034 | gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1034, __FUNCTION__), 0 : 0)); | ||||
1035 | return m_vecdata[--m_vecpfx.m_num]; | ||||
1036 | } | ||||
1037 | |||||
1038 | |||||
1039 | /* Set the length of the vector to SIZE. The new length must be less | ||||
1040 | than or equal to the current length. This is an O(1) operation. */ | ||||
1041 | |||||
1042 | template<typename T, typename A> | ||||
1043 | inline void | ||||
1044 | vec<T, A, vl_embed>::truncate (unsigned size) | ||||
1045 | { | ||||
1046 | gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1046, __FUNCTION__), 0 : 0)); | ||||
1047 | m_vecpfx.m_num = size; | ||||
1048 | } | ||||
1049 | |||||
1050 | |||||
1051 | /* Insert an element, OBJ, at the IXth position of this vector. There | ||||
1052 | must be sufficient space. */ | ||||
1053 | |||||
1054 | template<typename T, typename A> | ||||
1055 | inline void | ||||
1056 | vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj) | ||||
1057 | { | ||||
1058 | gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1058, __FUNCTION__), 0 : 0)); | ||||
1059 | gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1059, __FUNCTION__), 0 : 0)); | ||||
1060 | T *slot = &m_vecdata[ix]; | ||||
1061 | memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T)); | ||||
1062 | *slot = obj; | ||||
1063 | } | ||||
1064 | |||||
1065 | |||||
1066 | /* Remove an element from the IXth position of this vector. Ordering of | ||||
1067 | remaining elements is preserved. This is an O(N) operation due to | ||||
1068 | memmove. */ | ||||
1069 | |||||
1070 | template<typename T, typename A> | ||||
1071 | inline void | ||||
1072 | vec<T, A, vl_embed>::ordered_remove (unsigned ix) | ||||
1073 | { | ||||
1074 | gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1074, __FUNCTION__), 0 : 0)); | ||||
1075 | T *slot = &m_vecdata[ix]; | ||||
1076 | memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T)); | ||||
1077 | } | ||||
1078 | |||||
1079 | |||||
1080 | /* Remove elements in [START, END) from VEC for which COND holds. Ordering of | ||||
1081 | remaining elements is preserved. This is an O(N) operation. */ | ||||
1082 | |||||
1083 | #define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index = (start); read_index < (end); ++read_index) { elem_ptr = &(vec)[read_index]; bool remove_p = (cond); if (remove_p ) continue; if (read_index != write_index) (vec)[write_index] = (vec)[read_index]; write_index++; } if (read_index - write_index > 0) (vec).block_remove (write_index, read_index - write_index ); } | ||||
1084 | elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index = (start); read_index < (end); ++read_index) { elem_ptr = &(vec)[read_index]; bool remove_p = (cond); if (remove_p ) continue; if (read_index != write_index) (vec)[write_index] = (vec)[read_index]; write_index++; } if (read_index - write_index > 0) (vec).block_remove (write_index, read_index - write_index ); } \ | ||||
1085 | { \ | ||||
1086 | gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1086, __FUNCTION__), 0 : 0)); \ | ||||
1087 | for (read_index = write_index = (start); read_index < (end); \ | ||||
1088 | ++read_index) \ | ||||
1089 | { \ | ||||
1090 | elem_ptr = &(vec)[read_index]; \ | ||||
1091 | bool remove_p = (cond); \ | ||||
1092 | if (remove_p) \ | ||||
1093 | continue; \ | ||||
1094 | \ | ||||
1095 | if (read_index != write_index) \ | ||||
1096 | (vec)[write_index] = (vec)[read_index]; \ | ||||
1097 | \ | ||||
1098 | write_index++; \ | ||||
1099 | } \ | ||||
1100 | \ | ||||
1101 | if (read_index - write_index > 0) \ | ||||
1102 | (vec).block_remove (write_index, read_index - write_index); \ | ||||
1103 | } | ||||
1104 | |||||
1105 | |||||
1106 | /* Remove elements from VEC for which COND holds. Ordering of remaining | ||||
1107 | elements is preserved. This is an O(N) operation. */ | ||||
1108 | |||||
1109 | #define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1110 | cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } \ | ||||
1111 | VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1112 | elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index = (0); read_index < ((vec).length ()); ++read_index) { elem_ptr = &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p ) continue; if (read_index != write_index) ((vec))[write_index ] = ((vec))[read_index]; write_index++; } if (read_index - write_index > 0) ((vec)).block_remove (write_index, read_index - write_index ); } | ||||
1113 | |||||
1114 | /* Remove an element from the IXth position of this vector. Ordering of | ||||
1115 | remaining elements is destroyed. This is an O(1) operation. */ | ||||
1116 | |||||
1117 | template<typename T, typename A> | ||||
1118 | inline void | ||||
1119 | vec<T, A, vl_embed>::unordered_remove (unsigned ix) | ||||
1120 | { | ||||
1121 | gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1121, __FUNCTION__), 0 : 0)); | ||||
1122 | m_vecdata[ix] = m_vecdata[--m_vecpfx.m_num]; | ||||
1123 | } | ||||
1124 | |||||
1125 | |||||
1126 | /* Remove LEN elements starting at the IXth. Ordering is retained. | ||||
1127 | This is an O(N) operation due to memmove. */ | ||||
1128 | |||||
1129 | template<typename T, typename A> | ||||
1130 | inline void | ||||
1131 | vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len) | ||||
1132 | { | ||||
1133 | gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1133, __FUNCTION__), 0 : 0)); | ||||
1134 | T *slot = &m_vecdata[ix]; | ||||
1135 | m_vecpfx.m_num -= len; | ||||
1136 | memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T)); | ||||
1137 | } | ||||
1138 | |||||
1139 | |||||
1140 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
1141 | function to pass to qsort. */ | ||||
1142 | |||||
1143 | template<typename T, typename A> | ||||
1144 | inline void | ||||
1145 | vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *)) | ||||
1146 | { | ||||
1147 | if (length () > 1) | ||||
1148 | gcc_qsort (address (), length (), sizeof (T), cmp); | ||||
1149 | } | ||||
1150 | |||||
1151 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
1152 | function to pass to qsort. */ | ||||
1153 | |||||
1154 | template<typename T, typename A> | ||||
1155 | inline void | ||||
1156 | vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *), | ||||
1157 | void *data) | ||||
1158 | { | ||||
1159 | if (length () > 1) | ||||
1160 | gcc_sort_r (address (), length (), sizeof (T), cmp, data); | ||||
1161 | } | ||||
1162 | |||||
1163 | |||||
1164 | /* Search the contents of the sorted vector with a binary search. | ||||
1165 | CMP is the comparison function to pass to bsearch. */ | ||||
1166 | |||||
1167 | template<typename T, typename A> | ||||
1168 | inline T * | ||||
1169 | vec<T, A, vl_embed>::bsearch (const void *key, | ||||
1170 | int (*compar) (const void *, const void *)) | ||||
1171 | { | ||||
1172 | const void *base = this->address (); | ||||
1173 | size_t nmemb = this->length (); | ||||
1174 | size_t size = sizeof (T); | ||||
1175 | /* The following is a copy of glibc stdlib-bsearch.h. */ | ||||
1176 | size_t l, u, idx; | ||||
1177 | const void *p; | ||||
1178 | int comparison; | ||||
1179 | |||||
1180 | l = 0; | ||||
1181 | u = nmemb; | ||||
1182 | while (l < u) | ||||
1183 | { | ||||
1184 | idx = (l + u) / 2; | ||||
1185 | p = (const void *) (((const char *) base) + (idx * size)); | ||||
1186 | comparison = (*compar) (key, p); | ||||
1187 | if (comparison < 0) | ||||
1188 | u = idx; | ||||
1189 | else if (comparison > 0) | ||||
1190 | l = idx + 1; | ||||
1191 | else | ||||
1192 | return (T *)const_cast<void *>(p); | ||||
1193 | } | ||||
1194 | |||||
1195 | return NULLnullptr; | ||||
1196 | } | ||||
1197 | |||||
1198 | /* Search the contents of the sorted vector with a binary search. | ||||
1199 | CMP is the comparison function to pass to bsearch. */ | ||||
1200 | |||||
1201 | template<typename T, typename A> | ||||
1202 | inline T * | ||||
1203 | vec<T, A, vl_embed>::bsearch (const void *key, | ||||
1204 | int (*compar) (const void *, const void *, | ||||
1205 | void *), void *data) | ||||
1206 | { | ||||
1207 | const void *base = this->address (); | ||||
1208 | size_t nmemb = this->length (); | ||||
1209 | size_t size = sizeof (T); | ||||
1210 | /* The following is a copy of glibc stdlib-bsearch.h. */ | ||||
1211 | size_t l, u, idx; | ||||
1212 | const void *p; | ||||
1213 | int comparison; | ||||
1214 | |||||
1215 | l = 0; | ||||
1216 | u = nmemb; | ||||
1217 | while (l < u) | ||||
1218 | { | ||||
1219 | idx = (l + u) / 2; | ||||
1220 | p = (const void *) (((const char *) base) + (idx * size)); | ||||
1221 | comparison = (*compar) (key, p, data); | ||||
1222 | if (comparison < 0) | ||||
1223 | u = idx; | ||||
1224 | else if (comparison > 0) | ||||
1225 | l = idx + 1; | ||||
1226 | else | ||||
1227 | return (T *)const_cast<void *>(p); | ||||
1228 | } | ||||
1229 | |||||
1230 | return NULLnullptr; | ||||
1231 | } | ||||
1232 | |||||
1233 | /* Return true if SEARCH is an element of V. Note that this is O(N) in the | ||||
1234 | size of the vector and so should be used with care. */ | ||||
1235 | |||||
1236 | template<typename T, typename A> | ||||
1237 | inline bool | ||||
1238 | vec<T, A, vl_embed>::contains (const T &search) const | ||||
1239 | { | ||||
1240 | unsigned int len = length (); | ||||
1241 | for (unsigned int i = 0; i < len; i++) | ||||
1242 | if ((*this)[i] == search) | ||||
1243 | return true; | ||||
1244 | |||||
1245 | return false; | ||||
1246 | } | ||||
1247 | |||||
1248 | /* Find and return the first position in which OBJ could be inserted | ||||
1249 | without changing the ordering of this vector. LESSTHAN is a | ||||
1250 | function that returns true if the first argument is strictly less | ||||
1251 | than the second. */ | ||||
1252 | |||||
1253 | template<typename T, typename A> | ||||
1254 | unsigned | ||||
1255 | vec<T, A, vl_embed>::lower_bound (T obj, bool (*lessthan)(const T &, const T &)) | ||||
1256 | const | ||||
1257 | { | ||||
1258 | unsigned int len = length (); | ||||
1259 | unsigned int half, middle; | ||||
1260 | unsigned int first = 0; | ||||
1261 | while (len > 0) | ||||
1262 | { | ||||
1263 | half = len / 2; | ||||
1264 | middle = first; | ||||
1265 | middle += half; | ||||
1266 | T middle_elem = (*this)[middle]; | ||||
1267 | if (lessthan (middle_elem, obj)) | ||||
1268 | { | ||||
1269 | first = middle; | ||||
1270 | ++first; | ||||
1271 | len = len - half - 1; | ||||
1272 | } | ||||
1273 | else | ||||
1274 | len = half; | ||||
1275 | } | ||||
1276 | return first; | ||||
1277 | } | ||||
1278 | |||||
1279 | |||||
1280 | /* Return the number of bytes needed to embed an instance of an | ||||
1281 | embeddable vec inside another data structure. | ||||
1282 | |||||
1283 | Use these methods to determine the required size and initialization | ||||
1284 | of a vector V of type T embedded within another structure (as the | ||||
1285 | final member): | ||||
1286 | |||||
1287 | size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc); | ||||
1288 | void v->embedded_init (unsigned alloc, unsigned num); | ||||
1289 | |||||
1290 | These allow the caller to perform the memory allocation. */ | ||||
1291 | |||||
1292 | template<typename T, typename A> | ||||
1293 | inline size_t | ||||
1294 | vec<T, A, vl_embed>::embedded_size (unsigned alloc) | ||||
1295 | { | ||||
1296 | struct alignas (T) U { char data[sizeof (T)]; }; | ||||
1297 | typedef vec<U, A, vl_embed> vec_embedded; | ||||
1298 | typedef typename std::conditional<std::is_standard_layout<T>::value, | ||||
1299 | vec, vec_embedded>::type vec_stdlayout; | ||||
1300 | static_assert (sizeof (vec_stdlayout) == sizeof (vec), ""); | ||||
1301 | static_assert (alignof (vec_stdlayout) == alignof (vec), ""); | ||||
1302 | return offsetof (vec_stdlayout, m_vecdata)__builtin_offsetof(vec_stdlayout, m_vecdata) + alloc * sizeof (T); | ||||
1303 | } | ||||
1304 | |||||
1305 | |||||
1306 | /* Initialize the vector to contain room for ALLOC elements and | ||||
1307 | NUM active elements. */ | ||||
1308 | |||||
1309 | template<typename T, typename A> | ||||
1310 | inline void | ||||
1311 | vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut) | ||||
1312 | { | ||||
1313 | m_vecpfx.m_alloc = alloc; | ||||
1314 | m_vecpfx.m_using_auto_storage = aut; | ||||
1315 | m_vecpfx.m_num = num; | ||||
1316 | } | ||||
1317 | |||||
1318 | |||||
1319 | /* Grow the vector to a specific length. LEN must be as long or longer than | ||||
1320 | the current length. The new elements are uninitialized. */ | ||||
1321 | |||||
1322 | template<typename T, typename A> | ||||
1323 | inline void | ||||
1324 | vec<T, A, vl_embed>::quick_grow (unsigned len) | ||||
1325 | { | ||||
1326 | gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc ) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1326, __FUNCTION__), 0 : 0)); | ||||
1327 | m_vecpfx.m_num = len; | ||||
1328 | } | ||||
1329 | |||||
1330 | |||||
1331 | /* Grow the vector to a specific length. LEN must be as long or longer than | ||||
1332 | the current length. The new elements are initialized to zero. */ | ||||
1333 | |||||
1334 | template<typename T, typename A> | ||||
1335 | inline void | ||||
1336 | vec<T, A, vl_embed>::quick_grow_cleared (unsigned len) | ||||
1337 | { | ||||
1338 | unsigned oldlen = length (); | ||||
1339 | size_t growby = len - oldlen; | ||||
1340 | quick_grow (len); | ||||
1341 | if (growby != 0) | ||||
1342 | vec_default_construct (address () + oldlen, growby); | ||||
1343 | } | ||||
1344 | |||||
1345 | /* Garbage collection support for vec<T, A, vl_embed>. */ | ||||
1346 | |||||
1347 | template<typename T> | ||||
1348 | void | ||||
1349 | gt_ggc_mx (vec<T, va_gc> *v) | ||||
1350 | { | ||||
1351 | extern void gt_ggc_mx (T &); | ||||
1352 | for (unsigned i = 0; i < v->length (); i++) | ||||
1353 | gt_ggc_mx ((*v)[i]); | ||||
1354 | } | ||||
1355 | |||||
1356 | template<typename T> | ||||
1357 | void | ||||
1358 | gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED__attribute__ ((__unused__))) | ||||
1359 | { | ||||
1360 | /* Nothing to do. Vectors of atomic types wrt GC do not need to | ||||
1361 | be traversed. */ | ||||
1362 | } | ||||
1363 | |||||
1364 | |||||
1365 | /* PCH support for vec<T, A, vl_embed>. */ | ||||
1366 | |||||
1367 | template<typename T, typename A> | ||||
1368 | void | ||||
1369 | gt_pch_nx (vec<T, A, vl_embed> *v) | ||||
1370 | { | ||||
1371 | extern void gt_pch_nx (T &); | ||||
1372 | for (unsigned i = 0; i < v->length (); i++) | ||||
1373 | gt_pch_nx ((*v)[i]); | ||||
1374 | } | ||||
1375 | |||||
1376 | template<typename T, typename A> | ||||
1377 | void | ||||
1378 | gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie) | ||||
1379 | { | ||||
1380 | for (unsigned i = 0; i < v->length (); i++) | ||||
1381 | op (&((*v)[i]), cookie); | ||||
1382 | } | ||||
1383 | |||||
1384 | template<typename T, typename A> | ||||
1385 | void | ||||
1386 | gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie) | ||||
1387 | { | ||||
1388 | extern void gt_pch_nx (T *, gt_pointer_operator, void *); | ||||
1389 | for (unsigned i = 0; i < v->length (); i++) | ||||
1390 | gt_pch_nx (&((*v)[i]), op, cookie); | ||||
1391 | } | ||||
1392 | |||||
1393 | |||||
1394 | /* Space efficient vector. These vectors can grow dynamically and are | ||||
1395 | allocated together with their control data. They are suited to be | ||||
1396 | included in data structures. Prior to initial allocation, they | ||||
1397 | only take a single word of storage. | ||||
1398 | |||||
1399 | These vectors are implemented as a pointer to an embeddable vector. | ||||
1400 | The semantics allow for this pointer to be NULL to represent empty | ||||
1401 | vectors. This way, empty vectors occupy minimal space in the | ||||
1402 | structure containing them. | ||||
1403 | |||||
1404 | Properties: | ||||
1405 | |||||
1406 | - The whole vector and control data are allocated in a single | ||||
1407 | contiguous block. | ||||
1408 | - The whole vector may be re-allocated. | ||||
1409 | - Vector data may grow and shrink. | ||||
1410 | - Access and manipulation requires a pointer test and | ||||
1411 | indirection. | ||||
1412 | - It requires 1 word of storage (prior to vector allocation). | ||||
1413 | |||||
1414 | |||||
1415 | Limitations: | ||||
1416 | |||||
1417 | These vectors must be PODs because they are stored in unions. | ||||
1418 | (http://en.wikipedia.org/wiki/Plain_old_data_structures). | ||||
1419 | As long as we use C++03, we cannot have constructors nor | ||||
1420 | destructors in classes that are stored in unions. */ | ||||
1421 | |||||
1422 | template<typename T> | ||||
1423 | struct vec<T, va_heap, vl_ptr> | ||||
1424 | { | ||||
1425 | public: | ||||
1426 | /* Memory allocation and deallocation for the embedded vector. | ||||
1427 | Needed because we cannot have proper ctors/dtors defined. */ | ||||
1428 | void create (unsigned nelems CXX_MEM_STAT_INFO); | ||||
1429 | void release (void); | ||||
1430 | |||||
1431 | /* Vector operations. */ | ||||
1432 | bool exists (void) const | ||||
1433 | { return m_vec != NULLnullptr; } | ||||
1434 | |||||
1435 | bool is_empty (void) const | ||||
1436 | { return m_vec ? m_vec->is_empty () : true; } | ||||
1437 | |||||
1438 | unsigned length (void) const | ||||
1439 | { return m_vec ? m_vec->length () : 0; } | ||||
1440 | |||||
1441 | T *address (void) | ||||
1442 | { return m_vec ? m_vec->m_vecdata : NULLnullptr; } | ||||
1443 | |||||
1444 | const T *address (void) const | ||||
1445 | { return m_vec ? m_vec->m_vecdata : NULLnullptr; } | ||||
1446 | |||||
1447 | T *begin () { return address (); } | ||||
1448 | const T *begin () const { return address (); } | ||||
1449 | T *end () { return begin () + length (); } | ||||
1450 | const T *end () const { return begin () + length (); } | ||||
1451 | const T &operator[] (unsigned ix) const | ||||
1452 | { return (*m_vec)[ix]; } | ||||
1453 | |||||
1454 | bool operator!=(const vec &other) const | ||||
1455 | { return !(*this == other); } | ||||
1456 | |||||
1457 | bool operator==(const vec &other) const | ||||
1458 | { return address () == other.address (); } | ||||
1459 | |||||
1460 | T &operator[] (unsigned ix) | ||||
1461 | { return (*m_vec)[ix]; } | ||||
1462 | |||||
1463 | T &last (void) | ||||
1464 | { return m_vec->last (); } | ||||
1465 | |||||
1466 | bool space (int nelems) const | ||||
1467 | { return m_vec ? m_vec->space (nelems) : nelems == 0; } | ||||
1468 | |||||
1469 | bool iterate (unsigned ix, T *p) const; | ||||
1470 | bool iterate (unsigned ix, T **p) const; | ||||
1471 | vec copy (ALONE_CXX_MEM_STAT_INFO) const; | ||||
1472 | bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO); | ||||
1473 | bool reserve_exact (unsigned CXX_MEM_STAT_INFO); | ||||
1474 | void splice (const vec &); | ||||
1475 | void safe_splice (const vec & CXX_MEM_STAT_INFO); | ||||
1476 | T *quick_push (const T &); | ||||
1477 | T *safe_push (const T &CXX_MEM_STAT_INFO); | ||||
1478 | T &pop (void); | ||||
1479 | void truncate (unsigned); | ||||
1480 | void safe_grow (unsigned, bool = false CXX_MEM_STAT_INFO); | ||||
1481 | void safe_grow_cleared (unsigned, bool = false CXX_MEM_STAT_INFO); | ||||
1482 | void quick_grow (unsigned); | ||||
1483 | void quick_grow_cleared (unsigned); | ||||
1484 | void quick_insert (unsigned, const T &); | ||||
1485 | void safe_insert (unsigned, const T & CXX_MEM_STAT_INFO); | ||||
1486 | void ordered_remove (unsigned); | ||||
1487 | void unordered_remove (unsigned); | ||||
1488 | void block_remove (unsigned, unsigned); | ||||
1489 | void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *)); | ||||
1490 | void sort (int (*) (const void *, const void *, void *), void *); | ||||
1491 | T *bsearch (const void *key, int (*compar)(const void *, const void *)); | ||||
1492 | T *bsearch (const void *key, | ||||
1493 | int (*compar)(const void *, const void *, void *), void *); | ||||
1494 | unsigned lower_bound (T, bool (*)(const T &, const T &)) const; | ||||
1495 | bool contains (const T &search) const; | ||||
1496 | void reverse (void); | ||||
1497 | |||||
1498 | bool using_auto_storage () const; | ||||
1499 | |||||
1500 | /* FIXME - This field should be private, but we need to cater to | ||||
1501 | compilers that have stricter notions of PODness for types. */ | ||||
1502 | vec<T, va_heap, vl_embed> *m_vec; | ||||
1503 | }; | ||||
1504 | |||||
1505 | |||||
1506 | /* auto_vec is a subclass of vec that automatically manages creating and | ||||
1507 | releasing the internal vector. If N is non zero then it has N elements of | ||||
1508 | internal storage. The default is no internal storage, and you probably only | ||||
1509 | want to ask for internal storage for vectors on the stack because if the | ||||
1510 | size of the vector is larger than the internal storage that space is wasted. | ||||
1511 | */ | ||||
1512 | template<typename T, size_t N = 0> | ||||
1513 | class auto_vec : public vec<T, va_heap> | ||||
1514 | { | ||||
1515 | public: | ||||
1516 | auto_vec () | ||||
1517 | { | ||||
1518 | m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1); | ||||
1519 | this->m_vec = &m_auto; | ||||
1520 | } | ||||
1521 | |||||
1522 | auto_vec (size_t s) | ||||
1523 | { | ||||
1524 | if (s > N) | ||||
1525 | { | ||||
1526 | this->create (s); | ||||
1527 | return; | ||||
1528 | } | ||||
1529 | |||||
1530 | m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1); | ||||
1531 | this->m_vec = &m_auto; | ||||
1532 | } | ||||
1533 | |||||
1534 | ~auto_vec () | ||||
1535 | { | ||||
1536 | this->release (); | ||||
1537 | } | ||||
1538 | |||||
1539 | private: | ||||
1540 | vec<T, va_heap, vl_embed> m_auto; | ||||
1541 | T m_data[MAX (N - 1, 1)((N - 1) > (1) ? (N - 1) : (1))]; | ||||
1542 | }; | ||||
1543 | |||||
1544 | /* auto_vec is a sub class of vec whose storage is released when it is | ||||
1545 | destroyed. */ | ||||
1546 | template<typename T> | ||||
1547 | class auto_vec<T, 0> : public vec<T, va_heap> | ||||
1548 | { | ||||
1549 | public: | ||||
1550 | auto_vec () { this->m_vec = NULLnullptr; } | ||||
1551 | auto_vec (size_t n) { this->create (n); } | ||||
1552 | ~auto_vec () { this->release (); } | ||||
1553 | |||||
1554 | auto_vec (vec<T, va_heap>&& r) | ||||
1555 | { | ||||
1556 | gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1556, __FUNCTION__), 0 : 0)); | ||||
1557 | this->m_vec = r.m_vec; | ||||
1558 | r.m_vec = NULLnullptr; | ||||
1559 | } | ||||
1560 | auto_vec& operator= (vec<T, va_heap>&& r) | ||||
1561 | { | ||||
1562 | gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1562, __FUNCTION__), 0 : 0)); | ||||
1563 | this->release (); | ||||
1564 | this->m_vec = r.m_vec; | ||||
1565 | r.m_vec = NULLnullptr; | ||||
1566 | return *this; | ||||
1567 | } | ||||
1568 | }; | ||||
1569 | |||||
1570 | |||||
1571 | /* Allocate heap memory for pointer V and create the internal vector | ||||
1572 | with space for NELEMS elements. If NELEMS is 0, the internal | ||||
1573 | vector is initialized to empty. */ | ||||
1574 | |||||
1575 | template<typename T> | ||||
1576 | inline void | ||||
1577 | vec_alloc (vec<T> *&v, unsigned nelems CXX_MEM_STAT_INFO) | ||||
1578 | { | ||||
1579 | v = new vec<T>; | ||||
1580 | v->create (nelems PASS_MEM_STAT); | ||||
1581 | } | ||||
1582 | |||||
1583 | |||||
1584 | /* A subclass of auto_vec <char *> that frees all of its elements on | ||||
1585 | deletion. */ | ||||
1586 | |||||
1587 | class auto_string_vec : public auto_vec <char *> | ||||
1588 | { | ||||
1589 | public: | ||||
1590 | ~auto_string_vec (); | ||||
1591 | }; | ||||
1592 | |||||
1593 | /* A subclass of auto_vec <T *> that deletes all of its elements on | ||||
1594 | destruction. | ||||
1595 | |||||
1596 | This is a crude way for a vec to "own" the objects it points to | ||||
1597 | and clean up automatically. | ||||
1598 | |||||
1599 | For example, no attempt is made to delete elements when an item | ||||
1600 | within the vec is overwritten. | ||||
1601 | |||||
1602 | We can't rely on gnu::unique_ptr within a container, | ||||
1603 | since we can't rely on move semantics in C++98. */ | ||||
1604 | |||||
1605 | template <typename T> | ||||
1606 | class auto_delete_vec : public auto_vec <T *> | ||||
1607 | { | ||||
1608 | public: | ||||
1609 | auto_delete_vec () {} | ||||
1610 | auto_delete_vec (size_t s) : auto_vec <T *> (s) {} | ||||
1611 | |||||
1612 | ~auto_delete_vec (); | ||||
1613 | |||||
1614 | private: | ||||
1615 | DISABLE_COPY_AND_ASSIGN(auto_delete_vec)auto_delete_vec (const auto_delete_vec&) = delete; void operator = (const auto_delete_vec &) = delete; | ||||
1616 | }; | ||||
1617 | |||||
1618 | /* Conditionally allocate heap memory for VEC and its internal vector. */ | ||||
1619 | |||||
1620 | template<typename T> | ||||
1621 | inline void | ||||
1622 | vec_check_alloc (vec<T, va_heap> *&vec, unsigned nelems CXX_MEM_STAT_INFO) | ||||
1623 | { | ||||
1624 | if (!vec) | ||||
1625 | vec_alloc (vec, nelems PASS_MEM_STAT); | ||||
1626 | } | ||||
1627 | |||||
1628 | |||||
1629 | /* Free the heap memory allocated by vector V and set it to NULL. */ | ||||
1630 | |||||
1631 | template<typename T> | ||||
1632 | inline void | ||||
1633 | vec_free (vec<T> *&v) | ||||
1634 | { | ||||
1635 | if (v == NULLnullptr) | ||||
1636 | return; | ||||
1637 | |||||
1638 | v->release (); | ||||
1639 | delete v; | ||||
1640 | v = NULLnullptr; | ||||
1641 | } | ||||
1642 | |||||
1643 | |||||
1644 | /* Return iteration condition and update PTR to point to the IX'th | ||||
1645 | element of this vector. Use this to iterate over the elements of a | ||||
1646 | vector as follows, | ||||
1647 | |||||
1648 | for (ix = 0; v.iterate (ix, &ptr); ix++) | ||||
1649 | continue; */ | ||||
1650 | |||||
1651 | template<typename T> | ||||
1652 | inline bool | ||||
1653 | vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T *ptr) const | ||||
1654 | { | ||||
1655 | if (m_vec) | ||||
1656 | return m_vec->iterate (ix, ptr); | ||||
1657 | else | ||||
1658 | { | ||||
1659 | *ptr = 0; | ||||
1660 | return false; | ||||
1661 | } | ||||
1662 | } | ||||
1663 | |||||
1664 | |||||
1665 | /* Return iteration condition and update *PTR to point to the | ||||
1666 | IX'th element of this vector. Use this to iterate over the | ||||
1667 | elements of a vector as follows, | ||||
1668 | |||||
1669 | for (ix = 0; v->iterate (ix, &ptr); ix++) | ||||
1670 | continue; | ||||
1671 | |||||
1672 | This variant is for vectors of objects. */ | ||||
1673 | |||||
1674 | template<typename T> | ||||
1675 | inline bool | ||||
1676 | vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T **ptr) const | ||||
1677 | { | ||||
1678 | if (m_vec) | ||||
1679 | return m_vec->iterate (ix, ptr); | ||||
1680 | else | ||||
1681 | { | ||||
1682 | *ptr = 0; | ||||
1683 | return false; | ||||
1684 | } | ||||
1685 | } | ||||
1686 | |||||
1687 | |||||
1688 | /* Convenience macro for forward iteration. */ | ||||
1689 | #define FOR_EACH_VEC_ELT(V, I, P)for (I = 0; (V).iterate ((I), &(P)); ++(I)) \ | ||||
1690 | for (I = 0; (V).iterate ((I), &(P)); ++(I)) | ||||
1691 | |||||
1692 | #define FOR_EACH_VEC_SAFE_ELT(V, I, P)for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) \ | ||||
1693 | for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) | ||||
1694 | |||||
1695 | /* Likewise, but start from FROM rather than 0. */ | ||||
1696 | #define FOR_EACH_VEC_ELT_FROM(V, I, P, FROM)for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) \ | ||||
1697 | for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) | ||||
1698 | |||||
1699 | /* Convenience macro for reverse iteration. */ | ||||
1700 | #define FOR_EACH_VEC_ELT_REVERSE(V, I, P)for (I = (V).length () - 1; (V).iterate ((I), &(P)); (I)-- ) \ | ||||
1701 | for (I = (V).length () - 1; \ | ||||
1702 | (V).iterate ((I), &(P)); \ | ||||
1703 | (I)--) | ||||
1704 | |||||
1705 | #define FOR_EACH_VEC_SAFE_ELT_REVERSE(V, I, P)for (I = vec_safe_length (V) - 1; vec_safe_iterate ((V), (I), &(P)); (I)--) \ | ||||
1706 | for (I = vec_safe_length (V) - 1; \ | ||||
1707 | vec_safe_iterate ((V), (I), &(P)); \ | ||||
1708 | (I)--) | ||||
1709 | |||||
1710 | /* auto_string_vec's dtor, freeing all contained strings, automatically | ||||
1711 | chaining up to ~auto_vec <char *>, which frees the internal buffer. */ | ||||
1712 | |||||
1713 | inline | ||||
1714 | auto_string_vec::~auto_string_vec () | ||||
1715 | { | ||||
1716 | int i; | ||||
1717 | char *str; | ||||
1718 | FOR_EACH_VEC_ELT (*this, i, str)for (i = 0; (*this).iterate ((i), &(str)); ++(i)) | ||||
1719 | free (str); | ||||
1720 | } | ||||
1721 | |||||
1722 | /* auto_delete_vec's dtor, deleting all contained items, automatically | ||||
1723 | chaining up to ~auto_vec <T*>, which frees the internal buffer. */ | ||||
1724 | |||||
1725 | template <typename T> | ||||
1726 | inline | ||||
1727 | auto_delete_vec<T>::~auto_delete_vec () | ||||
1728 | { | ||||
1729 | int i; | ||||
1730 | T *item; | ||||
1731 | FOR_EACH_VEC_ELT (*this, i, item)for (i = 0; (*this).iterate ((i), &(item)); ++(i)) | ||||
1732 | delete item; | ||||
1733 | } | ||||
1734 | |||||
1735 | |||||
1736 | /* Return a copy of this vector. */ | ||||
1737 | |||||
1738 | template<typename T> | ||||
1739 | inline vec<T, va_heap, vl_ptr> | ||||
1740 | vec<T, va_heap, vl_ptr>::copy (ALONE_MEM_STAT_DECLvoid) const | ||||
1741 | { | ||||
1742 | vec<T, va_heap, vl_ptr> new_vec = vNULL; | ||||
1743 | if (length ()) | ||||
1744 | new_vec.m_vec = m_vec->copy (ALONE_PASS_MEM_STAT); | ||||
1745 | return new_vec; | ||||
1746 | } | ||||
1747 | |||||
1748 | |||||
1749 | /* Ensure that the vector has at least RESERVE slots available (if | ||||
1750 | EXACT is false), or exactly RESERVE slots available (if EXACT is | ||||
1751 | true). | ||||
1752 | |||||
1753 | This may create additional headroom if EXACT is false. | ||||
1754 | |||||
1755 | Note that this can cause the embedded vector to be reallocated. | ||||
1756 | Returns true iff reallocation actually occurred. */ | ||||
1757 | |||||
1758 | template<typename T> | ||||
1759 | inline bool | ||||
1760 | vec<T, va_heap, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL) | ||||
1761 | { | ||||
1762 | if (space (nelems)) | ||||
1763 | return false; | ||||
1764 | |||||
1765 | /* For now play a game with va_heap::reserve to hide our auto storage if any, | ||||
1766 | this is necessary because it doesn't have enough information to know the | ||||
1767 | embedded vector is in auto storage, and so should not be freed. */ | ||||
1768 | vec<T, va_heap, vl_embed> *oldvec = m_vec; | ||||
1769 | unsigned int oldsize = 0; | ||||
1770 | bool handle_auto_vec = m_vec && using_auto_storage (); | ||||
1771 | if (handle_auto_vec) | ||||
1772 | { | ||||
1773 | m_vec = NULLnullptr; | ||||
1774 | oldsize = oldvec->length (); | ||||
1775 | nelems += oldsize; | ||||
1776 | } | ||||
1777 | |||||
1778 | va_heap::reserve (m_vec, nelems, exact PASS_MEM_STAT); | ||||
1779 | if (handle_auto_vec) | ||||
1780 | { | ||||
1781 | vec_copy_construct (m_vec->address (), oldvec->address (), oldsize); | ||||
1782 | m_vec->m_vecpfx.m_num = oldsize; | ||||
1783 | } | ||||
1784 | |||||
1785 | return true; | ||||
1786 | } | ||||
1787 | |||||
1788 | |||||
1789 | /* Ensure that this vector has exactly NELEMS slots available. This | ||||
1790 | will not create additional headroom. Note this can cause the | ||||
1791 | embedded vector to be reallocated. Returns true iff reallocation | ||||
1792 | actually occurred. */ | ||||
1793 | |||||
1794 | template<typename T> | ||||
1795 | inline bool | ||||
1796 | vec<T, va_heap, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL) | ||||
1797 | { | ||||
1798 | return reserve (nelems, true PASS_MEM_STAT); | ||||
1799 | } | ||||
1800 | |||||
1801 | |||||
1802 | /* Create the internal vector and reserve NELEMS for it. This is | ||||
1803 | exactly like vec::reserve, but the internal vector is | ||||
1804 | unconditionally allocated from scratch. The old one, if it | ||||
1805 | existed, is lost. */ | ||||
1806 | |||||
1807 | template<typename T> | ||||
1808 | inline void | ||||
1809 | vec<T, va_heap, vl_ptr>::create (unsigned nelems MEM_STAT_DECL) | ||||
1810 | { | ||||
1811 | m_vec = NULLnullptr; | ||||
1812 | if (nelems > 0) | ||||
1813 | reserve_exact (nelems PASS_MEM_STAT); | ||||
1814 | } | ||||
1815 | |||||
1816 | |||||
1817 | /* Free the memory occupied by the embedded vector. */ | ||||
1818 | |||||
1819 | template<typename T> | ||||
1820 | inline void | ||||
1821 | vec<T, va_heap, vl_ptr>::release (void) | ||||
1822 | { | ||||
1823 | if (!m_vec) | ||||
1824 | return; | ||||
1825 | |||||
1826 | if (using_auto_storage ()) | ||||
1827 | { | ||||
1828 | m_vec->m_vecpfx.m_num = 0; | ||||
1829 | return; | ||||
1830 | } | ||||
1831 | |||||
1832 | va_heap::release (m_vec); | ||||
1833 | } | ||||
1834 | |||||
1835 | /* Copy the elements from SRC to the end of this vector as if by memcpy. | ||||
1836 | SRC and this vector must be allocated with the same memory | ||||
1837 | allocation mechanism. This vector is assumed to have sufficient | ||||
1838 | headroom available. */ | ||||
1839 | |||||
1840 | template<typename T> | ||||
1841 | inline void | ||||
1842 | vec<T, va_heap, vl_ptr>::splice (const vec<T, va_heap, vl_ptr> &src) | ||||
1843 | { | ||||
1844 | if (src.length ()) | ||||
1845 | m_vec->splice (*(src.m_vec)); | ||||
1846 | } | ||||
1847 | |||||
1848 | |||||
1849 | /* Copy the elements in SRC to the end of this vector as if by memcpy. | ||||
1850 | SRC and this vector must be allocated with the same mechanism. | ||||
1851 | If there is not enough headroom in this vector, it will be reallocated | ||||
1852 | as needed. */ | ||||
1853 | |||||
1854 | template<typename T> | ||||
1855 | inline void | ||||
1856 | vec<T, va_heap, vl_ptr>::safe_splice (const vec<T, va_heap, vl_ptr> &src | ||||
1857 | MEM_STAT_DECL) | ||||
1858 | { | ||||
1859 | if (src.length ()) | ||||
1860 | { | ||||
1861 | reserve_exact (src.length ()); | ||||
1862 | splice (src); | ||||
1863 | } | ||||
1864 | } | ||||
1865 | |||||
1866 | |||||
1867 | /* Push OBJ (a new element) onto the end of the vector. There must be | ||||
1868 | sufficient space in the vector. Return a pointer to the slot | ||||
1869 | where OBJ was inserted. */ | ||||
1870 | |||||
1871 | template<typename T> | ||||
1872 | inline T * | ||||
1873 | vec<T, va_heap, vl_ptr>::quick_push (const T &obj) | ||||
1874 | { | ||||
1875 | return m_vec->quick_push (obj); | ||||
1876 | } | ||||
1877 | |||||
1878 | |||||
1879 | /* Push a new element OBJ onto the end of this vector. Reallocates | ||||
1880 | the embedded vector, if needed. Return a pointer to the slot where | ||||
1881 | OBJ was inserted. */ | ||||
1882 | |||||
1883 | template<typename T> | ||||
1884 | inline T * | ||||
1885 | vec<T, va_heap, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL) | ||||
1886 | { | ||||
1887 | reserve (1, false PASS_MEM_STAT); | ||||
1888 | return quick_push (obj); | ||||
1889 | } | ||||
1890 | |||||
1891 | |||||
1892 | /* Pop and return the last element off the end of the vector. */ | ||||
1893 | |||||
1894 | template<typename T> | ||||
1895 | inline T & | ||||
1896 | vec<T, va_heap, vl_ptr>::pop (void) | ||||
1897 | { | ||||
1898 | return m_vec->pop (); | ||||
1899 | } | ||||
1900 | |||||
1901 | |||||
1902 | /* Set the length of the vector to LEN. The new length must be less | ||||
1903 | than or equal to the current length. This is an O(1) operation. */ | ||||
1904 | |||||
1905 | template<typename T> | ||||
1906 | inline void | ||||
1907 | vec<T, va_heap, vl_ptr>::truncate (unsigned size) | ||||
1908 | { | ||||
1909 | if (m_vec) | ||||
1910 | m_vec->truncate (size); | ||||
1911 | else | ||||
1912 | gcc_checking_assert (size == 0)((void)(!(size == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1912, __FUNCTION__), 0 : 0)); | ||||
1913 | } | ||||
1914 | |||||
1915 | |||||
1916 | /* Grow the vector to a specific length. LEN must be as long or | ||||
1917 | longer than the current length. The new elements are | ||||
1918 | uninitialized. Reallocate the internal vector, if needed. */ | ||||
1919 | |||||
1920 | template<typename T> | ||||
1921 | inline void | ||||
1922 | vec<T, va_heap, vl_ptr>::safe_grow (unsigned len, bool exact MEM_STAT_DECL) | ||||
1923 | { | ||||
1924 | unsigned oldlen = length (); | ||||
1925 | gcc_checking_assert (oldlen <= len)((void)(!(oldlen <= len) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1925, __FUNCTION__), 0 : 0)); | ||||
1926 | reserve (len - oldlen, exact PASS_MEM_STAT); | ||||
1927 | if (m_vec) | ||||
1928 | m_vec->quick_grow (len); | ||||
1929 | else | ||||
1930 | gcc_checking_assert (len == 0)((void)(!(len == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1930, __FUNCTION__), 0 : 0)); | ||||
1931 | } | ||||
1932 | |||||
1933 | |||||
1934 | /* Grow the embedded vector to a specific length. LEN must be as | ||||
1935 | long or longer than the current length. The new elements are | ||||
1936 | initialized to zero. Reallocate the internal vector, if needed. */ | ||||
1937 | |||||
1938 | template<typename T> | ||||
1939 | inline void | ||||
1940 | vec<T, va_heap, vl_ptr>::safe_grow_cleared (unsigned len, bool exact | ||||
1941 | MEM_STAT_DECL) | ||||
1942 | { | ||||
1943 | unsigned oldlen = length (); | ||||
1944 | size_t growby = len - oldlen; | ||||
1945 | safe_grow (len, exact PASS_MEM_STAT); | ||||
1946 | if (growby != 0) | ||||
1947 | vec_default_construct (address () + oldlen, growby); | ||||
1948 | } | ||||
1949 | |||||
1950 | |||||
1951 | /* Same as vec::safe_grow but without reallocation of the internal vector. | ||||
1952 | If the vector cannot be extended, a runtime assertion will be triggered. */ | ||||
1953 | |||||
1954 | template<typename T> | ||||
1955 | inline void | ||||
1956 | vec<T, va_heap, vl_ptr>::quick_grow (unsigned len) | ||||
1957 | { | ||||
1958 | gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1958, __FUNCTION__), 0 : 0)); | ||||
1959 | m_vec->quick_grow (len); | ||||
1960 | } | ||||
1961 | |||||
1962 | |||||
1963 | /* Same as vec::quick_grow_cleared but without reallocation of the | ||||
1964 | internal vector. If the vector cannot be extended, a runtime | ||||
1965 | assertion will be triggered. */ | ||||
1966 | |||||
1967 | template<typename T> | ||||
1968 | inline void | ||||
1969 | vec<T, va_heap, vl_ptr>::quick_grow_cleared (unsigned len) | ||||
1970 | { | ||||
1971 | gcc_checking_assert (m_vec)((void)(!(m_vec) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 1971, __FUNCTION__), 0 : 0)); | ||||
1972 | m_vec->quick_grow_cleared (len); | ||||
1973 | } | ||||
1974 | |||||
1975 | |||||
1976 | /* Insert an element, OBJ, at the IXth position of this vector. There | ||||
1977 | must be sufficient space. */ | ||||
1978 | |||||
1979 | template<typename T> | ||||
1980 | inline void | ||||
1981 | vec<T, va_heap, vl_ptr>::quick_insert (unsigned ix, const T &obj) | ||||
1982 | { | ||||
1983 | m_vec->quick_insert (ix, obj); | ||||
1984 | } | ||||
1985 | |||||
1986 | |||||
1987 | /* Insert an element, OBJ, at the IXth position of the vector. | ||||
1988 | Reallocate the embedded vector, if necessary. */ | ||||
1989 | |||||
1990 | template<typename T> | ||||
1991 | inline void | ||||
1992 | vec<T, va_heap, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL) | ||||
1993 | { | ||||
1994 | reserve (1, false PASS_MEM_STAT); | ||||
1995 | quick_insert (ix, obj); | ||||
1996 | } | ||||
1997 | |||||
1998 | |||||
1999 | /* Remove an element from the IXth position of this vector. Ordering of | ||||
2000 | remaining elements is preserved. This is an O(N) operation due to | ||||
2001 | a memmove. */ | ||||
2002 | |||||
2003 | template<typename T> | ||||
2004 | inline void | ||||
2005 | vec<T, va_heap, vl_ptr>::ordered_remove (unsigned ix) | ||||
2006 | { | ||||
2007 | m_vec->ordered_remove (ix); | ||||
2008 | } | ||||
2009 | |||||
2010 | |||||
2011 | /* Remove an element from the IXth position of this vector. Ordering | ||||
2012 | of remaining elements is destroyed. This is an O(1) operation. */ | ||||
2013 | |||||
2014 | template<typename T> | ||||
2015 | inline void | ||||
2016 | vec<T, va_heap, vl_ptr>::unordered_remove (unsigned ix) | ||||
2017 | { | ||||
2018 | m_vec->unordered_remove (ix); | ||||
2019 | } | ||||
2020 | |||||
2021 | |||||
2022 | /* Remove LEN elements starting at the IXth. Ordering is retained. | ||||
2023 | This is an O(N) operation due to memmove. */ | ||||
2024 | |||||
2025 | template<typename T> | ||||
2026 | inline void | ||||
2027 | vec<T, va_heap, vl_ptr>::block_remove (unsigned ix, unsigned len) | ||||
2028 | { | ||||
2029 | m_vec->block_remove (ix, len); | ||||
2030 | } | ||||
2031 | |||||
2032 | |||||
2033 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
2034 | function to pass to qsort. */ | ||||
2035 | |||||
2036 | template<typename T> | ||||
2037 | inline void | ||||
2038 | vec<T, va_heap, vl_ptr>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *)) | ||||
2039 | { | ||||
2040 | if (m_vec) | ||||
2041 | m_vec->qsort (cmp)qsort (cmp); | ||||
2042 | } | ||||
2043 | |||||
2044 | /* Sort the contents of this vector with qsort. CMP is the comparison | ||||
2045 | function to pass to qsort. */ | ||||
2046 | |||||
2047 | template<typename T> | ||||
2048 | inline void | ||||
2049 | vec<T, va_heap, vl_ptr>::sort (int (*cmp) (const void *, const void *, | ||||
2050 | void *), void *data) | ||||
2051 | { | ||||
2052 | if (m_vec) | ||||
2053 | m_vec->sort (cmp, data); | ||||
2054 | } | ||||
2055 | |||||
2056 | |||||
2057 | /* Search the contents of the sorted vector with a binary search. | ||||
2058 | CMP is the comparison function to pass to bsearch. */ | ||||
2059 | |||||
2060 | template<typename T> | ||||
2061 | inline T * | ||||
2062 | vec<T, va_heap, vl_ptr>::bsearch (const void *key, | ||||
2063 | int (*cmp) (const void *, const void *)) | ||||
2064 | { | ||||
2065 | if (m_vec) | ||||
2066 | return m_vec->bsearch (key, cmp); | ||||
2067 | return NULLnullptr; | ||||
2068 | } | ||||
2069 | |||||
2070 | /* Search the contents of the sorted vector with a binary search. | ||||
2071 | CMP is the comparison function to pass to bsearch. */ | ||||
2072 | |||||
2073 | template<typename T> | ||||
2074 | inline T * | ||||
2075 | vec<T, va_heap, vl_ptr>::bsearch (const void *key, | ||||
2076 | int (*cmp) (const void *, const void *, | ||||
2077 | void *), void *data) | ||||
2078 | { | ||||
2079 | if (m_vec) | ||||
2080 | return m_vec->bsearch (key, cmp, data); | ||||
2081 | return NULLnullptr; | ||||
2082 | } | ||||
2083 | |||||
2084 | |||||
2085 | /* Find and return the first position in which OBJ could be inserted | ||||
2086 | without changing the ordering of this vector. LESSTHAN is a | ||||
2087 | function that returns true if the first argument is strictly less | ||||
2088 | than the second. */ | ||||
2089 | |||||
2090 | template<typename T> | ||||
2091 | inline unsigned | ||||
2092 | vec<T, va_heap, vl_ptr>::lower_bound (T obj, | ||||
2093 | bool (*lessthan)(const T &, const T &)) | ||||
2094 | const | ||||
2095 | { | ||||
2096 | return m_vec ? m_vec->lower_bound (obj, lessthan) : 0; | ||||
2097 | } | ||||
2098 | |||||
2099 | /* Return true if SEARCH is an element of V. Note that this is O(N) in the | ||||
2100 | size of the vector and so should be used with care. */ | ||||
2101 | |||||
2102 | template<typename T> | ||||
2103 | inline bool | ||||
2104 | vec<T, va_heap, vl_ptr>::contains (const T &search) const | ||||
2105 | { | ||||
2106 | return m_vec ? m_vec->contains (search) : false; | ||||
2107 | } | ||||
2108 | |||||
2109 | /* Reverse content of the vector. */ | ||||
2110 | |||||
2111 | template<typename T> | ||||
2112 | inline void | ||||
2113 | vec<T, va_heap, vl_ptr>::reverse (void) | ||||
2114 | { | ||||
2115 | unsigned l = length (); | ||||
2116 | T *ptr = address (); | ||||
2117 | |||||
2118 | for (unsigned i = 0; i < l / 2; i++) | ||||
2119 | std::swap (ptr[i], ptr[l - i - 1]); | ||||
2120 | } | ||||
2121 | |||||
2122 | template<typename T> | ||||
2123 | inline bool | ||||
2124 | vec<T, va_heap, vl_ptr>::using_auto_storage () const | ||||
2125 | { | ||||
2126 | return m_vec->m_vecpfx.m_using_auto_storage; | ||||
2127 | } | ||||
2128 | |||||
2129 | /* Release VEC and call release of all element vectors. */ | ||||
2130 | |||||
2131 | template<typename T> | ||||
2132 | inline void | ||||
2133 | release_vec_vec (vec<vec<T> > &vec) | ||||
2134 | { | ||||
2135 | for (unsigned i = 0; i < vec.length (); i++) | ||||
2136 | vec[i].release (); | ||||
2137 | |||||
2138 | vec.release (); | ||||
2139 | } | ||||
2140 | |||||
2141 | // Provide a subset of the std::span functionality. (We can't use std::span | ||||
2142 | // itself because it's a C++20 feature.) | ||||
2143 | // | ||||
2144 | // In addition, provide an invalid value that is distinct from all valid | ||||
2145 | // sequences (including the empty sequence). This can be used to return | ||||
2146 | // failure without having to use std::optional. | ||||
2147 | // | ||||
2148 | // There is no operator bool because it would be ambiguous whether it is | ||||
2149 | // testing for a valid value or an empty sequence. | ||||
2150 | template<typename T> | ||||
2151 | class array_slice | ||||
2152 | { | ||||
2153 | template<typename OtherT> friend class array_slice; | ||||
2154 | |||||
2155 | public: | ||||
2156 | using value_type = T; | ||||
2157 | using iterator = T *; | ||||
2158 | using const_iterator = const T *; | ||||
2159 | |||||
2160 | array_slice () : m_base (nullptr), m_size (0) {} | ||||
2161 | |||||
2162 | template<typename OtherT> | ||||
2163 | array_slice (array_slice<OtherT> other) | ||||
2164 | : m_base (other.m_base), m_size (other.m_size) {} | ||||
2165 | |||||
2166 | array_slice (iterator base, unsigned int size) | ||||
2167 | : m_base (base), m_size (size) {} | ||||
2168 | |||||
2169 | template<size_t N> | ||||
2170 | array_slice (T (&array)[N]) : m_base (array), m_size (N) {} | ||||
2171 | |||||
2172 | template<typename OtherT> | ||||
2173 | array_slice (const vec<OtherT> &v) | ||||
2174 | : m_base (v.address ()), m_size (v.length ()) {} | ||||
2175 | |||||
2176 | iterator begin () { return m_base; } | ||||
2177 | iterator end () { return m_base + m_size; } | ||||
2178 | |||||
2179 | const_iterator begin () const { return m_base; } | ||||
2180 | const_iterator end () const { return m_base + m_size; } | ||||
2181 | |||||
2182 | value_type &front (); | ||||
2183 | value_type &back (); | ||||
2184 | value_type &operator[] (unsigned int i); | ||||
2185 | |||||
2186 | const value_type &front () const; | ||||
2187 | const value_type &back () const; | ||||
2188 | const value_type &operator[] (unsigned int i) const; | ||||
2189 | |||||
2190 | size_t size () const { return m_size; } | ||||
2191 | size_t size_bytes () const { return m_size * sizeof (T); } | ||||
2192 | bool empty () const { return m_size == 0; } | ||||
2193 | |||||
2194 | // An invalid array_slice that represents a failed operation. This is | ||||
2195 | // distinct from an empty slice, which is a valid result in some contexts. | ||||
2196 | static array_slice invalid () { return { nullptr, ~0U }; } | ||||
2197 | |||||
2198 | // True if the array is valid, false if it is an array like INVALID. | ||||
2199 | bool is_valid () const { return m_base || m_size == 0; } | ||||
2200 | |||||
2201 | private: | ||||
2202 | iterator m_base; | ||||
2203 | unsigned int m_size; | ||||
2204 | }; | ||||
2205 | |||||
2206 | template<typename T> | ||||
2207 | inline typename array_slice<T>::value_type & | ||||
2208 | array_slice<T>::front () | ||||
2209 | { | ||||
2210 | gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2210, __FUNCTION__), 0 : 0)); | ||||
2211 | return m_base[0]; | ||||
2212 | } | ||||
2213 | |||||
2214 | template<typename T> | ||||
2215 | inline const typename array_slice<T>::value_type & | ||||
2216 | array_slice<T>::front () const | ||||
2217 | { | ||||
2218 | gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2218, __FUNCTION__), 0 : 0)); | ||||
2219 | return m_base[0]; | ||||
2220 | } | ||||
2221 | |||||
2222 | template<typename T> | ||||
2223 | inline typename array_slice<T>::value_type & | ||||
2224 | array_slice<T>::back () | ||||
2225 | { | ||||
2226 | gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2226, __FUNCTION__), 0 : 0)); | ||||
2227 | return m_base[m_size - 1]; | ||||
2228 | } | ||||
2229 | |||||
2230 | template<typename T> | ||||
2231 | inline const typename array_slice<T>::value_type & | ||||
2232 | array_slice<T>::back () const | ||||
2233 | { | ||||
2234 | gcc_checking_assert (m_size)((void)(!(m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2234, __FUNCTION__), 0 : 0)); | ||||
2235 | return m_base[m_size - 1]; | ||||
2236 | } | ||||
2237 | |||||
2238 | template<typename T> | ||||
2239 | inline typename array_slice<T>::value_type & | ||||
2240 | array_slice<T>::operator[] (unsigned int i) | ||||
2241 | { | ||||
2242 | gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2242, __FUNCTION__), 0 : 0)); | ||||
2243 | return m_base[i]; | ||||
2244 | } | ||||
2245 | |||||
2246 | template<typename T> | ||||
2247 | inline const typename array_slice<T>::value_type & | ||||
2248 | array_slice<T>::operator[] (unsigned int i) const | ||||
2249 | { | ||||
2250 | gcc_checking_assert (i < m_size)((void)(!(i < m_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h" , 2250, __FUNCTION__), 0 : 0)); | ||||
2251 | return m_base[i]; | ||||
2252 | } | ||||
2253 | |||||
2254 | template<typename T> | ||||
2255 | array_slice<T> | ||||
2256 | make_array_slice (T *base, unsigned int size) | ||||
2257 | { | ||||
2258 | return array_slice<T> (base, size); | ||||
2259 | } | ||||
2260 | |||||
2261 | #if (GCC_VERSION(4 * 1000 + 2) >= 3000) | ||||
2262 | # pragma GCC poison m_vec m_vecpfx m_vecdata | ||||
2263 | #endif | ||||
2264 | |||||
2265 | #endif // GCC_VEC_H |