Bug Summary

File:build/gcc/builtins.c
Warning:line 5660, column 4
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name builtins.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-UT4b4o.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c

1/* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24#include "config.h"
25#include "system.h"
26#include "coretypes.h"
27#include "backend.h"
28#include "target.h"
29#include "rtl.h"
30#include "tree.h"
31#include "memmodel.h"
32#include "gimple.h"
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
36#include "tree-vrp.h"
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
40#include "emit-rtl.h"
41#include "recog.h"
42#include "diagnostic-core.h"
43#include "alias.h"
44#include "fold-const.h"
45#include "fold-const-call.h"
46#include "gimple-ssa-warn-restrict.h"
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
51#include "tree-ssa-strlen.h"
52#include "realmpfr.h"
53#include "cfgrtl.h"
54#include "except.h"
55#include "dojump.h"
56#include "explow.h"
57#include "stmt.h"
58#include "expr.h"
59#include "libfuncs.h"
60#include "output.h"
61#include "typeclass.h"
62#include "langhooks.h"
63#include "value-prof.h"
64#include "builtins.h"
65#include "stringpool.h"
66#include "attribs.h"
67#include "asan.h"
68#include "internal-fn.h"
69#include "case-cfn-macros.h"
70#include "gimple-fold.h"
71#include "intl.h"
72#include "file-prefix-map.h" /* remap_macro_filename() */
73#include "gomp-constants.h"
74#include "omp-general.h"
75#include "tree-dfa.h"
76#include "gimple-iterator.h"
77#include "gimple-ssa.h"
78#include "tree-ssa-live.h"
79#include "tree-outof-ssa.h"
80#include "attr-fnspec.h"
81#include "demangle.h"
82
83struct target_builtins default_target_builtins;
84#if SWITCHABLE_TARGET1
85struct target_builtins *this_target_builtins = &default_target_builtins;
86#endif
87
88/* Define the names of the builtin function types and codes. */
89const char *const built_in_class_names[BUILT_IN_LAST(BUILT_IN_NORMAL + 1)]
90 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
91
92#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
93const char * built_in_names[(int) END_BUILTINS] =
94{
95#include "builtins.def"
96};
97
98/* Setup an array of builtin_info_type, make sure each element decl is
99 initialized to NULL_TREE. */
100builtin_info_type builtin_info[(int)END_BUILTINS];
101
102/* Non-zero if __builtin_constant_p should be folded right away. */
103bool force_folding_builtin_constant_p;
104
105static int target_char_cast (tree, char *);
106static rtx get_memory_rtx (tree, tree);
107static int apply_args_size (void);
108static int apply_result_size (void);
109static rtx result_vector (int, rtx);
110static void expand_builtin_prefetch (tree);
111static rtx expand_builtin_apply_args (void);
112static rtx expand_builtin_apply_args_1 (void);
113static rtx expand_builtin_apply (rtx, rtx, rtx);
114static void expand_builtin_return (rtx);
115static enum type_class type_to_class (tree);
116static rtx expand_builtin_classify_type (tree);
117static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
118static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
119static rtx expand_builtin_interclass_mathfn (tree, rtx);
120static rtx expand_builtin_sincos (tree);
121static rtx expand_builtin_cexpi (tree, rtx);
122static rtx expand_builtin_int_roundingfn (tree, rtx);
123static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
124static rtx expand_builtin_next_arg (void);
125static rtx expand_builtin_va_start (tree);
126static rtx expand_builtin_va_end (tree);
127static rtx expand_builtin_va_copy (tree);
128static rtx inline_expand_builtin_bytecmp (tree, rtx);
129static rtx expand_builtin_strcmp (tree, rtx);
130static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
131static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INTlong, scalar_int_mode);
132static rtx expand_builtin_memchr (tree, rtx);
133static rtx expand_builtin_memcpy (tree, rtx);
134static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
135 rtx target, tree exp,
136 memop_ret retmode,
137 bool might_overlap);
138static rtx expand_builtin_memmove (tree, rtx);
139static rtx expand_builtin_mempcpy (tree, rtx);
140static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
141static rtx expand_builtin_strcat (tree);
142static rtx expand_builtin_strcpy (tree, rtx);
143static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
144static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
145static rtx expand_builtin_stpncpy (tree, rtx);
146static rtx expand_builtin_strncat (tree, rtx);
147static rtx expand_builtin_strncpy (tree, rtx);
148static rtx builtin_memset_gen_str (void *, HOST_WIDE_INTlong, scalar_int_mode);
149static rtx expand_builtin_memset (tree, rtx, machine_mode);
150static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
151static rtx expand_builtin_bzero (tree);
152static rtx expand_builtin_strlen (tree, rtx, machine_mode);
153static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
154static rtx expand_builtin_alloca (tree);
155static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
156static rtx expand_builtin_frame_address (tree, tree);
157static tree stabilize_va_list_loc (location_t, tree, int);
158static rtx expand_builtin_expect (tree, rtx);
159static rtx expand_builtin_expect_with_probability (tree, rtx);
160static tree fold_builtin_constant_p (tree);
161static tree fold_builtin_classify_type (tree);
162static tree fold_builtin_strlen (location_t, tree, tree, tree);
163static tree fold_builtin_inf (location_t, tree, int);
164static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
165static bool validate_arg (const_tree, enum tree_code code);
166static rtx expand_builtin_fabs (tree, rtx, rtx);
167static rtx expand_builtin_signbit (tree, rtx);
168static tree fold_builtin_memcmp (location_t, tree, tree, tree);
169static tree fold_builtin_isascii (location_t, tree);
170static tree fold_builtin_toascii (location_t, tree);
171static tree fold_builtin_isdigit (location_t, tree);
172static tree fold_builtin_fabs (location_t, tree, tree);
173static tree fold_builtin_abs (location_t, tree, tree);
174static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
175 enum tree_code);
176static tree fold_builtin_varargs (location_t, tree, tree*, int);
177
178static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
179static tree fold_builtin_strspn (location_t, tree, tree, tree);
180static tree fold_builtin_strcspn (location_t, tree, tree, tree);
181
182static rtx expand_builtin_object_size (tree);
183static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
184 enum built_in_function);
185static void maybe_emit_chk_warning (tree, enum built_in_function);
186static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
187static tree fold_builtin_object_size (tree, tree);
188static bool check_read_access (tree, tree, tree = NULL_TREE(tree) __null, int = 1);
189static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
190 pointer_query *);
191
192unsigned HOST_WIDE_INTlong target_newline;
193unsigned HOST_WIDE_INTlong target_percent;
194static unsigned HOST_WIDE_INTlong target_c;
195static unsigned HOST_WIDE_INTlong target_s;
196char target_percent_c[3];
197char target_percent_s[3];
198char target_percent_s_newline[4];
199static tree do_mpfr_remquo (tree, tree, tree);
200static tree do_mpfr_lgamma_r (tree, tree, tree);
201static void expand_builtin_sync_synchronize (void);
202
203access_ref::access_ref (tree bound /* = NULL_TREE */,
204 bool minaccess /* = false */)
205: ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
206 base0 (true), parmarray ()
207{
208 /* Set to valid. */
209 offrng[0] = offrng[1] = 0;
210 /* Invalidate. */
211 sizrng[0] = sizrng[1] = -1;
212
213 /* Set the default bounds of the access and adjust below. */
214 bndrng[0] = minaccess ? 1 : 0;
215 bndrng[1] = HOST_WIDE_INT_M1U-1UL;
216
217 /* When BOUND is nonnull and a range can be extracted from it,
218 set the bounds of the access to reflect both it and MINACCESS.
219 BNDRNG[0] is the size of the minimum access. */
220 tree rng[2];
221 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
222 {
223 bndrng[0] = wi::to_offset (rng[0]);
224 bndrng[1] = wi::to_offset (rng[1]);
225 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
226 }
227}
228
229/* Return the PHI node REF refers to or null if it doesn't. */
230
231gphi *
232access_ref::phi () const
233{
234 if (!ref || TREE_CODE (ref)((enum tree_code) (ref)->base.code) != SSA_NAME)
235 return NULL__null;
236
237 gimple *def_stmt = SSA_NAME_DEF_STMT (ref)(tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 237, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
238 if (gimple_code (def_stmt) != GIMPLE_PHI)
239 return NULL__null;
240
241 return as_a <gphi *> (def_stmt);
242}
243
244/* Determine and return the largest object to which *THIS. If *THIS
245 refers to a PHI and PREF is nonnull, fill *PREF with the details
246 of the object determined by compute_objsize(ARG, OSTYPE) for each
247 PHI argument ARG. */
248
249tree
250access_ref::get_ref (vec<access_ref> *all_refs,
251 access_ref *pref /* = NULL */,
252 int ostype /* = 1 */,
253 ssa_name_limit_t *psnlim /* = NULL */,
254 pointer_query *qry /* = NULL */) const
255{
256 gphi *phi_stmt = this->phi ();
257 if (!phi_stmt)
258 return ref;
259
260 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
261 cause unbounded recursion. */
262 ssa_name_limit_t snlim_buf;
263 if (!psnlim)
264 psnlim = &snlim_buf;
265
266 if (!psnlim->visit_phi (ref))
267 return NULL_TREE(tree) __null;
268
269 /* Reflects the range of offsets of all PHI arguments refer to the same
270 object (i.e., have the same REF). */
271 access_ref same_ref;
272 /* The conservative result of the PHI reflecting the offset and size
273 of the largest PHI argument, regardless of whether or not they all
274 refer to the same object. */
275 pointer_query empty_qry;
276 if (!qry)
277 qry = &empty_qry;
278
279 access_ref phi_ref;
280 if (pref)
281 {
282 phi_ref = *pref;
283 same_ref = *pref;
284 }
285
286 /* Set if any argument is a function array (or VLA) parameter not
287 declared [static]. */
288 bool parmarray = false;
289 /* The size of the smallest object referenced by the PHI arguments. */
290 offset_int minsize = 0;
291 const offset_int maxobjsize = wi::to_offset (max_object_size ());
292 /* The offset of the PHI, not reflecting those of its arguments. */
293 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
294
295 const unsigned nargs = gimple_phi_num_args (phi_stmt);
296 for (unsigned i = 0; i < nargs; ++i)
297 {
298 access_ref phi_arg_ref;
299 tree arg = gimple_phi_arg_def (phi_stmt, i);
300 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
301 || phi_arg_ref.sizrng[0] < 0)
302 /* A PHI with all null pointer arguments. */
303 return NULL_TREE(tree) __null;
304
305 /* Add PREF's offset to that of the argument. */
306 phi_arg_ref.add_offset (orng[0], orng[1]);
307 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME)
308 qry->put_ref (arg, phi_arg_ref);
309
310 if (all_refs)
311 all_refs->safe_push (phi_arg_ref);
312
313 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
314 || phi_arg_ref.sizrng[1] != maxobjsize);
315
316 parmarray |= phi_arg_ref.parmarray;
317
318 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
319
320 if (phi_ref.sizrng[0] < 0)
321 {
322 if (!nullp)
323 same_ref = phi_arg_ref;
324 phi_ref = phi_arg_ref;
325 if (arg_known_size)
326 minsize = phi_arg_ref.sizrng[0];
327 continue;
328 }
329
330 const bool phi_known_size = (phi_ref.sizrng[0] != 0
331 || phi_ref.sizrng[1] != maxobjsize);
332
333 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
334 minsize = phi_arg_ref.sizrng[0];
335
336 /* Disregard null pointers in PHIs with two or more arguments.
337 TODO: Handle this better! */
338 if (nullp)
339 continue;
340
341 /* Determine the amount of remaining space in the argument. */
342 offset_int argrem[2];
343 argrem[1] = phi_arg_ref.size_remaining (argrem);
344
345 /* Determine the amount of remaining space computed so far and
346 if the remaining space in the argument is more use it instead. */
347 offset_int phirem[2];
348 phirem[1] = phi_ref.size_remaining (phirem);
349
350 if (phi_arg_ref.ref != same_ref.ref)
351 same_ref.ref = NULL_TREE(tree) __null;
352
353 if (phirem[1] < argrem[1]
354 || (phirem[1] == argrem[1]
355 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
356 /* Use the argument with the most space remaining as the result,
357 or the larger one if the space is equal. */
358 phi_ref = phi_arg_ref;
359
360 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
361 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
362 same_ref.offrng[0] = phi_arg_ref.offrng[0];
363 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
364 same_ref.offrng[1] = phi_arg_ref.offrng[1];
365 }
366
367 if (phi_ref.sizrng[0] < 0)
368 {
369 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
370 (perhaps because they have all been already visited by prior
371 recursive calls). */
372 psnlim->leave_phi (ref);
373 return NULL_TREE(tree) __null;
374 }
375
376 if (!same_ref.ref && same_ref.offrng[0] != 0)
377 /* Clear BASE0 if not all the arguments refer to the same object and
378 if not all their offsets are zero-based. This allows the final
379 PHI offset to out of bounds for some arguments but not for others
380 (or negative even of all the arguments are BASE0), which is overly
381 permissive. */
382 phi_ref.base0 = false;
383
384 if (same_ref.ref)
385 phi_ref = same_ref;
386 else
387 {
388 /* Replace the lower bound of the largest argument with the size
389 of the smallest argument, and set PARMARRAY if any argument
390 was one. */
391 phi_ref.sizrng[0] = minsize;
392 phi_ref.parmarray = parmarray;
393 }
394
395 /* Avoid changing *THIS. */
396 if (pref && pref != this)
397 *pref = phi_ref;
398
399 psnlim->leave_phi (ref);
400
401 return phi_ref.ref;
402}
403
404/* Return the maximum amount of space remaining and if non-null, set
405 argument to the minimum. */
406
407offset_int
408access_ref::size_remaining (offset_int *pmin /* = NULL */) const
409{
410 offset_int minbuf;
411 if (!pmin)
412 pmin = &minbuf;
413
414 /* add_offset() ensures the offset range isn't inverted. */
415 gcc_checking_assert (offrng[0] <= offrng[1])((void)(!(offrng[0] <= offrng[1]) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 415, __FUNCTION__), 0 : 0))
;
416
417 if (base0)
418 {
419 /* The offset into referenced object is zero-based (i.e., it's
420 not referenced by a pointer into middle of some unknown object). */
421 if (offrng[0] < 0 && offrng[1] < 0)
422 {
423 /* If the offset is negative the remaining size is zero. */
424 *pmin = 0;
425 return 0;
426 }
427
428 if (sizrng[1] <= offrng[0])
429 {
430 /* If the starting offset is greater than or equal to the upper
431 bound on the size of the object, the space remaining is zero.
432 As a special case, if it's equal, set *PMIN to -1 to let
433 the caller know the offset is valid and just past the end. */
434 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
435 return 0;
436 }
437
438 /* Otherwise return the size minus the lower bound of the offset. */
439 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
440
441 *pmin = sizrng[0] - or0;
442 return sizrng[1] - or0;
443 }
444
445 /* The offset to the referenced object isn't zero-based (i.e., it may
446 refer to a byte other than the first. The size of such an object
447 is constrained only by the size of the address space (the result
448 of max_object_size()). */
449 if (sizrng[1] <= offrng[0])
450 {
451 *pmin = 0;
452 return 0;
453 }
454
455 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
456
457 *pmin = sizrng[0] - or0;
458 return sizrng[1] - or0;
459}
460
461/* Add the range [MIN, MAX] to the offset range. For known objects (with
462 zero-based offsets) at least one of whose offset's bounds is in range,
463 constrain the other (or both) to the bounds of the object (i.e., zero
464 and the upper bound of its size). This improves the quality of
465 diagnostics. */
466
467void access_ref::add_offset (const offset_int &min, const offset_int &max)
468{
469 if (min <= max)
470 {
471 /* To add an ordinary range just add it to the bounds. */
472 offrng[0] += min;
473 offrng[1] += max;
474 }
475 else if (!base0)
476 {
477 /* To add an inverted range to an offset to an unknown object
478 expand it to the maximum. */
479 add_max_offset ();
480 return;
481 }
482 else
483 {
484 /* To add an inverted range to an offset to an known object set
485 the upper bound to the maximum representable offset value
486 (which may be greater than MAX_OBJECT_SIZE).
487 The lower bound is either the sum of the current offset and
488 MIN when abs(MAX) is greater than the former, or zero otherwise.
489 Zero because then then inverted range includes the negative of
490 the lower bound. */
491 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 491, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
492 offrng[1] = maxoff;
493
494 if (max >= 0)
495 {
496 offrng[0] = 0;
497 return;
498 }
499
500 offset_int absmax = wi::abs (max);
501 if (offrng[0] < absmax)
502 {
503 offrng[0] += min;
504 /* Cap the lower bound at the upper (set to MAXOFF above)
505 to avoid inadvertently recreating an inverted range. */
506 if (offrng[1] < offrng[0])
507 offrng[0] = offrng[1];
508 }
509 else
510 offrng[0] = 0;
511 }
512
513 if (!base0)
514 return;
515
516 /* When referencing a known object check to see if the offset computed
517 so far is in bounds... */
518 offset_int remrng[2];
519 remrng[1] = size_remaining (remrng);
520 if (remrng[1] > 0 || remrng[0] < 0)
521 {
522 /* ...if so, constrain it so that neither bound exceeds the size of
523 the object. Out of bounds offsets are left unchanged, and, for
524 better or worse, become in bounds later. They should be detected
525 and diagnosed at the point they first become invalid by
526 -Warray-bounds. */
527 if (offrng[0] < 0)
528 offrng[0] = 0;
529 if (offrng[1] > sizrng[1])
530 offrng[1] = sizrng[1];
531 }
532}
533
534/* Set a bit for the PHI in VISITED and return true if it wasn't
535 already set. */
536
537bool
538ssa_name_limit_t::visit_phi (tree ssa_name)
539{
540 if (!visited)
541 visited = BITMAP_ALLOCbitmap_alloc (NULL__null);
542
543 /* Return false if SSA_NAME has already been visited. */
544 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name)(tree_check ((ssa_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 544, __FUNCTION__, (SSA_NAME)))->base.u.version
);
545}
546
547/* Clear a bit for the PHI in VISITED. */
548
549void
550ssa_name_limit_t::leave_phi (tree ssa_name)
551{
552 /* Return false if SSA_NAME has already been visited. */
553 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name)(tree_check ((ssa_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 553, __FUNCTION__, (SSA_NAME)))->base.u.version
);
554}
555
556/* Return false if the SSA_NAME chain length counter has reached
557 the limit, otherwise increment the counter and return true. */
558
559bool
560ssa_name_limit_t::next ()
561{
562 /* Return a negative value to let caller avoid recursing beyond
563 the specified limit. */
564 if (ssa_def_max == 0)
27
Assuming field 'ssa_def_max' is not equal to 0
28
Taking false branch
565 return false;
566
567 --ssa_def_max;
568 return true;
29
Returning the value 1, which participates in a condition later
569}
570
571/* If the SSA_NAME has already been "seen" return a positive value.
572 Otherwise add it to VISITED. If the SSA_NAME limit has been
573 reached, return a negative value. Otherwise return zero. */
574
575int
576ssa_name_limit_t::next_phi (tree ssa_name)
577{
578 {
579 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name)(tree_check ((ssa_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 579, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
580 /* Return a positive value if the PHI has already been visited. */
581 if (gimple_code (def_stmt) == GIMPLE_PHI
582 && !visit_phi (ssa_name))
583 return 1;
584 }
585
586 /* Return a negative value to let caller avoid recursing beyond
587 the specified limit. */
588 if (ssa_def_max == 0)
589 return -1;
590
591 --ssa_def_max;
592
593 return 0;
594}
595
596ssa_name_limit_t::~ssa_name_limit_t ()
597{
598 if (visited)
599 BITMAP_FREE (visited)((void) (bitmap_obstack_free ((bitmap) visited), (visited) = (
bitmap) __null))
;
600}
601
602/* Default ctor. Initialize object with pointers to the range_query
603 and cache_type instances to use or null. */
604
605pointer_query::pointer_query (range_query *qry /* = NULL */,
606 cache_type *cache /* = NULL */)
607: rvals (qry), var_cache (cache), hits (), misses (),
608 failures (), depth (), max_depth ()
609{
610 /* No op. */
611}
612
613/* Return a pointer to the cached access_ref instance for the SSA_NAME
614 PTR if it's there or null otherwise. */
615
616const access_ref *
617pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
618{
619 if (!var_cache)
620 {
621 ++misses;
622 return NULL__null;
623 }
624
625 unsigned version = SSA_NAME_VERSION (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 625, __FUNCTION__, (SSA_NAME)))->base.u.version
;
626 unsigned idx = version << 1 | (ostype & 1);
627 if (var_cache->indices.length () <= idx)
628 {
629 ++misses;
630 return NULL__null;
631 }
632
633 unsigned cache_idx = var_cache->indices[idx];
634 if (var_cache->access_refs.length () <= cache_idx)
635 {
636 ++misses;
637 return NULL__null;
638 }
639
640 access_ref &cache_ref = var_cache->access_refs[cache_idx];
641 if (cache_ref.ref)
642 {
643 ++hits;
644 return &cache_ref;
645 }
646
647 ++misses;
648 return NULL__null;
649}
650
651/* Retrieve the access_ref instance for a variable from the cache if it's
652 there or compute it and insert it into the cache if it's nonnonull. */
653
654bool
655pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
656{
657 const unsigned version
658 = TREE_CODE (ptr)((enum tree_code) (ptr)->base.code) == SSA_NAME ? SSA_NAME_VERSION (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 658, __FUNCTION__, (SSA_NAME)))->base.u.version
: 0;
659
660 if (var_cache && version)
661 {
662 unsigned idx = version << 1 | (ostype & 1);
663 if (idx < var_cache->indices.length ())
664 {
665 unsigned cache_idx = var_cache->indices[idx] - 1;
666 if (cache_idx < var_cache->access_refs.length ()
667 && var_cache->access_refs[cache_idx].ref)
668 {
669 ++hits;
670 *pref = var_cache->access_refs[cache_idx];
671 return true;
672 }
673 }
674
675 ++misses;
676 }
677
678 if (!compute_objsize (ptr, ostype, pref, this))
679 {
680 ++failures;
681 return false;
682 }
683
684 return true;
685}
686
687/* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
688 nonnull. */
689
690void
691pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
692{
693 /* Only add populated/valid entries. */
694 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
695 return;
696
697 /* Add REF to the two-level cache. */
698 unsigned version = SSA_NAME_VERSION (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 698, __FUNCTION__, (SSA_NAME)))->base.u.version
;
699 unsigned idx = version << 1 | (ostype & 1);
700
701 /* Grow INDICES if necessary. An index is valid if it's nonzero.
702 Its value minus one is the index into ACCESS_REFS. Not all
703 entries are valid. */
704 if (var_cache->indices.length () <= idx)
705 var_cache->indices.safe_grow_cleared (idx + 1);
706
707 if (!var_cache->indices[idx])
708 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
709
710 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
711 REF member is nonnull. All entries except for the last two
712 are valid. Once nonnull, the REF value must stay unchanged. */
713 unsigned cache_idx = var_cache->indices[idx];
714 if (var_cache->access_refs.length () <= cache_idx)
715 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
716
717 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
718 if (cache_ref.ref)
719 {
720 gcc_checking_assert (cache_ref.ref == ref.ref)((void)(!(cache_ref.ref == ref.ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 720, __FUNCTION__), 0 : 0))
;
721 return;
722 }
723
724 cache_ref = ref;
725}
726
727/* Flush the cache if it's nonnull. */
728
729void
730pointer_query::flush_cache ()
731{
732 if (!var_cache)
733 return;
734 var_cache->indices.release ();
735 var_cache->access_refs.release ();
736}
737
738/* Return true if NAME starts with __builtin_ or __sync_. */
739
740static bool
741is_builtin_name (const char *name)
742{
743 if (strncmp (name, "__builtin_", 10) == 0)
744 return true;
745 if (strncmp (name, "__sync_", 7) == 0)
746 return true;
747 if (strncmp (name, "__atomic_", 9) == 0)
748 return true;
749 return false;
750}
751
752/* Return true if NODE should be considered for inline expansion regardless
753 of the optimization level. This means whenever a function is invoked with
754 its "internal" name, which normally contains the prefix "__builtin". */
755
756bool
757called_as_built_in (tree node)
758{
759 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
760 we want the name used to call the function, not the name it
761 will have. */
762 const char *name = IDENTIFIER_POINTER (DECL_NAME (node))((const char *) (tree_check ((((contains_struct_check ((node)
, (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 762, __FUNCTION__))->decl_minimal.name)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 762, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
763 return is_builtin_name (name);
764}
765
766/* Compute values M and N such that M divides (address of EXP - N) and such
767 that N < M. If these numbers can be determined, store M in alignp and N in
768 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
769 *alignp and any bit-offset to *bitposp.
770
771 Note that the address (and thus the alignment) computed here is based
772 on the address to which a symbol resolves, whereas DECL_ALIGN is based
773 on the address at which an object is actually located. These two
774 addresses are not always the same. For example, on ARM targets,
775 the address &foo of a Thumb function foo() has the lowest bit set,
776 whereas foo() itself starts on an even address.
777
778 If ADDR_P is true we are taking the address of the memory reference EXP
779 and thus cannot rely on the access taking place. */
780
781static bool
782get_object_alignment_2 (tree exp, unsigned int *alignp,
783 unsigned HOST_WIDE_INTlong *bitposp, bool addr_p)
784{
785 poly_int64 bitsize, bitpos;
786 tree offset;
787 machine_mode mode;
788 int unsignedp, reversep, volatilep;
789 unsigned int align = BITS_PER_UNIT(8);
790 bool known_alignment = false;
791
792 /* Get the innermost object and the constant (bitpos) and possibly
793 variable (offset) offset of the access. */
794 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
795 &unsignedp, &reversep, &volatilep);
796
797 /* Extract alignment information from the innermost object and
798 possibly adjust bitpos and offset. */
799 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == FUNCTION_DECL)
800 {
801 /* Function addresses can encode extra information besides their
802 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
803 allows the low bit to be used as a virtual bit, we know
804 that the address itself must be at least 2-byte aligned. */
805 if (TARGET_PTRMEMFUNC_VBIT_LOCATIONptrmemfunc_vbit_in_pfn == ptrmemfunc_vbit_in_pfn)
806 align = 2 * BITS_PER_UNIT(8);
807 }
808 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == LABEL_DECL)
809 ;
810 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == CONST_DECL)
811 {
812 /* The alignment of a CONST_DECL is determined by its initializer. */
813 exp = DECL_INITIAL (exp)((contains_struct_check ((exp), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 813, __FUNCTION__))->decl_common.initial)
;
814 align = TYPE_ALIGN (TREE_TYPE (exp))((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 814, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 814, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 814, __FUNCTION__))->typed.type))->type_common.align -
1) : 0)
;
815 if (CONSTANT_CLASS_P (exp)(tree_code_type[(int) (((enum tree_code) (exp)->base.code)
)] == tcc_constant)
)
816 align = targetm.constant_alignment (exp, align);
817
818 known_alignment = true;
819 }
820 else if (DECL_P (exp)(tree_code_type[(int) (((enum tree_code) (exp)->base.code)
)] == tcc_declaration)
)
821 {
822 align = DECL_ALIGN (exp)(((contains_struct_check ((exp), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 822, __FUNCTION__))->decl_common.align) ? ((unsigned)1) <<
(((contains_struct_check ((exp), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 822, __FUNCTION__))->decl_common.align) - 1) : 0)
;
823 known_alignment = true;
824 }
825 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == INDIRECT_REF
826 || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == MEM_REF
827 || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF)
828 {
829 tree addr = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 829, __FUNCTION__)))))
;
830 unsigned ptr_align;
831 unsigned HOST_WIDE_INTlong ptr_bitpos;
832 unsigned HOST_WIDE_INTlong ptr_bitmask = ~0;
833
834 /* If the address is explicitely aligned, handle that. */
835 if (TREE_CODE (addr)((enum tree_code) (addr)->base.code) == BIT_AND_EXPR
836 && TREE_CODE (TREE_OPERAND (addr, 1))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((addr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 836, __FUNCTION__))))))->base.code)
== INTEGER_CST)
837 {
838 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast<
tree*> (tree_operand_check ((addr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 838, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 838, __FUNCTION__)))
;
839 ptr_bitmask *= BITS_PER_UNIT(8);
840 align = least_bit_hwi (ptr_bitmask);
841 addr = TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 841, __FUNCTION__)))))
;
842 }
843
844 known_alignment
845 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
846 align = MAX (ptr_align, align)((ptr_align) > (align) ? (ptr_align) : (align));
847
848 /* Re-apply explicit alignment to the bitpos. */
849 ptr_bitpos &= ptr_bitmask;
850
851 /* The alignment of the pointer operand in a TARGET_MEM_REF
852 has to take the variable offset parts into account. */
853 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF)
854 {
855 if (TMR_INDEX (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 855, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 855, __FUNCTION__))))))
)
856 {
857 unsigned HOST_WIDE_INTlong step = 1;
858 if (TMR_STEP (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 858, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 858, __FUNCTION__))))))
)
859 step = TREE_INT_CST_LOW (TMR_STEP (exp))((unsigned long) (*tree_int_cst_elt_check ((((*((const_cast<
tree*> (tree_operand_check (((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 859, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 859, __FUNCTION__))))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 859, __FUNCTION__)))
;
860 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT)((align) < (least_bit_hwi (step) * (8)) ? (align) : (least_bit_hwi
(step) * (8)))
;
861 }
862 if (TMR_INDEX2 (exp)((*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 862, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 862, __FUNCTION__))))))
)
863 align = BITS_PER_UNIT(8);
864 known_alignment = false;
865 }
866
867 /* When EXP is an actual memory reference then we can use
868 TYPE_ALIGN of a pointer indirection to derive alignment.
869 Do so only if get_pointer_alignment_1 did not reveal absolute
870 alignment knowledge and if using that alignment would
871 improve the situation. */
872 unsigned int talign;
873 if (!addr_p && !known_alignment
874 && (talign = min_align_of_type (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 874, __FUNCTION__))->typed.type)
) * BITS_PER_UNIT(8))
875 && talign > align)
876 align = talign;
877 else
878 {
879 /* Else adjust bitpos accordingly. */
880 bitpos += ptr_bitpos;
881 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == MEM_REF
882 || TREE_CODE (exp)((enum tree_code) (exp)->base.code) == TARGET_MEM_REF)
883 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT(8);
884 }
885 }
886 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == STRING_CST)
887 {
888 /* STRING_CST are the only constant objects we allow to be not
889 wrapped inside a CONST_DECL. */
890 align = TYPE_ALIGN (TREE_TYPE (exp))((tree_class_check ((((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 890, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 890, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 890, __FUNCTION__))->typed.type))->type_common.align -
1) : 0)
;
891 if (CONSTANT_CLASS_P (exp)(tree_code_type[(int) (((enum tree_code) (exp)->base.code)
)] == tcc_constant)
)
892 align = targetm.constant_alignment (exp, align);
893
894 known_alignment = true;
895 }
896
897 /* If there is a non-constant offset part extract the maximum
898 alignment that can prevail. */
899 if (offset)
900 {
901 unsigned int trailing_zeros = tree_ctz (offset);
902 if (trailing_zeros < HOST_BITS_PER_INT(8 * 4))
903 {
904 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT(8);
905 if (inner)
906 align = MIN (align, inner)((align) < (inner) ? (align) : (inner));
907 }
908 }
909
910 /* Account for the alignment of runtime coefficients, so that the constant
911 bitpos is guaranteed to be accurate. */
912 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
913 if (alt_align != 0 && alt_align < align)
914 {
915 align = alt_align;
916 known_alignment = false;
917 }
918
919 *alignp = align;
920 *bitposp = bitpos.coeffs[0] & (align - 1);
921 return known_alignment;
922}
923
924/* For a memory reference expression EXP compute values M and N such that M
925 divides (&EXP - N) and such that N < M. If these numbers can be determined,
926 store M in alignp and N in *BITPOSP and return true. Otherwise return false
927 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
928
929bool
930get_object_alignment_1 (tree exp, unsigned int *alignp,
931 unsigned HOST_WIDE_INTlong *bitposp)
932{
933 return get_object_alignment_2 (exp, alignp, bitposp, false);
934}
935
936/* Return the alignment in bits of EXP, an object. */
937
938unsigned int
939get_object_alignment (tree exp)
940{
941 unsigned HOST_WIDE_INTlong bitpos = 0;
942 unsigned int align;
943
944 get_object_alignment_1 (exp, &align, &bitpos);
945
946 /* align and bitpos now specify known low bits of the pointer.
947 ptr & (align - 1) == bitpos. */
948
949 if (bitpos != 0)
950 align = least_bit_hwi (bitpos);
951 return align;
952}
953
954/* For a pointer valued expression EXP compute values M and N such that M
955 divides (EXP - N) and such that N < M. If these numbers can be determined,
956 store M in alignp and N in *BITPOSP and return true. Return false if
957 the results are just a conservative approximation.
958
959 If EXP is not a pointer, false is returned too. */
960
961bool
962get_pointer_alignment_1 (tree exp, unsigned int *alignp,
963 unsigned HOST_WIDE_INTlong *bitposp)
964{
965 STRIP_NOPS (exp)(exp) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((exp)))))
;
966
967 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == ADDR_EXPR)
968 return get_object_alignment_2 (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 968, __FUNCTION__)))))
,
969 alignp, bitposp, true);
970 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == POINTER_PLUS_EXPR)
971 {
972 unsigned int align;
973 unsigned HOST_WIDE_INTlong bitpos;
974 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 974, __FUNCTION__)))))
,
975 &align, &bitpos);
976 if (TREE_CODE (TREE_OPERAND (exp, 1))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((exp), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 976, __FUNCTION__))))))->base.code)
== INTEGER_CST)
977 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))((unsigned long) (*tree_int_cst_elt_check (((*((const_cast<
tree*> (tree_operand_check ((exp), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 977, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 977, __FUNCTION__)))
* BITS_PER_UNIT(8);
978 else
979 {
980 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1)(*((const_cast<tree*> (tree_operand_check ((exp), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 980, __FUNCTION__)))))
);
981 if (trailing_zeros < HOST_BITS_PER_INT(8 * 4))
982 {
983 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT(8);
984 if (inner)
985 align = MIN (align, inner)((align) < (inner) ? (align) : (inner));
986 }
987 }
988 *alignp = align;
989 *bitposp = bitpos & (align - 1);
990 return res;
991 }
992 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SSA_NAME
993 && POINTER_TYPE_P (TREE_TYPE (exp))(((enum tree_code) (((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 993, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((exp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 993, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
994 {
995 unsigned int ptr_align, ptr_misalign;
996 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp)(tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 996, __FUNCTION__, (SSA_NAME)))->ssa_name.info.ptr_info
;
997
998 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
999 {
1000 *bitposp = ptr_misalign * BITS_PER_UNIT(8);
1001 *alignp = ptr_align * BITS_PER_UNIT(8);
1002 /* Make sure to return a sensible alignment when the multiplication
1003 by BITS_PER_UNIT overflowed. */
1004 if (*alignp == 0)
1005 *alignp = 1u << (HOST_BITS_PER_INT(8 * 4) - 1);
1006 /* We cannot really tell whether this result is an approximation. */
1007 return false;
1008 }
1009 else
1010 {
1011 *bitposp = 0;
1012 *alignp = BITS_PER_UNIT(8);
1013 return false;
1014 }
1015 }
1016 else if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == INTEGER_CST)
1017 {
1018 *alignp = BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
;
1019 *bitposp = ((TREE_INT_CST_LOW (exp)((unsigned long) (*tree_int_cst_elt_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1019, __FUNCTION__)))
* BITS_PER_UNIT(8))
1020 & (BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
- 1));
1021 return true;
1022 }
1023
1024 *bitposp = 0;
1025 *alignp = BITS_PER_UNIT(8);
1026 return false;
1027}
1028
1029/* Return the alignment in bits of EXP, a pointer valued expression.
1030 The alignment returned is, by default, the alignment of the thing that
1031 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
1032
1033 Otherwise, look at the expression to see if we can do better, i.e., if the
1034 expression is actually pointing at an object whose alignment is tighter. */
1035
1036unsigned int
1037get_pointer_alignment (tree exp)
1038{
1039 unsigned HOST_WIDE_INTlong bitpos = 0;
1040 unsigned int align;
1041
1042 get_pointer_alignment_1 (exp, &align, &bitpos);
1043
1044 /* align and bitpos now specify known low bits of the pointer.
1045 ptr & (align - 1) == bitpos. */
1046
1047 if (bitpos != 0)
1048 align = least_bit_hwi (bitpos);
1049
1050 return align;
1051}
1052
1053/* Return the number of leading non-zero elements in the sequence
1054 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
1055 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
1056
1057unsigned
1058string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
1059{
1060 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4)((void)(!(eltsize == 1 || eltsize == 2 || eltsize == 4) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1060, __FUNCTION__), 0 : 0))
;
1061
1062 unsigned n;
1063
1064 if (eltsize == 1)
1065 {
1066 /* Optimize the common case of plain char. */
1067 for (n = 0; n < maxelts; n++)
1068 {
1069 const char *elt = (const char*) ptr + n;
1070 if (!*elt)
1071 break;
1072 }
1073 }
1074 else
1075 {
1076 for (n = 0; n < maxelts; n++)
1077 {
1078 const char *elt = (const char*) ptr + n * eltsize;
1079 if (!memcmp (elt, "\0\0\0\0", eltsize))
1080 break;
1081 }
1082 }
1083 return n;
1084}
1085
1086/* For a call EXPR at LOC to a function FNAME that expects a string
1087 in the argument ARG, issue a diagnostic due to it being a called
1088 with an argument that is a character array with no terminating
1089 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
1090 of characters in which the NUL is expected. Either EXPR or FNAME
1091 may be null but noth both. SIZE may be null when BNDRNG is null. */
1092
1093void
1094warn_string_no_nul (location_t loc, tree expr, const char *fname,
1095 tree arg, tree decl, tree size /* = NULL_TREE */,
1096 bool exact /* = false */,
1097 const wide_int bndrng[2] /* = NULL */)
1098{
1099 if ((expr && TREE_NO_WARNING (expr)((expr)->base.nowarning_flag)) || TREE_NO_WARNING (arg)((arg)->base.nowarning_flag))
1100 return;
1101
1102 loc = expansion_point_location_if_in_system_header (loc);
1103 bool warned;
1104
1105 /* Format the bound range as a string to keep the nuber of messages
1106 from exploding. */
1107 char bndstr[80];
1108 *bndstr = 0;
1109 if (bndrng)
1110 {
1111 if (bndrng[0] == bndrng[1])
1112 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
1113 else
1114 sprintf (bndstr, "[%llu, %llu]",
1115 (unsigned long long) bndrng[0].to_uhwi (),
1116 (unsigned long long) bndrng[1].to_uhwi ());
1117 }
1118
1119 const tree maxobjsize = max_object_size ();
1120 const wide_int maxsiz = wi::to_wide (maxobjsize);
1121 if (expr)
1122 {
1123 tree func = get_callee_fndecl (expr);
1124 if (bndrng)
1125 {
1126 if (wi::ltu_p (maxsiz, bndrng[0]))
1127 warned = warning_at (loc, OPT_Wstringop_overread,
1128 "%K%qD specified bound %s exceeds "
1129 "maximum object size %E",
1130 expr, func, bndstr, maxobjsize);
1131 else
1132 {
1133 bool maybe = wi::to_wide (size) == bndrng[0];
1134 warned = warning_at (loc, OPT_Wstringop_overread,
1135 exact
1136 ? G_("%K%qD specified bound %s exceeds ""%K%qD specified bound %s exceeds " "the size %E of unterminated array"
1137 "the size %E of unterminated array")"%K%qD specified bound %s exceeds " "the size %E of unterminated array"
1138 : (maybe
1139 ? G_("%K%qD specified bound %s may ""%K%qD specified bound %s may " "exceed the size of at most %E "
"of unterminated array"
1140 "exceed the size of at most %E ""%K%qD specified bound %s may " "exceed the size of at most %E "
"of unterminated array"
1141 "of unterminated array")"%K%qD specified bound %s may " "exceed the size of at most %E "
"of unterminated array"
1142 : G_("%K%qD specified bound %s exceeds ""%K%qD specified bound %s exceeds " "the size of at most %E "
"of unterminated array"
1143 "the size of at most %E ""%K%qD specified bound %s exceeds " "the size of at most %E "
"of unterminated array"
1144 "of unterminated array")"%K%qD specified bound %s exceeds " "the size of at most %E "
"of unterminated array"
),
1145 expr, func, bndstr, size);
1146 }
1147 }
1148 else
1149 warned = warning_at (loc, OPT_Wstringop_overread,
1150 "%K%qD argument missing terminating nul",
1151 expr, func);
1152 }
1153 else
1154 {
1155 if (bndrng)
1156 {
1157 if (wi::ltu_p (maxsiz, bndrng[0]))
1158 warned = warning_at (loc, OPT_Wstringop_overread,
1159 "%qs specified bound %s exceeds "
1160 "maximum object size %E",
1161 fname, bndstr, maxobjsize);
1162 else
1163 {
1164 bool maybe = wi::to_wide (size) == bndrng[0];
1165 warned = warning_at (loc, OPT_Wstringop_overread,
1166 exact
1167 ? G_("%qs specified bound %s exceeds ""%qs specified bound %s exceeds " "the size %E of unterminated array"
1168 "the size %E of unterminated array")"%qs specified bound %s exceeds " "the size %E of unterminated array"
1169 : (maybe
1170 ? G_("%qs specified bound %s may ""%qs specified bound %s may " "exceed the size of at most %E "
"of unterminated array"
1171 "exceed the size of at most %E ""%qs specified bound %s may " "exceed the size of at most %E "
"of unterminated array"
1172 "of unterminated array")"%qs specified bound %s may " "exceed the size of at most %E "
"of unterminated array"
1173 : G_("%qs specified bound %s exceeds ""%qs specified bound %s exceeds " "the size of at most %E " "of unterminated array"
1174 "the size of at most %E ""%qs specified bound %s exceeds " "the size of at most %E " "of unterminated array"
1175 "of unterminated array")"%qs specified bound %s exceeds " "the size of at most %E " "of unterminated array"),
1176 fname, bndstr, size);
1177 }
1178 }
1179 else
1180 warned = warning_at (loc, OPT_Wstringop_overread,
1181 "%qsargument missing terminating nul",
1182 fname);
1183 }
1184
1185 if (warned)
1186 {
1187 inform (DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1187, __FUNCTION__))->decl_minimal.locus)
,
1188 "referenced argument declared here");
1189 TREE_NO_WARNING (arg)((arg)->base.nowarning_flag) = 1;
1190 if (expr)
1191 TREE_NO_WARNING (expr)((expr)->base.nowarning_flag) = 1;
1192 }
1193}
1194
1195/* For a call EXPR (which may be null) that expects a string argument
1196 SRC as an argument, returns false if SRC is a character array with
1197 no terminating NUL. When nonnull, BOUND is the number of characters
1198 in which to expect the terminating NUL. RDONLY is true for read-only
1199 accesses such as strcmp, false for read-write such as strcpy. When
1200 EXPR is also issues a warning. */
1201
1202bool
1203check_nul_terminated_array (tree expr, tree src,
1204 tree bound /* = NULL_TREE */)
1205{
1206 /* The constant size of the array SRC points to. The actual size
1207 may be less of EXACT is true, but not more. */
1208 tree size;
1209 /* True if SRC involves a non-constant offset into the array. */
1210 bool exact;
1211 /* The unterminated constant array SRC points to. */
1212 tree nonstr = unterminated_array (src, &size, &exact);
1213 if (!nonstr)
1214 return true;
1215
1216 /* NONSTR refers to the non-nul terminated constant array and SIZE
1217 is the constant size of the array in bytes. EXACT is true when
1218 SIZE is exact. */
1219
1220 wide_int bndrng[2];
1221 if (bound)
1222 {
1223 if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) == INTEGER_CST)
1224 bndrng[0] = bndrng[1] = wi::to_wide (bound);
1225 else
1226 {
1227 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
1228 if (rng != VR_RANGE)
1229 return true;
1230 }
1231
1232 if (exact)
1233 {
1234 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
1235 return true;
1236 }
1237 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
1238 return true;
1239 }
1240
1241 if (expr)
1242 warn_string_no_nul (EXPR_LOCATION (expr)((((expr)) && ((tree_code_type[(int) (((enum tree_code
) ((expr))->base.code))]) >= tcc_reference && (
tree_code_type[(int) (((enum tree_code) ((expr))->base.code
))]) <= tcc_expression)) ? (expr)->exp.locus : ((location_t
) 0))
, expr, NULL__null, src, nonstr,
1243 size, exact, bound ? bndrng : NULL__null);
1244
1245 return false;
1246}
1247
1248/* If EXP refers to an unterminated constant character array return
1249 the declaration of the object of which the array is a member or
1250 element and if SIZE is not null, set *SIZE to the size of
1251 the unterminated array and set *EXACT if the size is exact or
1252 clear it otherwise. Otherwise return null. */
1253
1254tree
1255unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
1256{
1257 /* C_STRLEN will return NULL and set DECL in the info
1258 structure if EXP references a unterminated array. */
1259 c_strlen_data lendata = { };
1260 tree len = c_strlen (exp, 1, &lendata);
1261 if (len == NULL_TREE(tree) __null && lendata.minlen && lendata.decl)
1262 {
1263 if (size)
1264 {
1265 len = lendata.minlen;
1266 if (lendata.off)
1267 {
1268 /* Constant offsets are already accounted for in LENDATA.MINLEN,
1269 but not in a SSA_NAME + CST expression. */
1270 if (TREE_CODE (lendata.off)((enum tree_code) (lendata.off)->base.code) == INTEGER_CST)
1271 *exact = true;
1272 else if (TREE_CODE (lendata.off)((enum tree_code) (lendata.off)->base.code) == PLUS_EXPR
1273 && TREE_CODE (TREE_OPERAND (lendata.off, 1))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((lendata.off), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1273, __FUNCTION__))))))->base.code)
== INTEGER_CST)
1274 {
1275 /* Subtract the offset from the size of the array. */
1276 *exact = false;
1277 tree temp = TREE_OPERAND (lendata.off, 1)(*((const_cast<tree*> (tree_operand_check ((lendata.off
), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1277, __FUNCTION__)))))
;
1278 temp = fold_convert (ssizetype, temp)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_ssizetype
], temp)
;
1279 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp)fold_build2_loc (((location_t) 0), MINUS_EXPR, sizetype_tab[(
int) stk_ssizetype], len, temp )
;
1280 }
1281 else
1282 *exact = false;
1283 }
1284 else
1285 *exact = true;
1286
1287 *size = len;
1288 }
1289 return lendata.decl;
1290 }
1291
1292 return NULL_TREE(tree) __null;
1293}
1294
1295/* Compute the length of a null-terminated character string or wide
1296 character string handling character sizes of 1, 2, and 4 bytes.
1297 TREE_STRING_LENGTH is not the right way because it evaluates to
1298 the size of the character array in bytes (as opposed to characters)
1299 and because it can contain a zero byte in the middle.
1300
1301 ONLY_VALUE should be nonzero if the result is not going to be emitted
1302 into the instruction stream and zero if it is going to be expanded.
1303 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
1304 is returned, otherwise NULL, since
1305 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
1306 evaluate the side-effects.
1307
1308 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
1309 accesses. Note that this implies the result is not going to be emitted
1310 into the instruction stream.
1311
1312 Additional information about the string accessed may be recorded
1313 in DATA. For example, if ARG references an unterminated string,
1314 then the declaration will be stored in the DECL field. If the
1315 length of the unterminated string can be determined, it'll be
1316 stored in the LEN field. Note this length could well be different
1317 than what a C strlen call would return.
1318
1319 ELTSIZE is 1 for normal single byte character strings, and 2 or
1320 4 for wide characer strings. ELTSIZE is by default 1.
1321
1322 The value returned is of type `ssizetype'. */
1323
1324tree
1325c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
1326{
1327 /* If we were not passed a DATA pointer, then get one to a local
1328 structure. That avoids having to check DATA for NULL before
1329 each time we want to use it. */
1330 c_strlen_data local_strlen_data = { };
1331 if (!data)
1332 data = &local_strlen_data;
1333
1334 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4)((void)(!(eltsize == 1 || eltsize == 2 || eltsize == 4) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1334, __FUNCTION__), 0 : 0))
;
1335
1336 tree src = STRIP_NOPS (arg)(arg) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((arg)))))
;
1337 if (TREE_CODE (src)((enum tree_code) (src)->base.code) == COND_EXPR
1338 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))((non_type_check (((*((const_cast<tree*> (tree_operand_check
((src), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1338, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1338, __FUNCTION__))->base.side_effects_flag)
))
1339 {
1340 tree len1, len2;
1341
1342 len1 = c_strlen (TREE_OPERAND (src, 1)(*((const_cast<tree*> (tree_operand_check ((src), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1342, __FUNCTION__)))))
, only_value, data, eltsize);
1343 len2 = c_strlen (TREE_OPERAND (src, 2)(*((const_cast<tree*> (tree_operand_check ((src), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1343, __FUNCTION__)))))
, only_value, data, eltsize);
1344 if (tree_int_cst_equal (len1, len2))
1345 return len1;
1346 }
1347
1348 if (TREE_CODE (src)((enum tree_code) (src)->base.code) == COMPOUND_EXPR
1349 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))((non_type_check (((*((const_cast<tree*> (tree_operand_check
((src), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1349, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1349, __FUNCTION__))->base.side_effects_flag)
))
1350 return c_strlen (TREE_OPERAND (src, 1)(*((const_cast<tree*> (tree_operand_check ((src), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1350, __FUNCTION__)))))
, only_value, data, eltsize);
1351
1352 location_t loc = EXPR_LOC_OR_LOC (src, input_location)((((IS_ADHOC_LOC (((((src)) && ((tree_code_type[(int)
(((enum tree_code) ((src))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((src))->
base.code))]) <= tcc_expression)) ? (src)->exp.locus : (
(location_t) 0)))) ? get_location_from_adhoc_loc (line_table,
((((src)) && ((tree_code_type[(int) (((enum tree_code
) ((src))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((src))->base.code))]) <= tcc_expression
)) ? (src)->exp.locus : ((location_t) 0))) : (((((src)) &&
((tree_code_type[(int) (((enum tree_code) ((src))->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) ((src))->base.code))]) <= tcc_expression
)) ? (src)->exp.locus : ((location_t) 0)))) != ((location_t
) 0)) ? (src)->exp.locus : (input_location))
;
1353
1354 /* Offset from the beginning of the string in bytes. */
1355 tree byteoff;
1356 tree memsize;
1357 tree decl;
1358 src = string_constant (src, &byteoff, &memsize, &decl);
1359 if (src == 0)
1360 return NULL_TREE(tree) __null;
1361
1362 /* Determine the size of the string element. */
1363 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))((tree_class_check ((((contains_struct_check ((((contains_struct_check
((src), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1363, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1363, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1363, __FUNCTION__))->type_common.size_unit)
))
1364 return NULL_TREE(tree) __null;
1365
1366 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
1367 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
1368 in case the latter is less than the size of the array, such as when
1369 SRC refers to a short string literal used to initialize a large array.
1370 In that case, the elements of the array after the terminating NUL are
1371 all NUL. */
1372 HOST_WIDE_INTlong strelts = TREE_STRING_LENGTH (src)((tree_check ((src), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1372, __FUNCTION__, (STRING_CST)))->string.length)
;
1373 strelts = strelts / eltsize;
1374
1375 if (!tree_fits_uhwi_p (memsize))
1376 return NULL_TREE(tree) __null;
1377
1378 HOST_WIDE_INTlong maxelts = tree_to_uhwi (memsize) / eltsize;
1379
1380 /* PTR can point to the byte representation of any string type, including
1381 char* and wchar_t*. */
1382 const char *ptr = TREE_STRING_POINTER (src)((const char *)((tree_check ((src), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1382, __FUNCTION__, (STRING_CST)))->string.str))
;
1383
1384 if (byteoff && TREE_CODE (byteoff)((enum tree_code) (byteoff)->base.code) != INTEGER_CST)
1385 {
1386 /* The code below works only for single byte character types. */
1387 if (eltsize != 1)
1388 return NULL_TREE(tree) __null;
1389
1390 /* If the string has an internal NUL character followed by any
1391 non-NUL characters (e.g., "foo\0bar"), we can't compute
1392 the offset to the following NUL if we don't know where to
1393 start searching for it. */
1394 unsigned len = string_length (ptr, eltsize, strelts);
1395
1396 /* Return when an embedded null character is found or none at all.
1397 In the latter case, set the DECL/LEN field in the DATA structure
1398 so that callers may examine them. */
1399 if (len + 1 < strelts)
1400 return NULL_TREE(tree) __null;
1401 else if (len >= maxelts)
1402 {
1403 data->decl = decl;
1404 data->off = byteoff;
1405 data->minlen = ssize_int (len)size_int_kind (len, stk_ssizetype);
1406 return NULL_TREE(tree) __null;
1407 }
1408
1409 /* For empty strings the result should be zero. */
1410 if (len == 0)
1411 return ssize_int (0)size_int_kind (0, stk_ssizetype);
1412
1413 /* We don't know the starting offset, but we do know that the string
1414 has no internal zero bytes. If the offset falls within the bounds
1415 of the string subtract the offset from the length of the string,
1416 and return that. Otherwise the length is zero. Take care to
1417 use SAVE_EXPR in case the OFFSET has side-effects. */
1418 tree offsave = TREE_SIDE_EFFECTS (byteoff)((non_type_check ((byteoff), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1418, __FUNCTION__))->base.side_effects_flag)
? save_expr (byteoff)
1419 : byteoff;
1420 offsave = fold_convert_loc (loc, sizetypesizetype_tab[(int) stk_sizetype], offsave);
1421 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], offsave,
1422 size_int (len)size_int_kind (len, stk_sizetype));
1423 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetypesizetype_tab[(int) stk_sizetype], size_int (len)size_int_kind (len, stk_sizetype),
1424 offsave);
1425 lenexp = fold_convert_loc (loc, ssizetypesizetype_tab[(int) stk_ssizetype], lenexp);
1426 return fold_build3_loc (loc, COND_EXPR, ssizetypesizetype_tab[(int) stk_ssizetype], condexp, lenexp,
1427 build_zero_cst (ssizetypesizetype_tab[(int) stk_ssizetype]));
1428 }
1429
1430 /* Offset from the beginning of the string in elements. */
1431 HOST_WIDE_INTlong eltoff;
1432
1433 /* We have a known offset into the string. Start searching there for
1434 a null character if we can represent it as a single HOST_WIDE_INT. */
1435 if (byteoff == 0)
1436 eltoff = 0;
1437 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1438 eltoff = -1;
1439 else
1440 eltoff = tree_to_uhwi (byteoff) / eltsize;
1441
1442 /* If the offset is known to be out of bounds, warn, and call strlen at
1443 runtime. */
1444 if (eltoff < 0 || eltoff >= maxelts)
1445 {
1446 /* Suppress multiple warnings for propagated constant strings. */
1447 if (only_value != 2
1448 && !TREE_NO_WARNING (arg)((arg)->base.nowarning_flag)
1449 && warning_at (loc, OPT_Warray_bounds,
1450 "offset %qwi outside bounds of constant string",
1451 eltoff))
1452 {
1453 if (decl)
1454 inform (DECL_SOURCE_LOCATION (decl)((contains_struct_check ((decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1454, __FUNCTION__))->decl_minimal.locus)
, "%qE declared here", decl);
1455 TREE_NO_WARNING (arg)((arg)->base.nowarning_flag) = 1;
1456 }
1457 return NULL_TREE(tree) __null;
1458 }
1459
1460 /* If eltoff is larger than strelts but less than maxelts the
1461 string length is zero, since the excess memory will be zero. */
1462 if (eltoff > strelts)
1463 return ssize_int (0)size_int_kind (0, stk_ssizetype);
1464
1465 /* Use strlen to search for the first zero byte. Since any strings
1466 constructed with build_string will have nulls appended, we win even
1467 if we get handed something like (char[4])"abcd".
1468
1469 Since ELTOFF is our starting index into the string, no further
1470 calculation is needed. */
1471 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1472 strelts - eltoff);
1473
1474 /* Don't know what to return if there was no zero termination.
1475 Ideally this would turn into a gcc_checking_assert over time.
1476 Set DECL/LEN so callers can examine them. */
1477 if (len >= maxelts - eltoff)
1478 {
1479 data->decl = decl;
1480 data->off = byteoff;
1481 data->minlen = ssize_int (len)size_int_kind (len, stk_ssizetype);
1482 return NULL_TREE(tree) __null;
1483 }
1484
1485 return ssize_int (len)size_int_kind (len, stk_ssizetype);
1486}
1487
1488/* Return a constant integer corresponding to target reading
1489 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1490 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1491 are assumed to be zero, otherwise it reads as many characters
1492 as needed. */
1493
1494rtx
1495c_readstr (const char *str, scalar_int_mode mode,
1496 bool null_terminated_p/*=true*/)
1497{
1498 HOST_WIDE_INTlong ch;
1499 unsigned int i, j;
1500 HOST_WIDE_INTlong tmp[MAX_BITSIZE_MODE_ANY_INT160 / HOST_BITS_PER_WIDE_INT64];
1501
1502 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT)((void)(!(((enum mode_class) mode_class[mode]) == MODE_INT) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1502, __FUNCTION__), 0 : 0))
;
1503 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT64 - 1)
1504 / HOST_BITS_PER_WIDE_INT64;
1505
1506 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT)((void)(!(len <= 160 / 64) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1506, __FUNCTION__), 0 : 0))
;
1507 for (i = 0; i < len; i++)
1508 tmp[i] = 0;
1509
1510 ch = 1;
1511 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1512 {
1513 j = i;
1514 if (WORDS_BIG_ENDIAN0)
1515 j = GET_MODE_SIZE (mode) - i - 1;
1516 if (BYTES_BIG_ENDIAN0 != WORDS_BIG_ENDIAN0
1517 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
)
1518 j = j + UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
- 2 * (j % UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
) - 1;
1519 j *= BITS_PER_UNIT(8);
1520
1521 if (ch || !null_terminated_p)
1522 ch = (unsigned char) str[i];
1523 tmp[j / HOST_BITS_PER_WIDE_INT64] |= ch << (j % HOST_BITS_PER_WIDE_INT64);
1524 }
1525
1526 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1527 return immed_wide_int_const (c, mode);
1528}
1529
1530/* Cast a target constant CST to target CHAR and if that value fits into
1531 host char type, return zero and put that value into variable pointed to by
1532 P. */
1533
1534static int
1535target_char_cast (tree cst, char *p)
1536{
1537 unsigned HOST_WIDE_INTlong val, hostval;
1538
1539 if (TREE_CODE (cst)((enum tree_code) (cst)->base.code) != INTEGER_CST
1540 || CHAR_TYPE_SIZE(8) > HOST_BITS_PER_WIDE_INT64)
1541 return 1;
1542
1543 /* Do not care if it fits or not right here. */
1544 val = TREE_INT_CST_LOW (cst)((unsigned long) (*tree_int_cst_elt_check ((cst), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1544, __FUNCTION__)))
;
1545
1546 if (CHAR_TYPE_SIZE(8) < HOST_BITS_PER_WIDE_INT64)
1547 val &= (HOST_WIDE_INT_1U1UL << CHAR_TYPE_SIZE(8)) - 1;
1548
1549 hostval = val;
1550 if (HOST_BITS_PER_CHAR8 < HOST_BITS_PER_WIDE_INT64)
1551 hostval &= (HOST_WIDE_INT_1U1UL << HOST_BITS_PER_CHAR8) - 1;
1552
1553 if (val != hostval)
1554 return 1;
1555
1556 *p = hostval;
1557 return 0;
1558}
1559
1560/* Similar to save_expr, but assumes that arbitrary code is not executed
1561 in between the multiple evaluations. In particular, we assume that a
1562 non-addressable local variable will not be modified. */
1563
1564static tree
1565builtin_save_expr (tree exp)
1566{
1567 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SSA_NAME
1568 || (TREE_ADDRESSABLE (exp)((exp)->base.addressable_flag) == 0
1569 && (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == PARM_DECL
1570 || (VAR_P (exp)(((enum tree_code) (exp)->base.code) == VAR_DECL) && !TREE_STATIC (exp)((exp)->base.static_flag)))))
1571 return exp;
1572
1573 return save_expr (exp);
1574}
1575
1576/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1577 times to get the address of either a higher stack frame, or a return
1578 address located within it (depending on FNDECL_CODE). */
1579
1580static rtx
1581expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1582{
1583 int i;
1584 rtx tem = INITIAL_FRAME_ADDRESS_RTX__null;
1585 if (tem == NULL_RTX(rtx) 0)
1586 {
1587 /* For a zero count with __builtin_return_address, we don't care what
1588 frame address we return, because target-specific definitions will
1589 override us. Therefore frame pointer elimination is OK, and using
1590 the soft frame pointer is OK.
1591
1592 For a nonzero count, or a zero count with __builtin_frame_address,
1593 we require a stable offset from the current frame pointer to the
1594 previous one, so we must use the hard frame pointer, and
1595 we must disable frame pointer elimination. */
1596 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1597 tem = frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]);
1598 else
1599 {
1600 tem = hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]);
1601
1602 /* Tell reload not to eliminate the frame pointer. */
1603 crtl(&x_rtl)->accesses_prior_frames = 1;
1604 }
1605 }
1606
1607 if (count > 0)
1608 SETUP_FRAME_ADDRESSES ()ix86_setup_frame_addresses ();
1609
1610 /* On the SPARC, the return address is not in the frame, it is in a
1611 register. There is no way to access it off of the current frame
1612 pointer, but it can be accessed off the previous frame pointer by
1613 reading the value from the register window save area. */
1614 if (RETURN_ADDR_IN_PREVIOUS_FRAME0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1615 count--;
1616
1617 /* Scan back COUNT frames to the specified frame. */
1618 for (i = 0; i < count; i++)
1619 {
1620 /* Assume the dynamic chain pointer is in the word that the
1621 frame address points to, unless otherwise specified. */
1622 tem = DYNAMIC_CHAIN_ADDRESS (tem)(tem);
1623 tem = memory_address (Pmode, tem)memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), (
tem), 0)
;
1624 tem = gen_frame_mem (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tem);
1625 tem = copy_to_reg (tem);
1626 }
1627
1628 /* For __builtin_frame_address, return what we've got. But, on
1629 the SPARC for example, we may have to add a bias. */
1630 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1631 return FRAME_ADDR_RTX (tem)(tem);
1632
1633 /* For __builtin_return_address, get the return address from that frame. */
1634#ifdef RETURN_ADDR_RTX
1635 tem = RETURN_ADDR_RTX (count, tem)((count) == 0 ? gen_rtx_MEM ((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), plus_constant
((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), ((this_target_rtl->
x_global_rtl)[GR_ARG_POINTER]), -(((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4))) : gen_rtx_MEM ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), plus_constant ((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
), (tem), (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))))
;
1636#else
1637 tem = memory_address (Pmode,memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), (
plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), tem, GET_MODE_SIZE (
(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))))), 0)
1638 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)))memory_address_addr_space (((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))), (
plus_constant ((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))), tem, GET_MODE_SIZE (
(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))))), 0)
;
1639 tem = gen_frame_mem (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tem);
1640#endif
1641 return tem;
1642}
1643
1644/* Alias set used for setjmp buffer. */
1645static alias_set_type setjmp_alias_set = -1;
1646
1647/* Construct the leading half of a __builtin_setjmp call. Control will
1648 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1649 exception handling code. */
1650
1651void
1652expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1653{
1654 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode
::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))))
;
1655 rtx stack_save;
1656 rtx mem;
1657
1658 if (setjmp_alias_set == -1)
1659 setjmp_alias_set = new_alias_set ();
1660
1661 buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (buf_addr), 0)
;
1662
1663 buf_addr = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, force_operand (buf_addr, NULL_RTX(rtx) 0));
1664
1665 /* We store the frame pointer and the address of receiver_label in
1666 the buffer and use the rest of it for the stack save area, which
1667 is machine-dependent. */
1668
1669 mem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr);
1670 set_mem_alias_set (mem, setjmp_alias_set);
1671 emit_move_insn (mem, hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1672
1673 mem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr,
1674 GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
))),
1675 set_mem_alias_set (mem, setjmp_alias_set);
1676
1677 emit_move_insn (validize_mem (mem),
1678 force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, gen_rtx_LABEL_REF (Pmode, receiver_label)gen_rtx_fmt_u_stat ((LABEL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((receiver_label)) )
));
1679
1680 stack_save = gen_rtx_MEM (sa_mode,
1681 plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr,
1682 2 * GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)));
1683 set_mem_alias_set (stack_save, setjmp_alias_set);
1684 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1685
1686 /* If there is further processing to do, do it. */
1687 if (targetm.have_builtin_setjmp_setup ())
1688 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1689
1690 /* We have a nonlocal label. */
1691 cfun(cfun + 0)->has_nonlocal_label = 1;
1692}
1693
1694/* Construct the trailing part of a __builtin_setjmp call. This is
1695 also called directly by the SJLJ exception handling code.
1696 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1697
1698void
1699expand_builtin_setjmp_receiver (rtx receiver_label)
1700{
1701 rtx chain;
1702
1703 /* Mark the FP as used when we get here, so we have to make sure it's
1704 marked as used by this function. */
1705 emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1706
1707 /* Mark the static chain as clobbered here so life information
1708 doesn't get messed up for it. */
1709 chain = rtx_for_static_chain (current_function_decl, true);
1710 if (chain && REG_P (chain)(((enum rtx_code) (chain)->code) == REG))
1711 emit_clobber (chain);
1712
1713 if (!HARD_FRAME_POINTER_IS_ARG_POINTER(6 == 16) && fixed_regs(this_target_hard_regs->x_fixed_regs)[ARG_POINTER_REGNUM16])
1714 {
1715 /* If the argument pointer can be eliminated in favor of the
1716 frame pointer, we don't need to restore it. We assume here
1717 that if such an elimination is present, it can always be used.
1718 This is the case on all known machines; if we don't make this
1719 assumption, we do unnecessary saving on many machines. */
1720 size_t i;
1721 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS{{ 16, 7}, { 16, 6}, { 19, 7}, { 19, 6}};
1722
1723 for (i = 0; i < ARRAY_SIZE (elim_regs)(sizeof (elim_regs) / sizeof ((elim_regs)[0])); i++)
1724 if (elim_regs[i].from == ARG_POINTER_REGNUM16
1725 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM6)
1726 break;
1727
1728 if (i == ARRAY_SIZE (elim_regs)(sizeof (elim_regs) / sizeof ((elim_regs)[0])))
1729 {
1730 /* Now restore our arg pointer from the address at which it
1731 was saved in our stack frame. */
1732 emit_move_insn (crtl(&x_rtl)->args.internal_arg_pointer,
1733 copy_to_reg (get_arg_pointer_save_area ()));
1734 }
1735 }
1736
1737 if (receiver_label != NULL__null && targetm.have_builtin_setjmp_receiver ())
1738 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1739 else if (targetm.have_nonlocal_goto_receiver ())
1740 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1741 else
1742 { /* Nothing */ }
1743
1744 /* We must not allow the code we just generated to be reordered by
1745 scheduling. Specifically, the update of the frame pointer must
1746 happen immediately, not later. */
1747 emit_insn (gen_blockage ());
1748}
1749
1750/* __builtin_longjmp is passed a pointer to an array of five words (not
1751 all will be used on all machines). It operates similarly to the C
1752 library function of the same name, but is more efficient. Much of
1753 the code below is copied from the handling of non-local gotos. */
1754
1755static void
1756expand_builtin_longjmp (rtx buf_addr, rtx value)
1757{
1758 rtx fp, lab, stack;
1759 rtx_insn *insn, *last;
1760 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode
::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))))
;
1761
1762 /* DRAP is needed for stack realign if longjmp is expanded to current
1763 function */
1764 if (SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options
.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi
() == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4))))
)
1765 crtl(&x_rtl)->need_drap = true;
1766
1767 if (setjmp_alias_set == -1)
1768 setjmp_alias_set = new_alias_set ();
1769
1770 buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (buf_addr), 0)
;
1771
1772 buf_addr = force_reg (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr);
1773
1774 /* We require that the user must pass a second argument of 1, because
1775 that is what builtin_setjmp will return. */
1776 gcc_assert (value == const1_rtx)((void)(!(value == (const_int_rtx[64 +1])) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1776, __FUNCTION__), 0 : 0))
;
1777
1778 last = get_last_insn ();
1779 if (targetm.have_builtin_longjmp ())
1780 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1781 else
1782 {
1783 fp = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr);
1784 lab = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr,
1785 GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)));
1786
1787 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, buf_addr,
1788 2 * GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)));
1789 set_mem_alias_set (fp, setjmp_alias_set);
1790 set_mem_alias_set (lab, setjmp_alias_set);
1791 set_mem_alias_set (stack, setjmp_alias_set);
1792
1793 /* Pick up FP, label, and SP from the block and jump. This code is
1794 from expand_goto in stmt.c; see there for detailed comments. */
1795 if (targetm.have_nonlocal_goto ())
1796 /* We have to pass a value to the nonlocal_goto pattern that will
1797 get copied into the static_chain pointer, but it does not matter
1798 what that value is, because builtin_setjmp does not use it. */
1799 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1800 else
1801 {
1802 emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), gen_rtx_SCRATCH (VOIDmode)gen_rtx_fmt__stat ((SCRATCH), ((((void) 0, E_VOIDmode))) )));
1803 emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])));
1804
1805 lab = copy_to_reg (lab);
1806
1807 /* Restore the frame pointer and stack pointer. We must use a
1808 temporary since the setjmp buffer may be a local. */
1809 fp = copy_to_reg (fp);
1810 emit_stack_restore (SAVE_NONLOCAL, stack);
1811
1812 /* Ensure the frame pointer move is not optimized. */
1813 emit_insn (gen_blockage ());
1814 emit_clobber (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1815 emit_clobber (frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]));
1816 emit_move_insn (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]), fp);
1817
1818 emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1819 emit_use (stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]));
1820 emit_indirect_jump (lab);
1821 }
1822 }
1823
1824 /* Search backwards and mark the jump insn as a non-local goto.
1825 Note that this precludes the use of __builtin_longjmp to a
1826 __builtin_setjmp target in the same function. However, we've
1827 already cautioned the user that these functions are for
1828 internal exception handling use only. */
1829 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1830 {
1831 gcc_assert (insn != last)((void)(!(insn != last) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1831, __FUNCTION__), 0 : 0))
;
1832
1833 if (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN))
1834 {
1835 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx(const_int_rtx[64]));
1836 break;
1837 }
1838 else if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
1839 break;
1840 }
1841}
1842
1843static inline bool
1844more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1845{
1846 return (iter->i < iter->n);
1847}
1848
1849/* This function validates the types of a function call argument list
1850 against a specified list of tree_codes. If the last specifier is a 0,
1851 that represents an ellipsis, otherwise the last specifier must be a
1852 VOID_TYPE. */
1853
1854static bool
1855validate_arglist (const_tree callexpr, ...)
1856{
1857 enum tree_code code;
1858 bool res = 0;
1859 va_list ap;
1860 const_call_expr_arg_iterator iter;
1861 const_tree arg;
1862
1863 va_start (ap, callexpr)__builtin_va_start(ap, callexpr);
1864 init_const_call_expr_arg_iterator (callexpr, &iter);
1865
1866 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1867 tree fn = CALL_EXPR_FN (callexpr)(*((const_cast<tree*> (tree_operand_check (((tree_check
((callexpr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1867, __FUNCTION__, (CALL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1867, __FUNCTION__)))))
;
1868 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn))((contains_struct_check ((((contains_struct_check ((fn), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1868, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1868, __FUNCTION__))->typed.type)
);
1869
1870 for (unsigned argno = 1; ; ++argno)
1871 {
1872 code = (enum tree_code) va_arg (ap, int)__builtin_va_arg(ap, int);
1873
1874 switch (code)
1875 {
1876 case 0:
1877 /* This signifies an ellipses, any further arguments are all ok. */
1878 res = true;
1879 goto end;
1880 case VOID_TYPE:
1881 /* This signifies an endlink, if no arguments remain, return
1882 true, otherwise return false. */
1883 res = !more_const_call_expr_args_p (&iter);
1884 goto end;
1885 case POINTER_TYPE:
1886 /* The actual argument must be nonnull when either the whole
1887 called function has been declared nonnull, or when the formal
1888 argument corresponding to the actual argument has been. */
1889 if (argmap
1890 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1891 {
1892 arg = next_const_call_expr_arg (&iter);
1893 if (!validate_arg (arg, code) || integer_zerop (arg))
1894 goto end;
1895 break;
1896 }
1897 /* FALLTHRU */
1898 default:
1899 /* If no parameters remain or the parameter's code does not
1900 match the specified code, return false. Otherwise continue
1901 checking any remaining arguments. */
1902 arg = next_const_call_expr_arg (&iter);
1903 if (!validate_arg (arg, code))
1904 goto end;
1905 break;
1906 }
1907 }
1908
1909 /* We need gotos here since we can only have one VA_CLOSE in a
1910 function. */
1911 end: ;
1912 va_end (ap)__builtin_va_end(ap);
1913
1914 BITMAP_FREE (argmap)((void) (bitmap_obstack_free ((bitmap) argmap), (argmap) = (bitmap
) __null))
;
1915
1916 return res;
1917}
1918
1919/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1920 and the address of the save area. */
1921
1922static rtx
1923expand_builtin_nonlocal_goto (tree exp)
1924{
1925 tree t_label, t_save_area;
1926 rtx r_label, r_save_area, r_fp, r_sp;
1927 rtx_insn *insn;
1928
1929 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1930 return NULL_RTX(rtx) 0;
1931
1932 t_label = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1932, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1932, __FUNCTION__)))))
;
1933 t_save_area = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1933, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 1933, __FUNCTION__)))))
;
1934
1935 r_label = expand_normal (t_label);
1936 r_label = convert_memory_address (Pmode, r_label)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (r_label), 0)
;
1937 r_save_area = expand_normal (t_save_area);
1938 r_save_area = convert_memory_address (Pmode, r_save_area)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (r_save_area), 0)
;
1939 /* Copy the address of the save location to a register just in case it was
1940 based on the frame pointer. */
1941 r_save_area = copy_to_reg (r_save_area);
1942 r_fp = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, r_save_area);
1943 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode
::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))))
,
1944 plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, r_save_area,
1945 GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)));
1946
1947 crtl(&x_rtl)->has_nonlocal_goto = 1;
1948
1949 /* ??? We no longer need to pass the static chain value, afaik. */
1950 if (targetm.have_nonlocal_goto ())
1951 emit_insn (targetm.gen_nonlocal_goto (const0_rtx(const_int_rtx[64]), r_label, r_sp, r_fp));
1952 else
1953 {
1954 emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), gen_rtx_SCRATCH (VOIDmode)gen_rtx_fmt__stat ((SCRATCH), ((((void) 0, E_VOIDmode))) )));
1955 emit_clobber (gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])));
1956
1957 r_label = copy_to_reg (r_label);
1958
1959 /* Restore the frame pointer and stack pointer. We must use a
1960 temporary since the setjmp buffer may be a local. */
1961 r_fp = copy_to_reg (r_fp);
1962 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1963
1964 /* Ensure the frame pointer move is not optimized. */
1965 emit_insn (gen_blockage ());
1966 emit_clobber (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1967 emit_clobber (frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]));
1968 emit_move_insn (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]), r_fp);
1969
1970 /* USE of hard_frame_pointer_rtx added for consistency;
1971 not clear if really needed. */
1972 emit_use (hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]));
1973 emit_use (stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]));
1974
1975 /* If the architecture is using a GP register, we must
1976 conservatively assume that the target function makes use of it.
1977 The prologue of functions with nonlocal gotos must therefore
1978 initialize the GP register to the appropriate value, and we
1979 must then make sure that this value is live at the point
1980 of the jump. (Note that this doesn't necessarily apply
1981 to targets with a nonlocal_goto pattern; they are free
1982 to implement it in their own way. Note also that this is
1983 a no-op if the GP register is a global invariant.) */
1984 unsigned regnum = PIC_OFFSET_TABLE_REGNUM(ix86_use_pseudo_pic_reg () ? ((this_target_rtl->x_pic_offset_table_rtx
) ? (~(unsigned int) 0) : (((global_options.x_ix86_isa_flags &
(1UL << 1)) != 0) ? 43 : 3)) : (~(unsigned int) 0))
;
1985 if (regnum != INVALID_REGNUM(~(unsigned int) 0) && fixed_regs(this_target_hard_regs->x_fixed_regs)[regnum])
1986 emit_use (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx));
1987
1988 emit_indirect_jump (r_label);
1989 }
1990
1991 /* Search backwards to the jump insn and mark it as a
1992 non-local goto. */
1993 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1994 {
1995 if (JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN))
1996 {
1997 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx(const_int_rtx[64]));
1998 break;
1999 }
2000 else if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
2001 break;
2002 }
2003
2004 return const0_rtx(const_int_rtx[64]);
2005}
2006
2007/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
2008 (not all will be used on all machines) that was passed to __builtin_setjmp.
2009 It updates the stack pointer in that block to the current value. This is
2010 also called directly by the SJLJ exception handling code. */
2011
2012void
2013expand_builtin_update_setjmp_buf (rtx buf_addr)
2014{
2015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL)((SAVE_NONLOCAL) == SAVE_NONLOCAL ? (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? (scalar_int_mode ((scalar_int_mode
::from_int) E_TImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_DImode))) : (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))))
;
2016 buf_addr = convert_memory_address (Pmode, buf_addr)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (buf_addr), 0)
;
2017 rtx stack_save
2018 = gen_rtx_MEM (sa_mode,
2019 memory_addressmemory_address_addr_space ((sa_mode), (plus_constant ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))))), 0)
2020 (sa_mode,memory_address_addr_space ((sa_mode), (plus_constant ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))))), 0)
2021 plus_constant (Pmode, buf_addr,memory_address_addr_space ((sa_mode), (plus_constant ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))))), 0)
2022 2 * GET_MODE_SIZE (Pmode)))memory_address_addr_space ((sa_mode), (plus_constant ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode))), buf_addr, 2 * GET_MODE_SIZE ((global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))))), 0)
);
2023
2024 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2025}
2026
2027/* Expand a call to __builtin_prefetch. For a target that does not support
2028 data prefetch, evaluate the memory address argument in case it has side
2029 effects. */
2030
2031static void
2032expand_builtin_prefetch (tree exp)
2033{
2034 tree arg0, arg1, arg2;
2035 int nargs;
2036 rtx op0, op1, op2;
2037
2038 if (!validate_arglist (exp, POINTER_TYPE, 0))
2039 return;
2040
2041 arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2041, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2041, __FUNCTION__)))))
;
2042
2043 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
2044 zero (read) and argument 2 (locality) defaults to 3 (high degree of
2045 locality). */
2046 nargs = call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2046, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2046, __FUNCTION__)))) - 3)
;
2047 if (nargs > 1)
2048 arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2048, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2048, __FUNCTION__)))))
;
2049 else
2050 arg1 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
2051 if (nargs > 2)
2052 arg2 = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2052, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2052, __FUNCTION__)))))
;
2053 else
2054 arg2 = integer_three_nodeglobal_trees[TI_INTEGER_THREE];
2055
2056 /* Argument 0 is an address. */
2057 op0 = expand_expr (arg0, NULL_RTX(rtx) 0, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
2058
2059 /* Argument 1 (read/write flag) must be a compile-time constant int. */
2060 if (TREE_CODE (arg1)((enum tree_code) (arg1)->base.code) != INTEGER_CST)
2061 {
2062 error ("second argument to %<__builtin_prefetch%> must be a constant");
2063 arg1 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
2064 }
2065 op1 = expand_normal (arg1);
2066 /* Argument 1 must be either zero or one. */
2067 if (INTVAL (op1)((op1)->u.hwint[0]) != 0 && INTVAL (op1)((op1)->u.hwint[0]) != 1)
2068 {
2069 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
2070 " using zero");
2071 op1 = const0_rtx(const_int_rtx[64]);
2072 }
2073
2074 /* Argument 2 (locality) must be a compile-time constant int. */
2075 if (TREE_CODE (arg2)((enum tree_code) (arg2)->base.code) != INTEGER_CST)
2076 {
2077 error ("third argument to %<__builtin_prefetch%> must be a constant");
2078 arg2 = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
2079 }
2080 op2 = expand_normal (arg2);
2081 /* Argument 2 must be 0, 1, 2, or 3. */
2082 if (INTVAL (op2)((op2)->u.hwint[0]) < 0 || INTVAL (op2)((op2)->u.hwint[0]) > 3)
2083 {
2084 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
2085 op2 = const0_rtx(const_int_rtx[64]);
2086 }
2087
2088 if (targetm.have_prefetch ())
2089 {
2090 class expand_operand ops[3];
2091
2092 create_address_operand (&ops[0], op0);
2093 create_integer_operand (&ops[1], INTVAL (op1)((op1)->u.hwint[0]));
2094 create_integer_operand (&ops[2], INTVAL (op2)((op2)->u.hwint[0]));
2095 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
2096 return;
2097 }
2098
2099 /* Don't do anything with direct references to volatile memory, but
2100 generate code to handle other side effects. */
2101 if (!MEM_P (op0)(((enum rtx_code) (op0)->code) == MEM) && side_effects_p (op0))
2102 emit_insn (op0);
2103}
2104
2105/* Get a MEM rtx for expression EXP which is the address of an operand
2106 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
2107 the maximum length of the block of memory that might be accessed or
2108 NULL if unknown. */
2109
2110static rtx
2111get_memory_rtx (tree exp, tree len)
2112{
2113 tree orig_exp = exp;
2114 rtx addr, mem;
2115
2116 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
2117 from its expression, for expr->a.b only <variable>.a.b is recorded. */
2118 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp)((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2118, __FUNCTION__, (SAVE_EXPR)))->base.public_flag)
)
2119 exp = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2119, __FUNCTION__)))))
;
2120
2121 addr = expand_expr (orig_exp, NULL_RTX(rtx) 0, ptr_mode, EXPAND_NORMAL);
2122 mem = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), memory_address (BLKmode, addr)memory_address_addr_space ((((void) 0, E_BLKmode)), (addr), 0
)
);
2123
2124 /* Get an expression we can use to find the attributes to assign to MEM.
2125 First remove any nops. */
2126 while (CONVERT_EXPR_P (exp)((((enum tree_code) (exp)->base.code)) == NOP_EXPR || (((enum
tree_code) (exp)->base.code)) == CONVERT_EXPR)
2127 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0)))(((enum tree_code) (((contains_struct_check (((*((const_cast<
tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2127, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2127, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2127, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2127, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2128 exp = TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2128, __FUNCTION__)))))
;
2129
2130 /* Build a MEM_REF representing the whole accessed area as a byte blob,
2131 (as builtin stringops may alias with anything). */
2132 exp = fold_build2 (MEM_REF,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2133 build_array_type (char_type_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2134 build_range_type (sizetype,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2135 size_one_node, len)),fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2136 exp, build_int_cst (ptr_type_node, 0))fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ONE], len)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
;
2137
2138 /* If the MEM_REF has no acceptable address, try to get the base object
2139 from the original address we got, and build an all-aliasing
2140 unknown-sized access to that one. */
2141 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)(*((const_cast<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2141, __FUNCTION__)))))
))
2142 set_mem_attributes (mem, exp, 0);
2143 else if (TREE_CODE (TREE_OPERAND (exp, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2143, __FUNCTION__))))))->base.code)
== ADDR_EXPR
2144 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),(*((const_cast<tree*> (tree_operand_check (((*((const_cast
<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2144, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2145, __FUNCTION__)))))
2145 0)(*((const_cast<tree*> (tree_operand_check (((*((const_cast
<tree*> (tree_operand_check ((exp), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2144, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2145, __FUNCTION__)))))
)))
2146 {
2147 exp = build_fold_addr_expr (exp)build_fold_addr_expr_loc (((location_t) 0), (exp));
2148 exp = fold_build2 (MEM_REF,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2149 build_array_type (char_type_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2150 build_range_type (sizetype,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2151 size_zero_node,fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2152 NULL)),fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
2153 exp, build_int_cst (ptr_type_node, 0))fold_build2_loc (((location_t) 0), MEM_REF, build_array_type (
integer_types[itk_char], build_range_type (sizetype_tab[(int)
stk_sizetype], global_trees[TI_SIZE_ZERO], __null)), exp, build_int_cst
(global_trees[TI_PTR_TYPE], 0) )
;
2154 set_mem_attributes (mem, exp, 0);
2155 }
2156 set_mem_alias_set (mem, 0);
2157 return mem;
2158}
2159
2160/* Built-in functions to perform an untyped call and return. */
2161
2162#define apply_args_mode(this_target_builtins->x_apply_args_mode) \
2163 (this_target_builtins->x_apply_args_mode)
2164#define apply_result_mode(this_target_builtins->x_apply_result_mode) \
2165 (this_target_builtins->x_apply_result_mode)
2166
2167/* Return the size required for the block returned by __builtin_apply_args,
2168 and initialize apply_args_mode. */
2169
2170static int
2171apply_args_size (void)
2172{
2173 static int size = -1;
2174 int align;
2175 unsigned int regno;
2176
2177 /* The values computed by this function never change. */
2178 if (size < 0)
2179 {
2180 /* The first value is the incoming arg-pointer. */
2181 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2182
2183 /* The second value is the structure value address unless this is
2184 passed as an "invisible" first argument. */
2185 if (targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2185, __FUNCTION__))->typed.type)
: 0, 0))
2186 size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2187
2188 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
2189 if (FUNCTION_ARG_REGNO_P (regno)ix86_function_arg_regno_p (regno))
2190 {
2191 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
2192
2193 gcc_assert (mode != VOIDmode)((void)(!(mode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2193, __FUNCTION__), 0 : 0))
;
2194
2195 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
2196 if (size % align != 0)
2197 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
2198 size += GET_MODE_SIZE (mode);
2199 apply_args_mode(this_target_builtins->x_apply_args_mode)[regno] = mode;
2200 }
2201 else
2202 {
2203 apply_args_mode(this_target_builtins->x_apply_args_mode)[regno] = as_a <fixed_size_mode> (VOIDmode((void) 0, E_VOIDmode));
2204 }
2205 }
2206 return size;
2207}
2208
2209/* Return the size required for the block returned by __builtin_apply,
2210 and initialize apply_result_mode. */
2211
2212static int
2213apply_result_size (void)
2214{
2215 static int size = -1;
2216 int align, regno;
2217
2218 /* The values computed by this function never change. */
2219 if (size < 0)
2220 {
2221 size = 0;
2222
2223 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
2224 if (targetm.calls.function_value_regno_p (regno))
2225 {
2226 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
2227
2228 gcc_assert (mode != VOIDmode)((void)(!(mode != ((void) 0, E_VOIDmode)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2228, __FUNCTION__), 0 : 0))
;
2229
2230 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
2231 if (size % align != 0)
2232 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
2233 size += GET_MODE_SIZE (mode);
2234 apply_result_mode(this_target_builtins->x_apply_result_mode)[regno] = mode;
2235 }
2236 else
2237 apply_result_mode(this_target_builtins->x_apply_result_mode)[regno] = as_a <fixed_size_mode> (VOIDmode((void) 0, E_VOIDmode));
2238
2239 /* Allow targets that use untyped_call and untyped_return to override
2240 the size so that machine-specific information can be stored here. */
2241#ifdef APPLY_RESULT_SIZE(8+108)
2242 size = APPLY_RESULT_SIZE(8+108);
2243#endif
2244 }
2245 return size;
2246}
2247
2248/* Create a vector describing the result block RESULT. If SAVEP is true,
2249 the result block is used to save the values; otherwise it is used to
2250 restore the values. */
2251
2252static rtx
2253result_vector (int savep, rtx result)
2254{
2255 int regno, size, align, nelts;
2256 fixed_size_mode mode;
2257 rtx reg, mem;
2258 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER)((rtx *) __builtin_alloca(sizeof (rtx) * (76)));
2259
2260 size = nelts = 0;
2261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
2262 if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
2263 {
2264 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
2265 if (size % align != 0)
2266 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
2267 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)(regno));
2268 mem = adjust_address (result, mode, size)adjust_address_1 (result, mode, size, 1, 1, 0, 0);
2269 savevec[nelts++] = (savep
2270 ? gen_rtx_SET (mem, reg)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((mem))
, ((reg)) )
2271 : gen_rtx_SET (reg, mem)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg))
, ((mem)) )
);
2272 size += GET_MODE_SIZE (mode);
2273 }
2274 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec_v (nelts, savevec))) )
;
2275}
2276
2277/* Save the state required to perform an untyped call with the same
2278 arguments as were passed to the current function. */
2279
2280static rtx
2281expand_builtin_apply_args_1 (void)
2282{
2283 rtx registers, tem;
2284 int size, align, regno;
2285 fixed_size_mode mode;
2286 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2286, __FUNCTION__))->typed.type)
: 0, 1);
2287
2288 /* Create a block where the arg-pointer, structure value address,
2289 and argument registers can be saved. */
2290 registers = assign_stack_local (BLKmode((void) 0, E_BLKmode), apply_args_size (), -1);
2291
2292 /* Walk past the arg-pointer and structure value address. */
2293 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2294 if (targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2294, __FUNCTION__))->typed.type)
: 0, 0))
2295 size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2296
2297 /* Save each register used in calling a function to the block. */
2298 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
2299 if ((mode = apply_args_mode(this_target_builtins->x_apply_args_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
2300 {
2301 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
2302 if (size % align != 0)
2303 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
2304
2305 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)(regno));
2306
2307 emit_move_insn (adjust_address (registers, mode, size)adjust_address_1 (registers, mode, size, 1, 1, 0, 0), tem);
2308 size += GET_MODE_SIZE (mode);
2309 }
2310
2311 /* Save the arg pointer to the block. */
2312 tem = copy_to_reg (crtl(&x_rtl)->args.internal_arg_pointer);
2313 /* We need the pointer as the caller actually passed them to us, not
2314 as we might have pretended they were passed. Make sure it's a valid
2315 operand, as emit_move_insn isn't expected to handle a PLUS. */
2316 if (STACK_GROWS_DOWNWARD1)
2317 tem
2318 = force_operand (plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tem,
2319 crtl(&x_rtl)->args.pretend_args_size),
2320 NULL_RTX(rtx) 0);
2321 emit_move_insn (adjust_address (registers, Pmode, 0)adjust_address_1 (registers, (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 0
, 1, 1, 0, 0)
, tem);
2322
2323 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2324
2325 /* Save the structure value address unless this is passed as an
2326 "invisible" first argument. */
2327 if (struct_incoming_value)
2328 emit_move_insn (adjust_address (registers, Pmode, size)adjust_address_1 (registers, (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), size
, 1, 1, 0, 0)
,
2329 copy_to_reg (struct_incoming_value));
2330
2331 /* Return the address of the block. */
2332 return copy_addr_to_reg (XEXP (registers, 0)(((registers)->u.fld[0]).rt_rtx));
2333}
2334
2335/* __builtin_apply_args returns block of memory allocated on
2336 the stack into which is stored the arg pointer, structure
2337 value address, static chain, and all the registers that might
2338 possibly be used in performing a function call. The code is
2339 moved to the start of the function so the incoming values are
2340 saved. */
2341
2342static rtx
2343expand_builtin_apply_args (void)
2344{
2345 /* Don't do __builtin_apply_args more than once in a function.
2346 Save the result of the first call and reuse it. */
2347 if (apply_args_value((&x_rtl)->expr.x_apply_args_value) != 0)
2348 return apply_args_value((&x_rtl)->expr.x_apply_args_value);
2349 {
2350 /* When this function is called, it means that registers must be
2351 saved on entry to this function. So we migrate the
2352 call to the first insn of this function. */
2353 rtx temp;
2354
2355 start_sequence ();
2356 temp = expand_builtin_apply_args_1 ();
2357 rtx_insn *seq = get_insns ();
2358 end_sequence ();
2359
2360 apply_args_value((&x_rtl)->expr.x_apply_args_value) = temp;
2361
2362 /* Put the insns after the NOTE that starts the function.
2363 If this is inside a start_sequence, make the outer-level insn
2364 chain current, so the code is placed at the start of the
2365 function. If internal_arg_pointer is a non-virtual pseudo,
2366 it needs to be placed after the function that initializes
2367 that pseudo. */
2368 push_topmost_sequence ();
2369 if (REG_P (crtl->args.internal_arg_pointer)(((enum rtx_code) ((&x_rtl)->args.internal_arg_pointer
)->code) == REG)
2370 && REGNO (crtl->args.internal_arg_pointer)(rhs_regno((&x_rtl)->args.internal_arg_pointer)) > LAST_VIRTUAL_REGISTER(((76)) + 5))
2371 emit_insn_before (seq, parm_birth_insn((&x_rtl)->x_parm_birth_insn));
2372 else
2373 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
2374 pop_topmost_sequence ();
2375 return temp;
2376 }
2377}
2378
2379/* Perform an untyped call and save the state required to perform an
2380 untyped return of whatever value was returned by the given function. */
2381
2382static rtx
2383expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2384{
2385 int size, align, regno;
2386 fixed_size_mode mode;
2387 rtx incoming_args, result, reg, dest, src;
2388 rtx_call_insn *call_insn;
2389 rtx old_stack_level = 0;
2390 rtx call_fusage = 0;
2391 rtx struct_value = targetm.calls.struct_value_rtx (cfun(cfun + 0) ? TREE_TYPE (cfun->decl)((contains_struct_check (((cfun + 0)->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2391, __FUNCTION__))->typed.type)
: 0, 0);
2392
2393 arguments = convert_memory_address (Pmode, arguments)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (arguments), 0)
;
2394
2395 /* Create a block where the return registers can be saved. */
2396 result = assign_stack_local (BLKmode((void) 0, E_BLKmode), apply_result_size (), -1);
2397
2398 /* Fetch the arg pointer from the ARGUMENTS block. */
2399 incoming_args = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2400 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, arguments));
2401 if (!STACK_GROWS_DOWNWARD1)
2402 incoming_args = expand_simple_binop (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, MINUS, incoming_args, argsize,
2403 incoming_args, 0, OPTAB_LIB_WIDEN);
2404
2405 /* Push a new argument block and copy the arguments. Do not allow
2406 the (potential) memcpy call below to interfere with our stack
2407 manipulations. */
2408 do_pending_stack_adjust ();
2409 NO_DEFER_POP(((&x_rtl)->expr.x_inhibit_defer_pop) += 1);
2410
2411 /* Save the stack with nonlocal if available. */
2412 if (targetm.have_save_stack_nonlocal ())
2413 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2414 else
2415 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2416
2417 /* Allocate a block of memory onto the stack and copy the memory
2418 arguments to the outgoing arguments address. We can pass TRUE
2419 as the 4th argument because we just saved the stack pointer
2420 and will restore it right after the call. */
2421 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
, -1, true);
2422
2423 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2424 may have already set current_function_calls_alloca to true.
2425 current_function_calls_alloca won't be set if argsize is zero,
2426 so we have to guarantee need_drap is true here. */
2427 if (SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options
.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi
() == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4))))
)
2428 crtl(&x_rtl)->need_drap = true;
2429
2430 dest = virtual_outgoing_args_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_OUTGOING_ARGS]
)
;
2431 if (!STACK_GROWS_DOWNWARD1)
2432 {
2433 if (CONST_INT_P (argsize)(((enum rtx_code) (argsize)->code) == CONST_INT))
2434 dest = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, dest, -INTVAL (argsize)((argsize)->u.hwint[0]));
2435 else
2436 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((dest)), ((negate_rtx ((global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), argsize
))) )
;
2437 }
2438 dest = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), dest);
2439 set_mem_align (dest, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
);
2440 src = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), incoming_args);
2441 set_mem_align (src, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
);
2442 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2443
2444 /* Refer to the argument block. */
2445 apply_args_size ();
2446 arguments = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), arguments);
2447 set_mem_align (arguments, PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
);
2448
2449 /* Walk past the arg-pointer and structure value address. */
2450 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2451 if (struct_value)
2452 size += GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2453
2454 /* Restore each of the registers previously saved. Make USE insns
2455 for each of these registers for use in making the call. */
2456 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
2457 if ((mode = apply_args_mode(this_target_builtins->x_apply_args_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
2458 {
2459 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
2460 if (size % align != 0)
2461 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
2462 reg = gen_rtx_REG (mode, regno);
2463 emit_move_insn (reg, adjust_address (arguments, mode, size)adjust_address_1 (arguments, mode, size, 1, 1, 0, 0));
2464 use_reg (&call_fusage, reg);
2465 size += GET_MODE_SIZE (mode);
2466 }
2467
2468 /* Restore the structure value address unless this is passed as an
2469 "invisible" first argument. */
2470 size = GET_MODE_SIZE (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2471 if (struct_value)
2472 {
2473 rtx value = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
2474 emit_move_insn (value, adjust_address (arguments, Pmode, size)adjust_address_1 (arguments, (global_options.x_ix86_pmode == PMODE_DI
? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) :
(scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), size
, 1, 1, 0, 0)
);
2475 emit_move_insn (struct_value, value);
2476 if (REG_P (struct_value)(((enum rtx_code) (struct_value)->code) == REG))
2477 use_reg (&call_fusage, struct_value);
2478 }
2479
2480 /* All arguments and registers used for the call are set up by now! */
2481 function = prepare_call_address (NULL__null, function, NULL__null, &call_fusage, 0, 0);
2482
2483 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2484 and we don't want to load it into a register as an optimization,
2485 because prepare_call_address already did it if it should be done. */
2486 if (GET_CODE (function)((enum rtx_code) (function)->code) != SYMBOL_REF)
2487 function = memory_address (FUNCTION_MODE, function)memory_address_addr_space (((scalar_int_mode ((scalar_int_mode
::from_int) E_QImode))), (function), 0)
;
2488
2489 /* Generate the actual call instruction and save the return value. */
2490 if (targetm.have_untyped_call ())
2491 {
2492 rtx mem = gen_rtx_MEM (FUNCTION_MODE(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), function);
2493 emit_call_insn (targetm.gen_untyped_call (mem, result,
2494 result_vector (1, result)));
2495 }
2496 else if (targetm.have_call_value ())
2497 {
2498 rtx valreg = 0;
2499
2500 /* Locate the unique return register. It is not possible to
2501 express a call that sets more than one return register using
2502 call_value; use untyped_call for that. In fact, untyped_call
2503 only needs to save the return registers in the given block. */
2504 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
2505 if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
2506 {
2507 gcc_assert (!valreg)((void)(!(!valreg) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2507, __FUNCTION__), 0 : 0))
; /* have_untyped_call required. */
2508
2509 valreg = gen_rtx_REG (mode, regno);
2510 }
2511
2512 emit_insn (targetm.gen_call_value (valreg,
2513 gen_rtx_MEM (FUNCTION_MODE(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), function),
2514 const0_rtx(const_int_rtx[64]), NULL_RTX(rtx) 0, const0_rtx(const_int_rtx[64])));
2515
2516 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0)adjust_address_1 (result, ((machine_mode) (valreg)->mode),
0, 1, 1, 0, 0)
, valreg);
2517 }
2518 else
2519 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2519, __FUNCTION__))
;
2520
2521 /* Find the CALL insn we just emitted, and attach the register usage
2522 information. */
2523 call_insn = last_call_insn ();
2524 add_function_usage_to (call_insn, call_fusage);
2525
2526 /* Restore the stack. */
2527 if (targetm.have_save_stack_nonlocal ())
2528 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2529 else
2530 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2531 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2532
2533 OK_DEFER_POP(((&x_rtl)->expr.x_inhibit_defer_pop) -= 1);
2534
2535 /* Return the address of the result block. */
2536 result = copy_addr_to_reg (XEXP (result, 0)(((result)->u.fld[0]).rt_rtx));
2537 return convert_memory_address (ptr_mode, result)convert_memory_address_addr_space ((ptr_mode), (result), 0);
2538}
2539
2540/* Perform an untyped return. */
2541
2542static void
2543expand_builtin_return (rtx result)
2544{
2545 int size, align, regno;
2546 fixed_size_mode mode;
2547 rtx reg;
2548 rtx_insn *call_fusage = 0;
2549
2550 result = convert_memory_address (Pmode, result)convert_memory_address_addr_space (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)), (result), 0)
;
2551
2552 apply_result_size ();
2553 result = gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), result);
2554
2555 if (targetm.have_untyped_return ())
2556 {
2557 rtx vector = result_vector (0, result);
2558 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2559 emit_barrier ();
2560 return;
2561 }
2562
2563 /* Restore the return value and note that each value is used. */
2564 size = 0;
2565 for (regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
2566 if ((mode = apply_result_mode(this_target_builtins->x_apply_result_mode)[regno]) != VOIDmode((void) 0, E_VOIDmode))
2567 {
2568 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode) / BITS_PER_UNIT(8);
2569 if (size % align != 0)
2570 size = CEIL (size, align)(((size) + (align) - 1) / (align)) * align;
2571 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)(regno));
2572 emit_move_insn (reg, adjust_address (result, mode, size)adjust_address_1 (result, mode, size, 1, 1, 0, 0));
2573
2574 push_to_sequence (call_fusage);
2575 emit_use (reg);
2576 call_fusage = get_insns ();
2577 end_sequence ();
2578 size += GET_MODE_SIZE (mode);
2579 }
2580
2581 /* Put the USE insns before the return. */
2582 emit_insn (call_fusage);
2583
2584 /* Return whatever values was restored by jumping directly to the end
2585 of the function. */
2586 expand_naked_return ();
2587}
2588
2589/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2590
2591static enum type_class
2592type_to_class (tree type)
2593{
2594 switch (TREE_CODE (type)((enum tree_code) (type)->base.code))
2595 {
2596 case VOID_TYPE: return void_type_class;
2597 case INTEGER_TYPE: return integer_type_class;
2598 case ENUMERAL_TYPE: return enumeral_type_class;
2599 case BOOLEAN_TYPE: return boolean_type_class;
2600 case POINTER_TYPE: return pointer_type_class;
2601 case REFERENCE_TYPE: return reference_type_class;
2602 case OFFSET_TYPE: return offset_type_class;
2603 case REAL_TYPE: return real_type_class;
2604 case COMPLEX_TYPE: return complex_type_class;
2605 case FUNCTION_TYPE: return function_type_class;
2606 case METHOD_TYPE: return method_type_class;
2607 case RECORD_TYPE: return record_type_class;
2608 case UNION_TYPE:
2609 case QUAL_UNION_TYPE: return union_type_class;
2610 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)((tree_check2 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2610, __FUNCTION__, (ARRAY_TYPE), (INTEGER_TYPE)))->type_common
.string_flag)
2611 ? string_type_class : array_type_class);
2612 case LANG_TYPE: return lang_type_class;
2613 case OPAQUE_TYPE: return opaque_type_class;
2614 default: return no_type_class;
2615 }
2616}
2617
2618/* Expand a call EXP to __builtin_classify_type. */
2619
2620static rtx
2621expand_builtin_classify_type (tree exp)
2622{
2623 if (call_expr_nargs (exp)(((int)((unsigned long) (*tree_int_cst_elt_check (((tree_class_check
((exp), (tcc_vl_exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2623, __FUNCTION__))->exp.operands[0]), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2623, __FUNCTION__)))) - 3)
)
2624 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))))gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (type_to_class (((
contains_struct_check (((*((const_cast<tree*> (tree_operand_check
(((tree_check ((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2624, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2624, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2624, __FUNCTION__))->typed.type))))
;
2625 return GEN_INT (no_type_class)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (no_type_class));
2626}
2627
2628/* This helper macro, meant to be used in mathfn_built_in below, determines
2629 which among a set of builtin math functions is appropriate for a given type
2630 mode. The `F' (float) and `L' (long double) are automatically generated
2631 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2632 types, there are additional types that are considered with 'F32', 'F64',
2633 'F128', etc. suffixes. */
2634#define CASE_MATHFN(MATHFN) \
2635 CASE_CFN_##MATHFN: \
2636 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2637 fcodel = BUILT_IN_##MATHFN##L ; break;
2638/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2639 types. */
2640#define CASE_MATHFN_FLOATN(MATHFN) \
2641 CASE_CFN_##MATHFN: \
2642 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2643 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2644 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2645 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2646 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2647 break;
2648/* Similar to above, but appends _R after any F/L suffix. */
2649#define CASE_MATHFN_REENT(MATHFN) \
2650 case CFN_BUILT_IN_##MATHFN##_R: \
2651 case CFN_BUILT_IN_##MATHFN##F_R: \
2652 case CFN_BUILT_IN_##MATHFN##L_R: \
2653 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2654 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2655
2656/* Return a function equivalent to FN but operating on floating-point
2657 values of type TYPE, or END_BUILTINS if no such function exists.
2658 This is purely an operation on function codes; it does not guarantee
2659 that the target actually has an implementation of the function. */
2660
2661static built_in_function
2662mathfn_built_in_2 (tree type, combined_fn fn)
2663{
2664 tree mtype;
2665 built_in_function fcode, fcodef, fcodel;
2666 built_in_function fcodef16 = END_BUILTINS;
2667 built_in_function fcodef32 = END_BUILTINS;
2668 built_in_function fcodef64 = END_BUILTINS;
2669 built_in_function fcodef128 = END_BUILTINS;
2670 built_in_function fcodef32x = END_BUILTINS;
2671 built_in_function fcodef64x = END_BUILTINS;
2672 built_in_function fcodef128x = END_BUILTINS;
2673
2674 switch (fn)
2675 {
2676#define SEQ_OF_CASE_MATHFN \
2677 CASE_MATHFN (ACOS) \
2678 CASE_MATHFN (ACOSH) \
2679 CASE_MATHFN (ASIN) \
2680 CASE_MATHFN (ASINH) \
2681 CASE_MATHFN (ATAN) \
2682 CASE_MATHFN (ATAN2) \
2683 CASE_MATHFN (ATANH) \
2684 CASE_MATHFN (CBRT) \
2685 CASE_MATHFN_FLOATN (CEIL) \
2686 CASE_MATHFN (CEXPI) \
2687 CASE_MATHFN_FLOATN (COPYSIGN) \
2688 CASE_MATHFN (COS) \
2689 CASE_MATHFN (COSH) \
2690 CASE_MATHFN (DREM) \
2691 CASE_MATHFN (ERF) \
2692 CASE_MATHFN (ERFC) \
2693 CASE_MATHFN (EXP) \
2694 CASE_MATHFN (EXP10) \
2695 CASE_MATHFN (EXP2) \
2696 CASE_MATHFN (EXPM1) \
2697 CASE_MATHFN (FABS) \
2698 CASE_MATHFN (FDIM) \
2699 CASE_MATHFN_FLOATN (FLOOR) \
2700 CASE_MATHFN_FLOATN (FMA) \
2701 CASE_MATHFN_FLOATN (FMAX) \
2702 CASE_MATHFN_FLOATN (FMIN) \
2703 CASE_MATHFN (FMOD) \
2704 CASE_MATHFN (FREXP) \
2705 CASE_MATHFN (GAMMA) \
2706 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2707 CASE_MATHFN (HUGE_VAL) \
2708 CASE_MATHFN (HYPOT) \
2709 CASE_MATHFN (ILOGB) \
2710 CASE_MATHFN (ICEIL) \
2711 CASE_MATHFN (IFLOOR) \
2712 CASE_MATHFN (INF) \
2713 CASE_MATHFN (IRINT) \
2714 CASE_MATHFN (IROUND) \
2715 CASE_MATHFN (ISINF) \
2716 CASE_MATHFN (J0) \
2717 CASE_MATHFN (J1) \
2718 CASE_MATHFN (JN) \
2719 CASE_MATHFN (LCEIL) \
2720 CASE_MATHFN (LDEXP) \
2721 CASE_MATHFN (LFLOOR) \
2722 CASE_MATHFN (LGAMMA) \
2723 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2724 CASE_MATHFN (LLCEIL) \
2725 CASE_MATHFN (LLFLOOR) \
2726 CASE_MATHFN (LLRINT) \
2727 CASE_MATHFN (LLROUND) \
2728 CASE_MATHFN (LOG) \
2729 CASE_MATHFN (LOG10) \
2730 CASE_MATHFN (LOG1P) \
2731 CASE_MATHFN (LOG2) \
2732 CASE_MATHFN (LOGB) \
2733 CASE_MATHFN (LRINT) \
2734 CASE_MATHFN (LROUND) \
2735 CASE_MATHFN (MODF) \
2736 CASE_MATHFN (NAN) \
2737 CASE_MATHFN (NANS) \
2738 CASE_MATHFN_FLOATN (NEARBYINT) \
2739 CASE_MATHFN (NEXTAFTER) \
2740 CASE_MATHFN (NEXTTOWARD) \
2741 CASE_MATHFN (POW) \
2742 CASE_MATHFN (POWI) \
2743 CASE_MATHFN (POW10) \
2744 CASE_MATHFN (REMAINDER) \
2745 CASE_MATHFN (REMQUO) \
2746 CASE_MATHFN_FLOATN (RINT) \
2747 CASE_MATHFN_FLOATN (ROUND) \
2748 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2749 CASE_MATHFN (SCALB) \
2750 CASE_MATHFN (SCALBLN) \
2751 CASE_MATHFN (SCALBN) \
2752 CASE_MATHFN (SIGNBIT) \
2753 CASE_MATHFN (SIGNIFICAND) \
2754 CASE_MATHFN (SIN) \
2755 CASE_MATHFN (SINCOS) \
2756 CASE_MATHFN (SINH) \
2757 CASE_MATHFN_FLOATN (SQRT) \
2758 CASE_MATHFN (TAN) \
2759 CASE_MATHFN (TANH) \
2760 CASE_MATHFN (TGAMMA) \
2761 CASE_MATHFN_FLOATN (TRUNC) \
2762 CASE_MATHFN (Y0) \
2763 CASE_MATHFN (Y1) \
2764 CASE_MATHFN (YN)
2765
2766 SEQ_OF_CASE_MATHFN
2767
2768 default:
2769 return END_BUILTINS;
2770 }
2771
2772 mtype = TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2772, __FUNCTION__))->type_common.main_variant)
;
2773 if (mtype == double_type_nodeglobal_trees[TI_DOUBLE_TYPE])
2774 return fcode;
2775 else if (mtype == float_type_nodeglobal_trees[TI_FLOAT_TYPE])
2776 return fcodef;
2777 else if (mtype == long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE])
2778 return fcodel;
2779 else if (mtype == float16_type_nodeglobal_trees[TI_FLOAT16_TYPE])
2780 return fcodef16;
2781 else if (mtype == float32_type_nodeglobal_trees[TI_FLOAT32_TYPE])
2782 return fcodef32;
2783 else if (mtype == float64_type_nodeglobal_trees[TI_FLOAT64_TYPE])
2784 return fcodef64;
2785 else if (mtype == float128_type_nodeglobal_trees[TI_FLOAT128_TYPE])
2786 return fcodef128;
2787 else if (mtype == float32x_type_nodeglobal_trees[TI_FLOAT32X_TYPE])
2788 return fcodef32x;
2789 else if (mtype == float64x_type_nodeglobal_trees[TI_FLOAT64X_TYPE])
2790 return fcodef64x;
2791 else if (mtype == float128x_type_nodeglobal_trees[TI_FLOAT128X_TYPE])
2792 return fcodef128x;
2793 else
2794 return END_BUILTINS;
2795}
2796
2797#undef CASE_MATHFN
2798#undef CASE_MATHFN_FLOATN
2799#undef CASE_MATHFN_REENT
2800
2801/* Return mathematic function equivalent to FN but operating directly on TYPE,
2802 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2803 otherwise use the explicit declaration. If we can't do the conversion,
2804 return null. */
2805
2806static tree
2807mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2808{
2809 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2810 if (fcode2 == END_BUILTINS)
2811 return NULL_TREE(tree) __null;
2812
2813 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2814 return NULL_TREE(tree) __null;
2815
2816 return builtin_decl_explicit (fcode2);
2817}
2818
2819/* Like mathfn_built_in_1, but always use the implicit array. */
2820
2821tree
2822mathfn_built_in (tree type, combined_fn fn)
2823{
2824 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2825}
2826
2827/* Like mathfn_built_in_1, but take a built_in_function and
2828 always use the implicit array. */
2829
2830tree
2831mathfn_built_in (tree type, enum built_in_function fn)
2832{
2833 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2834}
2835
2836/* Return the type associated with a built in function, i.e., the one
2837 to be passed to mathfn_built_in to get the type-specific
2838 function. */
2839
2840tree
2841mathfn_built_in_type (combined_fn fn)
2842{
2843#define CASE_MATHFN(MATHFN) \
2844 case CFN_BUILT_IN_##MATHFN: \
2845 return double_type_nodeglobal_trees[TI_DOUBLE_TYPE]; \
2846 case CFN_BUILT_IN_##MATHFN##F: \
2847 return float_type_nodeglobal_trees[TI_FLOAT_TYPE]; \
2848 case CFN_BUILT_IN_##MATHFN##L: \
2849 return long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE];
2850
2851#define CASE_MATHFN_FLOATN(MATHFN) \
2852 CASE_MATHFN(MATHFN) \
2853 case CFN_BUILT_IN_##MATHFN##F16: \
2854 return float16_type_nodeglobal_trees[TI_FLOAT16_TYPE]; \
2855 case CFN_BUILT_IN_##MATHFN##F32: \
2856 return float32_type_nodeglobal_trees[TI_FLOAT32_TYPE]; \
2857 case CFN_BUILT_IN_##MATHFN##F64: \
2858 return float64_type_nodeglobal_trees[TI_FLOAT64_TYPE]; \
2859 case CFN_BUILT_IN_##MATHFN##F128: \
2860 return float128_type_nodeglobal_trees[TI_FLOAT128_TYPE]; \
2861 case CFN_BUILT_IN_##MATHFN##F32X: \
2862 return float32x_type_nodeglobal_trees[TI_FLOAT32X_TYPE]; \
2863 case CFN_BUILT_IN_##MATHFN##F64X: \
2864 return float64x_type_nodeglobal_trees[TI_FLOAT64X_TYPE]; \
2865 case CFN_BUILT_IN_##MATHFN##F128X: \
2866 return float128x_type_nodeglobal_trees[TI_FLOAT128X_TYPE];
2867
2868/* Similar to above, but appends _R after any F/L suffix. */
2869#define CASE_MATHFN_REENT(MATHFN) \
2870 case CFN_BUILT_IN_##MATHFN##_R: \
2871 return double_type_nodeglobal_trees[TI_DOUBLE_TYPE]; \
2872 case CFN_BUILT_IN_##MATHFN##F_R: \
2873 return float_type_nodeglobal_trees[TI_FLOAT_TYPE]; \
2874 case CFN_BUILT_IN_##MATHFN##L_R: \
2875 return long_double_type_nodeglobal_trees[TI_LONG_DOUBLE_TYPE];
2876
2877 switch (fn)
2878 {
2879 SEQ_OF_CASE_MATHFN
2880
2881 default:
2882 return NULL_TREE(tree) __null;
2883 }
2884
2885#undef CASE_MATHFN
2886#undef CASE_MATHFN_FLOATN
2887#undef CASE_MATHFN_REENT
2888#undef SEQ_OF_CASE_MATHFN
2889}
2890
2891/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2892 return its code, otherwise return IFN_LAST. Note that this function
2893 only tests whether the function is defined in internals.def, not whether
2894 it is actually available on the target. */
2895
2896internal_fn
2897associated_internal_fn (tree fndecl)
2898{
2899 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)((void)(!(((built_in_class) (tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2899, __FUNCTION__, (FUNCTION_DECL)))->function_decl.built_in_class
) == BUILT_IN_NORMAL) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2899, __FUNCTION__), 0 : 0))
;
2900 tree return_type = TREE_TYPE (TREE_TYPE (fndecl))((contains_struct_check ((((contains_struct_check ((fndecl), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2900, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2900, __FUNCTION__))->typed.type)
;
2901 switch (DECL_FUNCTION_CODE (fndecl))
2902 {
2903#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2904 CASE_FLT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEF: case BUILT_IN_##
NAMEL
: return IFN_##NAME;
2905#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2906 CASE_FLT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEF: case BUILT_IN_##
NAMEL
: return IFN_##NAME; \
2907 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME)case BUILT_IN_##NAMEF16: case BUILT_IN_##NAMEF32: case BUILT_IN_
##NAMEF64: case BUILT_IN_##NAMEF128: case BUILT_IN_##NAMEF32X
: case BUILT_IN_##NAMEF64X: case BUILT_IN_##NAMEF128X
: return IFN_##NAME;
2908#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2909 CASE_INT_FN (BUILT_IN_##NAME)case BUILT_IN_##NAME: case BUILT_IN_##NAMEL: case BUILT_IN_##
NAMELL: case BUILT_IN_##NAMEIMAX
: return IFN_##NAME;
2910#include "internal-fn.def"
2911
2912 CASE_FLT_FN (BUILT_IN_POW10)case BUILT_IN_POW10: case BUILT_IN_POW10F: case BUILT_IN_POW10L:
2913 return IFN_EXP10;
2914
2915 CASE_FLT_FN (BUILT_IN_DREM)case BUILT_IN_DREM: case BUILT_IN_DREMF: case BUILT_IN_DREML:
2916 return IFN_REMAINDER;
2917
2918 CASE_FLT_FN (BUILT_IN_SCALBN)case BUILT_IN_SCALBN: case BUILT_IN_SCALBNF: case BUILT_IN_SCALBNL:
2919 CASE_FLT_FN (BUILT_IN_SCALBLN)case BUILT_IN_SCALBLN: case BUILT_IN_SCALBLNF: case BUILT_IN_SCALBLNL:
2920 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))(real_format_for_mode[(((enum mode_class) mode_class[((((enum
tree_code) ((tree_class_check ((return_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2920, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(return_type) : (return_type)->type_common.mode)]) == MODE_DECIMAL_FLOAT
) ? (((((((enum tree_code) ((tree_class_check ((return_type),
(tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2920, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(return_type) : (return_type)->type_common.mode)) - MIN_MODE_DECIMAL_FLOAT
) + (MAX_MODE_FLOAT - MIN_MODE_FLOAT + 1)) : ((enum mode_class
) mode_class[((((enum tree_code) ((tree_class_check ((return_type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2920, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(return_type) : (return_type)->type_common.mode)]) == MODE_FLOAT
? ((((((enum tree_code) ((tree_class_check ((return_type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2920, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(return_type) : (return_type)->type_common.mode)) - MIN_MODE_FLOAT
) : ((fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2920, __FUNCTION__)), 0)])
->b == 2)
2921 return IFN_LDEXP;
2922 return IFN_LAST;
2923
2924 default:
2925 return IFN_LAST;
2926 }
2927}
2928
2929/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2930 on the current target by a call to an internal function, return the
2931 code of that internal function, otherwise return IFN_LAST. The caller
2932 is responsible for ensuring that any side-effects of the built-in
2933 call are dealt with correctly. E.g. if CALL sets errno, the caller
2934 must decide that the errno result isn't needed or make it available
2935 in some other way. */
2936
2937internal_fn
2938replacement_internal_fn (gcall *call)
2939{
2940 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2941 {
2942 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2943 if (ifn != IFN_LAST)
2944 {
2945 tree_pair types = direct_internal_fn_types (ifn, call);
2946 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2947 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2948 return ifn;
2949 }
2950 }
2951 return IFN_LAST;
2952}
2953
2954/* Expand a call to the builtin trinary math functions (fma).
2955 Return NULL_RTX if a normal call should be emitted rather than expanding the
2956 function in-line. EXP is the expression that is a call to the builtin
2957 function; if convenient, the result should be placed in TARGET.
2958 SUBTARGET may be used as the target for computing one of EXP's
2959 operands. */
2960
2961static rtx
2962expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2963{
2964 optab builtin_optab;
2965 rtx op0, op1, op2, result;
2966 rtx_insn *insns;
2967 tree fndecl = get_callee_fndecl (exp);
2968 tree arg0, arg1, arg2;
2969 machine_mode mode;
2970
2971 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2972 return NULL_RTX(rtx) 0;
2973
2974 arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2974, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2974, __FUNCTION__)))))
;
2975 arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2975, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2975, __FUNCTION__)))))
;
2976 arg2 = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2976, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2976, __FUNCTION__)))))
;
2977
2978 switch (DECL_FUNCTION_CODE (fndecl))
2979 {
2980 CASE_FLT_FN (BUILT_IN_FMA)case BUILT_IN_FMA: case BUILT_IN_FMAF: case BUILT_IN_FMAL:
2981 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA)case BUILT_IN_FMAF16: case BUILT_IN_FMAF32: case BUILT_IN_FMAF64
: case BUILT_IN_FMAF128: case BUILT_IN_FMAF32X: case BUILT_IN_FMAF64X
: case BUILT_IN_FMAF128X
:
2982 builtin_optab = fma_optab; break;
2983 default:
2984 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2984, __FUNCTION__))
;
2985 }
2986
2987 /* Make a suitable register to place result in. */
2988 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2988, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2988, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2988, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2988, __FUNCTION__))->typed.type))->type_common.mode)
;
2989
2990 /* Before working hard, check whether the instruction is available. */
2991 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2992 return NULL_RTX(rtx) 0;
2993
2994 result = gen_reg_rtx (mode);
2995
2996 /* Always stabilize the argument list. */
2997 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2997, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2997, __FUNCTION__)))))
= arg0 = builtin_save_expr (arg0);
2998 CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2998, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2998, __FUNCTION__)))))
= arg1 = builtin_save_expr (arg1);
2999 CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2999, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 2999, __FUNCTION__)))))
= arg2 = builtin_save_expr (arg2);
3000
3001 op0 = expand_expr (arg0, subtarget, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3002 op1 = expand_normal (arg1);
3003 op2 = expand_normal (arg2);
3004
3005 start_sequence ();
3006
3007 /* Compute into RESULT.
3008 Set RESULT to wherever the result comes back. */
3009 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
3010 result, 0);
3011
3012 /* If we were unable to expand via the builtin, stop the sequence
3013 (without outputting the insns) and call to the library function
3014 with the stabilized argument list. */
3015 if (result == 0)
3016 {
3017 end_sequence ();
3018 return expand_call (exp, target, target == const0_rtx(const_int_rtx[64]));
3019 }
3020
3021 /* Output the entire sequence. */
3022 insns = get_insns ();
3023 end_sequence ();
3024 emit_insn (insns);
3025
3026 return result;
3027}
3028
3029/* Expand a call to the builtin sin and cos math functions.
3030 Return NULL_RTX if a normal call should be emitted rather than expanding the
3031 function in-line. EXP is the expression that is a call to the builtin
3032 function; if convenient, the result should be placed in TARGET.
3033 SUBTARGET may be used as the target for computing one of EXP's
3034 operands. */
3035
3036static rtx
3037expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
3038{
3039 optab builtin_optab;
3040 rtx op0;
3041 rtx_insn *insns;
3042 tree fndecl = get_callee_fndecl (exp);
3043 machine_mode mode;
3044 tree arg;
3045
3046 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3047 return NULL_RTX(rtx) 0;
3048
3049 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3049, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3049, __FUNCTION__)))))
;
3050
3051 switch (DECL_FUNCTION_CODE (fndecl))
3052 {
3053 CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL:
3054 CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL:
3055 builtin_optab = sincos_optab; break;
3056 default:
3057 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3057, __FUNCTION__))
;
3058 }
3059
3060 /* Make a suitable register to place result in. */
3061 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3061, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3061, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3061, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3061, __FUNCTION__))->typed.type))->type_common.mode)
;
3062
3063 /* Check if sincos insn is available, otherwise fallback
3064 to sin or cos insn. */
3065 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
3066 switch (DECL_FUNCTION_CODE (fndecl))
3067 {
3068 CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL:
3069 builtin_optab = sin_optab; break;
3070 CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL:
3071 builtin_optab = cos_optab; break;
3072 default:
3073 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3073, __FUNCTION__))
;
3074 }
3075
3076 /* Before working hard, check whether the instruction is available. */
3077 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
3078 {
3079 rtx result = gen_reg_rtx (mode);
3080
3081 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3082 need to expand the argument again. This way, we will not perform
3083 side-effects more the once. */
3084 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3084, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3084, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
3085
3086 op0 = expand_expr (arg, subtarget, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3087
3088 start_sequence ();
3089
3090 /* Compute into RESULT.
3091 Set RESULT to wherever the result comes back. */
3092 if (builtin_optab == sincos_optab)
3093 {
3094 int ok;
3095
3096 switch (DECL_FUNCTION_CODE (fndecl))
3097 {
3098 CASE_FLT_FN (BUILT_IN_SIN)case BUILT_IN_SIN: case BUILT_IN_SINF: case BUILT_IN_SINL:
3099 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
3100 break;
3101 CASE_FLT_FN (BUILT_IN_COS)case BUILT_IN_COS: case BUILT_IN_COSF: case BUILT_IN_COSL:
3102 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
3103 break;
3104 default:
3105 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3105, __FUNCTION__))
;
3106 }
3107 gcc_assert (ok)((void)(!(ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3107, __FUNCTION__), 0 : 0))
;
3108 }
3109 else
3110 result = expand_unop (mode, builtin_optab, op0, result, 0);
3111
3112 if (result != 0)
3113 {
3114 /* Output the entire sequence. */
3115 insns = get_insns ();
3116 end_sequence ();
3117 emit_insn (insns);
3118 return result;
3119 }
3120
3121 /* If we were unable to expand via the builtin, stop the sequence
3122 (without outputting the insns) and call to the library function
3123 with the stabilized argument list. */
3124 end_sequence ();
3125 }
3126
3127 return expand_call (exp, target, target == const0_rtx(const_int_rtx[64]));
3128}
3129
3130/* Given an interclass math builtin decl FNDECL and it's argument ARG
3131 return an RTL instruction code that implements the functionality.
3132 If that isn't possible or available return CODE_FOR_nothing. */
3133
3134static enum insn_code
3135interclass_mathfn_icode (tree arg, tree fndecl)
3136{
3137 bool errno_set = false;
3138 optab builtin_optab = unknown_optab;
3139 machine_mode mode;
3140
3141 switch (DECL_FUNCTION_CODE (fndecl))
3142 {
3143 CASE_FLT_FN (BUILT_IN_ILOGB)case BUILT_IN_ILOGB: case BUILT_IN_ILOGBF: case BUILT_IN_ILOGBL:
3144 errno_set = true; builtin_optab = ilogb_optab; break;
3145 CASE_FLT_FN (BUILT_IN_ISINF)case BUILT_IN_ISINF: case BUILT_IN_ISINFF: case BUILT_IN_ISINFL:
3146 builtin_optab = isinf_optab; break;
3147 case BUILT_IN_ISNORMAL:
3148 case BUILT_IN_ISFINITE:
3149 CASE_FLT_FN (BUILT_IN_FINITE)case BUILT_IN_FINITE: case BUILT_IN_FINITEF: case BUILT_IN_FINITEL:
3150 case BUILT_IN_FINITED32:
3151 case BUILT_IN_FINITED64:
3152 case BUILT_IN_FINITED128:
3153 case BUILT_IN_ISINFD32:
3154 case BUILT_IN_ISINFD64:
3155 case BUILT_IN_ISINFD128:
3156 /* These builtins have no optabs (yet). */
3157 break;
3158 default:
3159 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3159, __FUNCTION__))
;
3160 }
3161
3162 /* There's no easy way to detect the case we need to set EDOM. */
3163 if (flag_errno_mathglobal_options.x_flag_errno_math && errno_set)
3164 return CODE_FOR_nothing;
3165
3166 /* Optab mode depends on the mode of the input argument. */
3167 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3167, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3167, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3167, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3167, __FUNCTION__))->typed.type))->type_common.mode)
;
3168
3169 if (builtin_optab)
3170 return optab_handler (builtin_optab, mode);
3171 return CODE_FOR_nothing;
3172}
3173
3174/* Expand a call to one of the builtin math functions that operate on
3175 floating point argument and output an integer result (ilogb, isinf,
3176 isnan, etc).
3177 Return 0 if a normal call should be emitted rather than expanding the
3178 function in-line. EXP is the expression that is a call to the builtin
3179 function; if convenient, the result should be placed in TARGET. */
3180
3181static rtx
3182expand_builtin_interclass_mathfn (tree exp, rtx target)
3183{
3184 enum insn_code icode = CODE_FOR_nothing;
3185 rtx op0;
3186 tree fndecl = get_callee_fndecl (exp);
3187 machine_mode mode;
3188 tree arg;
3189
3190 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3191 return NULL_RTX(rtx) 0;
3192
3193 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3193, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3193, __FUNCTION__)))))
;
3194 icode = interclass_mathfn_icode (arg, fndecl);
3195 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3195, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3195, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3195, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3195, __FUNCTION__))->typed.type))->type_common.mode)
;
3196
3197 if (icode != CODE_FOR_nothing)
3198 {
3199 class expand_operand ops[1];
3200 rtx_insn *last = get_last_insn ();
3201 tree orig_arg = arg;
3202
3203 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3204 need to expand the argument again. This way, we will not perform
3205 side-effects more the once. */
3206 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3206, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3206, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
3207
3208 op0 = expand_expr (arg, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3209
3210 if (mode != GET_MODE (op0)((machine_mode) (op0)->mode))
3211 op0 = convert_to_mode (mode, op0, 0);
3212
3213 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3213, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3213, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3213, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3213, __FUNCTION__))->typed.type))->type_common.mode)
);
3214 if (maybe_legitimize_operands (icode, 0, 1, ops)
3215 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
3216 return ops[0].value;
3217
3218 delete_insns_since (last);
3219 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3219, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3219, __FUNCTION__)))))
= orig_arg;
3220 }
3221
3222 return NULL_RTX(rtx) 0;
3223}
3224
3225/* Expand a call to the builtin sincos math function.
3226 Return NULL_RTX if a normal call should be emitted rather than expanding the
3227 function in-line. EXP is the expression that is a call to the builtin
3228 function. */
3229
3230static rtx
3231expand_builtin_sincos (tree exp)
3232{
3233 rtx op0, op1, op2, target1, target2;
3234 machine_mode mode;
3235 tree arg, sinp, cosp;
3236 int result;
3237 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
3238 tree alias_type, alias_off;
3239
3240 if (!validate_arglist (exp, REAL_TYPE,
3241 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3242 return NULL_RTX(rtx) 0;
3243
3244 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3244, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3244, __FUNCTION__)))))
;
3245 sinp = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3245, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3245, __FUNCTION__)))))
;
3246 cosp = CALL_EXPR_ARG (exp, 2)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3246, __FUNCTION__, (CALL_EXPR)))), ((2) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3246, __FUNCTION__)))))
;
3247
3248 /* Make a suitable register to place result in. */
3249 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3249, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3249, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3249, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3249, __FUNCTION__))->typed.type))->type_common.mode)
;
3250
3251 /* Check if sincos insn is available, otherwise emit the call. */
3252 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
3253 return NULL_RTX(rtx) 0;
3254
3255 target1 = gen_reg_rtx (mode);
3256 target2 = gen_reg_rtx (mode);
3257
3258 op0 = expand_normal (arg);
3259 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3259, __FUNCTION__))->typed.type)
, ptr_mode, true);
3260 alias_off = build_int_cst (alias_type, 0);
3261 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3261, __FUNCTION__))->typed.type)
,
3262 sinp, alias_off));
3263 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3263, __FUNCTION__))->typed.type)
,
3264 cosp, alias_off));
3265
3266 /* Compute into target1 and target2.
3267 Set TARGET to wherever the result comes back. */
3268 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
3269 gcc_assert (result)((void)(!(result) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3269, __FUNCTION__), 0 : 0))
;
3270
3271 /* Move target1 and target2 to the memory locations indicated
3272 by op1 and op2. */
3273 emit_move_insn (op1, target1);
3274 emit_move_insn (op2, target2);
3275
3276 return const0_rtx(const_int_rtx[64]);
3277}
3278
3279/* Expand a call to the internal cexpi builtin to the sincos math function.
3280 EXP is the expression that is a call to the builtin function; if convenient,
3281 the result should be placed in TARGET. */
3282
3283static rtx
3284expand_builtin_cexpi (tree exp, rtx target)
3285{
3286 tree fndecl = get_callee_fndecl (exp);
3287 tree arg, type;
3288 machine_mode mode;
3289 rtx op0, op1, op2;
3290 location_t loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
3291
3292 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3293 return NULL_RTX(rtx) 0;
3294
3295 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3295, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3295, __FUNCTION__)))))
;
3296 type = TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3296, __FUNCTION__))->typed.type)
;
3297 mode = TYPE_MODE (TREE_TYPE (arg))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3297, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3297, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3297, __FUNCTION__))->typed.type)) : (((contains_struct_check
((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3297, __FUNCTION__))->typed.type))->type_common.mode)
;
3298
3299 /* Try expanding via a sincos optab, fall back to emitting a libcall
3300 to sincos or cexp. We are sure we have sincos or cexp because cexpi
3301 is only generated from sincos, cexp or if we have either of them. */
3302 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
3303 {
3304 op1 = gen_reg_rtx (mode);
3305 op2 = gen_reg_rtx (mode);
3306
3307 op0 = expand_expr (arg, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3308
3309 /* Compute into op1 and op2. */
3310 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
3311 }
3312 else if (targetm.libc_has_function (function_sincos, type))
3313 {
3314 tree call, fn = NULL_TREE(tree) __null;
3315 tree top1, top2;
3316 rtx op1a, op2a;
3317
3318 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3319 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
3320 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3321 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
3322 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3323 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
3324 else
3325 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3325, __FUNCTION__))
;
3326
3327 op1 = assign_temp (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3327, __FUNCTION__))->typed.type)
, 1, 1);
3328 op2 = assign_temp (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3328, __FUNCTION__))->typed.type)
, 1, 1);
3329 op1a = copy_addr_to_reg (XEXP (op1, 0)(((op1)->u.fld[0]).rt_rtx));
3330 op2a = copy_addr_to_reg (XEXP (op2, 0)(((op2)->u.fld[0]).rt_rtx));
3331 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3331, __FUNCTION__))->typed.type)
), op1a);
3332 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3332, __FUNCTION__))->typed.type)
), op2a);
3333
3334 /* Make sure not to fold the sincos call again. */
3335 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)((contains_struct_check ((fn), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3335, __FUNCTION__))->typed.type)
), fn);
3336 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn))((contains_struct_check ((((contains_struct_check ((fn), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3336, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3336, __FUNCTION__))->typed.type)
,
3337 call, 3, arg, top1, top2));
3338 }
3339 else
3340 {
3341 tree call, fn = NULL_TREE(tree) __null, narg;
3342 tree ctype = build_complex_type (type);
3343
3344 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3345 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
3346 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3347 fn = builtin_decl_explicit (BUILT_IN_CEXP);
3348 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3349 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
3350 else
3351 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3351, __FUNCTION__))
;
3352
3353 /* If we don't have a decl for cexp create one. This is the
3354 friendliest fallback if the user calls __builtin_cexpi
3355 without full target C99 function support. */
3356 if (fn == NULL_TREE(tree) __null)
3357 {
3358 tree fntype;
3359 const char *name = NULL__null;
3360
3361 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3362 name = "cexpf";
3363 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3364 name = "cexp";
3365 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3366 name = "cexpl";
3367
3368 fntype = build_function_type_list (ctype, ctype, NULL_TREE(tree) __null);
3369 fn = build_fn_decl (name, fntype);
3370 }
3371
3372 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
3373 build_real (type, dconst0), arg);
3374
3375 /* Make sure not to fold the cexp call again. */
3376 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)((contains_struct_check ((fn), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3376, __FUNCTION__))->typed.type)
), fn);
3377 return expand_expr (build_call_nary (ctype, call, 1, narg),
3378 target, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3379 }
3380
3381 /* Now build the proper return type. */
3382 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3383 make_tree (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3383, __FUNCTION__))->typed.type)
, op2),
3384 make_tree (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3384, __FUNCTION__))->typed.type)
, op1)),
3385 target, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3386}
3387
3388/* Conveniently construct a function call expression. FNDECL names the
3389 function to be called, N is the number of arguments, and the "..."
3390 parameters are the argument expressions. Unlike build_call_exr
3391 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3392
3393static tree
3394build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3395{
3396 va_list ap;
3397 tree fntype = TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3397, __FUNCTION__))->typed.type)
;
3398 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3399
3400 va_start (ap, n)__builtin_va_start(ap, n);
3401 fn = build_call_valist (TREE_TYPE (fntype)((contains_struct_check ((fntype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3401, __FUNCTION__))->typed.type)
, fn, n, ap);
3402 va_end (ap)__builtin_va_end(ap);
3403 SET_EXPR_LOCATION (fn, loc)(expr_check (((fn)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3403, __FUNCTION__))->exp.locus = (loc)
;
3404 return fn;
3405}
3406
3407/* Expand a call to one of the builtin rounding functions gcc defines
3408 as an extension (lfloor and lceil). As these are gcc extensions we
3409 do not need to worry about setting errno to EDOM.
3410 If expanding via optab fails, lower expression to (int)(floor(x)).
3411 EXP is the expression that is a call to the builtin function;
3412 if convenient, the result should be placed in TARGET. */
3413
3414static rtx
3415expand_builtin_int_roundingfn (tree exp, rtx target)
3416{
3417 convert_optab builtin_optab;
3418 rtx op0, tmp;
3419 rtx_insn *insns;
3420 tree fndecl = get_callee_fndecl (exp);
3421 enum built_in_function fallback_fn;
3422 tree fallback_fndecl;
3423 machine_mode mode;
3424 tree arg;
3425
3426 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3427 return NULL_RTX(rtx) 0;
3428
3429 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3429, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3429, __FUNCTION__)))))
;
3430
3431 switch (DECL_FUNCTION_CODE (fndecl))
3432 {
3433 CASE_FLT_FN (BUILT_IN_ICEIL)case BUILT_IN_ICEIL: case BUILT_IN_ICEILF: case BUILT_IN_ICEILL:
3434 CASE_FLT_FN (BUILT_IN_LCEIL)case BUILT_IN_LCEIL: case BUILT_IN_LCEILF: case BUILT_IN_LCEILL:
3435 CASE_FLT_FN (BUILT_IN_LLCEIL)case BUILT_IN_LLCEIL: case BUILT_IN_LLCEILF: case BUILT_IN_LLCEILL:
3436 builtin_optab = lceil_optab;
3437 fallback_fn = BUILT_IN_CEIL;
3438 break;
3439
3440 CASE_FLT_FN (BUILT_IN_IFLOOR)case BUILT_IN_IFLOOR: case BUILT_IN_IFLOORF: case BUILT_IN_IFLOORL:
3441 CASE_FLT_FN (BUILT_IN_LFLOOR)case BUILT_IN_LFLOOR: case BUILT_IN_LFLOORF: case BUILT_IN_LFLOORL:
3442 CASE_FLT_FN (BUILT_IN_LLFLOOR)case BUILT_IN_LLFLOOR: case BUILT_IN_LLFLOORF: case BUILT_IN_LLFLOORL:
3443 builtin_optab = lfloor_optab;
3444 fallback_fn = BUILT_IN_FLOOR;
3445 break;
3446
3447 default:
3448 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3448, __FUNCTION__))
;
3449 }
3450
3451 /* Make a suitable register to place result in. */
3452 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3452, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3452, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3452, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3452, __FUNCTION__))->typed.type))->type_common.mode)
;
3453
3454 target = gen_reg_rtx (mode);
3455
3456 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3457 need to expand the argument again. This way, we will not perform
3458 side-effects more the once. */
3459 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3459, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3459, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
3460
3461 op0 = expand_expr (arg, NULL__null, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3462
3463 start_sequence ();
3464
3465 /* Compute into TARGET. */
3466 if (expand_sfix_optab (target, op0, builtin_optab))
3467 {
3468 /* Output the entire sequence. */
3469 insns = get_insns ();
3470 end_sequence ();
3471 emit_insn (insns);
3472 return target;
3473 }
3474
3475 /* If we were unable to expand via the builtin, stop the sequence
3476 (without outputting the insns). */
3477 end_sequence ();
3478
3479 /* Fall back to floating point rounding optab. */
3480 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3480, __FUNCTION__))->typed.type)
, fallback_fn);
3481
3482 /* For non-C99 targets we may end up without a fallback fndecl here
3483 if the user called __builtin_lfloor directly. In this case emit
3484 a call to the floor/ceil variants nevertheless. This should result
3485 in the best user experience for not full C99 targets. */
3486 if (fallback_fndecl == NULL_TREE(tree) __null)
3487 {
3488 tree fntype;
3489 const char *name = NULL__null;
3490
3491 switch (DECL_FUNCTION_CODE (fndecl))
3492 {
3493 case BUILT_IN_ICEIL:
3494 case BUILT_IN_LCEIL:
3495 case BUILT_IN_LLCEIL:
3496 name = "ceil";
3497 break;
3498 case BUILT_IN_ICEILF:
3499 case BUILT_IN_LCEILF:
3500 case BUILT_IN_LLCEILF:
3501 name = "ceilf";
3502 break;
3503 case BUILT_IN_ICEILL:
3504 case BUILT_IN_LCEILL:
3505 case BUILT_IN_LLCEILL:
3506 name = "ceill";
3507 break;
3508 case BUILT_IN_IFLOOR:
3509 case BUILT_IN_LFLOOR:
3510 case BUILT_IN_LLFLOOR:
3511 name = "floor";
3512 break;
3513 case BUILT_IN_IFLOORF:
3514 case BUILT_IN_LFLOORF:
3515 case BUILT_IN_LLFLOORF:
3516 name = "floorf";
3517 break;
3518 case BUILT_IN_IFLOORL:
3519 case BUILT_IN_LFLOORL:
3520 case BUILT_IN_LLFLOORL:
3521 name = "floorl";
3522 break;
3523 default:
3524 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3524, __FUNCTION__))
;
3525 }
3526
3527 fntype = build_function_type_list (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3527, __FUNCTION__))->typed.type)
,
3528 TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3528, __FUNCTION__))->typed.type)
, NULL_TREE(tree) __null);
3529 fallback_fndecl = build_fn_decl (name, fntype);
3530 }
3531
3532 exp = build_call_nofold_loc (EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
, fallback_fndecl, 1, arg);
3533
3534 tmp = expand_normal (exp);
3535 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3535, __FUNCTION__))->typed.type)
);
3536
3537 /* Truncate the result of floating point optab to integer
3538 via expand_fix (). */
3539 target = gen_reg_rtx (mode);
3540 expand_fix (target, tmp, 0);
3541
3542 return target;
3543}
3544
3545/* Expand a call to one of the builtin math functions doing integer
3546 conversion (lrint).
3547 Return 0 if a normal call should be emitted rather than expanding the
3548 function in-line. EXP is the expression that is a call to the builtin
3549 function; if convenient, the result should be placed in TARGET. */
3550
3551static rtx
3552expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3553{
3554 convert_optab builtin_optab;
3555 rtx op0;
3556 rtx_insn *insns;
3557 tree fndecl = get_callee_fndecl (exp);
3558 tree arg;
3559 machine_mode mode;
3560 enum built_in_function fallback_fn = BUILT_IN_NONE;
3561
3562 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3563 return NULL_RTX(rtx) 0;
3564
3565 arg = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3565, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3565, __FUNCTION__)))))
;
3566
3567 switch (DECL_FUNCTION_CODE (fndecl))
3568 {
3569 CASE_FLT_FN (BUILT_IN_IRINT)case BUILT_IN_IRINT: case BUILT_IN_IRINTF: case BUILT_IN_IRINTL:
3570 fallback_fn = BUILT_IN_LRINT;
3571 gcc_fallthrough ();
3572 CASE_FLT_FN (BUILT_IN_LRINT)case BUILT_IN_LRINT: case BUILT_IN_LRINTF: case BUILT_IN_LRINTL:
3573 CASE_FLT_FN (BUILT_IN_LLRINT)case BUILT_IN_LLRINT: case BUILT_IN_LLRINTF: case BUILT_IN_LLRINTL:
3574 builtin_optab = lrint_optab;
3575 break;
3576
3577 CASE_FLT_FN (BUILT_IN_IROUND)case BUILT_IN_IROUND: case BUILT_IN_IROUNDF: case BUILT_IN_IROUNDL:
3578 fallback_fn = BUILT_IN_LROUND;
3579 gcc_fallthrough ();
3580 CASE_FLT_FN (BUILT_IN_LROUND)case BUILT_IN_LROUND: case BUILT_IN_LROUNDF: case BUILT_IN_LROUNDL:
3581 CASE_FLT_FN (BUILT_IN_LLROUND)case BUILT_IN_LLROUND: case BUILT_IN_LLROUNDF: case BUILT_IN_LLROUNDL:
3582 builtin_optab = lround_optab;
3583 break;
3584
3585 default:
3586 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3586, __FUNCTION__))
;
3587 }
3588
3589 /* There's no easy way to detect the case we need to set EDOM. */
3590 if (flag_errno_mathglobal_options.x_flag_errno_math && fallback_fn == BUILT_IN_NONE)
3591 return NULL_RTX(rtx) 0;
3592
3593 /* Make a suitable register to place result in. */
3594 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3594, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3594, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3594, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3594, __FUNCTION__))->typed.type))->type_common.mode)
;
3595
3596 /* There's no easy way to detect the case we need to set EDOM. */
3597 if (!flag_errno_mathglobal_options.x_flag_errno_math)
3598 {
3599 rtx result = gen_reg_rtx (mode);
3600
3601 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3602 need to expand the argument again. This way, we will not perform
3603 side-effects more the once. */
3604 CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3604, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3604, __FUNCTION__)))))
= arg = builtin_save_expr (arg);
3605
3606 op0 = expand_expr (arg, NULL__null, VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3607
3608 start_sequence ();
3609
3610 if (expand_sfix_optab (result, op0, builtin_optab))
3611 {
3612 /* Output the entire sequence. */
3613 insns = get_insns ();
3614 end_sequence ();
3615 emit_insn (insns);
3616 return result;
3617 }
3618
3619 /* If we were unable to expand via the builtin, stop the sequence
3620 (without outputting the insns) and call to the library function
3621 with the stabilized argument list. */
3622 end_sequence ();
3623 }
3624
3625 if (fallback_fn != BUILT_IN_NONE)
3626 {
3627 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3628 targets, (int) round (x) should never be transformed into
3629 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3630 a call to lround in the hope that the target provides at least some
3631 C99 functions. This should result in the best user experience for
3632 not full C99 targets. */
3633 tree fallback_fndecl = mathfn_built_in_1
3634 (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3634, __FUNCTION__))->typed.type)
, as_combined_fn (fallback_fn), 0);
3635
3636 exp = build_call_nofold_loc (EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
,
3637 fallback_fndecl, 1, arg);
3638
3639 target = expand_call (exp, NULL_RTX(rtx) 0, target == const0_rtx(const_int_rtx[64]));
3640 target = maybe_emit_group_store (target, TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3640, __FUNCTION__))->typed.type)
);
3641 return convert_to_mode (mode, target, 0);
3642 }
3643
3644 return expand_call (exp, target, target == const0_rtx(const_int_rtx[64]));
3645}
3646
3647/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3648 a normal call should be emitted rather than expanding the function
3649 in-line. EXP is the expression that is a call to the builtin
3650 function; if convenient, the result should be placed in TARGET. */
3651
3652static rtx
3653expand_builtin_powi (tree exp, rtx target)
3654{
3655 tree arg0, arg1;
3656 rtx op0, op1;
3657 machine_mode mode;
3658 machine_mode mode2;
3659
3660 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3661 return NULL_RTX(rtx) 0;
3662
3663 arg0 = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3663, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3663, __FUNCTION__)))))
;
3664 arg1 = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3664, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3664, __FUNCTION__)))))
;
3665 mode = TYPE_MODE (TREE_TYPE (exp))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3665, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3665, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3665, __FUNCTION__))->typed.type)) : (((contains_struct_check
((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3665, __FUNCTION__))->typed.type))->type_common.mode)
;
3666
3667 /* Emit a libcall to libgcc. */
3668
3669 /* Mode of the 2nd argument must match that of an int. */
3670 mode2 = int_mode_for_size (INT_TYPE_SIZE32, 0).require ();
3671
3672 if (target == NULL_RTX(rtx) 0)
3673 target = gen_reg_rtx (mode);
3674
3675 op0 = expand_expr (arg0, NULL_RTX(rtx) 0, mode, EXPAND_NORMAL);
3676 if (GET_MODE (op0)((machine_mode) (op0)->mode) != mode)
3677 op0 = convert_to_mode (mode, op0, 0);
3678 op1 = expand_expr (arg1, NULL_RTX(rtx) 0, mode2, EXPAND_NORMAL);
3679 if (GET_MODE (op1)((machine_mode) (op1)->mode) != mode2)
3680 op1 = convert_to_mode (mode2, op1, 0);
3681
3682 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3683 target, LCT_CONST, mode,
3684 op0, mode, op1, mode2);
3685
3686 return target;
3687}
3688
3689/* Expand expression EXP which is a call to the strlen builtin. Return
3690 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3691 try to get the result in TARGET, if convenient. */
3692
3693static rtx
3694expand_builtin_strlen (tree exp, rtx target,
3695 machine_mode target_mode)
3696{
3697 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3698 return NULL_RTX(rtx) 0;
3699
3700 tree src = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3700, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3700, __FUNCTION__)))))
;
3701 if (!check_read_access (exp, src))
3702 return NULL_RTX(rtx) 0;
3703
3704 /* If the length can be computed at compile-time, return it. */
3705 if (tree len = c_strlen (src, 0))
3706 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3707
3708 /* If the length can be computed at compile-time and is constant
3709 integer, but there are side-effects in src, evaluate
3710 src for side-effects, then return len.
3711 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3712 can be optimized into: i++; x = 3; */
3713 tree len = c_strlen (src, 1);
3714 if (len && TREE_CODE (len)((enum tree_code) (len)->base.code) == INTEGER_CST)
3715 {
3716 expand_expr (src, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
3717 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3718 }
3719
3720 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT(8);
3721
3722 /* If SRC is not a pointer type, don't do this operation inline. */
3723 if (align == 0)
3724 return NULL_RTX(rtx) 0;
3725
3726 /* Bail out if we can't compute strlen in the right mode. */
3727 machine_mode insn_mode;
3728 enum insn_code icode = CODE_FOR_nothing;
3729 FOR_EACH_MODE_FROM (insn_mode, target_mode)for ((insn_mode) = (target_mode); mode_iterator::iterate_p (&
(insn_mode)); mode_iterator::get_wider (&(insn_mode)))
3730 {
3731 icode = optab_handler (strlen_optab, insn_mode);
3732 if (icode != CODE_FOR_nothing)
3733 break;
3734 }
3735 if (insn_mode == VOIDmode((void) 0, E_VOIDmode))
3736 return NULL_RTX(rtx) 0;
3737
3738 /* Make a place to hold the source address. We will not expand
3739 the actual source until we are sure that the expansion will
3740 not fail -- there are trees that cannot be expanded twice. */
3741 rtx src_reg = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
3742
3743 /* Mark the beginning of the strlen sequence so we can emit the
3744 source operand later. */
3745 rtx_insn *before_strlen = get_last_insn ();
3746
3747 class expand_operand ops[4];
3748 create_output_operand (&ops[0], target, insn_mode);
3749 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode((void) 0, E_BLKmode), src_reg));
3750 create_integer_operand (&ops[2], 0);
3751 create_integer_operand (&ops[3], align);
3752 if (!maybe_expand_insn (icode, 4, ops))
3753 return NULL_RTX(rtx) 0;
3754
3755 /* Check to see if the argument was declared attribute nonstring
3756 and if so, issue a warning since at this point it's not known
3757 to be nul-terminated. */
3758 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3759
3760 /* Now that we are assured of success, expand the source. */
3761 start_sequence ();
3762 rtx pat = expand_expr (src, src_reg, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, EXPAND_NORMAL);
3763 if (pat != src_reg)
3764 {
3765#ifdef POINTERS_EXTEND_UNSIGNED1
3766 if (GET_MODE (pat)((machine_mode) (pat)->mode) != Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
)
3767 pat = convert_to_mode (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, pat,
3768 POINTERS_EXTEND_UNSIGNED1);
3769#endif
3770 emit_move_insn (src_reg, pat);
3771 }
3772 pat = get_insns ();
3773 end_sequence ();
3774
3775 if (before_strlen)
3776 emit_insn_after (pat, before_strlen);
3777 else
3778 emit_insn_before (pat, get_insns ());
3779
3780 /* Return the value in the proper mode for this function. */
3781 if (GET_MODE (ops[0].value)((machine_mode) (ops[0].value)->mode) == target_mode)
3782 target = ops[0].value;
3783 else if (target != 0)
3784 convert_move (target, ops[0].value, 0);
3785 else
3786 target = convert_to_mode (target_mode, ops[0].value, 0);
3787
3788 return target;
3789}
3790
3791/* Expand call EXP to the strnlen built-in, returning the result
3792 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3793
3794static rtx
3795expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3796{
3797 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3798 return NULL_RTX(rtx) 0;
3799
3800 tree src = CALL_EXPR_ARG (exp, 0)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3800, __FUNCTION__, (CALL_EXPR)))), ((0) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3800, __FUNCTION__)))))
;
3801 tree bound = CALL_EXPR_ARG (exp, 1)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3801, __FUNCTION__, (CALL_EXPR)))), ((1) + 3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3801, __FUNCTION__)))))
;
3802
3803 if (!bound)
3804 return NULL_RTX(rtx) 0;
3805
3806 check_read_access (exp, src, bound);
3807
3808 location_t loc = UNKNOWN_LOCATION((location_t) 0);
3809 if (EXPR_HAS_LOCATION (exp)(((IS_ADHOC_LOC (((((exp)) && ((tree_code_type[(int) (
((enum tree_code) ((exp))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((exp))->
base.code))]) <= tcc_expression)) ? (exp)->exp.locus : (
(location_t) 0)))) ? get_location_from_adhoc_loc (line_table,
((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))) : (((((exp)) &&
((tree_code_type[(int) (((enum tree_code) ((exp))->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0)))) != ((location_t
) 0))
)
3810 loc = EXPR_LOCATION (exp)((((exp)) && ((tree_code_type[(int) (((enum tree_code
) ((exp))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((exp))->base.code))]) <= tcc_expression
)) ? (exp)->exp.locus : ((location_t) 0))
;
3811
3812 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3813 so these conversions aren't necessary. */
3814 c_strlen_data lendata = { };
3815 tree len = c_strlen (src, 0, &lendata, 1);
3816 if (len)
3817 len = fold_convert_loc (loc, TREE_TYPE (bound)((contains_struct_check ((bound), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3817, __FUNCTION__))->typed.type)
, len);
3818
3819 if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) == INTEGER_CST)
3820 {
3821 if (!len)
3822 return NULL_RTX(rtx) 0;
3823
3824 len = fold_build2_loc (loc, MIN_EXPR, size_type_nodeglobal_trees[TI_SIZE_TYPE], len, bound);
3825 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3826 }
3827
3828 if (TREE_CODE (bound)((enum tree_code) (bound)->base.code) != SSA_NAME)
3829 return NULL_RTX(rtx) 0;
3830
3831 wide_int min, max;
3832 enum value_range_kind rng = get_range_info (bound, &min, &max);
3833 if (rng != VR_RANGE)
3834 return NULL_RTX(rtx) 0;
3835
3836 if (!len || TREE_CODE (len)((enum tree_code) (len)->base.code) != INTEGER_CST)
3837 {
3838 bool exact;
3839 lendata.decl = unterminated_array (src, &len, &exact);
3840 if (!lendata.decl)
3841 return NULL_RTX(rtx) 0;
3842 }
3843
3844 if (lendata.decl)
3845 return NULL_RTX(rtx) 0;
3846
3847 if (wi::gtu_p (min, wi::to_wide (len)))
3848 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3849
3850 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len)((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3850, __FUNCTION__))->typed.type)
, len, bound);
3851 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3852}
3853
3854/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3855 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3856 a target constant. */
3857
3858static rtx
3859builtin_memcpy_read_str (void *data, HOST_WIDE_INTlong offset,
3860 scalar_int_mode mode)
3861{
3862 /* The REPresentation pointed to by DATA need not be a nul-terminated
3863 string but the caller guarantees it's large enough for MODE. */
3864 const char *rep = (const char *) data;
3865
3866 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3867}
3868
3869/* LEN specify length of the block of memcpy/memset operation.
3870 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3871 In some cases we can make very likely guess on max size, then we
3872 set it into PROBABLE_MAX_SIZE. */
3873
3874static void
3875determine_block_size (tree len, rtx len_rtx,
3876 unsigned HOST_WIDE_INTlong *min_size,
3877 unsigned HOST_WIDE_INTlong *max_size,
3878 unsigned HOST_WIDE_INTlong *probable_max_size)
3879{
3880 if (CONST_INT_P (len_rtx)(((enum rtx_code) (len_rtx)->code) == CONST_INT))
3881 {
3882 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx)((unsigned long) ((len_rtx)->u.hwint[0]));
3883 return;
3884 }
3885 else
3886 {
3887 wide_int min, max;
3888 enum value_range_kind range_type = VR_UNDEFINED;
3889
3890 /* Determine bounds from the type. */
3891 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3891, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3891, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
))
3892 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3892, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3892, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
);
3893 else
3894 *min_size = 0;
3895 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3895, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3895, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
))
3896 *probable_max_size = *max_size
3897 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len))((tree_check5 ((((contains_struct_check ((len), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3897, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3897, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
3898 else
3899 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx))mode_mask_array[((machine_mode) (len_rtx)->mode)];
3900
3901 if (TREE_CODE (len)((enum tree_code) (len)->base.code) == SSA_NAME)
3902 range_type = get_range_info (len, &min, &max);
3903 if (range_type == VR_RANGE)
3904 {
3905 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3906 *min_size = min.to_uhwi ();
3907 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3908 *probable_max_size = *max_size = max.to_uhwi ();
3909 }
3910 else if (range_type == VR_ANTI_RANGE)
3911 {
3912 /* Code like
3913
3914 int n;
3915 if (n < 100)
3916 memcpy (a, b, n)
3917
3918 Produce anti range allowing negative values of N. We still
3919 can use the information and make a guess that N is not negative.
3920 */
3921 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3922 *probable_max_size = min.to_uhwi () - 1;
3923 }
3924 }
3925 gcc_checking_assert (*max_size <=((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode
) (len_rtx)->mode)]) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3927, __FUNCTION__), 0 : 0))
3926 (unsigned HOST_WIDE_INT)((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode
) (len_rtx)->mode)]) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3927, __FUNCTION__), 0 : 0))
3927 GET_MODE_MASK (GET_MODE (len_rtx)))((void)(!(*max_size <= (unsigned long) mode_mask_array[((machine_mode
) (len_rtx)->mode)]) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 3927, __FUNCTION__), 0 : 0))
;
3928}
3929
3930/* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3931 accessing an object with SIZE. */
3932
3933static bool
3934maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3935 tree bndrng[2], tree size, const access_data *pad = NULL__null)
3936{
3937 if (!bndrng[0] || TREE_NO_WARNING (exp)((exp)->base.nowarning_flag))
3938 return false;
3939
3940 tree maxobjsize = max_object_size ();
3941
3942 bool warned = false;
3943
3944 if (opt == OPT_Wstringop_overread)
3945 {
3946 bool maybe = pad && pad->src.phi ();
3947
3948 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3949 {
3950 if (bndrng[0] == bndrng[1])
3951 warned = (func
3952 ? warning_at (loc, opt,
3953 (maybe
3954 ? G_("%K%qD specified bound %E may ""%K%qD specified bound %E may " "exceed maximum object size %E"
3955 "exceed maximum object size %E")"%K%qD specified bound %E may " "exceed maximum object size %E"
3956 : G_("%K%qD specified bound %E ""%K%qD specified bound %E " "exceeds maximum object size %E"
3957 "exceeds maximum object size %E")"%K%qD specified bound %E " "exceeds maximum object size %E"),
3958 exp, func, bndrng[0], maxobjsize)
3959 : warning_at (loc, opt,
3960 (maybe
3961 ? G_("%Kspecified bound %E may ""%Kspecified bound %E may " "exceed maximum object size %E"
3962 "exceed maximum object size %E")"%Kspecified bound %E may " "exceed maximum object size %E"
3963 : G_("%Kspecified bound %E ""%Kspecified bound %E " "exceeds maximum object size %E"
3964 "exceeds maximum object size %E")"%Kspecified bound %E " "exceeds maximum object size %E"),
3965 exp, bndrng[0], maxobjsize));
3966 else
3967 warned = (func
3968 ? warning_at (loc, opt,
3969 (maybe
3970 ? G_("%K%qD specified bound [%E, %E] may ""%K%qD specified bound [%E, %E] may " "exceed maximum object size %E"
3971 "exceed maximum object size %E")"%K%qD specified bound [%E, %E] may " "exceed maximum object size %E"
3972 : G_("%K%qD specified bound [%E, %E] ""%K%qD specified bound [%E, %E] " "exceeds maximum object size %E"
3973 "exceeds maximum object size %E")"%K%qD specified bound [%E, %E] " "exceeds maximum object size %E"),
3974 exp, func,
3975 bndrng[0], bndrng[1], maxobjsize)
3976 : warning_at (loc, opt,
3977 (maybe
3978 ? G_("%Kspecified bound [%E, %E] may ""%Kspecified bound [%E, %E] may " "exceed maximum object size %E"
3979 "exceed maximum object size %E")"%Kspecified bound [%E, %E] may " "exceed maximum object size %E"
3980 : G_("%Kspecified bound [%E, %E] ""%Kspecified bound [%E, %E] " "exceeds maximum object size %E"
3981 "exceeds maximum object size %E")"%Kspecified bound [%E, %E] " "exceeds maximum object size %E"),
3982 exp, bndrng[0], bndrng[1], maxobjsize));
3983 }
3984 else if (!size || tree_int_cst_le (bndrng[0], size))
3985 return false;
3986 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3987 warned = (func
3988 ? warning_at (loc, opt,
3989 (maybe
3990 ? G_("%K%qD specified bound %E may exceed ""%K%qD specified bound %E may exceed " "source size %E"
3991 "source size %E")"%K%qD specified bound %E may exceed " "source size %E"
3992 : G_("%K%qD specified bound %E exceeds ""%K%qD specified bound %E exceeds " "source size %E"
3993 "source size %E")"%K%qD specified bound %E exceeds " "source size %E"),
3994 exp, func, bndrng[0], size)
3995 : warning_at (loc, opt,
3996 (maybe
3997 ? G_("%Kspecified bound %E may exceed ""%Kspecified bound %E may exceed " "source size %E"
3998 "source size %E")"%Kspecified bound %E may exceed " "source size %E"
3999 : G_("%Kspecified bound %E exceeds ""%Kspecified bound %E exceeds " "source size %E"
4000 "source size %E")"%Kspecified bound %E exceeds " "source size %E"),
4001 exp, bndrng[0], size));
4002 else
4003 warned = (func
4004 ? warning_at (loc, opt,
4005 (maybe
4006 ? G_("%K%qD specified bound [%E, %E] may ""%K%qD specified bound [%E, %E] may " "exceed source size %E"
4007 "exceed source size %E")"%K%qD specified bound [%E, %E] may " "exceed source size %E"
4008 : G_("%K%qD specified bound [%E, %E] exceeds ""%K%qD specified bound [%E, %E] exceeds " "source size %E"
4009 "source size %E")"%K%qD specified bound [%E, %E] exceeds " "source size %E"),
4010 exp, func, bndrng[0], bndrng[1], size)
4011 : warning_at (loc, opt,
4012 (maybe
4013 ? G_("%Kspecified bound [%E, %E] may exceed ""%Kspecified bound [%E, %E] may exceed " "source size %E"
4014 "source size %E")"%Kspecified bound [%E, %E] may exceed " "source size %E"
4015 : G_("%Kspecified bound [%E, %E] exceeds ""%Kspecified bound [%E, %E] exceeds " "source size %E"
4016 "source size %E")"%Kspecified bound [%E, %E] exceeds " "source size %E"),
4017 exp, bndrng[0], bndrng[1], size));
4018 if (warned)
4019 {
4020 if (pad && pad->src.ref)
4021 {
4022 if (DECL_P (pad->src.ref)(tree_code_type[(int) (((enum tree_code) (pad->src.ref)->
base.code))] == tcc_declaration)
)
4023 inform (DECL_SOURCE_LOCATION (pad->src.ref)((contains_struct_check ((pad->src.ref), (TS_DECL_MINIMAL)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4023, __FUNCTION__))->decl_minimal.locus)
,
4024 "source object declared here");
4025 else if (EXPR_HAS_LOCATION (pad->src.ref)(((IS_ADHOC_LOC (((((pad->src.ref)) && ((tree_code_type
[(int) (((enum tree_code) ((pad->src.ref))->base.code))
]) >= tcc_reference && (tree_code_type[(int) (((enum
tree_code) ((pad->src.ref))->base.code))]) <= tcc_expression
)) ? (pad->src.ref)->exp.locus : ((location_t) 0)))) ? get_location_from_adhoc_loc
(line_table, ((((pad->src.ref)) && ((tree_code_type
[(int) (((enum tree_code) ((pad->src.ref))->base.code))
]) >= tcc_reference && (tree_code_type[(int) (((enum
tree_code) ((pad->src.ref))->base.code))]) <= tcc_expression
)) ? (pad->src.ref)->exp.locus : ((location_t) 0))) : (
((((pad->src.ref)) && ((tree_code_type[(int) (((enum
tree_code) ((pad->src.ref))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((pad->
src.ref))->base.code))]) <= tcc_expression)) ? (pad->
src.ref)->exp.locus : ((location_t) 0)))) != ((location_t)
0))
)
4026 inform (EXPR_LOCATION (pad->src.ref)((((pad->src.ref)) && ((tree_code_type[(int) (((enum
tree_code) ((pad->src.ref))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((pad->
src.ref))->base.code))]) <= tcc_expression)) ? (pad->
src.ref)->exp.locus : ((location_t) 0))
,
4027 "source object allocated here");
4028 }
4029 TREE_NO_WARNING (exp)((exp)->base.nowarning_flag) = true;
4030 }
4031
4032 return warned;
4033 }
4034
4035 bool maybe = pad && pad->dst.phi ();
4036 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
4037 {
4038 if (bndrng[0] == bndrng[1])
4039 warned = (func
4040 ? warning_at (loc, opt,
4041 (maybe
4042 ? G_("%K%qD specified size %E may ""%K%qD specified size %E may " "exceed maximum object size %E"
4043 "exceed maximum object size %E")"%K%qD specified size %E may " "exceed maximum object size %E"
4044 : G_("%K%qD specified size %E ""%K%qD specified size %E " "exceeds maximum object size %E"
4045 "exceeds maximum object size %E")"%K%qD specified size %E " "exceeds maximum object size %E"),
4046 exp, func, bndrng[0], maxobjsize)
4047 : warning_at (loc, opt,
4048 (maybe
4049 ? G_("%Kspecified size %E may exceed ""%Kspecified size %E may exceed " "maximum object size %E"
4050 "maximum object size %E")"%Kspecified size %E may exceed " "maximum object size %E"
4051 : G_("%Kspecified size %E exceeds ""%Kspecified size %E exceeds " "maximum object size %E"
4052 "maximum object size %E")"%Kspecified size %E exceeds " "maximum object size %E"),
4053 exp, bndrng[0], maxobjsize));
4054 else
4055 warned = (func
4056 ? warning_at (loc, opt,
4057 (maybe
4058 ? G_("%K%qD specified size between %E and %E ""%K%qD specified size between %E and %E " "may exceed maximum object size %E"
4059 "may exceed maximum object size %E")"%K%qD specified size between %E and %E " "may exceed maximum object size %E"
4060 : G_("%K%qD specified size between %E and %E ""%K%qD specified size between %E and %E " "exceeds maximum object size %E"
4061 "exceeds maximum object size %E")"%K%qD specified size between %E and %E " "exceeds maximum object size %E"),
4062 exp, func,
4063 bndrng[0], bndrng[1], maxobjsize)
4064 : warning_at (loc, opt,
4065 (maybe
4066 ? G_("%Kspecified size between %E and %E ""%Kspecified size between %E and %E " "may exceed maximum object size %E"
4067 "may exceed maximum object size %E")"%Kspecified size between %E and %E " "may exceed maximum object size %E"
4068 : G_("%Kspecified size between %E and %E ""%Kspecified size between %E and %E " "exceeds maximum object size %E"
4069 "exceeds maximum object size %E")"%Kspecified size between %E and %E " "exceeds maximum object size %E"),
4070 exp, bndrng[0], bndrng[1], maxobjsize));
4071 }
4072 else if (!size || tree_int_cst_le (bndrng[0], size))
4073 return false;
4074 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4075 warned = (func
4076 ? warning_at (loc, OPT_Wstringop_overflow_,
4077 (maybe
4078 ? G_("%K%qD specified bound %E may exceed ""%K%qD specified bound %E may exceed " "destination size %E"
4079 "destination size %E")"%K%qD specified bound %E may exceed " "destination size %E"
4080 : G_("%K%qD specified bound %E exceeds ""%K%qD specified bound %E exceeds " "destination size %E"
4081 "destination size %E")"%K%qD specified bound %E exceeds " "destination size %E"),
4082 exp, func, bndrng[0], size)
4083 : warning_at (loc, OPT_Wstringop_overflow_,
4084 (maybe
4085 ? G_("%Kspecified bound %E may exceed ""%Kspecified bound %E may exceed " "destination size %E"
4086 "destination size %E")"%Kspecified bound %E may exceed " "destination size %E"
4087 : G_("%Kspecified bound %E exceeds ""%Kspecified bound %E exceeds " "destination size %E"
4088 "destination size %E")"%Kspecified bound %E exceeds " "destination size %E"),
4089 exp, bndrng[0], size));
4090 else
4091 warned = (func
4092 ? warning_at (loc, OPT_Wstringop_overflow_,
4093 (maybe
4094 ? G_("%K%qD specified bound [%E, %E] may exceed ""%K%qD specified bound [%E, %E] may exceed " "destination size %E"
4095 "destination size %E")"%K%qD specified bound [%E, %E] may exceed " "destination size %E"
4096 : G_("%K%qD specified bound [%E, %E] exceeds ""%K%qD specified bound [%E, %E] exceeds " "destination size %E"
4097 "destination size %E")"%K%qD specified bound [%E, %E] exceeds " "destination size %E"),
4098 exp, func, bndrng[0], bndrng[1], size)
4099 : warning_at (loc, OPT_Wstringop_overflow_,
4100 (maybe
4101 ? G_("%Kspecified bound [%E, %E] exceeds ""%Kspecified bound [%E, %E] exceeds " "destination size %E"
4102 "destination size %E")"%Kspecified bound [%E, %E] exceeds " "destination size %E"
4103 : G_("%Kspecified bound [%E, %E] exceeds ""%Kspecified bound [%E, %E] exceeds " "destination size %E"
4104 "destination size %E")"%Kspecified bound [%E, %E] exceeds " "destination size %E"),
4105 exp, bndrng[0], bndrng[1], size));
4106
4107 if (warned)
4108 {
4109 if (pad && pad->dst.ref)
4110 {
4111 if (DECL_P (pad->dst.ref)(tree_code_type[(int) (((enum tree_code) (pad->dst.ref)->
base.code))] == tcc_declaration)
)
4112 inform (DECL_SOURCE_LOCATION (pad->dst.ref)((contains_struct_check ((pad->dst.ref), (TS_DECL_MINIMAL)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4112, __FUNCTION__))->decl_minimal.locus)
,
4113 "destination object declared here");
4114 else if (EXPR_HAS_LOCATION (pad->dst.ref)(((IS_ADHOC_LOC (((((pad->dst.ref)) && ((tree_code_type
[(int) (((enum tree_code) ((pad->dst.ref))->base.code))
]) >= tcc_reference && (tree_code_type[(int) (((enum
tree_code) ((pad->dst.ref))->base.code))]) <= tcc_expression
)) ? (pad->dst.ref)->exp.locus : ((location_t) 0)))) ? get_location_from_adhoc_loc
(line_table, ((((pad->dst.ref)) && ((tree_code_type
[(int) (((enum tree_code) ((pad->dst.ref))->base.code))
]) >= tcc_reference && (tree_code_type[(int) (((enum
tree_code) ((pad->dst.ref))->base.code))]) <= tcc_expression
)) ? (pad->dst.ref)->exp.locus : ((location_t) 0))) : (
((((pad->dst.ref)) && ((tree_code_type[(int) (((enum
tree_code) ((pad->dst.ref))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((pad->
dst.ref))->base.code))]) <= tcc_expression)) ? (pad->
dst.ref)->exp.locus : ((location_t) 0)))) != ((location_t)
0))
)
4115 inform (EXPR_LOCATION (pad->dst.ref)((((pad->dst.ref)) && ((tree_code_type[(int) (((enum
tree_code) ((pad->dst.ref))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((pad->
dst.ref))->base.code))]) <= tcc_expression)) ? (pad->
dst.ref)->exp.locus : ((location_t) 0))
,
4116 "destination object allocated here");
4117 }
4118 TREE_NO_WARNING (exp)((exp)->base.nowarning_flag) = true;
4119 }
4120
4121 return warned;
4122}
4123
4124/* For an expression EXP issue an access warning controlled by option OPT
4125 with access to a region SIZE bytes in size in the RANGE of sizes.
4126 WRITE is true for a write access, READ for a read access, neither for
4127 call that may or may not perform an access but for which the range
4128 is expected to valid.
4129 Returns true when a warning has been issued. */
4130
4131static bool
4132warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
4133 tree size, bool write, bool read, bool maybe)
4134{
4135 bool warned = false;
4136
4137 if (write && read)
4138 {
4139 if (tree_int_cst_equal (range[0], range[1]))
4140 warned = (func
4141 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4142 (maybe
4143 ? G_("%K%qD may access %E byte in a region ""%K%qD may access %E byte in a region " "of size %E"
4144 "of size %E")"%K%qD may access %E byte in a region " "of size %E"
4145 : G_("%K%qD accessing %E byte in a region ""%K%qD accessing %E byte in a region " "of size %E"
4146 "of size %E")"%K%qD accessing %E byte in a region " "of size %E"),
4147 (maybe
4148 ? G_ ("%K%qD may access %E bytes in a region ""%K%qD may access %E bytes in a region " "of size %E"
4149 "of size %E")"%K%qD may access %E bytes in a region " "of size %E"
4150 : G_ ("%K%qD accessing %E bytes in a region ""%K%qD accessing %E bytes in a region " "of size %E"
4151 "of size %E")"%K%qD accessing %E bytes in a region " "of size %E"),
4152 exp, func, range[0], size)
4153 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4154 (maybe
4155 ? G_("%Kmay access %E byte in a region ""%Kmay access %E byte in a region " "of size %E"
4156 "of size %E")"%Kmay access %E byte in a region " "of size %E"
4157 : G_("%Kaccessing %E byte in a region ""%Kaccessing %E byte in a region " "of size %E"
4158 "of size %E")"%Kaccessing %E byte in a region " "of size %E"),
4159 (maybe
4160 ? G_("%Kmay access %E bytes in a region ""%Kmay access %E bytes in a region " "of size %E"
4161 "of size %E")"%Kmay access %E bytes in a region " "of size %E"
4162 : G_("%Kaccessing %E bytes in a region ""%Kaccessing %E bytes in a region " "of size %E"
4163 "of size %E")"%Kaccessing %E bytes in a region " "of size %E"),
4164 exp, range[0], size));
4165 else if (tree_int_cst_sign_bit (range[1]))
4166 {
4167 /* Avoid printing the upper bound if it's invalid. */
4168 warned = (func
4169 ? warning_at (loc, opt,
4170 (maybe
4171 ? G_("%K%qD may access %E or more bytes ""%K%qD may access %E or more bytes " "in a region of size %E"
4172 "in a region of size %E")"%K%qD may access %E or more bytes " "in a region of size %E"
4173 : G_("%K%qD accessing %E or more bytes ""%K%qD accessing %E or more bytes " "in a region of size %E"
4174 "in a region of size %E")"%K%qD accessing %E or more bytes " "in a region of size %E"),
4175 exp, func, range[0], size)
4176 : warning_at (loc, opt,
4177 (maybe
4178 ? G_("%Kmay access %E or more bytes ""%Kmay access %E or more bytes " "in a region of size %E"
4179 "in a region of size %E")"%Kmay access %E or more bytes " "in a region of size %E"
4180 : G_("%Kaccessing %E or more bytes ""%Kaccessing %E or more bytes " "in a region of size %E"
4181 "in a region of size %E")"%Kaccessing %E or more bytes " "in a region of size %E"),
4182 exp, range[0], size));
4183 }
4184 else
4185 warned = (func
4186 ? warning_at (loc, opt,
4187 (maybe
4188 ? G_("%K%qD may access between %E and %E ""%K%qD may access between %E and %E " "bytes in a region of size %E"
4189 "bytes in a region of size %E")"%K%qD may access between %E and %E " "bytes in a region of size %E"
4190 : G_("%K%qD accessing between %E and %E ""%K%qD accessing between %E and %E " "bytes in a region of size %E"
4191 "bytes in a region of size %E")"%K%qD accessing between %E and %E " "bytes in a region of size %E"),
4192 exp, func, range[0], range[1],
4193 size)
4194 : warning_at (loc, opt,
4195 (maybe
4196 ? G_("%Kmay access between %E and %E bytes ""%Kmay access between %E and %E bytes " "in a region of size %E"
4197 "in a region of size %E")"%Kmay access between %E and %E bytes " "in a region of size %E"
4198 : G_("%Kaccessing between %E and %E bytes ""%Kaccessing between %E and %E bytes " "in a region of size %E"
4199 "in a region of size %E")"%Kaccessing between %E and %E bytes " "in a region of size %E"),
4200 exp, range[0], range[1],
4201 size));
4202 return warned;
4203 }
4204
4205 if (write)
4206 {
4207 if (tree_int_cst_equal (range[0], range[1]))
4208 warned = (func
4209 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4210 (maybe
4211 ? G_("%K%qD may write %E byte into a region ""%K%qD may write %E byte into a region " "of size %E"
4212 "of size %E")"%K%qD may write %E byte into a region " "of size %E"
4213 : G_("%K%qD writing %E byte into a region ""%K%qD writing %E byte into a region " "of size %E overflows the destination"
4214 "of size %E overflows the destination")"%K%qD writing %E byte into a region " "of size %E overflows the destination"),
4215 (maybe
4216 ? G_("%K%qD may write %E bytes into a region ""%K%qD may write %E bytes into a region " "of size %E"
4217 "of size %E")"%K%qD may write %E bytes into a region " "of size %E"
4218 : G_("%K%qD writing %E bytes into a region ""%K%qD writing %E bytes into a region " "of size %E overflows the destination"
4219 "of size %E overflows the destination")"%K%qD writing %E bytes into a region " "of size %E overflows the destination"),
4220 exp, func, range[0], size)
4221 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4222 (maybe
4223 ? G_("%Kmay write %E byte into a region ""%Kmay write %E byte into a region " "of size %E"
4224 "of size %E")"%Kmay write %E byte into a region " "of size %E"
4225 : G_("%Kwriting %E byte into a region ""%Kwriting %E byte into a region " "of size %E overflows the destination"
4226 "of size %E overflows the destination")"%Kwriting %E byte into a region " "of size %E overflows the destination"),
4227 (maybe
4228 ? G_("%Kmay write %E bytes into a region ""%Kmay write %E bytes into a region " "of size %E"
4229 "of size %E")"%Kmay write %E bytes into a region " "of size %E"
4230 : G_("%Kwriting %E bytes into a region ""%Kwriting %E bytes into a region " "of size %E overflows the destination"
4231 "of size %E overflows the destination")"%Kwriting %E bytes into a region " "of size %E overflows the destination"),
4232 exp, range[0], size));
4233 else if (tree_int_cst_sign_bit (range[1]))
4234 {
4235 /* Avoid printing the upper bound if it's invalid. */
4236 warned = (func
4237 ? warning_at (loc, opt,
4238 (maybe
4239 ? G_("%K%qD may write %E or more bytes ""%K%qD may write %E or more bytes " "into a region of size %E "
"the destination"
4240 "into a region of size %E ""%K%qD may write %E or more bytes " "into a region of size %E "
"the destination"
4241 "the destination")"%K%qD may write %E or more bytes " "into a region of size %E "
"the destination"
4242 : G_("%K%qD writing %E or more bytes ""%K%qD writing %E or more bytes " "into a region of size %E overflows "
"the destination"
4243 "into a region of size %E overflows ""%K%qD writing %E or more bytes " "into a region of size %E overflows "
"the destination"
4244 "the destination")"%K%qD writing %E or more bytes " "into a region of size %E overflows "
"the destination"
),
4245 exp, func, range[0], size)
4246 : warning_at (loc, opt,
4247 (maybe
4248 ? G_("%Kmay write %E or more bytes into ""%Kmay write %E or more bytes into " "a region of size %E"
4249 "a region of size %E")"%Kmay write %E or more bytes into " "a region of size %E"
4250 : G_("%Kwriting %E or more bytes into ""%Kwriting %E or more bytes into " "a region of size %E overflows "
"the destination"
4251 "a region of size %E overflows ""%Kwriting %E or more bytes into " "a region of size %E overflows "
"the destination"
4252 "the destination")"%Kwriting %E or more bytes into " "a region of size %E overflows "
"the destination"
),
4253 exp, range[0], size));
4254 }
4255 else
4256 warned = (func
4257 ? warning_at (loc, opt,
4258 (maybe
4259 ? G_("%K%qD may write between %E and %E bytes ""%K%qD may write between %E and %E bytes " "into a region of size %E"
4260 "into a region of size %E")"%K%qD may write between %E and %E bytes " "into a region of size %E"
4261 : G_("%K%qD writing between %E and %E bytes ""%K%qD writing between %E and %E bytes " "into a region of size %E overflows "
"the destination"
4262 "into a region of size %E overflows ""%K%qD writing between %E and %E bytes " "into a region of size %E overflows "
"the destination"
4263 "the destination")"%K%qD writing between %E and %E bytes " "into a region of size %E overflows "
"the destination"
),
4264 exp, func, range[0], range[1],
4265 size)
4266 : warning_at (loc, opt,
4267 (maybe
4268 ? G_("%Kmay write between %E and %E bytes ""%Kmay write between %E and %E bytes " "into a region of size %E"
4269 "into a region of size %E")"%Kmay write between %E and %E bytes " "into a region of size %E"
4270 : G_("%Kwriting between %E and %E bytes ""%Kwriting between %E and %E bytes " "into a region of size %E overflows "
"the destination"
4271 "into a region of size %E overflows ""%Kwriting between %E and %E bytes " "into a region of size %E overflows "
"the destination"
4272 "the destination")"%Kwriting between %E and %E bytes " "into a region of size %E overflows "
"the destination"
),
4273 exp, range[0], range[1],
4274 size));
4275 return warned;
4276 }
4277
4278 if (read)
4279 {
4280 if (tree_int_cst_equal (range[0], range[1]))
4281 warned = (func
4282 ? warning_n (loc, OPT_Wstringop_overread,
4283 tree_to_uhwi (range[0]),
4284 (maybe
4285 ? G_("%K%qD may reade %E byte from a region ""%K%qD may reade %E byte from a region " "of size %E"
4286 "of size %E")"%K%qD may reade %E byte from a region " "of size %E"
4287 : G_("%K%qD reading %E byte from a region ""%K%qD reading %E byte from a region " "of size %E"
4288 "of size %E")"%K%qD reading %E byte from a region " "of size %E"),
4289 (maybe
4290 ? G_("%K%qD may read %E bytes from a region ""%K%qD may read %E bytes from a region " "of size %E"
4291 "of size %E")"%K%qD may read %E bytes from a region " "of size %E"
4292 : G_("%K%qD reading %E bytes from a region ""%K%qD reading %E bytes from a region " "of size %E"
4293 "of size %E")"%K%qD reading %E bytes from a region " "of size %E"),
4294 exp, func, range[0], size)
4295 : warning_n (loc, OPT_Wstringop_overread,
4296 tree_to_uhwi (range[0]),
4297 (maybe
4298 ? G_("%Kmay read %E byte from a region ""%Kmay read %E byte from a region " "of size %E"
4299 "of size %E")"%Kmay read %E byte from a region " "of size %E"
4300 : G_("%Kreading %E byte from a region ""%Kreading %E byte from a region " "of size %E"
4301 "of size %E")"%Kreading %E byte from a region " "of size %E"),
4302 (maybe
4303 ? G_("%Kmay read %E bytes from a region ""%Kmay read %E bytes from a region " "of size %E"
4304 "of size %E")"%Kmay read %E bytes from a region " "of size %E"
4305 : G_("%Kreading %E bytes from a region ""%Kreading %E bytes from a region " "of size %E"
4306 "of size %E")"%Kreading %E bytes from a region " "of size %E"),
4307 exp, range[0], size));
4308 else if (tree_int_cst_sign_bit (range[1]))
4309 {
4310 /* Avoid printing the upper bound if it's invalid. */
4311 warned = (func
4312 ? warning_at (loc, OPT_Wstringop_overread,
4313 (maybe
4314 ? G_("%K%qD may read %E or more bytes ""%K%qD may read %E or more bytes " "from a region of size %E"
4315 "from a region of size %E")"%K%qD may read %E or more bytes " "from a region of size %E"
4316 : G_("%K%qD reading %E or more bytes ""%K%qD reading %E or more bytes " "from a region of size %E"
4317 "from a region of size %E")"%K%qD reading %E or more bytes " "from a region of size %E"),
4318 exp, func, range[0], size)
4319 : warning_at (loc, OPT_Wstringop_overread,
4320 (maybe
4321 ? G_("%Kmay read %E or more bytes ""%Kmay read %E or more bytes " "from a region of size %E"
4322 "from a region of size %E")"%Kmay read %E or more bytes " "from a region of size %E"
4323 : G_("%Kreading %E or more bytes ""%Kreading %E or more bytes " "from a region of size %E"
4324 "from a region of size %E")"%Kreading %E or more bytes " "from a region of size %E"),
4325 exp, range[0], size));
4326 }
4327 else
4328 warned = (func
4329 ? warning_at (loc, OPT_Wstringop_overread,
4330 (maybe
4331 ? G_("%K%qD may read between %E and %E bytes ""%K%qD may read between %E and %E bytes " "from a region of size %E"
4332 "from a region of size %E")"%K%qD may read between %E and %E bytes " "from a region of size %E"
4333 : G_("%K%qD reading between %E and %E bytes ""%K%qD reading between %E and %E bytes " "from a region of size %E"
4334 "from a region of size %E")"%K%qD reading between %E and %E bytes " "from a region of size %E"),
4335 exp, func, range[0], range[1], size)
4336 : warning_at (loc, opt,
4337 (maybe
4338 ? G_("%Kmay read between %E and %E bytes ""%Kmay read between %E and %E bytes " "from a region of size %E"
4339 "from a region of size %E")"%Kmay read between %E and %E bytes " "from a region of size %E"
4340 : G_("%Kreading between %E and %E bytes ""%Kreading between %E and %E bytes " "from a region of size %E"
4341 "from a region of size %E")"%Kreading between %E and %E bytes " "from a region of size %E"),
4342 exp, range[0], range[1], size));
4343
4344 if (warned)
4345 TREE_NO_WARNING (exp)((exp)->base.nowarning_flag) = true;
4346
4347 return warned;
4348 }
4349
4350 if (tree_int_cst_equal (range[0], range[1])
4351 || tree_int_cst_sign_bit (range[1]))
4352 warned = (func
4353 ? warning_n (loc, OPT_Wstringop_overread,
4354 tree_to_uhwi (range[0]),
4355 "%K%qD epecting %E byte in a region of size %E",
4356 "%K%qD expecting %E bytes in a region of size %E",
4357 exp, func, range[0], size)
4358 : warning_n (loc, OPT_Wstringop_overread,
4359 tree_to_uhwi (range[0]),
4360 "%Kexpecting %E byte in a region of size %E",
4361 "%Kexpecting %E bytes in a region of size %E",
4362 exp, range[0], size));
4363 else if (tree_int_cst_sign_bit (range[1]))
4364 {
4365 /* Avoid printing the upper bound if it's invalid. */
4366 warned = (func
4367 ? warning_at (loc, OPT_Wstringop_overread,
4368 "%K%qD expecting %E or more bytes in a region "
4369 "of size %E",
4370 exp, func, range[0], size)
4371 : warning_at (loc, OPT_Wstringop_overread,
4372 "%Kexpecting %E or more bytes in a region "
4373 "of size %E",
4374 exp, range[0], size));
4375 }
4376 else
4377 warned = (func
4378 ? warning_at (loc, OPT_Wstringop_overread,
4379 "%K%qD expecting between %E and %E bytes in "
4380 "a region of size %E",
4381 exp, func, range[0], range[1], size)
4382 : warning_at (loc, OPT_Wstringop_overread,
4383 "%Kexpectting between %E and %E bytes in "
4384 "a region of size %E",
4385 exp, range[0], range[1], size));
4386
4387 if (warned)
4388 TREE_NO_WARNING (exp)((exp)->base.nowarning_flag) = true;
4389
4390 return warned;
4391}
4392
4393/* Issue one inform message describing each target of an access REF.
4394 WRITE is set for a write access and clear for a read access. */
4395
4396void
4397access_ref::inform_access (access_mode mode) const
4398{
4399 const access_ref &aref = *this;
4400 if (!aref.ref)
4401 return;
4402
4403 if (aref.phi ())
4404 {
4405 /* Set MAXREF to refer to the largest object and fill ALL_REFS
4406 with data for all objects referenced by the PHI arguments. */
4407 access_ref maxref;
4408 auto_vec<access_ref> all_refs;
4409 if (!get_ref (&all_refs, &maxref))
4410 return;
4411
4412 /* Except for MAXREF, the rest of the arguments' offsets need not
4413 reflect one added to the PHI itself. Determine the latter from
4414 MAXREF on which the result is based. */
4415 const offset_int orng[] =
4416 {
4417 offrng[0] - maxref.offrng[0],
4418 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
4419 };
4420
4421 /* Add the final PHI's offset to that of each of the arguments
4422 and recurse to issue an inform message for it. */
4423 for (unsigned i = 0; i != all_refs.length (); ++i)
4424 {
4425 /* Skip any PHIs; those could lead to infinite recursion. */
4426 if (all_refs[i].phi ())
4427 continue;
4428
4429 all_refs[i].add_offset (orng[0], orng[1]);
4430 all_refs[i].inform_access (mode);
4431 }
4432 return;
4433 }
4434
4435 /* Convert offset range and avoid including a zero range since it
4436 isn't necessarily meaningful. */
4437 HOST_WIDE_INTlong diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4437, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
);
4438 HOST_WIDE_INTlong diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node)((tree_check5 ((global_trees[TI_PTRDIFF_TYPE]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4438, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
);
4439 HOST_WIDE_INTlong minoff;
4440 HOST_WIDE_INTlong maxoff = diff_max;
4441 if (wi::fits_shwi_p (aref.offrng[0]))
4442 minoff = aref.offrng[0].to_shwi ();
4443 else
4444 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
4445
4446 if (wi::fits_shwi_p (aref.offrng[1]))
4447 maxoff = aref.offrng[1].to_shwi ();
4448
4449 if (maxoff <= diff_min || maxoff >= diff_max)
4450 /* Avoid mentioning an upper bound that's equal to or in excess
4451 of the maximum of ptrdiff_t. */
4452 maxoff = minoff;
4453
4454 /* Convert size range and always include it since all sizes are
4455 meaningful. */
4456 unsigned long long minsize = 0, maxsize = 0;
4457 if (wi::fits_shwi_p (aref.sizrng[0])
4458 && wi::fits_shwi_p (aref.sizrng[1]))
4459 {
4460 minsize = aref.sizrng[0].to_shwi ();
4461 maxsize = aref.sizrng[1].to_shwi ();
4462 }
4463
4464 /* SIZRNG doesn't necessarily have the same range as the allocation
4465 size determined by gimple_call_alloc_size (). */
4466 char sizestr[80];
4467 if (minsize == maxsize)
4468 sprintf (sizestr, "%llu", minsize);
4469 else
4470 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
4471
4472 char offstr[80];
4473 if (minoff == 0
4474 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
4475 offstr[0] = '\0';
4476 else if (minoff == maxoff)
4477 sprintf (offstr, "%lli", (long long) minoff);
4478 else
4479 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
4480
4481 location_t loc = UNKNOWN_LOCATION((location_t) 0);
4482
4483 tree ref = this->ref;
4484 tree allocfn = NULL_TREE(tree) __null;
4485 if (TREE_CODE (ref)((enum tree_code) (ref)->base.code) == SSA_NAME)
4486 {
4487 gimple *stmt = SSA_NAME_DEF_STMT (ref)(tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4487, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
4488 if (is_gimple_call (stmt))
4489 {
4490 loc = gimple_location (stmt);
4491 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4492 {
4493 /* Strip the SSA_NAME suffix from the variable name and
4494 recreate an identifier with the VLA's original name. */
4495 ref = gimple_call_lhs (stmt);
4496 ref = SSA_NAME_IDENTIFIER (ref)((tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4496, __FUNCTION__, (SSA_NAME)))->ssa_name.var != (tree)
__null ? (((enum tree_code) ((ref)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (ref)->ssa_name.var : ((contains_struct_check
(((ref)->ssa_name.var), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4496, __FUNCTION__))->decl_minimal.name)) : (tree) __null
)
;
4497 const char *id = IDENTIFIER_POINTER (ref)((const char *) (tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4497, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
4498 size_t len = strcspn (id, ".$");
4499 if (!len)
4500 len = strlen (id);
4501 ref = get_identifier_with_length (id, len);
4502 }
4503 else
4504 {
4505 /* Except for VLAs, retrieve the allocation function. */
4506 allocfn = gimple_call_fndecl (stmt);
4507 if (!allocfn)
4508 allocfn = gimple_call_fn (stmt);
4509 if (TREE_CODE (allocfn)((enum tree_code) (allocfn)->base.code) == SSA_NAME)
4510 {
4511 /* For an ALLOC_CALL via a function pointer make a small
4512 effort to determine the destination of the pointer. */
4513 gimple *def = SSA_NAME_DEF_STMT (allocfn)(tree_check ((allocfn), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4513, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
4514 if (gimple_assign_single_p (def))
4515 {
4516 tree rhs = gimple_assign_rhs1 (def);
4517 if (DECL_P (rhs)(tree_code_type[(int) (((enum tree_code) (rhs)->base.code)
)] == tcc_declaration)
)
4518 allocfn = rhs;
4519 else if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == COMPONENT_REF)
4520 allocfn = TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4520, __FUNCTION__)))))
;
4521 }
4522 }
4523 }
4524 }
4525 else if (gimple_nop_p (stmt))
4526 /* Handle DECL_PARM below. */
4527 ref = SSA_NAME_VAR (ref)((tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4527, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
__null || ((enum tree_code) ((ref)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) __null : (ref)->ssa_name
.var)
;
4528 }
4529
4530 if (DECL_P (ref)(tree_code_type[(int) (((enum tree_code) (ref)->base.code)
)] == tcc_declaration)
)
4531 loc = DECL_SOURCE_LOCATION (ref)((contains_struct_check ((ref), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4531, __FUNCTION__))->decl_minimal.locus)
;
4532 else if (EXPR_P (ref)((tree_code_type[(int) (((enum tree_code) (ref)->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) (ref)->base.code))]) <= tcc_expression)
&& EXPR_HAS_LOCATION (ref)(((IS_ADHOC_LOC (((((ref)) && ((tree_code_type[(int) (
((enum tree_code) ((ref))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((ref))->
base.code))]) <= tcc_expression)) ? (ref)->exp.locus : (
(location_t) 0)))) ? get_location_from_adhoc_loc (line_table,
((((ref)) && ((tree_code_type[(int) (((enum tree_code
) ((ref))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((ref))->base.code))]) <= tcc_expression
)) ? (ref)->exp.locus : ((location_t) 0))) : (((((ref)) &&
((tree_code_type[(int) (((enum tree_code) ((ref))->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) ((ref))->base.code))]) <= tcc_expression
)) ? (ref)->exp.locus : ((location_t) 0)))) != ((location_t
) 0))
)
4533 loc = EXPR_LOCATION (ref)((((ref)) && ((tree_code_type[(int) (((enum tree_code
) ((ref))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((ref))->base.code))]) <= tcc_expression
)) ? (ref)->exp.locus : ((location_t) 0))
;
4534 else if (TREE_CODE (ref)((enum tree_code) (ref)->base.code) != IDENTIFIER_NODE
4535 && TREE_CODE (ref)((enum tree_code) (ref)->base.code) != SSA_NAME)
4536 return;
4537
4538 if (mode == access_read_write || mode == access_write_only)
4539 {
4540 if (allocfn == NULL_TREE(tree) __null)
4541 {
4542 if (*offstr)
4543 inform (loc, "at offset %s into destination object %qE of size %s",
4544 offstr, ref, sizestr);
4545 else
4546 inform (loc, "destination object %qE of size %s", ref, sizestr);
4547 return;
4548 }
4549
4550 if (*offstr)
4551 inform (loc,
4552 "at offset %s into destination object of size %s "
4553 "allocated by %qE", offstr, sizestr, allocfn);
4554 else
4555 inform (loc, "destination object of size %s allocated by %qE",
4556 sizestr, allocfn);
4557 return;
4558 }
4559
4560 if (DECL_P (ref)(tree_code_type[(int) (((enum tree_code) (ref)->base.code)
)] == tcc_declaration)
)
4561 {
4562 if (*offstr)
4563 inform (loc, "at offset %s into source object %qD of size %s",
4564 offstr, ref, sizestr);
4565 else
4566 inform (loc, "source object %qD of size %s", ref, sizestr);
4567
4568 return;
4569 }
4570
4571 if (*offstr)
4572 inform (loc,
4573 "at offset %s into source object of size %s allocated by %qE",
4574 offstr, sizestr, allocfn);
4575 else
4576 inform (loc, "source object of size %s allocated by %qE",
4577 sizestr, allocfn);
4578}
4579
4580/* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4581 by BNDRNG if nonnull and valid. */
4582
4583static void
4584get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4585{
4586 if (bound)
4587 get_size_range (bound, range);
4588
4589 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U-1UL))
4590 return;
4591
4592 if (range[0] && TREE_CODE (range[0])((enum tree_code) (range[0])->base.code) == INTEGER_CST)
4593 {
4594 offset_int r[] =
4595 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4596 if (r[0] < bndrng[0])
4597 range[0] = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], bndrng[0]);
4598 if (bndrng[1] < r[1])
4599 range[1] = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], bndrng[1]);
4600 }
4601 else
4602 {
4603 range[0] = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], bndrng[0]);
4604 range[1] = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], bndrng[1]);
4605 }
4606}
4607
4608/* Try to verify that the sizes and lengths of the arguments to a string
4609 manipulation function given by EXP are within valid bounds and that
4610 the operation does not lead to buffer overflow or read past the end.
4611 Arguments other than EXP may be null. When non-null, the arguments
4612 have the following meaning:
4613 DST is the destination of a copy call or NULL otherwise.
4614 SRC is the source of a copy call or NULL otherwise.
4615 DSTWRITE is the number of bytes written into the destination obtained
4616 from the user-supplied size argument to the function (such as in
4617 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4618 MAXREAD is the user-supplied bound on the length of the source sequence
4619 (such as in strncat(d, s, N). It specifies the upper limit on the number
4620 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4621 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4622 expression EXP is a string function call (as opposed to a memory call
4623 like memcpy). As an exception, SRCSTR can also be an integer denoting
4624 the precomputed size of the source string or object (for functions like
4625 memcpy).
4626 DSTSIZE is the size of the destination object.
4627
4628 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4629 SIZE_MAX.
4630
4631 WRITE is true for write accesses, READ is true for reads. Both are
4632 false for simple size checks in calls to functions that neither read
4633 from nor write to the region.
4634
4635 When nonnull, PAD points to a more detailed description of the access.
4636
4637 If the call is successfully verified as safe return true, otherwise
4638 return false. */
4639
4640bool
4641check_access (tree exp, tree dstwrite,
4642 tree maxread, tree srcstr, tree dstsize,
4643 access_mode mode, const access_data *pad /* = NULL */)
4644{
4645 /* The size of the largest object is half the address space, or
4646 PTRDIFF_MAX. (This is way too permissive.) */
4647 tree maxobjsize = max_object_size ();
4648
4649 /* Either an approximate/minimum the length of the source string for
4650 string functions or the size of the source object for raw memory
4651 functions. */
4652 tree slen = NULL_TREE(tree) __null;
4653
4654 /* The range of the access in bytes; first set to the write access
4655 for functions that write and then read for those that also (or
4656 just) read. */
4657 tree range[2] = { NULL_TREE(tree) __null, NULL_TREE(tree) __null };
4658
4659 /* Set to true when the exact number of bytes written by a string
4660 function like strcpy is not known and the only thing that is
4661 known is that it must be at least one (for the terminating nul). */
4662 bool at_least_one = false;
4663 if (srcstr)
4664 {
4665 /* SRCSTR is normally a pointer to string but as a special case
4666 it can be an integer denoting the length of a string. */
4667 if (POINTER_TYPE_P (TREE_TYPE (srcstr))(((enum tree_code) (((contains_struct_check ((srcstr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4667, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((srcstr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4667, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
4668 {
4669 if (!check_nul_terminated_array (exp, srcstr, maxread))
4670 return false;
4671 /* Try to determine the range of lengths the source string
4672 refers to. If it can be determined and is less than
4673 the upper bound given by MAXREAD add one to it for
4674 the terminating nul. Otherwise, set it to one for
4675 the same reason, or to MAXREAD as appropriate. */
4676 c_strlen_data lendata = { };
4677 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4678 range[0] = lendata.minlen;
4679 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4680 if (range[0]
4681 && TREE_CODE (range[0])((enum tree_code) (range[0])->base.code) == INTEGER_CST
4682 && TREE_CODE (range[1])((enum tree_code) (range[1])->base.code) == INTEGER_CST
4683 && (!maxread || TREE_CODE (maxread)((enum tree_code) (maxread)->base.code) == INTEGER_CST))
4684 {
4685 if (maxread && tree_int_cst_le (maxread, range[0]))
4686 range[0] = range[1] = maxread;
4687 else
4688 range[0] = fold_build2 (PLUS_EXPR, size_type_node,fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_SIZE_TYPE
], range[0], global_trees[TI_SIZE_ONE] )
4689 range[0], size_one_node)fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_SIZE_TYPE
], range[0], global_trees[TI_SIZE_ONE] )
;
4690
4691 if (maxread && tree_int_cst_le (maxread, range[1]))
4692 range[1] = maxread;
4693 else if (!integer_all_onesp (range[1]))
4694 range[1] = fold_build2 (PLUS_EXPR, size_type_node,fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_SIZE_TYPE
], range[1], global_trees[TI_SIZE_ONE] )
4695 range[1], size_one_node)fold_build2_loc (((location_t) 0), PLUS_EXPR, global_trees[TI_SIZE_TYPE
], range[1], global_trees[TI_SIZE_ONE] )
;
4696
4697 slen = range[0];
4698 }
4699 else
4700 {
4701 at_least_one = true;
4702 slen = size_one_nodeglobal_trees[TI_SIZE_ONE];
4703 }
4704 }
4705 else
4706 slen = srcstr;
4707 }
4708
4709 if (!dstwrite && !maxread)
4710 {
4711 /* When the only available piece of data is the object size
4712 there is nothing to do. */
4713 if (!slen)
4714 return true;
4715
4716 /* Otherwise, when the length of the source sequence is known
4717 (as with strlen), set DSTWRITE to it. */
4718 if (!range[0])
4719 dstwrite = slen;
4720 }
4721
4722 if (!dstsize)
4723 dstsize = maxobjsize;
4724
4725 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4726 if valid. */
4727 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL__null);
4728
4729 tree func = get_callee_fndecl (exp);
4730 /* Read vs write access by built-ins can be determined from the const
4731 qualifiers on the pointer argument. In the absence of attribute
4732 access, non-const qualified pointer arguments to user-defined
4733 functions are assumed to both read and write the objects. */
4734 const bool builtin = func ? fndecl_built_in_p (func) : false;
4735
4736 /* First check the number of bytes to be written against the maximum
4737 object size. */
4738 if (range[0]
4739 && TREE_CODE (range[0])((enum tree_code) (range[0])->base.code) == INTEGER_CST
4740 && tree_int_cst_lt (maxobjsize, range[0]))
4741 {
4742 location_t loc = tree_inlined_location (exp);
4743 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4744 NULL_TREE(tree) __null, pad);
4745 return false;
4746 }
4747
4748 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4749 constant, and in range of unsigned HOST_WIDE_INT. */
4750 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4751
4752 /* Next check the number of bytes to be written against the destination
4753 object size. */
4754 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4755 {
4756 if (range[0]
4757 && TREE_CODE (range[0])((enum tree_code) (range[0])->base.code) == INTEGER_CST
4758 && ((tree_fits_uhwi_p (dstsize)
4759 && tree_int_cst_lt (dstsize, range[0]))
4760 || (dstwrite
4761 && tree_fits_uhwi_p (dstwrite)
4762 && tree_int_cst_lt (dstwrite, range[0]))))
4763 {
4764 if (TREE_NO_WARNING (exp)((exp)->base.nowarning_flag)
4765 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)((pad->dst.ref)->base.nowarning_flag)))
4766 return false;
4767
4768 location_t loc = tree_inlined_location (exp);
4769 bool warned = false;
4770 if (dstwrite == slen && at_least_one)
4771 {
4772 /* This is a call to strcpy with a destination of 0 size
4773 and a source of unknown length. The call will write
4774 at least one byte past the end of the destination. */
4775 warned = (func
4776 ? warning_at (loc, OPT_Wstringop_overflow_,
4777 "%K%qD writing %E or more bytes into "
4778 "a region of size %E overflows "
4779 "the destination",
4780 exp, func, range[0], dstsize)
4781 : warning_at (loc, OPT_Wstringop_overflow_,
4782 "%Kwriting %E or more bytes into "
4783 "a region of size %E overflows "
4784 "the destination",
4785 exp, range[0], dstsize));
4786 }
4787 else
4788 {
4789 const bool read
4790 = mode == access_read_only || mode == access_read_write;
4791 const bool write
4792 = mode == access_write_only || mode == access_read_write;
4793 const bool maybe = pad && pad->dst.parmarray;
4794 warned = warn_for_access (loc, func, exp,
4795 OPT_Wstringop_overflow_,
4796 range, dstsize,
4797 write, read && !builtin, maybe);
4798 }
4799
4800 if (warned)
4801 {
4802 TREE_NO_WARNING (exp)((exp)->base.nowarning_flag) = true;
4803 if (pad)
4804 pad->dst.inform_access (pad->mode);
4805 }
4806
4807 /* Return error when an overflow has been detected. */
4808 return false;
4809 }
4810 }
4811
4812 /* Check the maximum length of the source sequence against the size
4813 of the destination object if known, or against the maximum size
4814 of an object. */
4815 if (maxread)
4816 {
4817 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4818 PAD is nonnull and BNDRNG is valid. */
4819 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL__null);
4820
4821 location_t loc = tree_inlined_location (exp);
4822 tree size = dstsize;
4823 if (pad && pad->mode == access_read_only)
4824 size = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], pad->src.sizrng[1]);
4825
4826 if (range[0] && maxread && tree_fits_uhwi_p (size))
4827 {
4828 if (tree_int_cst_lt (maxobjsize, range[0]))
4829 {
4830 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4831 range, size, pad);
4832 return false;
4833 }
4834
4835 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4836 {
4837 int opt = (dstwrite || mode != access_read_only
4838 ? OPT_Wstringop_overflow_
4839 : OPT_Wstringop_overread);
4840 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4841 return false;
4842 }
4843 }
4844
4845 maybe_warn_nonstring_arg (func, exp);
4846 }
4847
4848 /* Check for reading past the end of SRC. */
4849 bool overread = (slen
4850 && slen == srcstr
4851 && dstwrite
4852 && range[0]
4853 && TREE_CODE (slen)((enum tree_code) (slen)->base.code) == INTEGER_CST
4854 && tree_int_cst_lt (slen, range[0]));
4855 /* If none is determined try to get a better answer based on the details
4856 in PAD. */
4857 if (!overread
4858 && pad
4859 && pad->src.sizrng[1] >= 0
4860 && pad->src.offrng[0] >= 0
4861 && (pad->src.offrng[1] < 0
4862 || pad->src.offrng[0] <= pad->src.offrng[1]))
4863 {
4864 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4865 PAD is nonnull and BNDRNG is valid. */
4866 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL__null);
4867 /* Set OVERREAD for reads starting just past the end of an object. */
4868 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4869 range[0] = wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], pad->src.bndrng[0]);
4870 slen = size_zero_nodeglobal_trees[TI_SIZE_ZERO];
4871 }
4872
4873 if (overread)
4874 {
4875 if (TREE_NO_WARNING (exp)((exp)->base.nowarning_flag)
4876 || (srcstr && TREE_NO_WARNING (srcstr)((srcstr)->base.nowarning_flag))
4877 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)((pad->src.ref)->base.nowarning_flag)))
4878 return false;
4879
4880 location_t loc = tree_inlined_location (exp);
4881 const bool read
4882 = mode == access_read_only || mode == access_read_write;
4883 const bool maybe = pad && pad->dst.parmarray;
4884 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4885 slen, false, read, maybe))
4886 {
4887 TREE_NO_WARNING (exp)((exp)->base.nowarning_flag) = true;
4888 if (pad)
4889 pad->src.inform_access (access_read_only);
4890 }
4891 return false;
4892 }
4893
4894 return true;
4895}
4896
4897/* A convenience wrapper for check_access above to check access
4898 by a read-only function like puts. */
4899
4900static bool
4901check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4902 int ost /* = 1 */)
4903{
4904 if (!warn_stringop_overreadglobal_options.x_warn_stringop_overread)
4905 return true;
4906
4907 access_data data (exp, access_read_only, NULL_TREE(tree) __null, false, bound, true);
4908 compute_objsize (src, ost, &data.src);
4909 return check_access (exp, /*dstwrite=*/ NULL_TREE(tree) __null, /*maxread=*/ bound,
4910 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE(tree) __null, data.mode,
4911 &data);
4912}
4913
4914/* If STMT is a call to an allocation function, returns the constant
4915 maximum size of the object allocated by the call represented as
4916 sizetype. If nonnull, sets RNG1[] to the range of the size.
4917 When nonnull, uses RVALS for range information, otherwise calls
4918 get_range_info to get it.
4919 Returns null when STMT is not a call to a valid allocation function. */
4920
4921tree
4922gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4923 range_query * /* = NULL */)
4924{
4925 if (!stmt)
4926 return NULL_TREE(tree) __null;
4927
4928 tree allocfntype;
4929 if (tree fndecl = gimple_call_fndecl (stmt))
4930 allocfntype = TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4930, __FUNCTION__))->typed.type)
;
4931 else
4932 allocfntype = gimple_call_fntype (stmt);
4933
4934 if (!allocfntype)
4935 return NULL_TREE(tree) __null;
4936
4937 unsigned argidx1 = UINT_MAX(2147483647 *2U +1U), argidx2 = UINT_MAX(2147483647 *2U +1U);
4938 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype)((tree_class_check ((allocfntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4938, __FUNCTION__))->type_common.attributes)
);
4939 if (!at)
4940 {
4941 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4942 return NULL_TREE(tree) __null;
4943
4944 argidx1 = 0;
4945 }
4946
4947 unsigned nargs = gimple_call_num_args (stmt);
4948
4949 if (argidx1 == UINT_MAX(2147483647 *2U +1U))
4950 {
4951 tree atval = TREE_VALUE (at)((tree_check ((at), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4951, __FUNCTION__, (TREE_LIST)))->list.value)
;
4952 if (!atval)
4953 return NULL_TREE(tree) __null;
4954
4955 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((atval
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4955, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4955, __FUNCTION__)))
- 1;
4956 if (nargs <= argidx1)
4957 return NULL_TREE(tree) __null;
4958
4959 atval = TREE_CHAIN (atval)((contains_struct_check ((atval), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4959, __FUNCTION__))->common.chain)
;
4960 if (atval)
4961 {
4962 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((atval
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4962, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 4962, __FUNCTION__)))
- 1;
4963 if (nargs <= argidx2)
4964 return NULL_TREE(tree) __null;
4965 }
4966 }
4967
4968 tree size = gimple_call_arg (stmt, argidx1);
4969
4970 wide_int rng1_buf[2];
4971 /* If RNG1 is not set, use the buffer. */
4972 if (!rng1)
4973 rng1 = rng1_buf;
4974
4975 /* Use maximum precision to avoid overflow below. */
4976 const int prec = ADDR_MAX_PRECISION((64 + 4 + 64 - 1) & ~(64 - 1));
4977
4978 {
4979 tree r[2];
4980 /* Determine the largest valid range size, including zero. */
4981 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4982 return NULL_TREE(tree) __null;
4983 rng1[0] = wi::to_wide (r[0], prec);
4984 rng1[1] = wi::to_wide (r[1], prec);
4985 }
4986
4987 if (argidx2 > nargs && TREE_CODE (size)((enum tree_code) (size)->base.code) == INTEGER_CST)
4988 return fold_convert (sizetype, size)fold_convert_loc (((location_t) 0), sizetype_tab[(int) stk_sizetype
], size)
;
4989
4990 /* To handle ranges do the math in wide_int and return the product
4991 of the upper bounds as a constant. Ignore anti-ranges. */
4992 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_nodeglobal_trees[TI_INTEGER_ONE];
4993 wide_int rng2[2];
4994 {
4995 tree r[2];
4996 /* As above, use the full non-negative range on failure. */
4997 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4998 return NULL_TREE(tree) __null;
4999 rng2[0] = wi::to_wide (r[0], prec);
5000 rng2[1] = wi::to_wide (r[1], prec);
5001 }
5002
5003 /* Compute products of both bounds for the caller but return the lesser
5004 of SIZE_MAX and the product of the upper bounds as a constant. */
5005 rng1[0] = rng1[0] * rng2[0];
5006 rng1[1] = rng1[1] * rng2[1];
5007
5008 const tree size_max = TYPE_MAX_VALUE (sizetype)((tree_check5 ((sizetype_tab[(int) stk_sizetype]), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5008, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
;
5009 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
5010 {
5011 rng1[1] = wi::to_wide (size_max, prec);
5012 return size_max;
5013 }
5014
5015 return wide_int_to_tree (sizetypesizetype_tab[(int) stk_sizetype], rng1[1]);
5016}
5017
5018/* For an access to an object referenced to by the function parameter PTR
5019 of pointer type, and set RNG[] to the range of sizes of the object
5020 obtainedfrom the attribute access specification for the current function.
5021 Set STATIC_ARRAY if the array parameter has been declared [static].
5022 Return the function parameter on success and null otherwise. */
5023
5024tree
5025gimple_parm_array_size (tree ptr, wide_int rng[2],
5026 bool *static_array /* = NULL */)
5027{
5028 /* For a function argument try to determine the byte size of the array
5029 from the current function declaratation (e.g., attribute access or
5030 related). */
5031 tree var = SSA_NAME_VAR (ptr)((tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5031, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
__null || ((enum tree_code) ((ptr)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) __null : (ptr)->ssa_name
.var)
;
44
Assuming the condition is false
45
Assuming field 'code' is not equal to IDENTIFIER_NODE
46
'?' condition is false
5032 if (TREE_CODE (var)((enum tree_code) (var)->base.code) != PARM_DECL)
47
Assuming field 'code' is not equal to PARM_DECL
48
Taking true branch
5033 return NULL_TREE(tree) __null;
49
Returning null pointer, which participates in a condition later
5034
5035 const unsigned prec = TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5035, __FUNCTION__))->type_common.precision)
;
5036
5037 rdwr_map rdwr_idx;
5038 attr_access *access = get_parm_access (rdwr_idx, var);
5039 if (!access)
5040 return NULL_TREE(tree) __null;
5041
5042 if (access->sizarg != UINT_MAX(2147483647 *2U +1U))
5043 {
5044 /* TODO: Try to extract the range from the argument based on
5045 those of subsequent assertions or based on known calls to
5046 the current function. */
5047 return NULL_TREE(tree) __null;
5048 }
5049
5050 if (!access->minsize)
5051 return NULL_TREE(tree) __null;
5052
5053 /* Only consider ordinary array bound at level 2 (or above if it's
5054 ever added). */
5055 if (warn_array_parameterglobal_options.x_warn_array_parameter < 2 && !access->static_p)
5056 return NULL_TREE(tree) __null;
5057
5058 if (static_array)
5059 *static_array = access->static_p;
5060
5061 rng[0] = wi::zero (prec);
5062 rng[1] = wi::uhwi (access->minsize, prec);
5063 /* Multiply the array bound encoded in the attribute by the size
5064 of what the pointer argument to which it decays points to. */
5065 tree eltype = TREE_TYPE (TREE_TYPE (ptr))((contains_struct_check ((((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5065, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5065, __FUNCTION__))->typed.type)
;
5066 tree size = TYPE_SIZE_UNIT (eltype)((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5066, __FUNCTION__))->type_common.size_unit)
;
5067 if (!size || TREE_CODE (size)((enum tree_code) (size)->base.code) != INTEGER_CST)
5068 return NULL_TREE(tree) __null;
5069
5070 rng[1] *= wi::to_wide (size, prec);
5071 return var;
5072}
5073
5074/* Wrapper around the wide_int overload of get_range that accepts
5075 offset_int instead. For middle end expressions returns the same
5076 result. For a subset of nonconstamt expressions emitted by the front
5077 end determines a more precise range than would be possible otherwise. */
5078
5079static bool
5080get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
5081{
5082 offset_int add = 0;
5083 if (TREE_CODE (x)((enum tree_code) (x)->base.code) == PLUS_EXPR)
5084 {
5085 /* Handle constant offsets in pointer addition expressions seen
5086 n the front end IL. */
5087 tree op = TREE_OPERAND (x, 1)(*((const_cast<tree*> (tree_operand_check ((x), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5087, __FUNCTION__)))))
;
5088 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == INTEGER_CST)
5089 {
5090 op = fold_convert (signed_type_for (TREE_TYPE (op)), op)fold_convert_loc (((location_t) 0), signed_type_for (((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5090, __FUNCTION__))->typed.type)), op)
;
5091 add = wi::to_offset (op);
5092 x = TREE_OPERAND (x, 0)(*((const_cast<tree*> (tree_operand_check ((x), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5092, __FUNCTION__)))))
;
5093 }
5094 }
5095
5096 if (TREE_CODE (x)((enum tree_code) (x)->base.code) == NOP_EXPR)
5097 /* Also handle conversions to sizetype seen in the front end IL. */
5098 x = TREE_OPERAND (x, 0)(*((const_cast<tree*> (tree_operand_check ((x), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5098, __FUNCTION__)))))
;
5099
5100 tree type = TREE_TYPE (x)((contains_struct_check ((x), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5100, __FUNCTION__))->typed.type)
;
5101 if (!INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
&& !POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
5102 return false;
5103
5104 if (TREE_CODE (x)((enum tree_code) (x)->base.code) != INTEGER_CST
5105 && TREE_CODE (x)((enum tree_code) (x)->base.code) != SSA_NAME)
5106 {
5107 if (TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5107, __FUNCTION__))->base.u.bits.unsigned_flag)
5108 && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5108, __FUNCTION__))->type_common.precision)
== TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5108, __FUNCTION__))->type_common.precision)
)
5109 type = signed_type_for (type);
5110
5111 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)((tree_check5 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5111, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.minval
)
) + add;
5112 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)((tree_check5 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5112, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
) + add;
5113 return x;
5114 }
5115
5116 wide_int wr[2];
5117 if (!get_range (x, stmt, wr, rvals))
5118 return false;
5119
5120 signop sgn = SIGNED;
5121 /* Only convert signed integers or unsigned sizetype to a signed
5122 offset and avoid converting large positive values in narrower
5123 types to negative offsets. */
5124 if (TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5124, __FUNCTION__))->base.u.bits.unsigned_flag)
5125 && wr[0].get_precision () < TYPE_PRECISION (sizetype)((tree_class_check ((sizetype_tab[(int) stk_sizetype]), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5125, __FUNCTION__))->type_common.precision)
)
5126 sgn = UNSIGNED;
5127
5128 r[0] = offset_int::from (wr[0], sgn);
5129 r[1] = offset_int::from (wr[1], sgn);
5130 return true;
5131}
5132
5133/* Return the argument that the call STMT to a built-in function returns
5134 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
5135 from the argument reflected in the value returned by the built-in if it
5136 can be determined, otherwise to 0 and HWI_M1U respectively. */
5137
5138static tree
5139gimple_call_return_array (gimple *stmt, offset_int offrng[2],
5140 range_query *rvals)
5141{
5142 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5143 || gimple_call_num_args (stmt) < 1)
5144 return NULL_TREE(tree) __null;
5145
5146 tree fn = gimple_call_fndecl (stmt);
5147 switch (DECL_FUNCTION_CODE (fn))
5148 {
5149 case BUILT_IN_MEMCPY:
5150 case BUILT_IN_MEMCPY_CHK:
5151 case BUILT_IN_MEMMOVE:
5152 case BUILT_IN_MEMMOVE_CHK:
5153 case BUILT_IN_MEMSET:
5154 case BUILT_IN_STPCPY:
5155 case BUILT_IN_STPCPY_CHK:
5156 case BUILT_IN_STPNCPY:
5157 case BUILT_IN_STPNCPY_CHK:
5158 case BUILT_IN_STRCAT:
5159 case BUILT_IN_STRCAT_CHK:
5160 case BUILT_IN_STRCPY:
5161 case BUILT_IN_STRCPY_CHK:
5162 case BUILT_IN_STRNCAT:
5163 case BUILT_IN_STRNCAT_CHK:
5164 case BUILT_IN_STRNCPY:
5165 case BUILT_IN_STRNCPY_CHK:
5166 offrng[0] = offrng[1] = 0;
5167 return gimple_call_arg (stmt, 0);
5168
5169 case BUILT_IN_MEMPCPY:
5170 case BUILT_IN_MEMPCPY_CHK:
5171 {
5172 tree off = gimple_call_arg (stmt, 2);
5173 if (!get_offset_range (off, stmt, offrng, rvals))
5174 {
5175 offrng[0] = 0;
5176 offrng[1] = HOST_WIDE_INT_M1U-1UL;
5177 }
5178 return gimple_call_arg (stmt, 0);
5179 }
5180
5181 case BUILT_IN_MEMCHR:
5182 {
5183 tree off = gimple_call_arg (stmt, 2);
5184 if (get_offset_range (off, stmt, offrng, rvals))
5185 offrng[0] = 0;
5186 else
5187 {
5188 offrng[0] = 0;
5189 offrng[1] = HOST_WIDE_INT_M1U-1UL;
5190 }
5191 return gimple_call_arg (stmt, 0);
5192 }
5193
5194 case BUILT_IN_STRCHR:
5195 case BUILT_IN_STRRCHR:
5196 case BUILT_IN_STRSTR:
5197 {
5198 offrng[0] = 0;
5199 offrng[1] = HOST_WIDE_INT_M1U-1UL;
5200 }
5201 return gimple_call_arg (stmt, 0);
5202
5203 default:
5204 break;
5205 }
5206
5207 return NULL_TREE(tree) __null;
5208}
5209
5210/* A helper of compute_objsize() to determine the size from an assignment
5211 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
5212
5213static bool
5214handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
5215 ssa_name_limit_t &snlim, pointer_query *qry)
5216{
5217 tree_code code = gimple_assign_rhs_code (stmt);
5218
5219 tree ptr = gimple_assign_rhs1 (stmt);
5220
5221 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
5222 Determine the size/offset of each and use the one with more or less
5223 space remaining, respectively. If either fails, use the information
5224 determined from the other instead, adjusted up or down as appropriate
5225 for the expression. */
5226 access_ref aref[2] = { *pref, *pref };
5227 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
5228 {
5229 aref[0].base0 = false;
5230 aref[0].offrng[0] = aref[0].offrng[1] = 0;
5231 aref[0].add_max_offset ();
5232 aref[0].set_max_size_range ();
5233 }
5234
5235 ptr = gimple_assign_rhs2 (stmt);
5236 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
5237 {
5238 aref[1].base0 = false;
5239 aref[1].offrng[0] = aref[1].offrng[1] = 0;
5240 aref[1].add_max_offset ();
5241 aref[1].set_max_size_range ();
5242 }
5243
5244 if (!aref[0].ref && !aref[1].ref)
5245 /* Fail if the identity of neither argument could be determined. */
5246 return false;
5247
5248 bool i0 = false;
5249 if (aref[0].ref && aref[0].base0)
5250 {
5251 if (aref[1].ref && aref[1].base0)
5252 {
5253 /* If the object referenced by both arguments has been determined
5254 set *PREF to the one with more or less space remainng, whichever
5255 is appopriate for CODE.
5256 TODO: Indicate when the objects are distinct so it can be
5257 diagnosed. */
5258 i0 = code == MAX_EXPR;
5259 const bool i1 = !i0;
5260
5261 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
5262 *pref = aref[i1];
5263 else
5264 *pref = aref[i0];
5265 return true;
5266 }
5267
5268 /* If only the object referenced by one of the arguments could be
5269 determined, use it and... */
5270 *pref = aref[0];
5271 i0 = true;
5272 }
5273 else
5274 *pref = aref[1];
5275
5276 const bool i1 = !i0;
5277 /* ...see if the offset obtained from the other pointer can be used
5278 to tighten up the bound on the offset obtained from the first. */
5279 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
5280 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
5281 {
5282 pref->offrng[0] = aref[i0].offrng[0];
5283 pref->offrng[1] = aref[i0].offrng[1];
5284 }
5285 return true;
5286}
5287
5288/* Helper to compute the size of the object referenced by the PTR
5289 expression which must have pointer type, using Object Size type
5290 OSTYPE (only the least significant 2 bits are used).
5291 On success, sets PREF->REF to the DECL of the referenced object
5292 if it's unique, otherwise to null, PREF->OFFRNG to the range of
5293 offsets into it, and PREF->SIZRNG to the range of sizes of
5294 the object(s).
5295 SNLIM is used to avoid visiting the same PHI operand multiple
5296 times, and, when nonnull, RVALS to determine range information.
5297 Returns true on success, false when a meaningful size (or range)
5298 cannot be determined.
5299
5300 The function is intended for diagnostics and should not be used
5301 to influence code generation or optimization. */
5302
5303static bool
5304compute_objsize_r (tree ptr, int ostype, access_ref *pref,
5305 ssa_name_limit_t &snlim, pointer_query *qry)
5306{
5307 STRIP_NOPS (ptr)(ptr) = tree_strip_nop_conversions ((const_cast<union tree_node
*> (((ptr)))))
;
5308
5309 const bool addr = TREE_CODE (ptr)((enum tree_code) (ptr)->base.code) == ADDR_EXPR;
1
Assuming field 'code' is not equal to ADDR_EXPR
5310 if (addr
1.1
'addr' is false
1.1
'addr' is false
)
2
Taking false branch
5311 {
5312 --pref->deref;
5313 ptr = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5313, __FUNCTION__)))))
;
5314 }
5315
5316 if (DECL_P (ptr)(tree_code_type[(int) (((enum tree_code) (ptr)->base.code)
)] == tcc_declaration)
)
3
Assuming the condition is false
4
Taking false branch
5317 {
5318 pref->ref = ptr;
5319
5320 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr))(((enum tree_code) (((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5320, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((ptr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5320, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
5321 {
5322 /* Set the maximum size if the reference is to the pointer
5323 itself (as opposed to what it points to). */
5324 pref->set_max_size_range ();
5325 return true;
5326 }
5327
5328 if (tree size = decl_init_size (ptr, false))
5329 if (TREE_CODE (size)((enum tree_code) (size)->base.code) == INTEGER_CST)
5330 {
5331 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5332 return true;
5333 }
5334
5335 pref->set_max_size_range ();
5336 return true;
5337 }
5338
5339 const tree_code code = TREE_CODE (ptr)((enum tree_code) (ptr)->base.code);
5340 range_query *const rvals = qry ? qry->rvals : NULL__null;
5
Assuming 'qry' is null
6
'?' condition is false
5341
5342 if (code == BIT_FIELD_REF)
7
Assuming 'code' is not equal to BIT_FIELD_REF
8
Taking false branch
5343 {
5344 tree ref = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5344, __FUNCTION__)))))
;
5345 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5346 return false;
5347
5348 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)(*((const_cast<tree*> (tree_operand_check ((ptr), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5348, __FUNCTION__)))))
));
5349 pref->add_offset (off / BITS_PER_UNIT(8));
5350 return true;
5351 }
5352
5353 if (code == COMPONENT_REF)
9
Assuming 'code' is not equal to COMPONENT_REF
10
Taking false branch
5354 {
5355 tree ref = TREE_OPERAND (ptr, 0)(*((const_cast<tree*> (tree_operand_check ((ptr), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5355, __FUNCTION__)))))
;
5356 if (TREE_CODE (TREE_TYPE (ref))((enum tree_code) (((contains_struct_check ((ref), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5356, __FUNCTION__))->typed.type))->base.code)
== UNION_TYPE)
5357 /* In accesses through union types consider the entire unions
5358 rather than just their members. */
5359 ostype = 0;
5360 tree field = TREE_OPERAND (ptr, 1)(*((const_cast<tree*> (tree_operand_check ((ptr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5360, __FUNCTION__)))))
;
5361
5362 if (ostype == 0)
5363 {
5364 /* In OSTYPE zero (for raw memory functions like memcpy), use
5365 the maximum size instead if the identity of the enclosing
5366 object cannot be determined. */
5367 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5368 return false;
5369
5370 /* Otherwise, use the size of the enclosing object and add
5371 the offset of the member to the offset computed so far. */
5372 tree offset = byte_position (field);
5373 if (TREE_CODE (offset)((enum tree_code) (offset)->base.code) == INTEGER_CST)
5374 pref->add_offset (wi::to_offset (offset));
5375 else
5376 pref->add_max_offset ();
5377
5378 if (!pref->ref)
5379 /* REF may have been already set to an SSA_NAME earlier
5380 to provide better context for diagnostics. In that case,
5381 leave it unchanged. */
5382 pref->ref = ref;
5383 return true;
5384 }
5385
5386 pref->ref = field;
5387
5388 if (!addr && POINTER_TYPE_P (TREE_TYPE (field))(((enum tree_code) (((contains_struct_check ((field), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5388, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((field), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/builtins.c"
, 5388, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
5389 {
5390 /* Set maximum size if the reference is to the pointer member
5391 itself (as opposed to what it points to). */
5392 pref->set_max_size_range ();
5393 return true;
5394 }
5395
5396 /* SAM is set for array members that might need special treatment. */
5397 special_array_member sam;
5398 tree size = component_ref_size (ptr, &sam);
5399 if (sam == special_array_member::int_0)
5400 pref->sizrng[0] = pref->sizrng[1] = 0;
5401 else if (!pref->trail1special && sam == special_array_member::trail_1)
5402 pref->sizrng[0] = pref->sizrng[1] = 1;
5403 else if (size && TREE_CODE (size)((enum tree_code) (size)->base.code) == INTEGER_CST)
5404 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5405 else
5406 {