Bug Summary

File:build/gcc/wide-int.h
Warning:line 1283, column 3
Undefined or garbage value returned to caller

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name tree-ssa-ccp.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-osi3ap.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c

1/* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000-2021 Free Software Foundation, Inc.
3 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
4 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it
9under the terms of the GNU General Public License as published by the
10Free Software Foundation; either version 3, or (at your option) any
11later version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT
14ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Conditional constant propagation (CCP) is based on the SSA
23 propagation engine (tree-ssa-propagate.c). Constant assignments of
24 the form VAR = CST are propagated from the assignments into uses of
25 VAR, which in turn may generate new constants. The simulation uses
26 a four level lattice to keep track of constant values associated
27 with SSA names. Given an SSA name V_i, it may take one of the
28 following values:
29
30 UNINITIALIZED -> the initial state of the value. This value
31 is replaced with a correct initial value
32 the first time the value is used, so the
33 rest of the pass does not need to care about
34 it. Using this value simplifies initialization
35 of the pass, and prevents us from needlessly
36 scanning statements that are never reached.
37
38 UNDEFINED -> V_i is a local variable whose definition
39 has not been processed yet. Therefore we
40 don't yet know if its value is a constant
41 or not.
42
43 CONSTANT -> V_i has been found to hold a constant
44 value C.
45
46 VARYING -> V_i cannot take a constant value, or if it
47 does, it is not possible to determine it
48 at compile time.
49
50 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
51
52 1- In ccp_visit_stmt, we are interested in assignments whose RHS
53 evaluates into a constant and conditional jumps whose predicate
54 evaluates into a boolean true or false. When an assignment of
55 the form V_i = CONST is found, V_i's lattice value is set to
56 CONSTANT and CONST is associated with it. This causes the
57 propagation engine to add all the SSA edges coming out the
58 assignment into the worklists, so that statements that use V_i
59 can be visited.
60
61 If the statement is a conditional with a constant predicate, we
62 mark the outgoing edges as executable or not executable
63 depending on the predicate's value. This is then used when
64 visiting PHI nodes to know when a PHI argument can be ignored.
65
66
67 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
68 same constant C, then the LHS of the PHI is set to C. This
69 evaluation is known as the "meet operation". Since one of the
70 goals of this evaluation is to optimistically return constant
71 values as often as possible, it uses two main short cuts:
72
73 - If an argument is flowing in through a non-executable edge, it
74 is ignored. This is useful in cases like this:
75
76 if (PRED)
77 a_9 = 3;
78 else
79 a_10 = 100;
80 a_11 = PHI (a_9, a_10)
81
82 If PRED is known to always evaluate to false, then we can
83 assume that a_11 will always take its value from a_10, meaning
84 that instead of consider it VARYING (a_9 and a_10 have
85 different values), we can consider it CONSTANT 100.
86
87 - If an argument has an UNDEFINED value, then it does not affect
88 the outcome of the meet operation. If a variable V_i has an
89 UNDEFINED value, it means that either its defining statement
90 hasn't been visited yet or V_i has no defining statement, in
91 which case the original symbol 'V' is being used
92 uninitialized. Since 'V' is a local variable, the compiler
93 may assume any initial value for it.
94
95
96 After propagation, every variable V_i that ends up with a lattice
97 value of CONSTANT will have the associated constant value in the
98 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
99 final substitution and folding.
100
101 This algorithm uses wide-ints at the max precision of the target.
102 This means that, with one uninteresting exception, variables with
103 UNSIGNED types never go to VARYING because the bits above the
104 precision of the type of the variable are always zero. The
105 uninteresting case is a variable of UNSIGNED type that has the
106 maximum precision of the target. Such variables can go to VARYING,
107 but this causes no loss of infomation since these variables will
108 never be extended.
109
110 References:
111
112 Constant propagation with conditional branches,
113 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
114
115 Building an Optimizing Compiler,
116 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
117
118 Advanced Compiler Design and Implementation,
119 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
120
121#include "config.h"
122#include "system.h"
123#include "coretypes.h"
124#include "backend.h"
125#include "target.h"
126#include "tree.h"
127#include "gimple.h"
128#include "tree-pass.h"
129#include "ssa.h"
130#include "gimple-pretty-print.h"
131#include "fold-const.h"
132#include "gimple-fold.h"
133#include "tree-eh.h"
134#include "gimplify.h"
135#include "gimple-iterator.h"
136#include "tree-cfg.h"
137#include "tree-ssa-propagate.h"
138#include "dbgcnt.h"
139#include "builtins.h"
140#include "cfgloop.h"
141#include "stor-layout.h"
142#include "optabs-query.h"
143#include "tree-ssa-ccp.h"
144#include "tree-dfa.h"
145#include "diagnostic-core.h"
146#include "stringpool.h"
147#include "attribs.h"
148#include "tree-vector-builder.h"
149#include "cgraph.h"
150#include "alloc-pool.h"
151#include "symbol-summary.h"
152#include "ipa-utils.h"
153#include "ipa-prop.h"
154
155/* Possible lattice values. */
156typedef enum
157{
158 UNINITIALIZED,
159 UNDEFINED,
160 CONSTANT,
161 VARYING
162} ccp_lattice_t;
163
164class ccp_prop_value_t {
165public:
166 /* Lattice value. */
167 ccp_lattice_t lattice_val;
168
169 /* Propagated value. */
170 tree value;
171
172 /* Mask that applies to the propagated value during CCP. For X
173 with a CONSTANT lattice value X & ~mask == value & ~mask. The
174 zero bits in the mask cover constant values. The ones mean no
175 information. */
176 widest_int mask;
177};
178
179class ccp_propagate : public ssa_propagation_engine
180{
181 public:
182 enum ssa_prop_result visit_stmt (gimple *, edge *, tree *) FINAL OVERRIDE;
183 enum ssa_prop_result visit_phi (gphi *) FINAL OVERRIDE;
184};
185
186/* Array of propagated constant values. After propagation,
187 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
188 the constant is held in an SSA name representing a memory store
189 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
190 memory reference used to store (i.e., the LHS of the assignment
191 doing the store). */
192static ccp_prop_value_t *const_val;
193static unsigned n_const_val;
194
195static void canonicalize_value (ccp_prop_value_t *);
196static void ccp_lattice_meet (ccp_prop_value_t *, ccp_prop_value_t *);
197
198/* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
199
200static void
201dump_lattice_value (FILE *outf, const char *prefix, ccp_prop_value_t val)
202{
203 switch (val.lattice_val)
204 {
205 case UNINITIALIZED:
206 fprintf (outf, "%sUNINITIALIZED", prefix);
207 break;
208 case UNDEFINED:
209 fprintf (outf, "%sUNDEFINED", prefix);
210 break;
211 case VARYING:
212 fprintf (outf, "%sVARYING", prefix);
213 break;
214 case CONSTANT:
215 if (TREE_CODE (val.value)((enum tree_code) (val.value)->base.code) != INTEGER_CST
216 || val.mask == 0)
217 {
218 fprintf (outf, "%sCONSTANT ", prefix);
219 print_generic_expr (outf, val.value, dump_flags);
220 }
221 else
222 {
223 widest_int cval = wi::bit_and_not (wi::to_widest (val.value),
224 val.mask);
225 fprintf (outf, "%sCONSTANT ", prefix);
226 print_hex (cval, outf);
227 fprintf (outf, " (");
228 print_hex (val.mask, outf);
229 fprintf (outf, ")");
230 }
231 break;
232 default:
233 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 233, __FUNCTION__))
;
234 }
235}
236
237
238/* Print lattice value VAL to stderr. */
239
240void debug_lattice_value (ccp_prop_value_t val);
241
242DEBUG_FUNCTION__attribute__ ((__used__)) void
243debug_lattice_value (ccp_prop_value_t val)
244{
245 dump_lattice_value (stderrstderr, "", val);
246 fprintf (stderrstderr, "\n");
247}
248
249/* Extend NONZERO_BITS to a full mask, based on sgn. */
250
251static widest_int
252extend_mask (const wide_int &nonzero_bits, signop sgn)
253{
254 return widest_int::from (nonzero_bits, sgn);
255}
256
257/* Compute a default value for variable VAR and store it in the
258 CONST_VAL array. The following rules are used to get default
259 values:
260
261 1- Global and static variables that are declared constant are
262 considered CONSTANT.
263
264 2- Any other value is considered UNDEFINED. This is useful when
265 considering PHI nodes. PHI arguments that are undefined do not
266 change the constant value of the PHI node, which allows for more
267 constants to be propagated.
268
269 3- Variables defined by statements other than assignments and PHI
270 nodes are considered VARYING.
271
272 4- Initial values of variables that are not GIMPLE registers are
273 considered VARYING. */
274
275static ccp_prop_value_t
276get_default_value (tree var)
277{
278 ccp_prop_value_t val = { UNINITIALIZED, NULL_TREE(tree) nullptr, 0 };
279 gimple *stmt;
280
281 stmt = SSA_NAME_DEF_STMT (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 281, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
282
283 if (gimple_nop_p (stmt))
284 {
285 /* Variables defined by an empty statement are those used
286 before being initialized. If VAR is a local variable, we
287 can assume initially that it is UNDEFINED, otherwise we must
288 consider it VARYING. */
289 if (!virtual_operand_p (var)
290 && SSA_NAME_VAR (var)((tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 290, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((var)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (var)->ssa_name.var
)
291 && TREE_CODE (SSA_NAME_VAR (var))((enum tree_code) (((tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 291, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((var)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (var)->ssa_name.var
))->base.code)
== VAR_DECL)
292 val.lattice_val = UNDEFINED;
293 else
294 {
295 val.lattice_val = VARYING;
296 val.mask = -1;
297 if (flag_tree_bit_ccpglobal_options.x_flag_tree_bit_ccp)
298 {
299 wide_int nonzero_bits = get_nonzero_bits (var);
300 tree value;
301 widest_int mask;
302
303 if (SSA_NAME_VAR (var)((tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 303, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((var)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (var)->ssa_name.var
)
304 && TREE_CODE (SSA_NAME_VAR (var))((enum tree_code) (((tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 304, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((var)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (var)->ssa_name.var
))->base.code)
== PARM_DECL
305 && ipcp_get_parm_bits (SSA_NAME_VAR (var)((tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 305, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((var)->ssa_name.var)->base.code)
== IDENTIFIER_NODE ? (tree) nullptr : (var)->ssa_name.var
)
, &value, &mask))
306 {
307 val.lattice_val = CONSTANT;
308 val.value = value;
309 widest_int ipa_value = wi::to_widest (value);
310 /* Unknown bits from IPA CP must be equal to zero. */
311 gcc_assert (wi::bit_and (ipa_value, mask) == 0)((void)(!(wi::bit_and (ipa_value, mask) == 0) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 311, __FUNCTION__), 0 : 0))
;
312 val.mask = mask;
313 if (nonzero_bits != -1)
314 val.mask &= extend_mask (nonzero_bits,
315 TYPE_SIGN (TREE_TYPE (var))((signop) ((tree_class_check ((((contains_struct_check ((var)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 315, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 315, __FUNCTION__))->base.u.bits.unsigned_flag))
);
316 }
317 else if (nonzero_bits != -1)
318 {
319 val.lattice_val = CONSTANT;
320 val.value = build_zero_cst (TREE_TYPE (var)((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 320, __FUNCTION__))->typed.type)
);
321 val.mask = extend_mask (nonzero_bits,
322 TYPE_SIGN (TREE_TYPE (var))((signop) ((tree_class_check ((((contains_struct_check ((var)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 322, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 322, __FUNCTION__))->base.u.bits.unsigned_flag))
);
323 }
324 }
325 }
326 }
327 else if (is_gimple_assign (stmt))
328 {
329 tree cst;
330 if (gimple_assign_single_p (stmt)
331 && DECL_P (gimple_assign_rhs1 (stmt))(tree_code_type[(int) (((enum tree_code) (gimple_assign_rhs1 (
stmt))->base.code))] == tcc_declaration)
332 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
333 {
334 val.lattice_val = CONSTANT;
335 val.value = cst;
336 }
337 else
338 {
339 /* Any other variable defined by an assignment is considered
340 UNDEFINED. */
341 val.lattice_val = UNDEFINED;
342 }
343 }
344 else if ((is_gimple_call (stmt)
345 && gimple_call_lhs (stmt) != NULL_TREE(tree) nullptr)
346 || gimple_code (stmt) == GIMPLE_PHI)
347 {
348 /* A variable defined by a call or a PHI node is considered
349 UNDEFINED. */
350 val.lattice_val = UNDEFINED;
351 }
352 else
353 {
354 /* Otherwise, VAR will never take on a constant value. */
355 val.lattice_val = VARYING;
356 val.mask = -1;
357 }
358
359 return val;
360}
361
362
363/* Get the constant value associated with variable VAR. */
364
365static inline ccp_prop_value_t *
366get_value (tree var)
367{
368 ccp_prop_value_t *val;
369
370 if (const_val == NULLnullptr
371 || SSA_NAME_VERSION (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 371, __FUNCTION__, (SSA_NAME)))->base.u.version
>= n_const_val)
372 return NULLnullptr;
373
374 val = &const_val[SSA_NAME_VERSION (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 374, __FUNCTION__, (SSA_NAME)))->base.u.version
];
375 if (val->lattice_val == UNINITIALIZED)
376 *val = get_default_value (var);
377
378 canonicalize_value (val);
379
380 return val;
381}
382
383/* Return the constant tree value associated with VAR. */
384
385static inline tree
386get_constant_value (tree var)
387{
388 ccp_prop_value_t *val;
389 if (TREE_CODE (var)((enum tree_code) (var)->base.code) != SSA_NAME)
390 {
391 if (is_gimple_min_invariant (var))
392 return var;
393 return NULL_TREE(tree) nullptr;
394 }
395 val = get_value (var);
396 if (val
397 && val->lattice_val == CONSTANT
398 && (TREE_CODE (val->value)((enum tree_code) (val->value)->base.code) != INTEGER_CST
399 || val->mask == 0))
400 return val->value;
401 return NULL_TREE(tree) nullptr;
402}
403
404/* Sets the value associated with VAR to VARYING. */
405
406static inline void
407set_value_varying (tree var)
408{
409 ccp_prop_value_t *val = &const_val[SSA_NAME_VERSION (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 409, __FUNCTION__, (SSA_NAME)))->base.u.version
];
410
411 val->lattice_val = VARYING;
412 val->value = NULL_TREE(tree) nullptr;
413 val->mask = -1;
414}
415
416/* For integer constants, make sure to drop TREE_OVERFLOW. */
417
418static void
419canonicalize_value (ccp_prop_value_t *val)
420{
421 if (val->lattice_val != CONSTANT)
422 return;
423
424 if (TREE_OVERFLOW_P (val->value)((tree_code_type[(int) (((enum tree_code) (val->value)->
base.code))] == tcc_constant) && ((tree_class_check (
(val->value), (tcc_constant), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 424, __FUNCTION__))->base.public_flag))
)
425 val->value = drop_tree_overflow (val->value);
426}
427
428/* Return whether the lattice transition is valid. */
429
430static bool
431valid_lattice_transition (ccp_prop_value_t old_val, ccp_prop_value_t new_val)
432{
433 /* Lattice transitions must always be monotonically increasing in
434 value. */
435 if (old_val.lattice_val < new_val.lattice_val)
436 return true;
437
438 if (old_val.lattice_val != new_val.lattice_val)
439 return false;
440
441 if (!old_val.value && !new_val.value)
442 return true;
443
444 /* Now both lattice values are CONSTANT. */
445
446 /* Allow arbitrary copy changes as we might look through PHI <a_1, ...>
447 when only a single copy edge is executable. */
448 if (TREE_CODE (old_val.value)((enum tree_code) (old_val.value)->base.code) == SSA_NAME
449 && TREE_CODE (new_val.value)((enum tree_code) (new_val.value)->base.code) == SSA_NAME)
450 return true;
451
452 /* Allow transitioning from a constant to a copy. */
453 if (is_gimple_min_invariant (old_val.value)
454 && TREE_CODE (new_val.value)((enum tree_code) (new_val.value)->base.code) == SSA_NAME)
455 return true;
456
457 /* Allow transitioning from PHI <&x, not executable> == &x
458 to PHI <&x, &y> == common alignment. */
459 if (TREE_CODE (old_val.value)((enum tree_code) (old_val.value)->base.code) != INTEGER_CST
460 && TREE_CODE (new_val.value)((enum tree_code) (new_val.value)->base.code) == INTEGER_CST)
461 return true;
462
463 /* Bit-lattices have to agree in the still valid bits. */
464 if (TREE_CODE (old_val.value)((enum tree_code) (old_val.value)->base.code) == INTEGER_CST
465 && TREE_CODE (new_val.value)((enum tree_code) (new_val.value)->base.code) == INTEGER_CST)
466 return (wi::bit_and_not (wi::to_widest (old_val.value), new_val.mask)
467 == wi::bit_and_not (wi::to_widest (new_val.value), new_val.mask));
468
469 /* Otherwise constant values have to agree. */
470 if (operand_equal_p (old_val.value, new_val.value, 0))
471 return true;
472
473 /* At least the kinds and types should agree now. */
474 if (TREE_CODE (old_val.value)((enum tree_code) (old_val.value)->base.code) != TREE_CODE (new_val.value)((enum tree_code) (new_val.value)->base.code)
475 || !types_compatible_p (TREE_TYPE (old_val.value)((contains_struct_check ((old_val.value), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 475, __FUNCTION__))->typed.type)
,
476 TREE_TYPE (new_val.value)((contains_struct_check ((new_val.value), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 476, __FUNCTION__))->typed.type)
))
477 return false;
478
479 /* For floats and !HONOR_NANS allow transitions from (partial) NaN
480 to non-NaN. */
481 tree type = TREE_TYPE (new_val.value)((contains_struct_check ((new_val.value), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 481, __FUNCTION__))->typed.type)
;
482 if (SCALAR_FLOAT_TYPE_P (type)(((enum tree_code) (type)->base.code) == REAL_TYPE)
483 && !HONOR_NANS (type))
484 {
485 if (REAL_VALUE_ISNAN (TREE_REAL_CST (old_val.value))real_isnan (&((*((tree_check ((old_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 485, __FUNCTION__, (REAL_CST)))->real_cst.real_cst_ptr))
))
)
486 return true;
487 }
488 else if (VECTOR_FLOAT_TYPE_P (type)((((enum tree_code) (type)->base.code) == VECTOR_TYPE) &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 488, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
489 && !HONOR_NANS (type))
490 {
491 unsigned int count
492 = tree_vector_builder::binary_encoded_nelts (old_val.value,
493 new_val.value);
494 for (unsigned int i = 0; i < count; ++i)
495 if (!REAL_VALUE_ISNANreal_isnan (&((*((tree_check ((((tree_check ((old_val.value
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 496, __FUNCTION__, (VECTOR_CST)))->vector.elts[i])), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 496, __FUNCTION__, (REAL_CST)))->real_cst.real_cst_ptr))
))
496 (TREE_REAL_CST (VECTOR_CST_ENCODED_ELT (old_val.value, i)))real_isnan (&((*((tree_check ((((tree_check ((old_val.value
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 496, __FUNCTION__, (VECTOR_CST)))->vector.elts[i])), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 496, __FUNCTION__, (REAL_CST)))->real_cst.real_cst_ptr))
))
497 && !operand_equal_p (VECTOR_CST_ENCODED_ELT (old_val.value, i)((tree_check ((old_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 497, __FUNCTION__, (VECTOR_CST)))->vector.elts[i])
,
498 VECTOR_CST_ENCODED_ELT (new_val.value, i)((tree_check ((new_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 498, __FUNCTION__, (VECTOR_CST)))->vector.elts[i])
, 0))
499 return false;
500 return true;
501 }
502 else if (COMPLEX_FLOAT_TYPE_P (type)(((enum tree_code) (type)->base.code) == COMPLEX_TYPE &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 502, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
503 && !HONOR_NANS (type))
504 {
505 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_REALPART (old_val.value)))real_isnan (&((*((tree_check ((((tree_check ((old_val.value
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 505, __FUNCTION__, (COMPLEX_CST)))->complex.real)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 505, __FUNCTION__, (REAL_CST)))->real_cst.real_cst_ptr))
))
506 && !operand_equal_p (TREE_REALPART (old_val.value)((tree_check ((old_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 506, __FUNCTION__, (COMPLEX_CST)))->complex.real)
,
507 TREE_REALPART (new_val.value)((tree_check ((new_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 507, __FUNCTION__, (COMPLEX_CST)))->complex.real)
, 0))
508 return false;
509 if (!REAL_VALUE_ISNAN (TREE_REAL_CST (TREE_IMAGPART (old_val.value)))real_isnan (&((*((tree_check ((((tree_check ((old_val.value
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 509, __FUNCTION__, (COMPLEX_CST)))->complex.imag)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 509, __FUNCTION__, (REAL_CST)))->real_cst.real_cst_ptr))
))
510 && !operand_equal_p (TREE_IMAGPART (old_val.value)((tree_check ((old_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 510, __FUNCTION__, (COMPLEX_CST)))->complex.imag)
,
511 TREE_IMAGPART (new_val.value)((tree_check ((new_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 511, __FUNCTION__, (COMPLEX_CST)))->complex.imag)
, 0))
512 return false;
513 return true;
514 }
515 return false;
516}
517
518/* Set the value for variable VAR to NEW_VAL. Return true if the new
519 value is different from VAR's previous value. */
520
521static bool
522set_lattice_value (tree var, ccp_prop_value_t *new_val)
523{
524 /* We can deal with old UNINITIALIZED values just fine here. */
525 ccp_prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 525, __FUNCTION__, (SSA_NAME)))->base.u.version
];
526
527 canonicalize_value (new_val);
528
529 /* We have to be careful to not go up the bitwise lattice
530 represented by the mask. Instead of dropping to VARYING
531 use the meet operator to retain a conservative value.
532 Missed optimizations like PR65851 makes this necessary.
533 It also ensures we converge to a stable lattice solution. */
534 if (old_val->lattice_val != UNINITIALIZED)
535 ccp_lattice_meet (new_val, old_val);
536
537 gcc_checking_assert (valid_lattice_transition (*old_val, *new_val))((void)(!(valid_lattice_transition (*old_val, *new_val)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 537, __FUNCTION__), 0 : 0))
;
538
539 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
540 caller that this was a non-transition. */
541 if (old_val->lattice_val != new_val->lattice_val
542 || (new_val->lattice_val == CONSTANT
543 && (TREE_CODE (new_val->value)((enum tree_code) (new_val->value)->base.code) != TREE_CODE (old_val->value)((enum tree_code) (old_val->value)->base.code)
544 || (TREE_CODE (new_val->value)((enum tree_code) (new_val->value)->base.code) == INTEGER_CST
545 && (new_val->mask != old_val->mask
546 || (wi::bit_and_not (wi::to_widest (old_val->value),
547 new_val->mask)
548 != wi::bit_and_not (wi::to_widest (new_val->value),
549 new_val->mask))))
550 || (TREE_CODE (new_val->value)((enum tree_code) (new_val->value)->base.code) != INTEGER_CST
551 && !operand_equal_p (new_val->value, old_val->value, 0)))))
552 {
553 /* ??? We would like to delay creation of INTEGER_CSTs from
554 partially constants here. */
555
556 if (dump_file && (dump_flags & TDF_DETAILS))
557 {
558 dump_lattice_value (dump_file, "Lattice value changed to ", *new_val);
559 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
560 }
561
562 *old_val = *new_val;
563
564 gcc_assert (new_val->lattice_val != UNINITIALIZED)((void)(!(new_val->lattice_val != UNINITIALIZED) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 564, __FUNCTION__), 0 : 0))
;
565 return true;
566 }
567
568 return false;
569}
570
571static ccp_prop_value_t get_value_for_expr (tree, bool);
572static ccp_prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
573void bit_value_binop (enum tree_code, signop, int, widest_int *, widest_int *,
574 signop, int, const widest_int &, const widest_int &,
575 signop, int, const widest_int &, const widest_int &);
576
577/* Return a widest_int that can be used for bitwise simplifications
578 from VAL. */
579
580static widest_int
581value_to_wide_int (ccp_prop_value_t val)
582{
583 if (val.value
584 && TREE_CODE (val.value)((enum tree_code) (val.value)->base.code) == INTEGER_CST)
585 return wi::to_widest (val.value);
586
587 return 0;
588}
589
590/* Return the value for the address expression EXPR based on alignment
591 information. */
592
593static ccp_prop_value_t
594get_value_from_alignment (tree expr)
595{
596 tree type = TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 596, __FUNCTION__))->typed.type)
;
597 ccp_prop_value_t val;
598 unsigned HOST_WIDE_INTlong bitpos;
599 unsigned int align;
600
601 gcc_assert (TREE_CODE (expr) == ADDR_EXPR)((void)(!(((enum tree_code) (expr)->base.code) == ADDR_EXPR
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 601, __FUNCTION__), 0 : 0))
;
602
603 get_pointer_alignment_1 (expr, &align, &bitpos);
604 val.mask = wi::bit_and_not
605 (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
|| TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 605, __FUNCTION__))->base.u.bits.unsigned_flag)
606 ? wi::mask <widest_int> (TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 606, __FUNCTION__))->type_common.precision)
, false)
607 : -1,
608 align / BITS_PER_UNIT(8) - 1);
609 val.lattice_val
610 = wi::sext (val.mask, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 610, __FUNCTION__))->type_common.precision)
) == -1 ? VARYING : CONSTANT;
611 if (val.lattice_val == CONSTANT)
612 val.value = build_int_cstu (type, bitpos / BITS_PER_UNIT(8));
613 else
614 val.value = NULL_TREE(tree) nullptr;
615
616 return val;
617}
618
619/* Return the value for the tree operand EXPR. If FOR_BITS_P is true
620 return constant bits extracted from alignment information for
621 invariant addresses. */
622
623static ccp_prop_value_t
624get_value_for_expr (tree expr, bool for_bits_p)
625{
626 ccp_prop_value_t val;
627
628 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == SSA_NAME)
629 {
630 ccp_prop_value_t *val_ = get_value (expr);
631 if (val_)
632 val = *val_;
633 else
634 {
635 val.lattice_val = VARYING;
636 val.value = NULL_TREE(tree) nullptr;
637 val.mask = -1;
638 }
639 if (for_bits_p
640 && val.lattice_val == CONSTANT)
641 {
642 if (TREE_CODE (val.value)((enum tree_code) (val.value)->base.code) == ADDR_EXPR)
643 val = get_value_from_alignment (val.value);
644 else if (TREE_CODE (val.value)((enum tree_code) (val.value)->base.code) != INTEGER_CST)
645 {
646 val.lattice_val = VARYING;
647 val.value = NULL_TREE(tree) nullptr;
648 val.mask = -1;
649 }
650 }
651 /* Fall back to a copy value. */
652 if (!for_bits_p
653 && val.lattice_val == VARYING
654 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (expr)(tree_check ((expr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 654, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
655 {
656 val.lattice_val = CONSTANT;
657 val.value = expr;
658 val.mask = -1;
659 }
660 }
661 else if (is_gimple_min_invariant (expr)
662 && (!for_bits_p || TREE_CODE (expr)((enum tree_code) (expr)->base.code) == INTEGER_CST))
663 {
664 val.lattice_val = CONSTANT;
665 val.value = expr;
666 val.mask = 0;
667 canonicalize_value (&val);
668 }
669 else if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == ADDR_EXPR)
670 val = get_value_from_alignment (expr);
671 else
672 {
673 val.lattice_val = VARYING;
674 val.mask = -1;
675 val.value = NULL_TREE(tree) nullptr;
676 }
677
678 if (val.lattice_val == VARYING
679 && TYPE_UNSIGNED (TREE_TYPE (expr))((tree_class_check ((((contains_struct_check ((expr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 679, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 679, __FUNCTION__))->base.u.bits.unsigned_flag)
)
680 val.mask = wi::zext (val.mask, TYPE_PRECISION (TREE_TYPE (expr))((tree_class_check ((((contains_struct_check ((expr), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 680, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 680, __FUNCTION__))->type_common.precision)
);
681
682 return val;
683}
684
685/* Return the likely CCP lattice value for STMT.
686
687 If STMT has no operands, then return CONSTANT.
688
689 Else if undefinedness of operands of STMT cause its value to be
690 undefined, then return UNDEFINED.
691
692 Else if any operands of STMT are constants, then return CONSTANT.
693
694 Else return VARYING. */
695
696static ccp_lattice_t
697likely_value (gimple *stmt)
698{
699 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
700 bool has_nsa_operand;
701 tree use;
702 ssa_op_iter iter;
703 unsigned i;
704
705 enum gimple_code code = gimple_code (stmt);
706
707 /* This function appears to be called only for assignments, calls,
708 conditionals, and switches, due to the logic in visit_stmt. */
709 gcc_assert (code == GIMPLE_ASSIGN((void)(!(code == GIMPLE_ASSIGN || code == GIMPLE_CALL || code
== GIMPLE_COND || code == GIMPLE_SWITCH) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 712, __FUNCTION__), 0 : 0))
710 || code == GIMPLE_CALL((void)(!(code == GIMPLE_ASSIGN || code == GIMPLE_CALL || code
== GIMPLE_COND || code == GIMPLE_SWITCH) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 712, __FUNCTION__), 0 : 0))
711 || code == GIMPLE_COND((void)(!(code == GIMPLE_ASSIGN || code == GIMPLE_CALL || code
== GIMPLE_COND || code == GIMPLE_SWITCH) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 712, __FUNCTION__), 0 : 0))
712 || code == GIMPLE_SWITCH)((void)(!(code == GIMPLE_ASSIGN || code == GIMPLE_CALL || code
== GIMPLE_COND || code == GIMPLE_SWITCH) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 712, __FUNCTION__), 0 : 0))
;
713
714 /* If the statement has volatile operands, it won't fold to a
715 constant value. */
716 if (gimple_has_volatile_ops (stmt))
717 return VARYING;
718
719 /* Arrive here for more complex cases. */
720 has_constant_operand = false;
721 has_undefined_operand = false;
722 all_undefined_operands = true;
723 has_nsa_operand = false;
724 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)for (use = op_iter_init_tree (&(iter), stmt, 0x01); !op_iter_done
(&(iter)); (void) (use = op_iter_next_tree (&(iter))
))
725 {
726 ccp_prop_value_t *val = get_value (use);
727
728 if (val && val->lattice_val == UNDEFINED)
729 has_undefined_operand = true;
730 else
731 all_undefined_operands = false;
732
733 if (val && val->lattice_val == CONSTANT)
734 has_constant_operand = true;
735
736 if (SSA_NAME_IS_DEFAULT_DEF (use)(tree_check ((use), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 736, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
737 || !prop_simulate_again_p (SSA_NAME_DEF_STMT (use)(tree_check ((use), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 737, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
))
738 has_nsa_operand = true;
739 }
740
741 /* There may be constants in regular rhs operands. For calls we
742 have to ignore lhs, fndecl and static chain, otherwise only
743 the lhs. */
744 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
745 i < gimple_num_ops (stmt); ++i)
746 {
747 tree op = gimple_op (stmt, i);
748 if (!op || TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
749 continue;
750 if (is_gimple_min_invariant (op))
751 has_constant_operand = true;
752 }
753
754 if (has_constant_operand)
755 all_undefined_operands = false;
756
757 if (has_undefined_operand
758 && code == GIMPLE_CALL
759 && gimple_call_internal_p (stmt))
760 switch (gimple_call_internal_fn (stmt))
761 {
762 /* These 3 builtins use the first argument just as a magic
763 way how to find out a decl uid. */
764 case IFN_GOMP_SIMD_LANE:
765 case IFN_GOMP_SIMD_VF:
766 case IFN_GOMP_SIMD_LAST_LANE:
767 has_undefined_operand = false;
768 break;
769 default:
770 break;
771 }
772
773 /* If the operation combines operands like COMPLEX_EXPR make sure to
774 not mark the result UNDEFINED if only one part of the result is
775 undefined. */
776 if (has_undefined_operand && all_undefined_operands)
777 return UNDEFINED;
778 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
779 {
780 switch (gimple_assign_rhs_code (stmt))
781 {
782 /* Unary operators are handled with all_undefined_operands. */
783 case PLUS_EXPR:
784 case MINUS_EXPR:
785 case POINTER_PLUS_EXPR:
786 case BIT_XOR_EXPR:
787 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
788 Not bitwise operators, one VARYING operand may specify the
789 result completely.
790 Not logical operators for the same reason, apart from XOR.
791 Not COMPLEX_EXPR as one VARYING operand makes the result partly
792 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
793 the undefined operand may be promoted. */
794 return UNDEFINED;
795
796 case ADDR_EXPR:
797 /* If any part of an address is UNDEFINED, like the index
798 of an ARRAY_EXPR, then treat the result as UNDEFINED. */
799 return UNDEFINED;
800
801 default:
802 ;
803 }
804 }
805 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
806 fall back to CONSTANT. During iteration UNDEFINED may still drop
807 to CONSTANT. */
808 if (has_undefined_operand)
809 return CONSTANT;
810
811 /* We do not consider virtual operands here -- load from read-only
812 memory may have only VARYING virtual operands, but still be
813 constant. Also we can combine the stmt with definitions from
814 operands whose definitions are not simulated again. */
815 if (has_constant_operand
816 || has_nsa_operand
817 || gimple_references_memory_p (stmt))
818 return CONSTANT;
819
820 return VARYING;
821}
822
823/* Returns true if STMT cannot be constant. */
824
825static bool
826surely_varying_stmt_p (gimple *stmt)
827{
828 /* If the statement has operands that we cannot handle, it cannot be
829 constant. */
830 if (gimple_has_volatile_ops (stmt))
831 return true;
832
833 /* If it is a call and does not return a value or is not a
834 builtin and not an indirect call or a call to function with
835 assume_aligned/alloc_align attribute, it is varying. */
836 if (is_gimple_call (stmt))
837 {
838 tree fndecl, fntype = gimple_call_fntype (stmt);
839 if (!gimple_call_lhs (stmt)
840 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE(tree) nullptr
841 && !fndecl_built_in_p (fndecl)
842 && !lookup_attribute ("assume_aligned",
843 TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 843, __FUNCTION__))->type_common.attributes)
)
844 && !lookup_attribute ("alloc_align",
845 TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 845, __FUNCTION__))->type_common.attributes)
)))
846 return true;
847 }
848
849 /* Any other store operation is not interesting. */
850 else if (gimple_vdef (stmt))
851 return true;
852
853 /* Anything other than assignments and conditional jumps are not
854 interesting for CCP. */
855 if (gimple_code (stmt) != GIMPLE_ASSIGN
856 && gimple_code (stmt) != GIMPLE_COND
857 && gimple_code (stmt) != GIMPLE_SWITCH
858 && gimple_code (stmt) != GIMPLE_CALL)
859 return true;
860
861 return false;
862}
863
864/* Initialize local data structures for CCP. */
865
866static void
867ccp_initialize (void)
868{
869 basic_block bb;
870
871 n_const_val = num_ssa_names(vec_safe_length ((cfun + 0)->gimple_df->ssa_names));
872 const_val = XCNEWVEC (ccp_prop_value_t, n_const_val)((ccp_prop_value_t *) xcalloc ((n_const_val), sizeof (ccp_prop_value_t
)))
;
873
874 /* Initialize simulation flags for PHI nodes and statements. */
875 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
876 {
877 gimple_stmt_iterator i;
878
879 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
880 {
881 gimple *stmt = gsi_stmt (i);
882 bool is_varying;
883
884 /* If the statement is a control insn, then we do not
885 want to avoid simulating the statement once. Failure
886 to do so means that those edges will never get added. */
887 if (stmt_ends_bb_p (stmt))
888 is_varying = false;
889 else
890 is_varying = surely_varying_stmt_p (stmt);
891
892 if (is_varying)
893 {
894 tree def;
895 ssa_op_iter iter;
896
897 /* If the statement will not produce a constant, mark
898 all its outputs VARYING. */
899 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)for (def = op_iter_init_tree (&(iter), stmt, ((0x08) | 0x02
)); !op_iter_done (&(iter)); (void) (def = op_iter_next_tree
(&(iter))))
900 set_value_varying (def);
901 }
902 prop_set_simulate_again (stmt, !is_varying);
903 }
904 }
905
906 /* Now process PHI nodes. We never clear the simulate_again flag on
907 phi nodes, since we do not know which edges are executable yet,
908 except for phi nodes for virtual operands when we do not do store ccp. */
909 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
910 {
911 gphi_iterator i;
912
913 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
914 {
915 gphi *phi = i.phi ();
916
917 if (virtual_operand_p (gimple_phi_result (phi)))
918 prop_set_simulate_again (phi, false);
919 else
920 prop_set_simulate_again (phi, true);
921 }
922 }
923}
924
925/* Debug count support. Reset the values of ssa names
926 VARYING when the total number ssa names analyzed is
927 beyond the debug count specified. */
928
929static void
930do_dbg_cnt (void)
931{
932 unsigned i;
933 for (i = 0; i < num_ssa_names(vec_safe_length ((cfun + 0)->gimple_df->ssa_names)); i++)
934 {
935 if (!dbg_cnt (ccp))
936 {
937 const_val[i].lattice_val = VARYING;
938 const_val[i].mask = -1;
939 const_val[i].value = NULL_TREE(tree) nullptr;
940 }
941 }
942}
943
944
945/* We want to provide our own GET_VALUE and FOLD_STMT virtual methods. */
946class ccp_folder : public substitute_and_fold_engine
947{
948 public:
949 tree value_of_expr (tree, gimple *) FINAL OVERRIDE;
950 bool fold_stmt (gimple_stmt_iterator *) FINAL OVERRIDE;
951};
952
953/* This method just wraps GET_CONSTANT_VALUE for now. Over time
954 naked calls to GET_CONSTANT_VALUE should be eliminated in favor
955 of calling member functions. */
956
957tree
958ccp_folder::value_of_expr (tree op, gimple *)
959{
960 return get_constant_value (op);
961}
962
963/* Do final substitution of propagated values, cleanup the flowgraph and
964 free allocated storage. If NONZERO_P, record nonzero bits.
965
966 Return TRUE when something was optimized. */
967
968static bool
969ccp_finalize (bool nonzero_p)
970{
971 bool something_changed;
972 unsigned i;
973 tree name;
974
975 do_dbg_cnt ();
976
977 /* Derive alignment and misalignment information from partially
978 constant pointers in the lattice or nonzero bits from partially
979 constant integers. */
980 FOR_EACH_SSA_NAME (i, name, cfun)for (i = 1; ((cfun + 0))->gimple_df->ssa_names->iterate
(i, &name); ++i) if (name)
981 {
982 ccp_prop_value_t *val;
983 unsigned int tem, align;
984
985 if (!POINTER_TYPE_P (TREE_TYPE (name))(((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 985, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 985, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
986 && (!INTEGRAL_TYPE_P (TREE_TYPE (name))(((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 986, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 986, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 986, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
987 /* Don't record nonzero bits before IPA to avoid
988 using too much memory. */
989 || !nonzero_p))
990 continue;
991
992 val = get_value (name);
993 if (val->lattice_val != CONSTANT
994 || TREE_CODE (val->value)((enum tree_code) (val->value)->base.code) != INTEGER_CST
995 || val->mask == 0)
996 continue;
997
998 if (POINTER_TYPE_P (TREE_TYPE (name))(((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 998, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 998, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
999 {
1000 /* Trailing mask bits specify the alignment, trailing value
1001 bits the misalignment. */
1002 tem = val->mask.to_uhwi ();
1003 align = least_bit_hwi (tem);
1004 if (align > 1)
1005 set_ptr_info_alignment (get_ptr_info (name), align,
1006 (TREE_INT_CST_LOW (val->value)((unsigned long) (*tree_int_cst_elt_check ((val->value), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1006, __FUNCTION__)))
1007 & (align - 1)));
1008 }
1009 else
1010 {
1011 unsigned int precision = TYPE_PRECISION (TREE_TYPE (val->value))((tree_class_check ((((contains_struct_check ((val->value)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1011, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1011, __FUNCTION__))->type_common.precision)
;
1012 wide_int nonzero_bits
1013 = (wide_int::from (val->mask, precision, UNSIGNED)
1014 | wi::to_wide (val->value));
1015 nonzero_bits &= get_nonzero_bits (name);
1016 set_nonzero_bits (name, nonzero_bits);
1017 }
1018 }
1019
1020 /* Perform substitutions based on the known constant values. */
1021 class ccp_folder ccp_folder;
1022 something_changed = ccp_folder.substitute_and_fold ();
1023
1024 free (const_val);
1025 const_val = NULLnullptr;
1026 return something_changed;
1027}
1028
1029
1030/* Compute the meet operator between *VAL1 and *VAL2. Store the result
1031 in VAL1.
1032
1033 any M UNDEFINED = any
1034 any M VARYING = VARYING
1035 Ci M Cj = Ci if (i == j)
1036 Ci M Cj = VARYING if (i != j)
1037 */
1038
1039static void
1040ccp_lattice_meet (ccp_prop_value_t *val1, ccp_prop_value_t *val2)
1041{
1042 if (val1->lattice_val == UNDEFINED
1043 /* For UNDEFINED M SSA we can't always SSA because its definition
1044 may not dominate the PHI node. Doing optimistic copy propagation
1045 also causes a lot of gcc.dg/uninit-pred*.c FAILs. */
1046 && (val2->lattice_val != CONSTANT
1047 || TREE_CODE (val2->value)((enum tree_code) (val2->value)->base.code) != SSA_NAME))
1048 {
1049 /* UNDEFINED M any = any */
1050 *val1 = *val2;
1051 }
1052 else if (val2->lattice_val == UNDEFINED
1053 /* See above. */
1054 && (val1->lattice_val != CONSTANT
1055 || TREE_CODE (val1->value)((enum tree_code) (val1->value)->base.code) != SSA_NAME))
1056 {
1057 /* any M UNDEFINED = any
1058 Nothing to do. VAL1 already contains the value we want. */
1059 ;
1060 }
1061 else if (val1->lattice_val == VARYING
1062 || val2->lattice_val == VARYING)
1063 {
1064 /* any M VARYING = VARYING. */
1065 val1->lattice_val = VARYING;
1066 val1->mask = -1;
1067 val1->value = NULL_TREE(tree) nullptr;
1068 }
1069 else if (val1->lattice_val == CONSTANT
1070 && val2->lattice_val == CONSTANT
1071 && TREE_CODE (val1->value)((enum tree_code) (val1->value)->base.code) == INTEGER_CST
1072 && TREE_CODE (val2->value)((enum tree_code) (val2->value)->base.code) == INTEGER_CST)
1073 {
1074 /* Ci M Cj = Ci if (i == j)
1075 Ci M Cj = VARYING if (i != j)
1076
1077 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
1078 drop to varying. */
1079 val1->mask = (val1->mask | val2->mask
1080 | (wi::to_widest (val1->value)
1081 ^ wi::to_widest (val2->value)));
1082 if (wi::sext (val1->mask, TYPE_PRECISION (TREE_TYPE (val1->value))((tree_class_check ((((contains_struct_check ((val1->value
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1082, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1082, __FUNCTION__))->type_common.precision)
) == -1)
1083 {
1084 val1->lattice_val = VARYING;
1085 val1->value = NULL_TREE(tree) nullptr;
1086 }
1087 }
1088 else if (val1->lattice_val == CONSTANT
1089 && val2->lattice_val == CONSTANT
1090 && operand_equal_p (val1->value, val2->value, 0))
1091 {
1092 /* Ci M Cj = Ci if (i == j)
1093 Ci M Cj = VARYING if (i != j)
1094
1095 VAL1 already contains the value we want for equivalent values. */
1096 }
1097 else if (val1->lattice_val == CONSTANT
1098 && val2->lattice_val == CONSTANT
1099 && (TREE_CODE (val1->value)((enum tree_code) (val1->value)->base.code) == ADDR_EXPR
1100 || TREE_CODE (val2->value)((enum tree_code) (val2->value)->base.code) == ADDR_EXPR))
1101 {
1102 /* When not equal addresses are involved try meeting for
1103 alignment. */
1104 ccp_prop_value_t tem = *val2;
1105 if (TREE_CODE (val1->value)((enum tree_code) (val1->value)->base.code) == ADDR_EXPR)
1106 *val1 = get_value_for_expr (val1->value, true);
1107 if (TREE_CODE (val2->value)((enum tree_code) (val2->value)->base.code) == ADDR_EXPR)
1108 tem = get_value_for_expr (val2->value, true);
1109 ccp_lattice_meet (val1, &tem);
1110 }
1111 else
1112 {
1113 /* Any other combination is VARYING. */
1114 val1->lattice_val = VARYING;
1115 val1->mask = -1;
1116 val1->value = NULL_TREE(tree) nullptr;
1117 }
1118}
1119
1120
1121/* Loop through the PHI_NODE's parameters for BLOCK and compare their
1122 lattice values to determine PHI_NODE's lattice value. The value of a
1123 PHI node is determined calling ccp_lattice_meet with all the arguments
1124 of the PHI node that are incoming via executable edges. */
1125
1126enum ssa_prop_result
1127ccp_propagate::visit_phi (gphi *phi)
1128{
1129 unsigned i;
1130 ccp_prop_value_t new_val;
1131
1132 if (dump_file && (dump_flags & TDF_DETAILS))
1133 {
1134 fprintf (dump_file, "\nVisiting PHI node: ");
1135 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1136 }
1137
1138 new_val.lattice_val = UNDEFINED;
1139 new_val.value = NULL_TREE(tree) nullptr;
1140 new_val.mask = 0;
1141
1142 bool first = true;
1143 bool non_exec_edge = false;
1144 for (i = 0; i < gimple_phi_num_args (phi); i++)
1145 {
1146 /* Compute the meet operator over all the PHI arguments flowing
1147 through executable edges. */
1148 edge e = gimple_phi_arg_edge (phi, i);
1149
1150 if (dump_file && (dump_flags & TDF_DETAILS))
1151 {
1152 fprintf (dump_file,
1153 "\tArgument #%d (%d -> %d %sexecutable)\n",
1154 i, e->src->index, e->dest->index,
1155 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
1156 }
1157
1158 /* If the incoming edge is executable, Compute the meet operator for
1159 the existing value of the PHI node and the current PHI argument. */
1160 if (e->flags & EDGE_EXECUTABLE)
1161 {
1162 tree arg = gimple_phi_arg (phi, i)->def;
1163 ccp_prop_value_t arg_val = get_value_for_expr (arg, false);
1164
1165 if (first)
1166 {
1167 new_val = arg_val;
1168 first = false;
1169 }
1170 else
1171 ccp_lattice_meet (&new_val, &arg_val);
1172
1173 if (dump_file && (dump_flags & TDF_DETAILS))
1174 {
1175 fprintf (dump_file, "\t");
1176 print_generic_expr (dump_file, arg, dump_flags);
1177 dump_lattice_value (dump_file, "\tValue: ", arg_val);
1178 fprintf (dump_file, "\n");
1179 }
1180
1181 if (new_val.lattice_val == VARYING)
1182 break;
1183 }
1184 else
1185 non_exec_edge = true;
1186 }
1187
1188 /* In case there were non-executable edges and the value is a copy
1189 make sure its definition dominates the PHI node. */
1190 if (non_exec_edge
1191 && new_val.lattice_val == CONSTANT
1192 && TREE_CODE (new_val.value)((enum tree_code) (new_val.value)->base.code) == SSA_NAME
1193 && ! SSA_NAME_IS_DEFAULT_DEF (new_val.value)(tree_check ((new_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1193, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
1194 && ! dominated_by_p (CDI_DOMINATORS, gimple_bb (phi),
1195 gimple_bb (SSA_NAME_DEF_STMT (new_val.value)(tree_check ((new_val.value), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1195, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
)))
1196 {
1197 new_val.lattice_val = VARYING;
1198 new_val.value = NULL_TREE(tree) nullptr;
1199 new_val.mask = -1;
1200 }
1201
1202 if (dump_file && (dump_flags & TDF_DETAILS))
1203 {
1204 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1205 fprintf (dump_file, "\n\n");
1206 }
1207
1208 /* Make the transition to the new value. */
1209 if (set_lattice_value (gimple_phi_result (phi), &new_val))
1210 {
1211 if (new_val.lattice_val == VARYING)
1212 return SSA_PROP_VARYING;
1213 else
1214 return SSA_PROP_INTERESTING;
1215 }
1216 else
1217 return SSA_PROP_NOT_INTERESTING;
1218}
1219
1220/* Return the constant value for OP or OP otherwise. */
1221
1222static tree
1223valueize_op (tree op)
1224{
1225 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
1226 {
1227 tree tem = get_constant_value (op);
1228 if (tem)
1229 return tem;
1230 }
1231 return op;
1232}
1233
1234/* Return the constant value for OP, but signal to not follow SSA
1235 edges if the definition may be simulated again. */
1236
1237static tree
1238valueize_op_1 (tree op)
1239{
1240 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
1241 {
1242 /* If the definition may be simulated again we cannot follow
1243 this SSA edge as the SSA propagator does not necessarily
1244 re-visit the use. */
1245 gimple *def_stmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1245, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1246 if (!gimple_nop_p (def_stmt)
1247 && prop_simulate_again_p (def_stmt))
1248 return NULL_TREE(tree) nullptr;
1249 tree tem = get_constant_value (op);
1250 if (tem)
1251 return tem;
1252 }
1253 return op;
1254}
1255
1256/* CCP specific front-end to the non-destructive constant folding
1257 routines.
1258
1259 Attempt to simplify the RHS of STMT knowing that one or more
1260 operands are constants.
1261
1262 If simplification is possible, return the simplified RHS,
1263 otherwise return the original RHS or NULL_TREE. */
1264
1265static tree
1266ccp_fold (gimple *stmt)
1267{
1268 location_t loc = gimple_location (stmt);
1269 switch (gimple_code (stmt))
1270 {
1271 case GIMPLE_COND:
1272 {
1273 /* Handle comparison operators that can appear in GIMPLE form. */
1274 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1275 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1276 enum tree_code code = gimple_cond_code (stmt);
1277 return fold_binary_loc (loc, code, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], op0, op1);
1278 }
1279
1280 case GIMPLE_SWITCH:
1281 {
1282 /* Return the constant switch index. */
1283 return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
1284 }
1285
1286 case GIMPLE_ASSIGN:
1287 case GIMPLE_CALL:
1288 return gimple_fold_stmt_to_constant_1 (stmt,
1289 valueize_op, valueize_op_1);
1290
1291 default:
1292 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1292, __FUNCTION__))
;
1293 }
1294}
1295
1296/* Apply the operation CODE in type TYPE to the value, mask pair
1297 RVAL and RMASK representing a value of type RTYPE and set
1298 the value, mask pair *VAL and *MASK to the result. */
1299
1300void
1301bit_value_unop (enum tree_code code, signop type_sgn, int type_precision,
1302 widest_int *val, widest_int *mask,
1303 signop rtype_sgn, int rtype_precision,
1304 const widest_int &rval, const widest_int &rmask)
1305{
1306 switch (code)
13
Control jumps to the 'default' case at line 1337
1307 {
1308 case BIT_NOT_EXPR:
1309 *mask = rmask;
1310 *val = ~rval;
1311 break;
1312
1313 case NEGATE_EXPR:
1314 {
1315 widest_int temv, temm;
1316 /* Return ~rval + 1. */
1317 bit_value_unop (BIT_NOT_EXPR, type_sgn, type_precision, &temv, &temm,
1318 type_sgn, type_precision, rval, rmask);
1319 bit_value_binop (PLUS_EXPR, type_sgn, type_precision, val, mask,
1320 type_sgn, type_precision, temv, temm,
1321 type_sgn, type_precision, 1, 0);
1322 break;
1323 }
1324
1325 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
1326 {
1327 /* First extend mask and value according to the original type. */
1328 *mask = wi::ext (rmask, rtype_precision, rtype_sgn);
1329 *val = wi::ext (rval, rtype_precision, rtype_sgn);
1330
1331 /* Then extend mask and value according to the target type. */
1332 *mask = wi::ext (*mask, type_precision, type_sgn);
1333 *val = wi::ext (*val, type_precision, type_sgn);
1334 break;
1335 }
1336
1337 default:
1338 *mask = -1;
1339 break;
14
Execution continues on line 1338
1340 }
1341}
15
Returning without writing to 'val->len'
1342
1343/* Apply the operation CODE in type TYPE to the value, mask pairs
1344 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1345 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1346
1347void
1348bit_value_binop (enum tree_code code, signop sgn, int width,
1349 widest_int *val, widest_int *mask,
1350 signop r1type_sgn, int r1type_precision,
1351 const widest_int &r1val, const widest_int &r1mask,
1352 signop r2type_sgn, int r2type_precision,
1353 const widest_int &r2val, const widest_int &r2mask)
1354{
1355 bool swap_p = false;
1356
1357 /* Assume we'll get a constant result. Use an initial non varying
1358 value, we fall back to varying in the end if necessary. */
1359 *mask = -1;
1360
1361 switch (code)
1362 {
1363 case BIT_AND_EXPR:
1364 /* The mask is constant where there is a known not
1365 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1366 *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
1367 *val = r1val & r2val;
1368 break;
1369
1370 case BIT_IOR_EXPR:
1371 /* The mask is constant where there is a known
1372 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1373 *mask = wi::bit_and_not (r1mask | r2mask,
1374 wi::bit_and_not (r1val, r1mask)
1375 | wi::bit_and_not (r2val, r2mask));
1376 *val = r1val | r2val;
1377 break;
1378
1379 case BIT_XOR_EXPR:
1380 /* m1 | m2 */
1381 *mask = r1mask | r2mask;
1382 *val = r1val ^ r2val;
1383 break;
1384
1385 case LROTATE_EXPR:
1386 case RROTATE_EXPR:
1387 if (r2mask == 0)
1388 {
1389 widest_int shift = r2val;
1390 if (shift == 0)
1391 {
1392 *mask = r1mask;
1393 *val = r1val;
1394 }
1395 else
1396 {
1397 if (wi::neg_p (shift))
1398 {
1399 shift = -shift;
1400 if (code == RROTATE_EXPR)
1401 code = LROTATE_EXPR;
1402 else
1403 code = RROTATE_EXPR;
1404 }
1405 if (code == RROTATE_EXPR)
1406 {
1407 *mask = wi::rrotate (r1mask, shift, width);
1408 *val = wi::rrotate (r1val, shift, width);
1409 }
1410 else
1411 {
1412 *mask = wi::lrotate (r1mask, shift, width);
1413 *val = wi::lrotate (r1val, shift, width);
1414 }
1415 }
1416 }
1417 break;
1418
1419 case LSHIFT_EXPR:
1420 case RSHIFT_EXPR:
1421 /* ??? We can handle partially known shift counts if we know
1422 its sign. That way we can tell that (x << (y | 8)) & 255
1423 is zero. */
1424 if (r2mask == 0)
1425 {
1426 widest_int shift = r2val;
1427 if (shift == 0)
1428 {
1429 *mask = r1mask;
1430 *val = r1val;
1431 }
1432 else
1433 {
1434 if (wi::neg_p (shift))
1435 break;
1436 if (code == RSHIFT_EXPR)
1437 {
1438 *mask = wi::rshift (wi::ext (r1mask, width, sgn), shift, sgn);
1439 *val = wi::rshift (wi::ext (r1val, width, sgn), shift, sgn);
1440 }
1441 else
1442 {
1443 *mask = wi::ext (r1mask << shift, width, sgn);
1444 *val = wi::ext (r1val << shift, width, sgn);
1445 }
1446 }
1447 }
1448 break;
1449
1450 case PLUS_EXPR:
1451 case POINTER_PLUS_EXPR:
1452 {
1453 /* Do the addition with unknown bits set to zero, to give carry-ins of
1454 zero wherever possible. */
1455 widest_int lo = (wi::bit_and_not (r1val, r1mask)
1456 + wi::bit_and_not (r2val, r2mask));
1457 lo = wi::ext (lo, width, sgn);
1458 /* Do the addition with unknown bits set to one, to give carry-ins of
1459 one wherever possible. */
1460 widest_int hi = (r1val | r1mask) + (r2val | r2mask);
1461 hi = wi::ext (hi, width, sgn);
1462 /* Each bit in the result is known if (a) the corresponding bits in
1463 both inputs are known, and (b) the carry-in to that bit position
1464 is known. We can check condition (b) by seeing if we got the same
1465 result with minimised carries as with maximised carries. */
1466 *mask = r1mask | r2mask | (lo ^ hi);
1467 *mask = wi::ext (*mask, width, sgn);
1468 /* It shouldn't matter whether we choose lo or hi here. */
1469 *val = lo;
1470 break;
1471 }
1472
1473 case MINUS_EXPR:
1474 {
1475 widest_int temv, temm;
1476 bit_value_unop (NEGATE_EXPR, r2type_sgn, r2type_precision, &temv, &temm,
1477 r2type_sgn, r2type_precision, r2val, r2mask);
1478 bit_value_binop (PLUS_EXPR, sgn, width, val, mask,
1479 r1type_sgn, r1type_precision, r1val, r1mask,
1480 r2type_sgn, r2type_precision, temv, temm);
1481 break;
1482 }
1483
1484 case MULT_EXPR:
1485 {
1486 /* Just track trailing zeros in both operands and transfer
1487 them to the other. */
1488 int r1tz = wi::ctz (r1val | r1mask);
1489 int r2tz = wi::ctz (r2val | r2mask);
1490 if (r1tz + r2tz >= width)
1491 {
1492 *mask = 0;
1493 *val = 0;
1494 }
1495 else if (r1tz + r2tz > 0)
1496 {
1497 *mask = wi::ext (wi::mask <widest_int> (r1tz + r2tz, true),
1498 width, sgn);
1499 *val = 0;
1500 }
1501 break;
1502 }
1503
1504 case EQ_EXPR:
1505 case NE_EXPR:
1506 {
1507 widest_int m = r1mask | r2mask;
1508 if (wi::bit_and_not (r1val, m) != wi::bit_and_not (r2val, m))
1509 {
1510 *mask = 0;
1511 *val = ((code == EQ_EXPR) ? 0 : 1);
1512 }
1513 else
1514 {
1515 /* We know the result of a comparison is always one or zero. */
1516 *mask = 1;
1517 *val = 0;
1518 }
1519 break;
1520 }
1521
1522 case GE_EXPR:
1523 case GT_EXPR:
1524 swap_p = true;
1525 code = swap_tree_comparison (code);
1526 /* Fall through. */
1527 case LT_EXPR:
1528 case LE_EXPR:
1529 {
1530 int minmax, maxmin;
1531
1532 const widest_int &o1val = swap_p ? r2val : r1val;
1533 const widest_int &o1mask = swap_p ? r2mask : r1mask;
1534 const widest_int &o2val = swap_p ? r1val : r2val;
1535 const widest_int &o2mask = swap_p ? r1mask : r2mask;
1536
1537 /* If the most significant bits are not known we know nothing. */
1538 if (wi::neg_p (o1mask) || wi::neg_p (o2mask))
1539 break;
1540
1541 /* For comparisons the signedness is in the comparison operands. */
1542 sgn = r1type_sgn;
1543
1544 /* If we know the most significant bits we know the values
1545 value ranges by means of treating varying bits as zero
1546 or one. Do a cross comparison of the max/min pairs. */
1547 maxmin = wi::cmp (o1val | o1mask,
1548 wi::bit_and_not (o2val, o2mask), sgn);
1549 minmax = wi::cmp (wi::bit_and_not (o1val, o1mask),
1550 o2val | o2mask, sgn);
1551 if (maxmin < 0) /* o1 is less than o2. */
1552 {
1553 *mask = 0;
1554 *val = 1;
1555 }
1556 else if (minmax > 0) /* o1 is not less or equal to o2. */
1557 {
1558 *mask = 0;
1559 *val = 0;
1560 }
1561 else if (maxmin == minmax) /* o1 and o2 are equal. */
1562 {
1563 /* This probably should never happen as we'd have
1564 folded the thing during fully constant value folding. */
1565 *mask = 0;
1566 *val = (code == LE_EXPR ? 1 : 0);
1567 }
1568 else
1569 {
1570 /* We know the result of a comparison is always one or zero. */
1571 *mask = 1;
1572 *val = 0;
1573 }
1574 break;
1575 }
1576
1577 default:;
1578 }
1579}
1580
1581/* Return the propagation value when applying the operation CODE to
1582 the value RHS yielding type TYPE. */
1583
1584static ccp_prop_value_t
1585bit_value_unop (enum tree_code code, tree type, tree rhs)
1586{
1587 ccp_prop_value_t rval = get_value_for_expr (rhs, true);
1588 widest_int value, mask;
1
Calling default constructor for 'generic_wide_int<fixed_wide_int_storage<192>>'
6
Returning from default constructor for 'generic_wide_int<fixed_wide_int_storage<192>>'
1589 ccp_prop_value_t val;
1590
1591 if (rval.lattice_val == UNDEFINED)
7
Assuming field 'lattice_val' is not equal to UNDEFINED
8
Taking false branch
1592 return rval;
1593
1594 gcc_assert ((rval.lattice_val == CONSTANT((void)(!((rval.lattice_val == CONSTANT && ((enum tree_code
) (rval.value)->base.code) == INTEGER_CST) || wi::sext (rval
.mask, ((tree_class_check ((((contains_struct_check ((rhs), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__), 0 : 0))
9
Assuming field 'lattice_val' is equal to CONSTANT
10
Assuming field 'code' is equal to INTEGER_CST
11
'?' condition is false
1595 && TREE_CODE (rval.value) == INTEGER_CST)((void)(!((rval.lattice_val == CONSTANT && ((enum tree_code
) (rval.value)->base.code) == INTEGER_CST) || wi::sext (rval
.mask, ((tree_class_check ((((contains_struct_check ((rhs), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__), 0 : 0))
1596 || wi::sext (rval.mask, TYPE_PRECISION (TREE_TYPE (rhs))) == -1)((void)(!((rval.lattice_val == CONSTANT && ((enum tree_code
) (rval.value)->base.code) == INTEGER_CST) || wi::sext (rval
.mask, ((tree_class_check ((((contains_struct_check ((rhs), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1596, __FUNCTION__), 0 : 0))
;
1597 bit_value_unop (code, TYPE_SIGN (type)((signop) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1597, __FUNCTION__))->base.u.bits.unsigned_flag))
, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1597, __FUNCTION__))->type_common.precision)
, &value, &mask,
12
Calling 'bit_value_unop'
16
Returning from 'bit_value_unop'
1598 TYPE_SIGN (TREE_TYPE (rhs))((signop) ((tree_class_check ((((contains_struct_check ((rhs)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1598, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1598, __FUNCTION__))->base.u.bits.unsigned_flag))
, TYPE_PRECISION (TREE_TYPE (rhs))((tree_class_check ((((contains_struct_check ((rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1598, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1598, __FUNCTION__))->type_common.precision)
,
1599 value_to_wide_int (rval), rval.mask);
1600 if (wi::sext (mask, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1600, __FUNCTION__))->type_common.precision)
) != -1)
17
Taking true branch
1601 {
1602 val.lattice_val = CONSTANT;
1603 val.mask = mask;
1604 /* ??? Delay building trees here. */
1605 val.value = wide_int_to_tree (type, value);
18
Calling constructor for 'poly_int<1, generic_wide_int<wide_int_ref_storage<false, true>>>'
1606 }
1607 else
1608 {
1609 val.lattice_val = VARYING;
1610 val.value = NULL_TREE(tree) nullptr;
1611 val.mask = -1;
1612 }
1613 return val;
1614}
1615
1616/* Return the propagation value when applying the operation CODE to
1617 the values RHS1 and RHS2 yielding type TYPE. */
1618
1619static ccp_prop_value_t
1620bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1621{
1622 ccp_prop_value_t r1val = get_value_for_expr (rhs1, true);
1623 ccp_prop_value_t r2val = get_value_for_expr (rhs2, true);
1624 widest_int value, mask;
1625 ccp_prop_value_t val;
1626
1627 if (r1val.lattice_val == UNDEFINED
1628 || r2val.lattice_val == UNDEFINED)
1629 {
1630 val.lattice_val = VARYING;
1631 val.value = NULL_TREE(tree) nullptr;
1632 val.mask = -1;
1633 return val;
1634 }
1635
1636 gcc_assert ((r1val.lattice_val == CONSTANT((void)(!((r1val.lattice_val == CONSTANT && ((enum tree_code
) (r1val.value)->base.code) == INTEGER_CST) || wi::sext (r1val
.mask, ((tree_class_check ((((contains_struct_check ((rhs1), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__), 0 : 0))
1637 && TREE_CODE (r1val.value) == INTEGER_CST)((void)(!((r1val.lattice_val == CONSTANT && ((enum tree_code
) (r1val.value)->base.code) == INTEGER_CST) || wi::sext (r1val
.mask, ((tree_class_check ((((contains_struct_check ((rhs1), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__), 0 : 0))
1638 || wi::sext (r1val.mask,((void)(!((r1val.lattice_val == CONSTANT && ((enum tree_code
) (r1val.value)->base.code) == INTEGER_CST) || wi::sext (r1val
.mask, ((tree_class_check ((((contains_struct_check ((rhs1), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__), 0 : 0))
1639 TYPE_PRECISION (TREE_TYPE (rhs1))) == -1)((void)(!((r1val.lattice_val == CONSTANT && ((enum tree_code
) (r1val.value)->base.code) == INTEGER_CST) || wi::sext (r1val
.mask, ((tree_class_check ((((contains_struct_check ((rhs1), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1639, __FUNCTION__), 0 : 0))
;
1640 gcc_assert ((r2val.lattice_val == CONSTANT((void)(!((r2val.lattice_val == CONSTANT && ((enum tree_code
) (r2val.value)->base.code) == INTEGER_CST) || wi::sext (r2val
.mask, ((tree_class_check ((((contains_struct_check ((rhs2), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__), 0 : 0))
1641 && TREE_CODE (r2val.value) == INTEGER_CST)((void)(!((r2val.lattice_val == CONSTANT && ((enum tree_code
) (r2val.value)->base.code) == INTEGER_CST) || wi::sext (r2val
.mask, ((tree_class_check ((((contains_struct_check ((rhs2), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__), 0 : 0))
1642 || wi::sext (r2val.mask,((void)(!((r2val.lattice_val == CONSTANT && ((enum tree_code
) (r2val.value)->base.code) == INTEGER_CST) || wi::sext (r2val
.mask, ((tree_class_check ((((contains_struct_check ((rhs2), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__), 0 : 0))
1643 TYPE_PRECISION (TREE_TYPE (rhs2))) == -1)((void)(!((r2val.lattice_val == CONSTANT && ((enum tree_code
) (r2val.value)->base.code) == INTEGER_CST) || wi::sext (r2val
.mask, ((tree_class_check ((((contains_struct_check ((rhs2), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1643, __FUNCTION__), 0 : 0))
;
1644 bit_value_binop (code, TYPE_SIGN (type)((signop) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1644, __FUNCTION__))->base.u.bits.unsigned_flag))
, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1644, __FUNCTION__))->type_common.precision)
, &value, &mask,
1645 TYPE_SIGN (TREE_TYPE (rhs1))((signop) ((tree_class_check ((((contains_struct_check ((rhs1
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1645, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1645, __FUNCTION__))->base.u.bits.unsigned_flag))
, TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1645, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1645, __FUNCTION__))->type_common.precision)
,
1646 value_to_wide_int (r1val), r1val.mask,
1647 TYPE_SIGN (TREE_TYPE (rhs2))((signop) ((tree_class_check ((((contains_struct_check ((rhs2
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1647, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1647, __FUNCTION__))->base.u.bits.unsigned_flag))
, TYPE_PRECISION (TREE_TYPE (rhs2))((tree_class_check ((((contains_struct_check ((rhs2), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1647, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1647, __FUNCTION__))->type_common.precision)
,
1648 value_to_wide_int (r2val), r2val.mask);
1649
1650 /* (x * x) & 2 == 0. */
1651 if (code == MULT_EXPR && rhs1 == rhs2 && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1651, __FUNCTION__))->type_common.precision)
> 1)
1652 {
1653 widest_int m = 2;
1654 if (wi::sext (mask, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1654, __FUNCTION__))->type_common.precision)
) != -1)
1655 value = wi::bit_and_not (value, m);
1656 else
1657 value = 0;
1658 mask = wi::bit_and_not (mask, m);
1659 }
1660
1661 if (wi::sext (mask, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1661, __FUNCTION__))->type_common.precision)
) != -1)
1662 {
1663 val.lattice_val = CONSTANT;
1664 val.mask = mask;
1665 /* ??? Delay building trees here. */
1666 val.value = wide_int_to_tree (type, value);
1667 }
1668 else
1669 {
1670 val.lattice_val = VARYING;
1671 val.value = NULL_TREE(tree) nullptr;
1672 val.mask = -1;
1673 }
1674 return val;
1675}
1676
1677/* Return the propagation value for __builtin_assume_aligned
1678 and functions with assume_aligned or alloc_aligned attribute.
1679 For __builtin_assume_aligned, ATTR is NULL_TREE,
1680 for assume_aligned attribute ATTR is non-NULL and ALLOC_ALIGNED
1681 is false, for alloc_aligned attribute ATTR is non-NULL and
1682 ALLOC_ALIGNED is true. */
1683
1684static ccp_prop_value_t
1685bit_value_assume_aligned (gimple *stmt, tree attr, ccp_prop_value_t ptrval,
1686 bool alloc_aligned)
1687{
1688 tree align, misalign = NULL_TREE(tree) nullptr, type;
1689 unsigned HOST_WIDE_INTlong aligni, misaligni = 0;
1690 ccp_prop_value_t alignval;
1691 widest_int value, mask;
1692 ccp_prop_value_t val;
1693
1694 if (attr == NULL_TREE(tree) nullptr)
1695 {
1696 tree ptr = gimple_call_arg (stmt, 0);
1697 type = TREE_TYPE (ptr)((contains_struct_check ((ptr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1697, __FUNCTION__))->typed.type)
;
1698 ptrval = get_value_for_expr (ptr, true);
1699 }
1700 else
1701 {
1702 tree lhs = gimple_call_lhs (stmt);
1703 type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1703, __FUNCTION__))->typed.type)
;
1704 }
1705
1706 if (ptrval.lattice_val == UNDEFINED)
1707 return ptrval;
1708 gcc_assert ((ptrval.lattice_val == CONSTANT((void)(!((ptrval.lattice_val == CONSTANT && ((enum tree_code
) (ptrval.value)->base.code) == INTEGER_CST) || wi::sext (
ptrval.mask, ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1710, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1710, __FUNCTION__), 0 : 0))
1709 && TREE_CODE (ptrval.value) == INTEGER_CST)((void)(!((ptrval.lattice_val == CONSTANT && ((enum tree_code
) (ptrval.value)->base.code) == INTEGER_CST) || wi::sext (
ptrval.mask, ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1710, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1710, __FUNCTION__), 0 : 0))
1710 || wi::sext (ptrval.mask, TYPE_PRECISION (type)) == -1)((void)(!((ptrval.lattice_val == CONSTANT && ((enum tree_code
) (ptrval.value)->base.code) == INTEGER_CST) || wi::sext (
ptrval.mask, ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1710, __FUNCTION__))->type_common.precision)) == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1710, __FUNCTION__), 0 : 0))
;
1711 if (attr == NULL_TREE(tree) nullptr)
1712 {
1713 /* Get aligni and misaligni from __builtin_assume_aligned. */
1714 align = gimple_call_arg (stmt, 1);
1715 if (!tree_fits_uhwi_p (align))
1716 return ptrval;
1717 aligni = tree_to_uhwi (align);
1718 if (gimple_call_num_args (stmt) > 2)
1719 {
1720 misalign = gimple_call_arg (stmt, 2);
1721 if (!tree_fits_uhwi_p (misalign))
1722 return ptrval;
1723 misaligni = tree_to_uhwi (misalign);
1724 }
1725 }
1726 else
1727 {
1728 /* Get aligni and misaligni from assume_aligned or
1729 alloc_align attributes. */
1730 if (TREE_VALUE (attr)((tree_check ((attr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1730, __FUNCTION__, (TREE_LIST)))->list.value)
== NULL_TREE(tree) nullptr)
1731 return ptrval;
1732 attr = TREE_VALUE (attr)((tree_check ((attr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1732, __FUNCTION__, (TREE_LIST)))->list.value)
;
1733 align = TREE_VALUE (attr)((tree_check ((attr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1733, __FUNCTION__, (TREE_LIST)))->list.value)
;
1734 if (!tree_fits_uhwi_p (align))
1735 return ptrval;
1736 aligni = tree_to_uhwi (align);
1737 if (alloc_aligned)
1738 {
1739 if (aligni == 0 || aligni > gimple_call_num_args (stmt))
1740 return ptrval;
1741 align = gimple_call_arg (stmt, aligni - 1);
1742 if (!tree_fits_uhwi_p (align))
1743 return ptrval;
1744 aligni = tree_to_uhwi (align);
1745 }
1746 else if (TREE_CHAIN (attr)((contains_struct_check ((attr), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1746, __FUNCTION__))->common.chain)
&& TREE_VALUE (TREE_CHAIN (attr))((tree_check ((((contains_struct_check ((attr), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1746, __FUNCTION__))->common.chain)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1746, __FUNCTION__, (TREE_LIST)))->list.value)
)
1747 {
1748 misalign = TREE_VALUE (TREE_CHAIN (attr))((tree_check ((((contains_struct_check ((attr), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1748, __FUNCTION__))->common.chain)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1748, __FUNCTION__, (TREE_LIST)))->list.value)
;
1749 if (!tree_fits_uhwi_p (misalign))
1750 return ptrval;
1751 misaligni = tree_to_uhwi (misalign);
1752 }
1753 }
1754 if (aligni <= 1 || (aligni & (aligni - 1)) != 0 || misaligni >= aligni)
1755 return ptrval;
1756
1757 align = build_int_cst_type (type, -aligni);
1758 alignval = get_value_for_expr (align, true);
1759 bit_value_binop (BIT_AND_EXPR, TYPE_SIGN (type)((signop) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1759, __FUNCTION__))->base.u.bits.unsigned_flag))
, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1759, __FUNCTION__))->type_common.precision)
, &value, &mask,
1760 TYPE_SIGN (type)((signop) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1760, __FUNCTION__))->base.u.bits.unsigned_flag))
, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1760, __FUNCTION__))->type_common.precision)
, value_to_wide_int (ptrval), ptrval.mask,
1761 TYPE_SIGN (type)((signop) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1761, __FUNCTION__))->base.u.bits.unsigned_flag))
, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1761, __FUNCTION__))->type_common.precision)
, value_to_wide_int (alignval), alignval.mask);
1762
1763 if (wi::sext (mask, TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1763, __FUNCTION__))->type_common.precision)
) != -1)
1764 {
1765 val.lattice_val = CONSTANT;
1766 val.mask = mask;
1767 gcc_assert ((mask.to_uhwi () & (aligni - 1)) == 0)((void)(!((mask.to_uhwi () & (aligni - 1)) == 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1767, __FUNCTION__), 0 : 0))
;
1768 gcc_assert ((value.to_uhwi () & (aligni - 1)) == 0)((void)(!((value.to_uhwi () & (aligni - 1)) == 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1768, __FUNCTION__), 0 : 0))
;
1769 value |= misaligni;
1770 /* ??? Delay building trees here. */
1771 val.value = wide_int_to_tree (type, value);
1772 }
1773 else
1774 {
1775 val.lattice_val = VARYING;
1776 val.value = NULL_TREE(tree) nullptr;
1777 val.mask = -1;
1778 }
1779 return val;
1780}
1781
1782/* Evaluate statement STMT.
1783 Valid only for assignments, calls, conditionals, and switches. */
1784
1785static ccp_prop_value_t
1786evaluate_stmt (gimple *stmt)
1787{
1788 ccp_prop_value_t val;
1789 tree simplified = NULL_TREE(tree) nullptr;
1790 ccp_lattice_t likelyvalue = likely_value (stmt);
1791 bool is_constant = false;
1792 unsigned int align;
1793 bool ignore_return_flags = false;
1794
1795 if (dump_file && (dump_flags & TDF_DETAILS))
1796 {
1797 fprintf (dump_file, "which is likely ");
1798 switch (likelyvalue)
1799 {
1800 case CONSTANT:
1801 fprintf (dump_file, "CONSTANT");
1802 break;
1803 case UNDEFINED:
1804 fprintf (dump_file, "UNDEFINED");
1805 break;
1806 case VARYING:
1807 fprintf (dump_file, "VARYING");
1808 break;
1809 default:;
1810 }
1811 fprintf (dump_file, "\n");
1812 }
1813
1814 /* If the statement is likely to have a CONSTANT result, then try
1815 to fold the statement to determine the constant value. */
1816 /* FIXME. This is the only place that we call ccp_fold.
1817 Since likely_value never returns CONSTANT for calls, we will
1818 not attempt to fold them, including builtins that may profit. */
1819 if (likelyvalue == CONSTANT)
1820 {
1821 fold_defer_overflow_warnings ();
1822 simplified = ccp_fold (stmt);
1823 if (simplified
1824 && TREE_CODE (simplified)((enum tree_code) (simplified)->base.code) == SSA_NAME)
1825 {
1826 /* We may not use values of something that may be simulated again,
1827 see valueize_op_1. */
1828 if (SSA_NAME_IS_DEFAULT_DEF (simplified)(tree_check ((simplified), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1828, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
1829 || ! prop_simulate_again_p (SSA_NAME_DEF_STMT (simplified)(tree_check ((simplified), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1829, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
))
1830 {
1831 ccp_prop_value_t *val = get_value (simplified);
1832 if (val && val->lattice_val != VARYING)
1833 {
1834 fold_undefer_overflow_warnings (true, stmt, 0);
1835 return *val;
1836 }
1837 }
1838 else
1839 /* We may also not place a non-valueized copy in the lattice
1840 as that might become stale if we never re-visit this stmt. */
1841 simplified = NULL_TREE(tree) nullptr;
1842 }
1843 is_constant = simplified && is_gimple_min_invariant (simplified);
1844 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1845 if (is_constant)
1846 {
1847 /* The statement produced a constant value. */
1848 val.lattice_val = CONSTANT;
1849 val.value = simplified;
1850 val.mask = 0;
1851 return val;
1852 }
1853 }
1854 /* If the statement is likely to have a VARYING result, then do not
1855 bother folding the statement. */
1856 else if (likelyvalue == VARYING)
1857 {
1858 enum gimple_code code = gimple_code (stmt);
1859 if (code == GIMPLE_ASSIGN)
1860 {
1861 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1862
1863 /* Other cases cannot satisfy is_gimple_min_invariant
1864 without folding. */
1865 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1866 simplified = gimple_assign_rhs1 (stmt);
1867 }
1868 else if (code == GIMPLE_SWITCH)
1869 simplified = gimple_switch_index (as_a <gswitch *> (stmt));
1870 else
1871 /* These cannot satisfy is_gimple_min_invariant without folding. */
1872 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND)((void)(!(code == GIMPLE_CALL || code == GIMPLE_COND) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1872, __FUNCTION__), 0 : 0))
;
1873 is_constant = simplified && is_gimple_min_invariant (simplified);
1874 if (is_constant)
1875 {
1876 /* The statement produced a constant value. */
1877 val.lattice_val = CONSTANT;
1878 val.value = simplified;
1879 val.mask = 0;
1880 }
1881 }
1882 /* If the statement result is likely UNDEFINED, make it so. */
1883 else if (likelyvalue == UNDEFINED)
1884 {
1885 val.lattice_val = UNDEFINED;
1886 val.value = NULL_TREE(tree) nullptr;
1887 val.mask = 0;
1888 return val;
1889 }
1890
1891 /* Resort to simplification for bitwise tracking. */
1892 if (flag_tree_bit_ccpglobal_options.x_flag_tree_bit_ccp
1893 && (likelyvalue == CONSTANT || is_gimple_call (stmt)
1894 || (gimple_assign_single_p (stmt)
1895 && gimple_assign_rhs_code (stmt) == ADDR_EXPR))
1896 && !is_constant)
1897 {
1898 enum gimple_code code = gimple_code (stmt);
1899 val.lattice_val = VARYING;
1900 val.value = NULL_TREE(tree) nullptr;
1901 val.mask = -1;
1902 if (code == GIMPLE_ASSIGN)
1903 {
1904 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1905 tree rhs1 = gimple_assign_rhs1 (stmt);
1906 tree lhs = gimple_assign_lhs (stmt);
1907 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1907, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1907, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1907, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1908 || POINTER_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1908, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1908, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1909 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1909, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1909, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1909, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1910 || POINTER_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1910, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1910, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
))
1911 switch (get_gimple_rhs_class (subcode))
1912 {
1913 case GIMPLE_SINGLE_RHS:
1914 val = get_value_for_expr (rhs1, true);
1915 break;
1916
1917 case GIMPLE_UNARY_RHS:
1918 val = bit_value_unop (subcode, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1918, __FUNCTION__))->typed.type)
, rhs1);
1919 break;
1920
1921 case GIMPLE_BINARY_RHS:
1922 val = bit_value_binop (subcode, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1922, __FUNCTION__))->typed.type)
, rhs1,
1923 gimple_assign_rhs2 (stmt));
1924 break;
1925
1926 default:;
1927 }
1928 }
1929 else if (code == GIMPLE_COND)
1930 {
1931 enum tree_code code = gimple_cond_code (stmt);
1932 tree rhs1 = gimple_cond_lhs (stmt);
1933 tree rhs2 = gimple_cond_rhs (stmt);
1934 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1934, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1934, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1934, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
1935 || POINTER_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1935, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1935, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
1936 val = bit_value_binop (code, TREE_TYPE (rhs1)((contains_struct_check ((rhs1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1936, __FUNCTION__))->typed.type)
, rhs1, rhs2);
1937 }
1938 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1939 {
1940 tree fndecl = gimple_call_fndecl (stmt);
1941 switch (DECL_FUNCTION_CODE (fndecl))
1942 {
1943 case BUILT_IN_MALLOC:
1944 case BUILT_IN_REALLOC:
1945 case BUILT_IN_CALLOC:
1946 case BUILT_IN_STRDUP:
1947 case BUILT_IN_STRNDUP:
1948 val.lattice_val = CONSTANT;
1949 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt))((contains_struct_check ((gimple_get_lhs (stmt)), (TS_TYPED),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1949, __FUNCTION__))->typed.type)
, 0);
1950 val.mask = ~((HOST_WIDE_INTlong) MALLOC_ABI_ALIGNMENT((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
1951 / BITS_PER_UNIT(8) - 1);
1952 break;
1953
1954 CASE_BUILT_IN_ALLOCAcase BUILT_IN_ALLOCA: case BUILT_IN_ALLOCA_WITH_ALIGN: case BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX:
1955 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
1956 ? BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
1957 : TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))((unsigned long) (*tree_int_cst_elt_check ((gimple_call_arg (
stmt, 1)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1957, __FUNCTION__)))
);
1958 val.lattice_val = CONSTANT;
1959 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt))((contains_struct_check ((gimple_get_lhs (stmt)), (TS_TYPED),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1959, __FUNCTION__))->typed.type)
, 0);
1960 val.mask = ~((HOST_WIDE_INTlong) align / BITS_PER_UNIT(8) - 1);
1961 break;
1962
1963 case BUILT_IN_ASSUME_ALIGNED:
1964 val = bit_value_assume_aligned (stmt, NULL_TREE(tree) nullptr, val, false);
1965 ignore_return_flags = true;
1966 break;
1967
1968 case BUILT_IN_ALIGNED_ALLOC:
1969 case BUILT_IN_GOMP_ALLOC:
1970 {
1971 tree align = get_constant_value (gimple_call_arg (stmt, 0));
1972 if (align
1973 && tree_fits_uhwi_p (align))
1974 {
1975 unsigned HOST_WIDE_INTlong aligni = tree_to_uhwi (align);
1976 if (aligni > 1
1977 /* align must be power-of-two */
1978 && (aligni & (aligni - 1)) == 0)
1979 {
1980 val.lattice_val = CONSTANT;
1981 val.value = build_int_cst (ptr_type_nodeglobal_trees[TI_PTR_TYPE], 0);
1982 val.mask = -aligni;
1983 }
1984 }
1985 break;
1986 }
1987
1988 case BUILT_IN_BSWAP16:
1989 case BUILT_IN_BSWAP32:
1990 case BUILT_IN_BSWAP64:
1991 case BUILT_IN_BSWAP128:
1992 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1993 if (val.lattice_val == UNDEFINED)
1994 break;
1995 else if (val.lattice_val == CONSTANT
1996 && val.value
1997 && TREE_CODE (val.value)((enum tree_code) (val.value)->base.code) == INTEGER_CST)
1998 {
1999 tree type = TREE_TYPE (gimple_call_lhs (stmt))((contains_struct_check ((gimple_call_lhs (stmt)), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 1999, __FUNCTION__))->typed.type)
;
2000 int prec = TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2000, __FUNCTION__))->type_common.precision)
;
2001 wide_int wval = wi::to_wide (val.value);
2002 val.value
2003 = wide_int_to_tree (type,
2004 wide_int::from (wval, prec,
2005 UNSIGNED).bswap ());
2006 val.mask
2007 = widest_int::from (wide_int::from (val.mask, prec,
2008 UNSIGNED).bswap (),
2009 UNSIGNED);
2010 if (wi::sext (val.mask, prec) != -1)
2011 break;
2012 }
2013 val.lattice_val = VARYING;
2014 val.value = NULL_TREE(tree) nullptr;
2015 val.mask = -1;
2016 break;
2017
2018 default:;
2019 }
2020 }
2021 if (is_gimple_call (stmt) && gimple_call_lhs (stmt))
2022 {
2023 tree fntype = gimple_call_fntype (stmt);
2024 if (fntype)
2025 {
2026 tree attrs = lookup_attribute ("assume_aligned",
2027 TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2027, __FUNCTION__))->type_common.attributes)
);
2028 if (attrs)
2029 val = bit_value_assume_aligned (stmt, attrs, val, false);
2030 attrs = lookup_attribute ("alloc_align",
2031 TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2031, __FUNCTION__))->type_common.attributes)
);
2032 if (attrs)
2033 val = bit_value_assume_aligned (stmt, attrs, val, true);
2034 }
2035 int flags = ignore_return_flags
2036 ? 0 : gimple_call_return_flags (as_a <gcall *> (stmt));
2037 if (flags & ERF_RETURNS_ARG(1 << 2)
2038 && (flags & ERF_RETURN_ARG_MASK(3)) < gimple_call_num_args (stmt))
2039 {
2040 val = get_value_for_expr
2041 (gimple_call_arg (stmt,
2042 flags & ERF_RETURN_ARG_MASK(3)), true);
2043 }
2044 }
2045 is_constant = (val.lattice_val == CONSTANT);
2046 }
2047
2048 if (flag_tree_bit_ccpglobal_options.x_flag_tree_bit_ccp
2049 && ((is_constant && TREE_CODE (val.value)((enum tree_code) (val.value)->base.code) == INTEGER_CST)
2050 || !is_constant)
2051 && gimple_get_lhs (stmt)
2052 && TREE_CODE (gimple_get_lhs (stmt))((enum tree_code) (gimple_get_lhs (stmt))->base.code) == SSA_NAME)
2053 {
2054 tree lhs = gimple_get_lhs (stmt);
2055 wide_int nonzero_bits = get_nonzero_bits (lhs);
2056 if (nonzero_bits != -1)
2057 {
2058 if (!is_constant)
2059 {
2060 val.lattice_val = CONSTANT;
2061 val.value = build_zero_cst (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2061, __FUNCTION__))->typed.type)
);
2062 val.mask = extend_mask (nonzero_bits, TYPE_SIGN (TREE_TYPE (lhs))((signop) ((tree_class_check ((((contains_struct_check ((lhs)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2062, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2062, __FUNCTION__))->base.u.bits.unsigned_flag))
);
2063 is_constant = true;
2064 }
2065 else
2066 {
2067 if (wi::bit_and_not (wi::to_wide (val.value), nonzero_bits) != 0)
2068 val.value = wide_int_to_tree (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2068, __FUNCTION__))->typed.type)
,
2069 nonzero_bits
2070 & wi::to_wide (val.value));
2071 if (nonzero_bits == 0)
2072 val.mask = 0;
2073 else
2074 val.mask = val.mask & extend_mask (nonzero_bits,
2075 TYPE_SIGN (TREE_TYPE (lhs))((signop) ((tree_class_check ((((contains_struct_check ((lhs)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2075, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2075, __FUNCTION__))->base.u.bits.unsigned_flag))
);
2076 }
2077 }
2078 }
2079
2080 /* The statement produced a nonconstant value. */
2081 if (!is_constant)
2082 {
2083 /* The statement produced a copy. */
2084 if (simplified && TREE_CODE (simplified)((enum tree_code) (simplified)->base.code) == SSA_NAME
2085 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (simplified)(tree_check ((simplified), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2085, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
2086 {
2087 val.lattice_val = CONSTANT;
2088 val.value = simplified;
2089 val.mask = -1;
2090 }
2091 /* The statement is VARYING. */
2092 else
2093 {
2094 val.lattice_val = VARYING;
2095 val.value = NULL_TREE(tree) nullptr;
2096 val.mask = -1;
2097 }
2098 }
2099
2100 return val;
2101}
2102
2103typedef hash_table<nofree_ptr_hash<gimple> > gimple_htab;
2104
2105/* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
2106 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
2107
2108static void
2109insert_clobber_before_stack_restore (tree saved_val, tree var,
2110 gimple_htab **visited)
2111{
2112 gimple *stmt;
2113 gassign *clobber_stmt;
2114 tree clobber;
2115 imm_use_iterator iter;
2116 gimple_stmt_iterator i;
2117 gimple **slot;
2118
2119 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(iter), (saved_val))), &
(iter))); !end_imm_use_stmt_p (&(iter)); (void) ((stmt) =
next_imm_use_stmt (&(iter))))
2120 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
2121 {
2122 clobber = build_clobber (TREE_TYPE (var)((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2122, __FUNCTION__))->typed.type)
);
2123 clobber_stmt = gimple_build_assign (var, clobber);
2124
2125 i = gsi_for_stmt (stmt);
2126 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
2127 }
2128 else if (gimple_code (stmt) == GIMPLE_PHI)
2129 {
2130 if (!*visited)
2131 *visited = new gimple_htab (10);
2132
2133 slot = (*visited)->find_slot (stmt, INSERT);
2134 if (*slot != NULLnullptr)
2135 continue;
2136
2137 *slot = stmt;
2138 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
2139 visited);
2140 }
2141 else if (gimple_assign_ssa_name_copy_p (stmt))
2142 insert_clobber_before_stack_restore (gimple_assign_lhs (stmt), var,
2143 visited);
2144}
2145
2146/* Advance the iterator to the previous non-debug gimple statement in the same
2147 or dominating basic block. */
2148
2149static inline void
2150gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
2151{
2152 basic_block dom;
2153
2154 gsi_prev_nondebug (i);
2155 while (gsi_end_p (*i))
2156 {
2157 dom = get_immediate_dominator (CDI_DOMINATORS, gsi_bb (*i));
2158 if (dom == NULLnullptr || dom == ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr))
2159 return;
2160
2161 *i = gsi_last_bb (dom);
2162 }
2163}
2164
2165/* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
2166 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
2167
2168 It is possible that BUILT_IN_STACK_SAVE cannot be found in a dominator when
2169 a previous pass (such as DOM) duplicated it along multiple paths to a BB.
2170 In that case the function gives up without inserting the clobbers. */
2171
2172static void
2173insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
2174{
2175 gimple *stmt;
2176 tree saved_val;
2177 gimple_htab *visited = NULLnullptr;
2178
2179 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
2180 {
2181 stmt = gsi_stmt (i);
2182
2183 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
2184 continue;
2185
2186 saved_val = gimple_call_lhs (stmt);
2187 if (saved_val == NULL_TREE(tree) nullptr)
2188 continue;
2189
2190 insert_clobber_before_stack_restore (saved_val, var, &visited);
2191 break;
2192 }
2193
2194 delete visited;
2195}
2196
2197/* Detects a __builtin_alloca_with_align with constant size argument. Declares
2198 fixed-size array and returns the address, if found, otherwise returns
2199 NULL_TREE. */
2200
2201static tree
2202fold_builtin_alloca_with_align (gimple *stmt)
2203{
2204 unsigned HOST_WIDE_INTlong size, threshold, n_elem;
2205 tree lhs, arg, block, var, elem_type, array_type;
2206
2207 /* Get lhs. */
2208 lhs = gimple_call_lhs (stmt);
2209 if (lhs == NULL_TREE(tree) nullptr)
2210 return NULL_TREE(tree) nullptr;
2211
2212 /* Detect constant argument. */
2213 arg = get_constant_value (gimple_call_arg (stmt, 0));
2214 if (arg == NULL_TREE(tree) nullptr
2215 || TREE_CODE (arg)((enum tree_code) (arg)->base.code) != INTEGER_CST
2216 || !tree_fits_uhwi_p (arg))
2217 return NULL_TREE(tree) nullptr;
2218
2219 size = tree_to_uhwi (arg);
2220
2221 /* Heuristic: don't fold large allocas. */
2222 threshold = (unsigned HOST_WIDE_INTlong)param_large_stack_frameglobal_options.x_param_large_stack_frame;
2223 /* In case the alloca is located at function entry, it has the same lifetime
2224 as a declared array, so we allow a larger size. */
2225 block = gimple_block (stmt);
2226 if (!(cfun(cfun + 0)->after_inlining
2227 && block
2228 && TREE_CODE (BLOCK_SUPERCONTEXT (block))((enum tree_code) (((tree_check ((block), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2228, __FUNCTION__, (BLOCK)))->block.supercontext))->
base.code)
== FUNCTION_DECL))
2229 threshold /= 10;
2230 if (size > threshold)
2231 return NULL_TREE(tree) nullptr;
2232
2233 /* We have to be able to move points-to info. We used to assert
2234 that we can but IPA PTA might end up with two UIDs here
2235 as it might need to handle more than one instance being
2236 live at the same time. Instead of trying to detect this case
2237 (using the first UID would be OK) just give up for now. */
2238 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2238, __FUNCTION__, (SSA_NAME)))->ssa_name.info.ptr_info
;
2239 unsigned uid = 0;
2240 if (pi != NULLnullptr
2241 && !pi->pt.anything
2242 && !pt_solution_singleton_or_null_p (&pi->pt, &uid))
2243 return NULL_TREE(tree) nullptr;
2244
2245 /* Declare array. */
2246 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT(8), 1);
2247 n_elem = size * 8 / BITS_PER_UNIT(8);
2248 array_type = build_array_type_nelts (elem_type, n_elem);
2249
2250 if (tree ssa_name = SSA_NAME_IDENTIFIER (lhs)((tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2250, __FUNCTION__, (SSA_NAME)))->ssa_name.var != (tree)
nullptr ? (((enum tree_code) ((lhs)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (lhs)->ssa_name.var : ((contains_struct_check
(((lhs)->ssa_name.var), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2250, __FUNCTION__))->decl_minimal.name)) : (tree) nullptr
)
)
2251 {
2252 /* Give the temporary a name derived from the name of the VLA
2253 declaration so it can be referenced in diagnostics. */
2254 const char *name = IDENTIFIER_POINTER (ssa_name)((const char *) (tree_check ((ssa_name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2254, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
2255 var = create_tmp_var (array_type, name);
2256 }
2257 else
2258 var = create_tmp_var (array_type);
2259
2260 if (gimple *lhsdef = SSA_NAME_DEF_STMT (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2260, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
)
2261 {
2262 /* Set the temporary's location to that of the VLA declaration
2263 so it can be pointed to in diagnostics. */
2264 location_t loc = gimple_location (lhsdef);
2265 DECL_SOURCE_LOCATION (var)((contains_struct_check ((var), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2265, __FUNCTION__))->decl_minimal.locus)
= loc;
2266 }
2267
2268 SET_DECL_ALIGN (var, TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))(((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2268, __FUNCTION__))->decl_common.align) = ffs_hwi (((unsigned
long) (*tree_int_cst_elt_check ((gimple_call_arg (stmt, 1)),
(0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2268, __FUNCTION__)))))
;
2269 if (uid != 0)
2270 SET_DECL_PT_UID (var, uid)((contains_struct_check ((var), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2270, __FUNCTION__))->decl_common.pt_uid = (uid))
;
2271
2272 /* Fold alloca to the address of the array. */
2273 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var))fold_convert_loc (((location_t) 0), ((contains_struct_check (
(lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2273, __FUNCTION__))->typed.type), build_fold_addr_expr_loc
(((location_t) 0), (var)))
;
2274}
2275
2276/* Fold the stmt at *GSI with CCP specific information that propagating
2277 and regular folding does not catch. */
2278
2279bool
2280ccp_folder::fold_stmt (gimple_stmt_iterator *gsi)
2281{
2282 gimple *stmt = gsi_stmt (*gsi);
2283
2284 switch (gimple_code (stmt))
2285 {
2286 case GIMPLE_COND:
2287 {
2288 gcond *cond_stmt = as_a <gcond *> (stmt);
2289 ccp_prop_value_t val;
2290 /* Statement evaluation will handle type mismatches in constants
2291 more gracefully than the final propagation. This allows us to
2292 fold more conditionals here. */
2293 val = evaluate_stmt (stmt);
2294 if (val.lattice_val != CONSTANT
2295 || val.mask != 0)
2296 return false;
2297
2298 if (dump_file)
2299 {
2300 fprintf (dump_file, "Folding predicate ");
2301 print_gimple_expr (dump_file, stmt, 0);
2302 fprintf (dump_file, " to ");
2303 print_generic_expr (dump_file, val.value);
2304 fprintf (dump_file, "\n");
2305 }
2306
2307 if (integer_zerop (val.value))
2308 gimple_cond_make_false (cond_stmt);
2309 else
2310 gimple_cond_make_true (cond_stmt);
2311
2312 return true;
2313 }
2314
2315 case GIMPLE_CALL:
2316 {
2317 tree lhs = gimple_call_lhs (stmt);
2318 int flags = gimple_call_flags (stmt);
2319 tree val;
2320 tree argt;
2321 bool changed = false;
2322 unsigned i;
2323
2324 /* If the call was folded into a constant make sure it goes
2325 away even if we cannot propagate into all uses because of
2326 type issues. */
2327 if (lhs
2328 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME
2329 && (val = get_constant_value (lhs))
2330 /* Don't optimize away calls that have side-effects. */
2331 && (flags & (ECF_CONST(1 << 0)|ECF_PURE(1 << 1))) != 0
2332 && (flags & ECF_LOOPING_CONST_OR_PURE(1 << 2)) == 0)
2333 {
2334 tree new_rhs = unshare_expr (val);
2335 bool res;
2336 if (!useless_type_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2336, __FUNCTION__))->typed.type)
,
2337 TREE_TYPE (new_rhs)((contains_struct_check ((new_rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2337, __FUNCTION__))->typed.type)
))
2338 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2338, __FUNCTION__))->typed.type), new_rhs)
;
2339 res = update_call_from_tree (gsi, new_rhs);
2340 gcc_assert (res)((void)(!(res) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2340, __FUNCTION__), 0 : 0))
;
2341 return true;
2342 }
2343
2344 /* Internal calls provide no argument types, so the extra laxity
2345 for normal calls does not apply. */
2346 if (gimple_call_internal_p (stmt))
2347 return false;
2348
2349 /* The heuristic of fold_builtin_alloca_with_align differs before and
2350 after inlining, so we don't require the arg to be changed into a
2351 constant for folding, but just to be constant. */
2352 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN)
2353 || gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX))
2354 {
2355 tree new_rhs = fold_builtin_alloca_with_align (stmt);
2356 if (new_rhs)
2357 {
2358 bool res = update_call_from_tree (gsi, new_rhs);
2359 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0)(*((const_cast<tree*> (tree_operand_check (((*((const_cast
<tree*> (tree_operand_check ((new_rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2359, __FUNCTION__)))))), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2359, __FUNCTION__)))))
;
2360 gcc_assert (res)((void)(!(res) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2360, __FUNCTION__), 0 : 0))
;
2361 insert_clobbers_for_var (*gsi, var);
2362 return true;
2363 }
2364 }
2365
2366 /* If there's no extra info from an assume_aligned call,
2367 drop it so it doesn't act as otherwise useless dataflow
2368 barrier. */
2369 if (gimple_call_builtin_p (stmt, BUILT_IN_ASSUME_ALIGNED))
2370 {
2371 tree ptr = gimple_call_arg (stmt, 0);
2372 ccp_prop_value_t ptrval = get_value_for_expr (ptr, true);
2373 if (ptrval.lattice_val == CONSTANT
2374 && TREE_CODE (ptrval.value)((enum tree_code) (ptrval.value)->base.code) == INTEGER_CST
2375 && ptrval.mask != 0)
2376 {
2377 ccp_prop_value_t val
2378 = bit_value_assume_aligned (stmt, NULL_TREE(tree) nullptr, ptrval, false);
2379 unsigned int ptralign = least_bit_hwi (ptrval.mask.to_uhwi ());
2380 unsigned int align = least_bit_hwi (val.mask.to_uhwi ());
2381 if (ptralign == align
2382 && ((TREE_INT_CST_LOW (ptrval.value)((unsigned long) (*tree_int_cst_elt_check ((ptrval.value), (0
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2382, __FUNCTION__)))
& (align - 1))
2383 == (TREE_INT_CST_LOW (val.value)((unsigned long) (*tree_int_cst_elt_check ((val.value), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2383, __FUNCTION__)))
& (align - 1))))
2384 {
2385 bool res = update_call_from_tree (gsi, ptr);
2386 gcc_assert (res)((void)(!(res) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2386, __FUNCTION__), 0 : 0))
;
2387 return true;
2388 }
2389 }
2390 }
2391
2392 /* Propagate into the call arguments. Compared to replace_uses_in
2393 this can use the argument slot types for type verification
2394 instead of the current argument type. We also can safely
2395 drop qualifiers here as we are dealing with constants anyway. */
2396 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt))((tree_check2 ((gimple_call_fntype (stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2396, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
2397 for (i = 0; i < gimple_call_num_args (stmt) && argt;
2398 ++i, argt = TREE_CHAIN (argt)((contains_struct_check ((argt), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2398, __FUNCTION__))->common.chain)
)
2399 {
2400 tree arg = gimple_call_arg (stmt, i);
2401 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == SSA_NAME
2402 && (val = get_constant_value (arg))
2403 && useless_type_conversion_p
2404 (TYPE_MAIN_VARIANT (TREE_VALUE (argt))((tree_class_check ((((tree_check ((argt), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2404, __FUNCTION__, (TREE_LIST)))->list.value)), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2404, __FUNCTION__))->type_common.main_variant)
,
2405 TYPE_MAIN_VARIANT (TREE_TYPE (val))((tree_class_check ((((contains_struct_check ((val), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2405, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2405, __FUNCTION__))->type_common.main_variant)
))
2406 {
2407 gimple_call_set_arg (stmt, i, unshare_expr (val));
2408 changed = true;
2409 }
2410 }
2411
2412 return changed;
2413 }
2414
2415 case GIMPLE_ASSIGN:
2416 {
2417 tree lhs = gimple_assign_lhs (stmt);
2418 tree val;
2419
2420 /* If we have a load that turned out to be constant replace it
2421 as we cannot propagate into all uses in all cases. */
2422 if (gimple_assign_single_p (stmt)
2423 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME
2424 && (val = get_constant_value (lhs)))
2425 {
2426 tree rhs = unshare_expr (val);
2427 if (!useless_type_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2427, __FUNCTION__))->typed.type)
, TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2427, __FUNCTION__))->typed.type)
))
2428 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs)fold_build1_loc (((location_t) 0), VIEW_CONVERT_EXPR, ((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2428, __FUNCTION__))->typed.type), rhs )
;
2429 gimple_assign_set_rhs_from_tree (gsi, rhs);
2430 return true;
2431 }
2432
2433 return false;
2434 }
2435
2436 default:
2437 return false;
2438 }
2439}
2440
2441/* Visit the assignment statement STMT. Set the value of its LHS to the
2442 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
2443 creates virtual definitions, set the value of each new name to that
2444 of the RHS (if we can derive a constant out of the RHS).
2445 Value-returning call statements also perform an assignment, and
2446 are handled here. */
2447
2448static enum ssa_prop_result
2449visit_assignment (gimple *stmt, tree *output_p)
2450{
2451 ccp_prop_value_t val;
2452 enum ssa_prop_result retval = SSA_PROP_NOT_INTERESTING;
2453
2454 tree lhs = gimple_get_lhs (stmt);
2455 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
2456 {
2457 /* Evaluate the statement, which could be
2458 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2459 val = evaluate_stmt (stmt);
2460
2461 /* If STMT is an assignment to an SSA_NAME, we only have one
2462 value to set. */
2463 if (set_lattice_value (lhs, &val))
2464 {
2465 *output_p = lhs;
2466 if (val.lattice_val == VARYING)
2467 retval = SSA_PROP_VARYING;
2468 else
2469 retval = SSA_PROP_INTERESTING;
2470 }
2471 }
2472
2473 return retval;
2474}
2475
2476
2477/* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2478 if it can determine which edge will be taken. Otherwise, return
2479 SSA_PROP_VARYING. */
2480
2481static enum ssa_prop_result
2482visit_cond_stmt (gimple *stmt, edge *taken_edge_p)
2483{
2484 ccp_prop_value_t val;
2485 basic_block block;
2486
2487 block = gimple_bb (stmt);
2488 val = evaluate_stmt (stmt);
2489 if (val.lattice_val != CONSTANT
2490 || val.mask != 0)
2491 return SSA_PROP_VARYING;
2492
2493 /* Find which edge out of the conditional block will be taken and add it
2494 to the worklist. If no single edge can be determined statically,
2495 return SSA_PROP_VARYING to feed all the outgoing edges to the
2496 propagation engine. */
2497 *taken_edge_p = find_taken_edge (block, val.value);
2498 if (*taken_edge_p)
2499 return SSA_PROP_INTERESTING;
2500 else
2501 return SSA_PROP_VARYING;
2502}
2503
2504
2505/* Evaluate statement STMT. If the statement produces an output value and
2506 its evaluation changes the lattice value of its output, return
2507 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2508 output value.
2509
2510 If STMT is a conditional branch and we can determine its truth
2511 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2512 value, return SSA_PROP_VARYING. */
2513
2514enum ssa_prop_result
2515ccp_propagate::visit_stmt (gimple *stmt, edge *taken_edge_p, tree *output_p)
2516{
2517 tree def;
2518 ssa_op_iter iter;
2519
2520 if (dump_file && (dump_flags & TDF_DETAILS))
2521 {
2522 fprintf (dump_file, "\nVisiting statement:\n");
2523 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2524 }
2525
2526 switch (gimple_code (stmt))
2527 {
2528 case GIMPLE_ASSIGN:
2529 /* If the statement is an assignment that produces a single
2530 output value, evaluate its RHS to see if the lattice value of
2531 its output has changed. */
2532 return visit_assignment (stmt, output_p);
2533
2534 case GIMPLE_CALL:
2535 /* A value-returning call also performs an assignment. */
2536 if (gimple_call_lhs (stmt) != NULL_TREE(tree) nullptr)
2537 return visit_assignment (stmt, output_p);
2538 break;
2539
2540 case GIMPLE_COND:
2541 case GIMPLE_SWITCH:
2542 /* If STMT is a conditional branch, see if we can determine
2543 which branch will be taken. */
2544 /* FIXME. It appears that we should be able to optimize
2545 computed GOTOs here as well. */
2546 return visit_cond_stmt (stmt, taken_edge_p);
2547
2548 default:
2549 break;
2550 }
2551
2552 /* Any other kind of statement is not interesting for constant
2553 propagation and, therefore, not worth simulating. */
2554 if (dump_file && (dump_flags & TDF_DETAILS))
2555 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2556
2557 /* Definitions made by statements other than assignments to
2558 SSA_NAMEs represent unknown modifications to their outputs.
2559 Mark them VARYING. */
2560 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)for (def = op_iter_init_tree (&(iter), stmt, ((0x08) | 0x02
)); !op_iter_done (&(iter)); (void) (def = op_iter_next_tree
(&(iter))))
2561 set_value_varying (def);
2562
2563 return SSA_PROP_VARYING;
2564}
2565
2566
2567/* Main entry point for SSA Conditional Constant Propagation. If NONZERO_P,
2568 record nonzero bits. */
2569
2570static unsigned int
2571do_ssa_ccp (bool nonzero_p)
2572{
2573 unsigned int todo = 0;
2574 calculate_dominance_info (CDI_DOMINATORS);
2575
2576 ccp_initialize ();
2577 class ccp_propagate ccp_propagate;
2578 ccp_propagate.ssa_propagate ();
2579 if (ccp_finalize (nonzero_p || flag_ipa_bit_cpglobal_options.x_flag_ipa_bit_cp))
2580 {
2581 todo = (TODO_cleanup_cfg(1 << 5) | TODO_update_ssa(1 << 11));
2582
2583 /* ccp_finalize does not preserve loop-closed ssa. */
2584 loops_state_clear (LOOP_CLOSED_SSA);
2585 }
2586
2587 free_dominance_info (CDI_DOMINATORS);
2588 return todo;
2589}
2590
2591
2592namespace {
2593
2594const pass_data pass_data_ccp =
2595{
2596 GIMPLE_PASS, /* type */
2597 "ccp", /* name */
2598 OPTGROUP_NONE, /* optinfo_flags */
2599 TV_TREE_CCP, /* tv_id */
2600 ( PROP_cfg(1 << 3) | PROP_ssa(1 << 5) ), /* properties_required */
2601 0, /* properties_provided */
2602 0, /* properties_destroyed */
2603 0, /* todo_flags_start */
2604 TODO_update_address_taken(1 << 21), /* todo_flags_finish */
2605};
2606
2607class pass_ccp : public gimple_opt_pass
2608{
2609public:
2610 pass_ccp (gcc::context *ctxt)
2611 : gimple_opt_pass (pass_data_ccp, ctxt), nonzero_p (false)
2612 {}
2613
2614 /* opt_pass methods: */
2615 opt_pass * clone () { return new pass_ccp (m_ctxt); }
2616 void set_pass_param (unsigned int n, bool param)
2617 {
2618 gcc_assert (n == 0)((void)(!(n == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2618, __FUNCTION__), 0 : 0))
;
2619 nonzero_p = param;
2620 }
2621 virtual bool gate (function *) { return flag_tree_ccpglobal_options.x_flag_tree_ccp != 0; }
2622 virtual unsigned int execute (function *) { return do_ssa_ccp (nonzero_p); }
2623
2624 private:
2625 /* Determines whether the pass instance records nonzero bits. */
2626 bool nonzero_p;
2627}; // class pass_ccp
2628
2629} // anon namespace
2630
2631gimple_opt_pass *
2632make_pass_ccp (gcc::context *ctxt)
2633{
2634 return new pass_ccp (ctxt);
2635}
2636
2637
2638
2639/* Try to optimize out __builtin_stack_restore. Optimize it out
2640 if there is another __builtin_stack_restore in the same basic
2641 block and no calls or ASM_EXPRs are in between, or if this block's
2642 only outgoing edge is to EXIT_BLOCK and there are no calls or
2643 ASM_EXPRs after this __builtin_stack_restore. */
2644
2645static tree
2646optimize_stack_restore (gimple_stmt_iterator i)
2647{
2648 tree callee;
2649 gimple *stmt;
2650
2651 basic_block bb = gsi_bb (i);
2652 gimple *call = gsi_stmt (i);
2653
2654 if (gimple_code (call) != GIMPLE_CALL
2655 || gimple_call_num_args (call) != 1
2656 || TREE_CODE (gimple_call_arg (call, 0))((enum tree_code) (gimple_call_arg (call, 0))->base.code) != SSA_NAME
2657 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0)))(((enum tree_code) (((contains_struct_check ((gimple_call_arg
(call, 0)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2657, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((gimple_call_arg
(call, 0)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2657, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
2658 return NULL_TREE(tree) nullptr;
2659
2660 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2661 {
2662 stmt = gsi_stmt (i);
2663 if (gimple_code (stmt) == GIMPLE_ASM)
2664 return NULL_TREE(tree) nullptr;
2665 if (gimple_code (stmt) != GIMPLE_CALL)
2666 continue;
2667
2668 callee = gimple_call_fndecl (stmt);
2669 if (!callee
2670 || !fndecl_built_in_p (callee, BUILT_IN_NORMAL)
2671 /* All regular builtins are ok, just obviously not alloca. */
2672 || ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (callee))((DECL_FUNCTION_CODE (callee)) == BUILT_IN_ALLOCA || (DECL_FUNCTION_CODE
(callee)) == BUILT_IN_ALLOCA_WITH_ALIGN || (DECL_FUNCTION_CODE
(callee)) == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX)
)
2673 return NULL_TREE(tree) nullptr;
2674
2675 if (fndecl_built_in_p (callee, BUILT_IN_STACK_RESTORE))
2676 goto second_stack_restore;
2677 }
2678
2679 if (!gsi_end_p (i))
2680 return NULL_TREE(tree) nullptr;
2681
2682 /* Allow one successor of the exit block, or zero successors. */
2683 switch (EDGE_COUNT (bb->succs)vec_safe_length (bb->succs))
2684 {
2685 case 0:
2686 break;
2687 case 1:
2688 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr))
2689 return NULL_TREE(tree) nullptr;
2690 break;
2691 default:
2692 return NULL_TREE(tree) nullptr;
2693 }
2694 second_stack_restore:
2695
2696 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2697 If there are multiple uses, then the last one should remove the call.
2698 In any case, whether the call to __builtin_stack_save can be removed
2699 or not is irrelevant to removing the call to __builtin_stack_restore. */
2700 if (has_single_use (gimple_call_arg (call, 0)))
2701 {
2702 gimple *stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0))(tree_check ((gimple_call_arg (call, 0)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2702, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2703 if (is_gimple_call (stack_save))
2704 {
2705 callee = gimple_call_fndecl (stack_save);
2706 if (callee && fndecl_built_in_p (callee, BUILT_IN_STACK_SAVE))
2707 {
2708 gimple_stmt_iterator stack_save_gsi;
2709 tree rhs;
2710
2711 stack_save_gsi = gsi_for_stmt (stack_save);
2712 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0))((contains_struct_check ((gimple_call_arg (call, 0)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2712, __FUNCTION__))->typed.type)
, 0);
2713 update_call_from_tree (&stack_save_gsi, rhs);
2714 }
2715 }
2716 }
2717
2718 /* No effect, so the statement will be deleted. */
2719 return integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
2720}
2721
2722/* If va_list type is a simple pointer and nothing special is needed,
2723 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2724 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2725 pointer assignment. */
2726
2727static tree
2728optimize_stdarg_builtin (gimple *call)
2729{
2730 tree callee, lhs, rhs, cfun_va_list;
2731 bool va_list_simple_ptr;
2732 location_t loc = gimple_location (call);
2733
2734 callee = gimple_call_fndecl (call);
2735
2736 cfun_va_list = targetm.fn_abi_va_list (callee);
2737 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)(((enum tree_code) (cfun_va_list)->base.code) == POINTER_TYPE
|| ((enum tree_code) (cfun_va_list)->base.code) == REFERENCE_TYPE
)
2738 && (TREE_TYPE (cfun_va_list)((contains_struct_check ((cfun_va_list), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2738, __FUNCTION__))->typed.type)
== void_type_nodeglobal_trees[TI_VOID_TYPE]
2739 || TREE_TYPE (cfun_va_list)((contains_struct_check ((cfun_va_list), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2739, __FUNCTION__))->typed.type)
== char_type_nodeinteger_types[itk_char]);
2740
2741 switch (DECL_FUNCTION_CODE (callee))
2742 {
2743 case BUILT_IN_VA_START:
2744 if (!va_list_simple_ptr
2745 || targetm.expand_builtin_va_start != NULLnullptr
2746 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2747 return NULL_TREE(tree) nullptr;
2748
2749 if (gimple_call_num_args (call) != 2)
2750 return NULL_TREE(tree) nullptr;
2751
2752 lhs = gimple_call_arg (call, 0);
2753 if (!POINTER_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2753, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2753, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
2754 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))((tree_class_check ((((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2754, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2754, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2754, __FUNCTION__))->type_common.main_variant)
2755 != TYPE_MAIN_VARIANT (cfun_va_list)((tree_class_check ((cfun_va_list), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2755, __FUNCTION__))->type_common.main_variant)
)
2756 return NULL_TREE(tree) nullptr;
2757
2758 lhs = build_fold_indirect_ref_loc (loc, lhs);
2759 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2760 1, integer_zero_nodeglobal_trees[TI_INTEGER_ZERO]);
2761 rhs = fold_convert_loc (loc, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2761, __FUNCTION__))->typed.type)
, rhs);
2762 return build2 (MODIFY_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2762, __FUNCTION__))->typed.type)
, lhs, rhs);
2763
2764 case BUILT_IN_VA_COPY:
2765 if (!va_list_simple_ptr)
2766 return NULL_TREE(tree) nullptr;
2767
2768 if (gimple_call_num_args (call) != 2)
2769 return NULL_TREE(tree) nullptr;
2770
2771 lhs = gimple_call_arg (call, 0);
2772 if (!POINTER_TYPE_P (TREE_TYPE (lhs))(((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2772, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2772, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
2773 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))((tree_class_check ((((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2773, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2773, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2773, __FUNCTION__))->type_common.main_variant)
2774 != TYPE_MAIN_VARIANT (cfun_va_list)((tree_class_check ((cfun_va_list), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2774, __FUNCTION__))->type_common.main_variant)
)
2775 return NULL_TREE(tree) nullptr;
2776
2777 lhs = build_fold_indirect_ref_loc (loc, lhs);
2778 rhs = gimple_call_arg (call, 1);
2779 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))((tree_class_check ((((contains_struct_check ((rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2779, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2779, __FUNCTION__))->type_common.main_variant)
2780 != TYPE_MAIN_VARIANT (cfun_va_list)((tree_class_check ((cfun_va_list), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2780, __FUNCTION__))->type_common.main_variant)
)
2781 return NULL_TREE(tree) nullptr;
2782
2783 rhs = fold_convert_loc (loc, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2783, __FUNCTION__))->typed.type)
, rhs);
2784 return build2 (MODIFY_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2784, __FUNCTION__))->typed.type)
, lhs, rhs);
2785
2786 case BUILT_IN_VA_END:
2787 /* No effect, so the statement will be deleted. */
2788 return integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
2789
2790 default:
2791 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2791, __FUNCTION__))
;
2792 }
2793}
2794
2795/* Attemp to make the block of __builtin_unreachable I unreachable by changing
2796 the incoming jumps. Return true if at least one jump was changed. */
2797
2798static bool
2799optimize_unreachable (gimple_stmt_iterator i)
2800{
2801 basic_block bb = gsi_bb (i);
2802 gimple_stmt_iterator gsi;
2803 gimple *stmt;
2804 edge_iterator ei;
2805 edge e;
2806 bool ret;
2807
2808 if (flag_sanitizeglobal_options.x_flag_sanitize & SANITIZE_UNREACHABLE)
2809 return false;
2810
2811 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2812 {
2813 stmt = gsi_stmt (gsi);
2814
2815 if (is_gimple_debug (stmt))
2816 continue;
2817
2818 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2819 {
2820 /* Verify we do not need to preserve the label. */
2821 if (FORCED_LABEL (gimple_label_label (label_stmt))((tree_check ((gimple_label_label (label_stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2821, __FUNCTION__, (LABEL_DECL)))->base.side_effects_flag
)
)
2822 return false;
2823
2824 continue;
2825 }
2826
2827 /* Only handle the case that __builtin_unreachable is the first statement
2828 in the block. We rely on DCE to remove stmts without side-effects
2829 before __builtin_unreachable. */
2830 if (gsi_stmt (gsi) != gsi_stmt (i))
2831 return false;
2832 }
2833
2834 ret = false;
2835 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2836 {
2837 gsi = gsi_last_bb (e->src);
2838 if (gsi_end_p (gsi))
2839 continue;
2840
2841 stmt = gsi_stmt (gsi);
2842 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
2843 {
2844 if (e->flags & EDGE_TRUE_VALUE)
2845 gimple_cond_make_false (cond_stmt);
2846 else if (e->flags & EDGE_FALSE_VALUE)
2847 gimple_cond_make_true (cond_stmt);
2848 else
2849 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2849, __FUNCTION__))
;
2850 update_stmt (cond_stmt);
2851 }
2852 else
2853 {
2854 /* Todo: handle other cases. Note that unreachable switch case
2855 statements have already been removed. */
2856 continue;
2857 }
2858
2859 ret = true;
2860 }
2861
2862 return ret;
2863}
2864
2865/* Optimize
2866 mask_2 = 1 << cnt_1;
2867 _4 = __atomic_fetch_or_* (ptr_6, mask_2, _3);
2868 _5 = _4 & mask_2;
2869 to
2870 _4 = ATOMIC_BIT_TEST_AND_SET (ptr_6, cnt_1, 0, _3);
2871 _5 = _4;
2872 If _5 is only used in _5 != 0 or _5 == 0 comparisons, 1
2873 is passed instead of 0, and the builtin just returns a zero
2874 or 1 value instead of the actual bit.
2875 Similarly for __sync_fetch_and_or_* (without the ", _3" part
2876 in there), and/or if mask_2 is a power of 2 constant.
2877 Similarly for xor instead of or, use ATOMIC_BIT_TEST_AND_COMPLEMENT
2878 in that case. And similarly for and instead of or, except that
2879 the second argument to the builtin needs to be one's complement
2880 of the mask instead of mask. */
2881
2882static void
2883optimize_atomic_bit_test_and (gimple_stmt_iterator *gsip,
2884 enum internal_fn fn, bool has_model_arg,
2885 bool after)
2886{
2887 gimple *call = gsi_stmt (*gsip);
2888 tree lhs = gimple_call_lhs (call);
2889 use_operand_p use_p;
2890 gimple *use_stmt;
2891 tree mask, bit;
2892 optab optab;
2893
2894 if (!flag_inline_atomicsglobal_options.x_flag_inline_atomics
2895 || optimize_debugglobal_options.x_optimize_debug
2896 || !gimple_call_builtin_p (call, BUILT_IN_NORMAL)
2897 || !lhs
2898 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2898, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
2899 || !single_imm_use (lhs, &use_p, &use_stmt)
2900 || !is_gimple_assign (use_stmt)
2901 || gimple_assign_rhs_code (use_stmt) != BIT_AND_EXPR
2902 || !gimple_vdef (call))
2903 return;
2904
2905 switch (fn)
2906 {
2907 case IFN_ATOMIC_BIT_TEST_AND_SET:
2908 optab = atomic_bit_test_and_set_optab;
2909 break;
2910 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
2911 optab = atomic_bit_test_and_complement_optab;
2912 break;
2913 case IFN_ATOMIC_BIT_TEST_AND_RESET:
2914 optab = atomic_bit_test_and_reset_optab;
2915 break;
2916 default:
2917 return;
2918 }
2919
2920 if (optab_handler (optab, TYPE_MODE (TREE_TYPE (lhs))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2920, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2920, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2920, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2920, __FUNCTION__))->typed.type))->type_common.mode)
) == CODE_FOR_nothing)
2921 return;
2922
2923 mask = gimple_call_arg (call, 1);
2924 tree use_lhs = gimple_assign_lhs (use_stmt);
2925 if (!use_lhs)
2926 return;
2927
2928 if (TREE_CODE (mask)((enum tree_code) (mask)->base.code) == INTEGER_CST)
2929 {
2930 if (fn == IFN_ATOMIC_BIT_TEST_AND_RESET)
2931 mask = const_unop (BIT_NOT_EXPR, TREE_TYPE (mask)((contains_struct_check ((mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2931, __FUNCTION__))->typed.type)
, mask);
2932 mask = fold_convert (TREE_TYPE (lhs), mask)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2932, __FUNCTION__))->typed.type), mask)
;
2933 int ibit = tree_log2 (mask);
2934 if (ibit < 0)
2935 return;
2936 bit = build_int_cst (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2936, __FUNCTION__))->typed.type)
, ibit);
2937 }
2938 else if (TREE_CODE (mask)((enum tree_code) (mask)->base.code) == SSA_NAME)
2939 {
2940 gimple *g = SSA_NAME_DEF_STMT (mask)(tree_check ((mask), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2940, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2941 if (fn == IFN_ATOMIC_BIT_TEST_AND_RESET)
2942 {
2943 if (!is_gimple_assign (g)
2944 || gimple_assign_rhs_code (g) != BIT_NOT_EXPR)
2945 return;
2946 mask = gimple_assign_rhs1 (g);
2947 if (TREE_CODE (mask)((enum tree_code) (mask)->base.code) != SSA_NAME)
2948 return;
2949 g = SSA_NAME_DEF_STMT (mask)(tree_check ((mask), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2949, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2950 }
2951 if (!is_gimple_assign (g)
2952 || gimple_assign_rhs_code (g) != LSHIFT_EXPR
2953 || !integer_onep (gimple_assign_rhs1 (g)))
2954 return;
2955 bit = gimple_assign_rhs2 (g);
2956 }
2957 else
2958 return;
2959
2960 if (gimple_assign_rhs1 (use_stmt) == lhs)
2961 {
2962 if (!operand_equal_p (gimple_assign_rhs2 (use_stmt), mask, 0))
2963 return;
2964 }
2965 else if (gimple_assign_rhs2 (use_stmt) != lhs
2966 || !operand_equal_p (gimple_assign_rhs1 (use_stmt), mask, 0))
2967 return;
2968
2969 bool use_bool = true;
2970 bool has_debug_uses = false;
2971 imm_use_iterator iter;
2972 gimple *g;
2973
2974 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs)(tree_check ((use_lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2974, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)
2975 use_bool = false;
2976 FOR_EACH_IMM_USE_STMT (g, iter, use_lhs)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((g) = first_imm_use_stmt (&(iter), (use_lhs))), &
(iter))); !end_imm_use_stmt_p (&(iter)); (void) ((g) = next_imm_use_stmt
(&(iter))))
2977 {
2978 enum tree_code code = ERROR_MARK;
2979 tree op0 = NULL_TREE(tree) nullptr, op1 = NULL_TREE(tree) nullptr;
2980 if (is_gimple_debug (g))
2981 {
2982 has_debug_uses = true;
2983 continue;
2984 }
2985 else if (is_gimple_assign (g))
2986 switch (gimple_assign_rhs_code (g))
2987 {
2988 case COND_EXPR:
2989 op1 = gimple_assign_rhs1 (g);
2990 code = TREE_CODE (op1)((enum tree_code) (op1)->base.code);
2991 op0 = TREE_OPERAND (op1, 0)(*((const_cast<tree*> (tree_operand_check ((op1), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2991, __FUNCTION__)))))
;
2992 op1 = TREE_OPERAND (op1, 1)(*((const_cast<tree*> (tree_operand_check ((op1), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 2992, __FUNCTION__)))))
;
2993 break;
2994 case EQ_EXPR:
2995 case NE_EXPR:
2996 code = gimple_assign_rhs_code (g);
2997 op0 = gimple_assign_rhs1 (g);
2998 op1 = gimple_assign_rhs2 (g);
2999 break;
3000 default:
3001 break;
3002 }
3003 else if (gimple_code (g) == GIMPLE_COND)
3004 {
3005 code = gimple_cond_code (g);
3006 op0 = gimple_cond_lhs (g);
3007 op1 = gimple_cond_rhs (g);
3008 }
3009
3010 if ((code == EQ_EXPR || code == NE_EXPR)
3011 && op0 == use_lhs
3012 && integer_zerop (op1))
3013 {
3014 use_operand_p use_p;
3015 int n = 0;
3016 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)for ((use_p) = first_imm_use_on_stmt (&(iter)); !end_imm_use_on_stmt_p
(&(iter)); (void) ((use_p) = next_imm_use_on_stmt (&
(iter))))
3017 n++;
3018 if (n == 1)
3019 continue;
3020 }
3021
3022 use_bool = false;
3023 break;
3024 }
3025
3026 tree new_lhs = make_ssa_name (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3026, __FUNCTION__))->typed.type)
);
3027 tree flag = build_int_cst (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3027, __FUNCTION__))->typed.type)
, use_bool);
3028 if (has_model_arg)
3029 g = gimple_build_call_internal (fn, 4, gimple_call_arg (call, 0),
3030 bit, flag, gimple_call_arg (call, 2));
3031 else
3032 g = gimple_build_call_internal (fn, 3, gimple_call_arg (call, 0),
3033 bit, flag);
3034 gimple_call_set_lhs (g, new_lhs);
3035 gimple_set_location (g, gimple_location (call));
3036 gimple_move_vops (g, call);
3037 bool throws = stmt_can_throw_internal (cfun(cfun + 0), call);
3038 gimple_call_set_nothrow (as_a <gcall *> (g),
3039 gimple_call_nothrow_p (as_a <gcall *> (call)));
3040 gimple_stmt_iterator gsi = *gsip;
3041 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3042 edge e = NULLnullptr;
3043 if (throws)
3044 {
3045 maybe_clean_or_replace_eh_stmt (call, g);
3046 if (after || (use_bool && has_debug_uses))
3047 e = find_fallthru_edge (gsi_bb (gsi)->succs);
3048 }
3049 if (after)
3050 {
3051 /* The internal function returns the value of the specified bit
3052 before the atomic operation. If we are interested in the value
3053 of the specified bit after the atomic operation (makes only sense
3054 for xor, otherwise the bit content is compile time known),
3055 we need to invert the bit. */
3056 g = gimple_build_assign (make_ssa_name (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3056, __FUNCTION__))->typed.type)
),
3057 BIT_XOR_EXPR, new_lhs,
3058 use_bool ? build_int_cst (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3058, __FUNCTION__))->typed.type)
, 1)
3059 : mask);
3060 new_lhs = gimple_assign_lhs (g);
3061 if (throws)
3062 {
3063 gsi_insert_on_edge_immediate (e, g);
3064 gsi = gsi_for_stmt (g);
3065 }
3066 else
3067 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3068 }
3069 if (use_bool && has_debug_uses)
3070 {
3071 tree temp = NULL_TREE(tree) nullptr;
3072 if (!throws || after || single_pred_p (e->dest))
3073 {
3074 temp = make_node (DEBUG_EXPR_DECL);
3075 DECL_ARTIFICIAL (temp)((contains_struct_check ((temp), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3075, __FUNCTION__))->decl_common.artificial_flag)
= 1;
3076 TREE_TYPE (temp)((contains_struct_check ((temp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3076, __FUNCTION__))->typed.type)
= TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3076, __FUNCTION__))->typed.type)
;
3077 SET_DECL_MODE (temp, TYPE_MODE (TREE_TYPE (lhs)))((contains_struct_check ((temp), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3077, __FUNCTION__))->decl_common.mode = (((((enum tree_code
) ((tree_class_check ((((contains_struct_check ((lhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3077, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3077, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3077, __FUNCTION__))->typed.type)) : (((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3077, __FUNCTION__))->typed.type))->type_common.mode)
))
;
3078 tree t = build2 (LSHIFT_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3078, __FUNCTION__))->typed.type)
, new_lhs, bit);
3079 g = gimple_build_debug_bind (temp, t, g);
3080 if (throws && !after)
3081 {
3082 gsi = gsi_after_labels (e->dest);
3083 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3084 }
3085 else
3086 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3087 }
3088 FOR_EACH_IMM_USE_STMT (g, iter, use_lhs)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((g) = first_imm_use_stmt (&(iter), (use_lhs))), &
(iter))); !end_imm_use_stmt_p (&(iter)); (void) ((g) = next_imm_use_stmt
(&(iter))))
3089 if (is_gimple_debug (g))
3090 {
3091 use_operand_p use_p;
3092 if (temp == NULL_TREE(tree) nullptr)
3093 gimple_debug_bind_reset_value (g);
3094 else
3095 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)for ((use_p) = first_imm_use_on_stmt (&(iter)); !end_imm_use_on_stmt_p
(&(iter)); (void) ((use_p) = next_imm_use_on_stmt (&
(iter))))
3096 SET_USE (use_p, temp)set_ssa_use_from_ptr (use_p, temp);
3097 update_stmt (g);
3098 }
3099 }
3100 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_lhs)(tree_check ((new_lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3100, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
3101 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use_lhs)(tree_check ((use_lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3101, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
;
3102 replace_uses_by (use_lhs, new_lhs);
3103 gsi = gsi_for_stmt (use_stmt);
3104 gsi_remove (&gsi, true);
3105 release_defs (use_stmt);
3106 gsi_remove (gsip, true);
3107 release_ssa_name (lhs);
3108}
3109
3110/* Optimize
3111 a = {};
3112 b = a;
3113 into
3114 a = {};
3115 b = {};
3116 Similarly for memset (&a, ..., sizeof (a)); instead of a = {};
3117 and/or memcpy (&b, &a, sizeof (a)); instead of b = a; */
3118
3119static void
3120optimize_memcpy (gimple_stmt_iterator *gsip, tree dest, tree src, tree len)
3121{
3122 gimple *stmt = gsi_stmt (*gsip);
3123 if (gimple_has_volatile_ops (stmt))
3124 return;
3125
3126 tree vuse = gimple_vuse (stmt);
3127 if (vuse == NULLnullptr)
3128 return;
3129
3130 gimple *defstmt = SSA_NAME_DEF_STMT (vuse)(tree_check ((vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3130, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3131 tree src2 = NULL_TREE(tree) nullptr, len2 = NULL_TREE(tree) nullptr;
3132 poly_int64 offset, offset2;
3133 tree val = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
3134 if (gimple_store_p (defstmt)
3135 && gimple_assign_single_p (defstmt)
3136 && TREE_CODE (gimple_assign_rhs1 (defstmt))((enum tree_code) (gimple_assign_rhs1 (defstmt))->base.code
)
== CONSTRUCTOR
3137 && !gimple_clobber_p (defstmt))
3138 src2 = gimple_assign_lhs (defstmt);
3139 else if (gimple_call_builtin_p (defstmt, BUILT_IN_MEMSET)
3140 && TREE_CODE (gimple_call_arg (defstmt, 0))((enum tree_code) (gimple_call_arg (defstmt, 0))->base.code
)
== ADDR_EXPR
3141 && TREE_CODE (gimple_call_arg (defstmt, 1))((enum tree_code) (gimple_call_arg (defstmt, 1))->base.code
)
== INTEGER_CST)
3142 {
3143 src2 = TREE_OPERAND (gimple_call_arg (defstmt, 0), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_call_arg
(defstmt, 0)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3143, __FUNCTION__)))))
;
3144 len2 = gimple_call_arg (defstmt, 2);
3145 val = gimple_call_arg (defstmt, 1);
3146 /* For non-0 val, we'd have to transform stmt from assignment
3147 into memset (only if dest is addressable). */
3148 if (!integer_zerop (val) && is_gimple_assign (stmt))
3149 src2 = NULL_TREE(tree) nullptr;
3150 }
3151
3152 if (src2 == NULL_TREE(tree) nullptr)
3153 return;
3154
3155 if (len == NULL_TREE(tree) nullptr)
3156 len = (TREE_CODE (src)((enum tree_code) (src)->base.code) == COMPONENT_REF
3157 ? DECL_SIZE_UNIT (TREE_OPERAND (src, 1))((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
((src), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3157, __FUNCTION__)))))), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3157, __FUNCTION__))->decl_common.size_unit)
3158 : TYPE_SIZE_UNIT (TREE_TYPE (src))((tree_class_check ((((contains_struct_check ((src), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3158, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3158, __FUNCTION__))->type_common.size_unit)
);
3159 if (len2 == NULL_TREE(tree) nullptr)
3160 len2 = (TREE_CODE (src2)((enum tree_code) (src2)->base.code) == COMPONENT_REF
3161 ? DECL_SIZE_UNIT (TREE_OPERAND (src2, 1))((contains_struct_check (((*((const_cast<tree*> (tree_operand_check
((src2), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3161, __FUNCTION__)))))), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3161, __FUNCTION__))->decl_common.size_unit)
3162 : TYPE_SIZE_UNIT (TREE_TYPE (src2))((tree_class_check ((((contains_struct_check ((src2), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3162, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3162, __FUNCTION__))->type_common.size_unit)
);
3163 if (len == NULL_TREE(tree) nullptr
3164 || !poly_int_tree_p (len)
3165 || len2 == NULL_TREE(tree) nullptr
3166 || !poly_int_tree_p (len2))
3167 return;
3168
3169 src = get_addr_base_and_unit_offset (src, &offset);
3170 src2 = get_addr_base_and_unit_offset (src2, &offset2);
3171 if (src == NULL_TREE(tree) nullptr
3172 || src2 == NULL_TREE(tree) nullptr
3173 || maybe_lt (offset, offset2))
3174 return;
3175
3176 if (!operand_equal_p (src, src2, 0))
3177 return;
3178
3179 /* [ src + offset2, src + offset2 + len2 - 1 ] is set to val.
3180 Make sure that
3181 [ src + offset, src + offset + len - 1 ] is a subset of that. */
3182 if (maybe_gt (wi::to_poly_offset (len) + (offset - offset2),maybe_lt (wi::to_poly_offset (len2), wi::to_poly_offset (len)
+ (offset - offset2))
3183 wi::to_poly_offset (len2))maybe_lt (wi::to_poly_offset (len2), wi::to_poly_offset (len)
+ (offset - offset2))
)
3184 return;
3185
3186 if (dump_file && (dump_flags & TDF_DETAILS))
3187 {
3188 fprintf (dump_file, "Simplified\n ");
3189 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3190 fprintf (dump_file, "after previous\n ");
3191 print_gimple_stmt (dump_file, defstmt, 0, dump_flags);
3192 }
3193
3194 /* For simplicity, don't change the kind of the stmt,
3195 turn dest = src; into dest = {}; and memcpy (&dest, &src, len);
3196 into memset (&dest, val, len);
3197 In theory we could change dest = src into memset if dest
3198 is addressable (maybe beneficial if val is not 0), or
3199 memcpy (&dest, &src, len) into dest = {} if len is the size
3200 of dest, dest isn't volatile. */
3201 if (is_gimple_assign (stmt))
3202 {
3203 tree ctor = build_constructor (TREE_TYPE (dest)((contains_struct_check ((dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3203, __FUNCTION__))->typed.type)
, NULLnullptr);
3204 gimple_assign_set_rhs_from_tree (gsip, ctor);
3205 update_stmt (stmt);
3206 }
3207 else /* If stmt is memcpy, transform it into memset. */
3208 {
3209 gcall *call = as_a <gcall *> (stmt);
3210 tree fndecl = builtin_decl_implicit (BUILT_IN_MEMSET);
3211 gimple_call_set_fndecl (call, fndecl);
3212 gimple_call_set_fntype (call, TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3212, __FUNCTION__))->typed.type)
);
3213 gimple_call_set_arg (call, 1, val);
3214 update_stmt (stmt);
3215 }
3216
3217 if (dump_file && (dump_flags & TDF_DETAILS))
3218 {
3219 fprintf (dump_file, "into\n ");
3220 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3221 }
3222}
3223
3224/* A simple pass that attempts to fold all builtin functions. This pass
3225 is run after we've propagated as many constants as we can. */
3226
3227namespace {
3228
3229const pass_data pass_data_fold_builtins =
3230{
3231 GIMPLE_PASS, /* type */
3232 "fab", /* name */
3233 OPTGROUP_NONE, /* optinfo_flags */
3234 TV_NONE, /* tv_id */
3235 ( PROP_cfg(1 << 3) | PROP_ssa(1 << 5) ), /* properties_required */
3236 0, /* properties_provided */
3237 0, /* properties_destroyed */
3238 0, /* todo_flags_start */
3239 TODO_update_ssa(1 << 11), /* todo_flags_finish */
3240};
3241
3242class pass_fold_builtins : public gimple_opt_pass
3243{
3244public:
3245 pass_fold_builtins (gcc::context *ctxt)
3246 : gimple_opt_pass (pass_data_fold_builtins, ctxt)
3247 {}
3248
3249 /* opt_pass methods: */
3250 opt_pass * clone () { return new pass_fold_builtins (m_ctxt); }
3251 virtual unsigned int execute (function *);
3252
3253}; // class pass_fold_builtins
3254
3255unsigned int
3256pass_fold_builtins::execute (function *fun)
3257{
3258 bool cfg_changed = false;
3259 basic_block bb;
3260 unsigned int todoflags = 0;
3261
3262 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
3263 {
3264 gimple_stmt_iterator i;
3265 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
3266 {
3267 gimple *stmt, *old_stmt;
3268 tree callee;
3269 enum built_in_function fcode;
3270
3271 stmt = gsi_stmt (i);
3272
3273 if (gimple_code (stmt) != GIMPLE_CALL)
3274 {
3275 /* Remove all *ssaname_N ={v} {CLOBBER}; stmts,
3276 after the last GIMPLE DSE they aren't needed and might
3277 unnecessarily keep the SSA_NAMEs live. */
3278 if (gimple_clobber_p (stmt))
3279 {
3280 tree lhs = gimple_assign_lhs (stmt);
3281 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == MEM_REF
3282 && TREE_CODE (TREE_OPERAND (lhs, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((lhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3282, __FUNCTION__))))))->base.code)
== SSA_NAME)
3283 {
3284 unlink_stmt_vdef (stmt);
3285 gsi_remove (&i, true);
3286 release_defs (stmt);
3287 continue;
3288 }
3289 }
3290 else if (gimple_assign_load_p (stmt) && gimple_store_p (stmt))
3291 optimize_memcpy (&i, gimple_assign_lhs (stmt),
3292 gimple_assign_rhs1 (stmt), NULL_TREE(tree) nullptr);
3293 gsi_next (&i);
3294 continue;
3295 }
3296
3297 callee = gimple_call_fndecl (stmt);
3298 if (!callee || !fndecl_built_in_p (callee, BUILT_IN_NORMAL))
3299 {
3300 gsi_next (&i);
3301 continue;
3302 }
3303
3304 fcode = DECL_FUNCTION_CODE (callee);
3305 if (fold_stmt (&i))
3306 ;
3307 else
3308 {
3309 tree result = NULL_TREE(tree) nullptr;
3310 switch (DECL_FUNCTION_CODE (callee))
3311 {
3312 case BUILT_IN_CONSTANT_P:
3313 /* Resolve __builtin_constant_p. If it hasn't been
3314 folded to integer_one_node by now, it's fairly
3315 certain that the value simply isn't constant. */
3316 result = integer_zero_nodeglobal_trees[TI_INTEGER_ZERO];
3317 break;
3318
3319 case BUILT_IN_ASSUME_ALIGNED:
3320 /* Remove __builtin_assume_aligned. */
3321 result = gimple_call_arg (stmt, 0);
3322 break;
3323
3324 case BUILT_IN_STACK_RESTORE:
3325 result = optimize_stack_restore (i);
3326 if (result)
3327 break;
3328 gsi_next (&i);
3329 continue;
3330
3331 case BUILT_IN_UNREACHABLE:
3332 if (optimize_unreachable (i))
3333 cfg_changed = true;
3334 break;
3335
3336 case BUILT_IN_ATOMIC_FETCH_OR_1:
3337 case BUILT_IN_ATOMIC_FETCH_OR_2:
3338 case BUILT_IN_ATOMIC_FETCH_OR_4:
3339 case BUILT_IN_ATOMIC_FETCH_OR_8:
3340 case BUILT_IN_ATOMIC_FETCH_OR_16:
3341 optimize_atomic_bit_test_and (&i,
3342 IFN_ATOMIC_BIT_TEST_AND_SET,
3343 true, false);
3344 break;
3345 case BUILT_IN_SYNC_FETCH_AND_OR_1:
3346 case BUILT_IN_SYNC_FETCH_AND_OR_2:
3347 case BUILT_IN_SYNC_FETCH_AND_OR_4:
3348 case BUILT_IN_SYNC_FETCH_AND_OR_8:
3349 case BUILT_IN_SYNC_FETCH_AND_OR_16:
3350 optimize_atomic_bit_test_and (&i,
3351 IFN_ATOMIC_BIT_TEST_AND_SET,
3352 false, false);
3353 break;
3354
3355 case BUILT_IN_ATOMIC_FETCH_XOR_1:
3356 case BUILT_IN_ATOMIC_FETCH_XOR_2:
3357 case BUILT_IN_ATOMIC_FETCH_XOR_4:
3358 case BUILT_IN_ATOMIC_FETCH_XOR_8:
3359 case BUILT_IN_ATOMIC_FETCH_XOR_16:
3360 optimize_atomic_bit_test_and
3361 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, true, false);
3362 break;
3363 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
3364 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
3365 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
3366 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
3367 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
3368 optimize_atomic_bit_test_and
3369 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, false, false);
3370 break;
3371
3372 case BUILT_IN_ATOMIC_XOR_FETCH_1:
3373 case BUILT_IN_ATOMIC_XOR_FETCH_2:
3374 case BUILT_IN_ATOMIC_XOR_FETCH_4:
3375 case BUILT_IN_ATOMIC_XOR_FETCH_8:
3376 case BUILT_IN_ATOMIC_XOR_FETCH_16:
3377 optimize_atomic_bit_test_and
3378 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, true, true);
3379 break;
3380 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
3381 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
3382 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
3383 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
3384 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
3385 optimize_atomic_bit_test_and
3386 (&i, IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT, false, true);
3387 break;
3388
3389 case BUILT_IN_ATOMIC_FETCH_AND_1:
3390 case BUILT_IN_ATOMIC_FETCH_AND_2:
3391 case BUILT_IN_ATOMIC_FETCH_AND_4:
3392 case BUILT_IN_ATOMIC_FETCH_AND_8:
3393 case BUILT_IN_ATOMIC_FETCH_AND_16:
3394 optimize_atomic_bit_test_and (&i,
3395 IFN_ATOMIC_BIT_TEST_AND_RESET,
3396 true, false);
3397 break;
3398 case BUILT_IN_SYNC_FETCH_AND_AND_1:
3399 case BUILT_IN_SYNC_FETCH_AND_AND_2:
3400 case BUILT_IN_SYNC_FETCH_AND_AND_4:
3401 case BUILT_IN_SYNC_FETCH_AND_AND_8:
3402 case BUILT_IN_SYNC_FETCH_AND_AND_16:
3403 optimize_atomic_bit_test_and (&i,
3404 IFN_ATOMIC_BIT_TEST_AND_RESET,
3405 false, false);
3406 break;
3407
3408 case BUILT_IN_MEMCPY:
3409 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
3410 && TREE_CODE (gimple_call_arg (stmt, 0))((enum tree_code) (gimple_call_arg (stmt, 0))->base.code) == ADDR_EXPR
3411 && TREE_CODE (gimple_call_arg (stmt, 1))((enum tree_code) (gimple_call_arg (stmt, 1))->base.code) == ADDR_EXPR
3412 && TREE_CODE (gimple_call_arg (stmt, 2))((enum tree_code) (gimple_call_arg (stmt, 2))->base.code) == INTEGER_CST)
3413 {
3414 tree dest = TREE_OPERAND (gimple_call_arg (stmt, 0), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_call_arg
(stmt, 0)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3414, __FUNCTION__)))))
;
3415 tree src = TREE_OPERAND (gimple_call_arg (stmt, 1), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_call_arg
(stmt, 1)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3415, __FUNCTION__)))))
;
3416 tree len = gimple_call_arg (stmt, 2);
3417 optimize_memcpy (&i, dest, src, len);
3418 }
3419 break;
3420
3421 case BUILT_IN_VA_START:
3422 case BUILT_IN_VA_END:
3423 case BUILT_IN_VA_COPY:
3424 /* These shouldn't be folded before pass_stdarg. */
3425 result = optimize_stdarg_builtin (stmt);
3426 break;
3427
3428 default:;
3429 }
3430
3431 if (!result)
3432 {
3433 gsi_next (&i);
3434 continue;
3435 }
3436
3437 if (!update_call_from_tree (&i, result))
3438 gimplify_and_update_call_from_tree (&i, result);
3439 }
3440
3441 todoflags |= TODO_update_address_taken(1 << 21);
3442
3443 if (dump_file && (dump_flags & TDF_DETAILS))
3444 {
3445 fprintf (dump_file, "Simplified\n ");
3446 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3447 }
3448
3449 old_stmt = stmt;
3450 stmt = gsi_stmt (i);
3451 update_stmt (stmt);
3452
3453 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
3454 && gimple_purge_dead_eh_edges (bb))
3455 cfg_changed = true;
3456
3457 if (dump_file && (dump_flags & TDF_DETAILS))
3458 {
3459 fprintf (dump_file, "to\n ");
3460 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
3461 fprintf (dump_file, "\n");
3462 }
3463
3464 /* Retry the same statement if it changed into another
3465 builtin, there might be new opportunities now. */
3466 if (gimple_code (stmt) != GIMPLE_CALL)
3467 {
3468 gsi_next (&i);
3469 continue;
3470 }
3471 callee = gimple_call_fndecl (stmt);
3472 if (!callee
3473 || !fndecl_built_in_p (callee, fcode))
3474 gsi_next (&i);
3475 }
3476 }
3477
3478 /* Delete unreachable blocks. */
3479 if (cfg_changed)
3480 todoflags |= TODO_cleanup_cfg(1 << 5);
3481
3482 return todoflags;
3483}
3484
3485} // anon namespace
3486
3487gimple_opt_pass *
3488make_pass_fold_builtins (gcc::context *ctxt)
3489{
3490 return new pass_fold_builtins (ctxt);
3491}
3492
3493/* A simple pass that emits some warnings post IPA. */
3494
3495namespace {
3496
3497const pass_data pass_data_post_ipa_warn =
3498{
3499 GIMPLE_PASS, /* type */
3500 "post_ipa_warn", /* name */
3501 OPTGROUP_NONE, /* optinfo_flags */
3502 TV_NONE, /* tv_id */
3503 ( PROP_cfg(1 << 3) | PROP_ssa(1 << 5) ), /* properties_required */
3504 0, /* properties_provided */
3505 0, /* properties_destroyed */
3506 0, /* todo_flags_start */
3507 0, /* todo_flags_finish */
3508};
3509
3510class pass_post_ipa_warn : public gimple_opt_pass
3511{
3512public:
3513 pass_post_ipa_warn (gcc::context *ctxt)
3514 : gimple_opt_pass (pass_data_post_ipa_warn, ctxt)
3515 {}
3516
3517 /* opt_pass methods: */
3518 opt_pass * clone () { return new pass_post_ipa_warn (m_ctxt); }
3519 virtual bool gate (function *) { return warn_nonnullglobal_options.x_warn_nonnull != 0; }
3520 virtual unsigned int execute (function *);
3521
3522}; // class pass_fold_builtins
3523
3524unsigned int
3525pass_post_ipa_warn::execute (function *fun)
3526{
3527 basic_block bb;
3528
3529 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
3530 {
3531 gimple_stmt_iterator gsi;
3532 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3533 {
3534 gimple *stmt = gsi_stmt (gsi);
3535 if (!is_gimple_call (stmt) || gimple_no_warning_p (stmt))
3536 continue;
3537
3538 tree fntype = gimple_call_fntype (stmt);
3539 bitmap nonnullargs = get_nonnull_args (fntype);
3540 if (!nonnullargs)
3541 continue;
3542
3543 tree fndecl = gimple_call_fndecl (stmt);
3544
3545 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
3546 {
3547 tree arg = gimple_call_arg (stmt, i);
3548 if (TREE_CODE (TREE_TYPE (arg))((enum tree_code) (((contains_struct_check ((arg), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3548, __FUNCTION__))->typed.type))->base.code)
!= POINTER_TYPE)
3549 continue;
3550 if (!integer_zerop (arg))
3551 continue;
3552 if (!bitmap_empty_p (nonnullargs)
3553 && !bitmap_bit_p (nonnullargs, i))
3554 continue;
3555
3556 /* In C++ non-static member functions argument 0 refers
3557 to the implicit this pointer. Use the same one-based
3558 numbering for ordinary arguments. */
3559 unsigned argno = TREE_CODE (fntype)((enum tree_code) (fntype)->base.code) == METHOD_TYPE ? i : i + 1;
3560 location_t loc = (EXPR_HAS_LOCATION (arg)(((IS_ADHOC_LOC (((((arg)) && ((tree_code_type[(int) (
((enum tree_code) ((arg))->base.code))]) >= tcc_reference
&& (tree_code_type[(int) (((enum tree_code) ((arg))->
base.code))]) <= tcc_expression)) ? (arg)->exp.locus : (
(location_t) 0)))) ? get_location_from_adhoc_loc (line_table,
((((arg)) && ((tree_code_type[(int) (((enum tree_code
) ((arg))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((arg))->base.code))]) <= tcc_expression
)) ? (arg)->exp.locus : ((location_t) 0))) : (((((arg)) &&
((tree_code_type[(int) (((enum tree_code) ((arg))->base.code
))]) >= tcc_reference && (tree_code_type[(int) (((
enum tree_code) ((arg))->base.code))]) <= tcc_expression
)) ? (arg)->exp.locus : ((location_t) 0)))) != ((location_t
) 0))
3561 ? EXPR_LOCATION (arg)((((arg)) && ((tree_code_type[(int) (((enum tree_code
) ((arg))->base.code))]) >= tcc_reference && (tree_code_type
[(int) (((enum tree_code) ((arg))->base.code))]) <= tcc_expression
)) ? (arg)->exp.locus : ((location_t) 0))
3562 : gimple_location (stmt));
3563 auto_diagnostic_group d;
3564 if (argno == 0)
3565 {
3566 if (warning_at (loc, OPT_Wnonnull,
3567 "%G%qs pointer null", stmt, "this")
3568 && fndecl)
3569 inform (DECL_SOURCE_LOCATION (fndecl)((contains_struct_check ((fndecl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3569, __FUNCTION__))->decl_minimal.locus)
,
3570 "in a call to non-static member function %qD",
3571 fndecl);
3572 continue;
3573 }
3574
3575 if (!warning_at (loc, OPT_Wnonnull,
3576 "%Gargument %u null where non-null "
3577 "expected", stmt, argno))
3578 continue;
3579
3580 tree fndecl = gimple_call_fndecl (stmt);
3581 if (fndecl && DECL_IS_UNDECLARED_BUILTIN (fndecl)(((contains_struct_check ((fndecl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3581, __FUNCTION__))->decl_minimal.locus) <= ((location_t
) 1))
)
3582 inform (loc, "in a call to built-in function %qD",
3583 fndecl);
3584 else if (fndecl)
3585 inform (DECL_SOURCE_LOCATION (fndecl)((contains_struct_check ((fndecl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-ccp.c"
, 3585, __FUNCTION__))->decl_minimal.locus)
,
3586 "in a call to function %qD declared %qs",
3587 fndecl, "nonnull");
3588 }
3589 BITMAP_FREE (nonnullargs)((void) (bitmap_obstack_free ((bitmap) nonnullargs), (nonnullargs
) = (bitmap) nullptr))
;
3590 }
3591 }
3592 return 0;
3593}
3594
3595} // anon namespace
3596
3597gimple_opt_pass *
3598make_pass_post_ipa_warn (gcc::context *ctxt)
3599{
3600 return new pass_post_ipa_warn (ctxt);
3601}

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/wide-int.h

1/* Operations with very long integers. -*- C++ -*-
2 Copyright (C) 2012-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 3, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#ifndef WIDE_INT_H
21#define WIDE_INT_H
22
23/* wide-int.[cc|h] implements a class that efficiently performs
24 mathematical operations on finite precision integers. wide_ints
25 are designed to be transient - they are not for long term storage
26 of values. There is tight integration between wide_ints and the
27 other longer storage GCC representations (rtl and tree).
28
29 The actual precision of a wide_int depends on the flavor. There
30 are three predefined flavors:
31
32 1) wide_int (the default). This flavor does the math in the
33 precision of its input arguments. It is assumed (and checked)
34 that the precisions of the operands and results are consistent.
35 This is the most efficient flavor. It is not possible to examine
36 bits above the precision that has been specified. Because of
37 this, the default flavor has semantics that are simple to
38 understand and in general model the underlying hardware that the
39 compiler is targetted for.
40
41 This flavor must be used at the RTL level of gcc because there
42 is, in general, not enough information in the RTL representation
43 to extend a value beyond the precision specified in the mode.
44
45 This flavor should also be used at the TREE and GIMPLE levels of
46 the compiler except for the circumstances described in the
47 descriptions of the other two flavors.
48
49 The default wide_int representation does not contain any
50 information inherent about signedness of the represented value,
51 so it can be used to represent both signed and unsigned numbers.
52 For operations where the results depend on signedness (full width
53 multiply, division, shifts, comparisons, and operations that need
54 overflow detected), the signedness must be specified separately.
55
56 2) offset_int. This is a fixed-precision integer that can hold
57 any address offset, measured in either bits or bytes, with at
58 least one extra sign bit. At the moment the maximum address
59 size GCC supports is 64 bits. With 8-bit bytes and an extra
60 sign bit, offset_int therefore needs to have at least 68 bits
61 of precision. We round this up to 128 bits for efficiency.
62 Values of type T are converted to this precision by sign- or
63 zero-extending them based on the signedness of T.
64
65 The extra sign bit means that offset_int is effectively a signed
66 128-bit integer, i.e. it behaves like int128_t.
67
68 Since the values are logically signed, there is no need to
69 distinguish between signed and unsigned operations. Sign-sensitive
70 comparison operators <, <=, > and >= are therefore supported.
71 Shift operators << and >> are also supported, with >> being
72 an _arithmetic_ right shift.
73
74 [ Note that, even though offset_int is effectively int128_t,
75 it can still be useful to use unsigned comparisons like
76 wi::leu_p (a, b) as a more efficient short-hand for
77 "a >= 0 && a <= b". ]
78
79 3) widest_int. This representation is an approximation of
80 infinite precision math. However, it is not really infinite
81 precision math as in the GMP library. It is really finite
82 precision math where the precision is 4 times the size of the
83 largest integer that the target port can represent.
84
85 Like offset_int, widest_int is wider than all the values that
86 it needs to represent, so the integers are logically signed.
87 Sign-sensitive comparison operators <, <=, > and >= are supported,
88 as are << and >>.
89
90 There are several places in the GCC where this should/must be used:
91
92 * Code that does induction variable optimizations. This code
93 works with induction variables of many different types at the
94 same time. Because of this, it ends up doing many different
95 calculations where the operands are not compatible types. The
96 widest_int makes this easy, because it provides a field where
97 nothing is lost when converting from any variable,
98
99 * There are a small number of passes that currently use the
100 widest_int that should use the default. These should be
101 changed.
102
103 There are surprising features of offset_int and widest_int
104 that the users should be careful about:
105
106 1) Shifts and rotations are just weird. You have to specify a
107 precision in which the shift or rotate is to happen in. The bits
108 above this precision are zeroed. While this is what you
109 want, it is clearly non obvious.
110
111 2) Larger precision math sometimes does not produce the same
112 answer as would be expected for doing the math at the proper
113 precision. In particular, a multiply followed by a divide will
114 produce a different answer if the first product is larger than
115 what can be represented in the input precision.
116
117 The offset_int and the widest_int flavors are more expensive
118 than the default wide int, so in addition to the caveats with these
119 two, the default is the prefered representation.
120
121 All three flavors of wide_int are represented as a vector of
122 HOST_WIDE_INTs. The default and widest_int vectors contain enough elements
123 to hold a value of MAX_BITSIZE_MODE_ANY_INT bits. offset_int contains only
124 enough elements to hold ADDR_MAX_PRECISION bits. The values are stored
125 in the vector with the least significant HOST_BITS_PER_WIDE_INT bits
126 in element 0.
127
128 The default wide_int contains three fields: the vector (VAL),
129 the precision and a length (LEN). The length is the number of HWIs
130 needed to represent the value. widest_int and offset_int have a
131 constant precision that cannot be changed, so they only store the
132 VAL and LEN fields.
133
134 Since most integers used in a compiler are small values, it is
135 generally profitable to use a representation of the value that is
136 as small as possible. LEN is used to indicate the number of
137 elements of the vector that are in use. The numbers are stored as
138 sign extended numbers as a means of compression. Leading
139 HOST_WIDE_INTs that contain strings of either -1 or 0 are removed
140 as long as they can be reconstructed from the top bit that is being
141 represented.
142
143 The precision and length of a wide_int are always greater than 0.
144 Any bits in a wide_int above the precision are sign-extended from the
145 most significant bit. For example, a 4-bit value 0x8 is represented as
146 VAL = { 0xf...fff8 }. However, as an optimization, we allow other integer
147 constants to be represented with undefined bits above the precision.
148 This allows INTEGER_CSTs to be pre-extended according to TYPE_SIGN,
149 so that the INTEGER_CST representation can be used both in TYPE_PRECISION
150 and in wider precisions.
151
152 There are constructors to create the various forms of wide_int from
153 trees, rtl and constants. For trees the options are:
154
155 tree t = ...;
156 wi::to_wide (t) // Treat T as a wide_int
157 wi::to_offset (t) // Treat T as an offset_int
158 wi::to_widest (t) // Treat T as a widest_int
159
160 All three are light-weight accessors that should have no overhead
161 in release builds. If it is useful for readability reasons to
162 store the result in a temporary variable, the preferred method is:
163
164 wi::tree_to_wide_ref twide = wi::to_wide (t);
165 wi::tree_to_offset_ref toffset = wi::to_offset (t);
166 wi::tree_to_widest_ref twidest = wi::to_widest (t);
167
168 To make an rtx into a wide_int, you have to pair it with a mode.
169 The canonical way to do this is with rtx_mode_t as in:
170
171 rtx r = ...
172 wide_int x = rtx_mode_t (r, mode);
173
174 Similarly, a wide_int can only be constructed from a host value if
175 the target precision is given explicitly, such as in:
176
177 wide_int x = wi::shwi (c, prec); // sign-extend C if necessary
178 wide_int y = wi::uhwi (c, prec); // zero-extend C if necessary
179
180 However, offset_int and widest_int have an inherent precision and so
181 can be initialized directly from a host value:
182
183 offset_int x = (int) c; // sign-extend C
184 widest_int x = (unsigned int) c; // zero-extend C
185
186 It is also possible to do arithmetic directly on rtx_mode_ts and
187 constants. For example:
188
189 wi::add (r1, r2); // add equal-sized rtx_mode_ts r1 and r2
190 wi::add (r1, 1); // add 1 to rtx_mode_t r1
191 wi::lshift (1, 100); // 1 << 100 as a widest_int
192
193 Many binary operations place restrictions on the combinations of inputs,
194 using the following rules:
195
196 - {rtx, wide_int} op {rtx, wide_int} -> wide_int
197 The inputs must be the same precision. The result is a wide_int
198 of the same precision
199
200 - {rtx, wide_int} op (un)signed HOST_WIDE_INT -> wide_int
201 (un)signed HOST_WIDE_INT op {rtx, wide_int} -> wide_int
202 The HOST_WIDE_INT is extended or truncated to the precision of
203 the other input. The result is a wide_int of the same precision
204 as that input.
205
206 - (un)signed HOST_WIDE_INT op (un)signed HOST_WIDE_INT -> widest_int
207 The inputs are extended to widest_int precision and produce a
208 widest_int result.
209
210 - offset_int op offset_int -> offset_int
211 offset_int op (un)signed HOST_WIDE_INT -> offset_int
212 (un)signed HOST_WIDE_INT op offset_int -> offset_int
213
214 - widest_int op widest_int -> widest_int
215 widest_int op (un)signed HOST_WIDE_INT -> widest_int
216 (un)signed HOST_WIDE_INT op widest_int -> widest_int
217
218 Other combinations like:
219
220 - widest_int op offset_int and
221 - wide_int op offset_int
222
223 are not allowed. The inputs should instead be extended or truncated
224 so that they match.
225
226 The inputs to comparison functions like wi::eq_p and wi::lts_p
227 follow the same compatibility rules, although their return types
228 are different. Unary functions on X produce the same result as
229 a binary operation X + X. Shift functions X op Y also produce
230 the same result as X + X; the precision of the shift amount Y
231 can be arbitrarily different from X. */
232
233/* The MAX_BITSIZE_MODE_ANY_INT is automatically generated by a very
234 early examination of the target's mode file. The WIDE_INT_MAX_ELTS
235 can accomodate at least 1 more bit so that unsigned numbers of that
236 mode can be represented as a signed value. Note that it is still
237 possible to create fixed_wide_ints that have precisions greater than
238 MAX_BITSIZE_MODE_ANY_INT. This can be useful when representing a
239 double-width multiplication result, for example. */
240#define WIDE_INT_MAX_ELTS((160 + 64) / 64) \
241 ((MAX_BITSIZE_MODE_ANY_INT160 + HOST_BITS_PER_WIDE_INT64) / HOST_BITS_PER_WIDE_INT64)
242
243#define WIDE_INT_MAX_PRECISION(((160 + 64) / 64) * 64) (WIDE_INT_MAX_ELTS((160 + 64) / 64) * HOST_BITS_PER_WIDE_INT64)
244
245/* This is the max size of any pointer on any machine. It does not
246 seem to be as easy to sniff this out of the machine description as
247 it is for MAX_BITSIZE_MODE_ANY_INT since targets may support
248 multiple address sizes and may have different address sizes for
249 different address spaces. However, currently the largest pointer
250 on any platform is 64 bits. When that changes, then it is likely
251 that a target hook should be defined so that targets can make this
252 value larger for those targets. */
253#define ADDR_MAX_BITSIZE64 64
254
255/* This is the internal precision used when doing any address
256 arithmetic. The '4' is really 3 + 1. Three of the bits are for
257 the number of extra bits needed to do bit addresses and the other bit
258 is to allow everything to be signed without loosing any precision.
259 Then everything is rounded up to the next HWI for efficiency. */
260#define ADDR_MAX_PRECISION((64 + 4 + 64 - 1) & ~(64 - 1)) \
261 ((ADDR_MAX_BITSIZE64 + 4 + HOST_BITS_PER_WIDE_INT64 - 1) \
262 & ~(HOST_BITS_PER_WIDE_INT64 - 1))
263
264/* The number of HWIs needed to store an offset_int. */
265#define OFFSET_INT_ELTS(((64 + 4 + 64 - 1) & ~(64 - 1)) / 64) (ADDR_MAX_PRECISION((64 + 4 + 64 - 1) & ~(64 - 1)) / HOST_BITS_PER_WIDE_INT64)
266
267/* The type of result produced by a binary operation on types T1 and T2.
268 Defined purely for brevity. */
269#define WI_BINARY_RESULT(T1, T2)typename wi::binary_traits <T1, T2>::result_type \
270 typename wi::binary_traits <T1, T2>::result_type
271
272/* Likewise for binary operators, which excludes the case in which neither
273 T1 nor T2 is a wide-int-based type. */
274#define WI_BINARY_OPERATOR_RESULT(T1, T2)typename wi::binary_traits <T1, T2>::operator_result \
275 typename wi::binary_traits <T1, T2>::operator_result
276
277/* The type of result produced by T1 << T2. Leads to substitution failure
278 if the operation isn't supported. Defined purely for brevity. */
279#define WI_SIGNED_SHIFT_RESULT(T1, T2)typename wi::binary_traits <T1, T2>::signed_shift_result_type \
280 typename wi::binary_traits <T1, T2>::signed_shift_result_type
281
282/* The type of result produced by a sign-agnostic binary predicate on
283 types T1 and T2. This is bool if wide-int operations make sense for
284 T1 and T2 and leads to substitution failure otherwise. */
285#define WI_BINARY_PREDICATE_RESULT(T1, T2)typename wi::binary_traits <T1, T2>::predicate_result \
286 typename wi::binary_traits <T1, T2>::predicate_result
287
288/* The type of result produced by a signed binary predicate on types T1 and T2.
289 This is bool if signed comparisons make sense for T1 and T2 and leads to
290 substitution failure otherwise. */
291#define WI_SIGNED_BINARY_PREDICATE_RESULT(T1, T2)typename wi::binary_traits <T1, T2>::signed_predicate_result \
292 typename wi::binary_traits <T1, T2>::signed_predicate_result
293
294/* The type of result produced by a unary operation on type T. */
295#define WI_UNARY_RESULT(T)typename wi::binary_traits <T, T>::result_type \
296 typename wi::binary_traits <T, T>::result_type
297
298/* Define a variable RESULT to hold the result of a binary operation on
299 X and Y, which have types T1 and T2 respectively. Define VAL to
300 point to the blocks of RESULT. Once the user of the macro has
301 filled in VAL, it should call RESULT.set_len to set the number
302 of initialized blocks. */
303#define WI_BINARY_RESULT_VAR(RESULT, VAL, T1, X, T2, Y)typename wi::binary_traits <T1, T2>::result_type RESULT
= wi::int_traits <typename wi::binary_traits <T1, T2>
::result_type>::get_binary_result (X, Y); long *VAL = RESULT
.write_val ()
\
304 WI_BINARY_RESULT (T1, T2)typename wi::binary_traits <T1, T2>::result_type RESULT = \
305 wi::int_traits <WI_BINARY_RESULT (T1, T2)typename wi::binary_traits <T1, T2>::result_type>::get_binary_result (X, Y); \
306 HOST_WIDE_INTlong *VAL = RESULT.write_val ()
307
308/* Similar for the result of a unary operation on X, which has type T. */
309#define WI_UNARY_RESULT_VAR(RESULT, VAL, T, X)typename wi::binary_traits <T, T>::result_type RESULT =
wi::int_traits <typename wi::binary_traits <T, T>::
result_type>::get_binary_result (X, X); long *VAL = RESULT
.write_val ()
\
310 WI_UNARY_RESULT (T)typename wi::binary_traits <T, T>::result_type RESULT = \
311 wi::int_traits <WI_UNARY_RESULT (T)typename wi::binary_traits <T, T>::result_type>::get_binary_result (X, X); \
312 HOST_WIDE_INTlong *VAL = RESULT.write_val ()
313
314template <typename T> class generic_wide_int;
315template <int N> class fixed_wide_int_storage;
316class wide_int_storage;
317
318/* An N-bit integer. Until we can use typedef templates, use this instead. */
319#define FIXED_WIDE_INT(N)generic_wide_int < fixed_wide_int_storage <N> > \
320 generic_wide_int < fixed_wide_int_storage <N> >
321
322typedef generic_wide_int <wide_int_storage> wide_int;
323typedef FIXED_WIDE_INT (ADDR_MAX_PRECISION)generic_wide_int < fixed_wide_int_storage <((64 + 4 + 64
- 1) & ~(64 - 1))> >
offset_int;
324typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION)generic_wide_int < fixed_wide_int_storage <(((160 + 64)
/ 64) * 64)> >
widest_int;
325/* Spelled out explicitly (rather than through FIXED_WIDE_INT)
326 so as not to confuse gengtype. */
327typedef generic_wide_int < fixed_wide_int_storage <WIDE_INT_MAX_PRECISION(((160 + 64) / 64) * 64) * 2> > widest2_int;
328
329/* wi::storage_ref can be a reference to a primitive type,
330 so this is the conservatively-correct setting. */
331template <bool SE, bool HDP = true>
332class wide_int_ref_storage;
333
334typedef generic_wide_int <wide_int_ref_storage <false> > wide_int_ref;
335
336/* This can be used instead of wide_int_ref if the referenced value is
337 known to have type T. It carries across properties of T's representation,
338 such as whether excess upper bits in a HWI are defined, and can therefore
339 help avoid redundant work.
340
341 The macro could be replaced with a template typedef, once we're able
342 to use those. */
343#define WIDE_INT_REF_FOR(T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
\
344 generic_wide_int \
345 <wide_int_ref_storage <wi::int_traits <T>::is_sign_extended, \
346 wi::int_traits <T>::host_dependent_precision> >
347
348namespace wi
349{
350 /* Operations that calculate overflow do so even for
351 TYPE_OVERFLOW_WRAPS types. For example, adding 1 to +MAX_INT in
352 an unsigned int is 0 and does not overflow in C/C++, but wi::add
353 will set the overflow argument in case it's needed for further
354 analysis.
355
356 For operations that require overflow, these are the different
357 types of overflow. */
358 enum overflow_type {
359 OVF_NONE = 0,
360 OVF_UNDERFLOW = -1,
361 OVF_OVERFLOW = 1,
362 /* There was an overflow, but we are unsure whether it was an
363 overflow or an underflow. */
364 OVF_UNKNOWN = 2
365 };
366
367 /* Classifies an integer based on its precision. */
368 enum precision_type {
369 /* The integer has both a precision and defined signedness. This allows
370 the integer to be converted to any width, since we know whether to fill
371 any extra bits with zeros or signs. */
372 FLEXIBLE_PRECISION,
373
374 /* The integer has a variable precision but no defined signedness. */
375 VAR_PRECISION,
376
377 /* The integer has a constant precision (known at GCC compile time)
378 and is signed. */
379 CONST_PRECISION
380 };
381
382 /* This class, which has no default implementation, is expected to
383 provide the following members:
384
385 static const enum precision_type precision_type;
386 Classifies the type of T.
387
388 static const unsigned int precision;
389 Only defined if precision_type == CONST_PRECISION. Specifies the
390 precision of all integers of type T.
391
392 static const bool host_dependent_precision;
393 True if the precision of T depends (or can depend) on the host.
394
395 static unsigned int get_precision (const T &x)
396 Return the number of bits in X.
397
398 static wi::storage_ref *decompose (HOST_WIDE_INT *scratch,
399 unsigned int precision, const T &x)
400 Decompose X as a PRECISION-bit integer, returning the associated
401 wi::storage_ref. SCRATCH is available as scratch space if needed.
402 The routine should assert that PRECISION is acceptable. */
403 template <typename T> struct int_traits;
404
405 /* This class provides a single type, result_type, which specifies the
406 type of integer produced by a binary operation whose inputs have
407 types T1 and T2. The definition should be symmetric. */
408 template <typename T1, typename T2,
409 enum precision_type P1 = int_traits <T1>::precision_type,
410 enum precision_type P2 = int_traits <T2>::precision_type>
411 struct binary_traits;
412
413 /* Specify the result type for each supported combination of binary
414 inputs. Note that CONST_PRECISION and VAR_PRECISION cannot be
415 mixed, in order to give stronger type checking. When both inputs
416 are CONST_PRECISION, they must have the same precision. */
417 template <typename T1, typename T2>
418 struct binary_traits <T1, T2, FLEXIBLE_PRECISION, FLEXIBLE_PRECISION>
419 {
420 typedef widest_int result_type;
421 /* Don't define operators for this combination. */
422 };
423
424 template <typename T1, typename T2>
425 struct binary_traits <T1, T2, FLEXIBLE_PRECISION, VAR_PRECISION>
426 {
427 typedef wide_int result_type;
428 typedef result_type operator_result;
429 typedef bool predicate_result;
430 };
431
432 template <typename T1, typename T2>
433 struct binary_traits <T1, T2, FLEXIBLE_PRECISION, CONST_PRECISION>
434 {
435 /* Spelled out explicitly (rather than through FIXED_WIDE_INT)
436 so as not to confuse gengtype. */
437 typedef generic_wide_int < fixed_wide_int_storage
438 <int_traits <T2>::precision> > result_type;
439 typedef result_type operator_result;
440 typedef bool predicate_result;
441 typedef result_type signed_shift_result_type;
442 typedef bool signed_predicate_result;
443 };
444
445 template <typename T1, typename T2>
446 struct binary_traits <T1, T2, VAR_PRECISION, FLEXIBLE_PRECISION>
447 {
448 typedef wide_int result_type;
449 typedef result_type operator_result;
450 typedef bool predicate_result;
451 };
452
453 template <typename T1, typename T2>
454 struct binary_traits <T1, T2, CONST_PRECISION, FLEXIBLE_PRECISION>
455 {
456 /* Spelled out explicitly (rather than through FIXED_WIDE_INT)
457 so as not to confuse gengtype. */
458 typedef generic_wide_int < fixed_wide_int_storage
459 <int_traits <T1>::precision> > result_type;
460 typedef result_type operator_result;
461 typedef bool predicate_result;
462 typedef result_type signed_shift_result_type;
463 typedef bool signed_predicate_result;
464 };
465
466 template <typename T1, typename T2>
467 struct binary_traits <T1, T2, CONST_PRECISION, CONST_PRECISION>
468 {
469 STATIC_ASSERT (int_traits <T1>::precision == int_traits <T2>::precision)static_assert ((int_traits <T1>::precision == int_traits
<T2>::precision), "int_traits <T1>::precision == int_traits <T2>::precision"
)
;
470 /* Spelled out explicitly (rather than through FIXED_WIDE_INT)
471 so as not to confuse gengtype. */
472 typedef generic_wide_int < fixed_wide_int_storage
473 <int_traits <T1>::precision> > result_type;
474 typedef result_type operator_result;
475 typedef bool predicate_result;
476 typedef result_type signed_shift_result_type;
477 typedef bool signed_predicate_result;
478 };
479
480 template <typename T1, typename T2>
481 struct binary_traits <T1, T2, VAR_PRECISION, VAR_PRECISION>
482 {
483 typedef wide_int result_type;
484 typedef result_type operator_result;
485 typedef bool predicate_result;
486 };
487}
488
489/* Public functions for querying and operating on integers. */
490namespace wi
491{
492 template <typename T>
493 unsigned int get_precision (const T &);
494
495 template <typename T1, typename T2>
496 unsigned int get_binary_precision (const T1 &, const T2 &);
497
498 template <typename T1, typename T2>
499 void copy (T1 &, const T2 &);
500
501#define UNARY_PREDICATE \
502 template <typename T> bool
503#define UNARY_FUNCTION \
504 template <typename T> WI_UNARY_RESULT (T)typename wi::binary_traits <T, T>::result_type
505#define BINARY_PREDICATE \
506 template <typename T1, typename T2> bool
507#define BINARY_FUNCTION \
508 template <typename T1, typename T2> WI_BINARY_RESULT (T1, T2)typename wi::binary_traits <T1, T2>::result_type
509#define SHIFT_FUNCTION \
510 template <typename T1, typename T2> WI_UNARY_RESULT (T1)typename wi::binary_traits <T1, T1>::result_type
511
512 UNARY_PREDICATE fits_shwi_p (const T &);
513 UNARY_PREDICATE fits_uhwi_p (const T &);
514 UNARY_PREDICATE neg_p (const T &, signop = SIGNED);
515
516 template <typename T>
517 HOST_WIDE_INTlong sign_mask (const T &);
518
519 BINARY_PREDICATE eq_p (const T1 &, const T2 &);
520 BINARY_PREDICATE ne_p (const T1 &, const T2 &);
521 BINARY_PREDICATE lt_p (const T1 &, const T2 &, signop);
522 BINARY_PREDICATE lts_p (const T1 &, const T2 &);
523 BINARY_PREDICATE ltu_p (const T1 &, const T2 &);
524 BINARY_PREDICATE le_p (const T1 &, const T2 &, signop);
525 BINARY_PREDICATE les_p (const T1 &, const T2 &);
526 BINARY_PREDICATE leu_p (const T1 &, const T2 &);
527 BINARY_PREDICATE gt_p (const T1 &, const T2 &, signop);
528 BINARY_PREDICATE gts_p (const T1 &, const T2 &);
529 BINARY_PREDICATE gtu_p (const T1 &, const T2 &);
530 BINARY_PREDICATE ge_p (const T1 &, const T2 &, signop);
531 BINARY_PREDICATE ges_p (const T1 &, const T2 &);
532 BINARY_PREDICATE geu_p (const T1 &, const T2 &);
533
534 template <typename T1, typename T2>
535 int cmp (const T1 &, const T2 &, signop);
536
537 template <typename T1, typename T2>
538 int cmps (const T1 &, const T2 &);
539
540 template <typename T1, typename T2>
541 int cmpu (const T1 &, const T2 &);
542
543 UNARY_FUNCTION bit_not (const T &);
544 UNARY_FUNCTION neg (const T &);
545 UNARY_FUNCTION neg (const T &, overflow_type *);
546 UNARY_FUNCTION abs (const T &);
547 UNARY_FUNCTION ext (const T &, unsigned int, signop);
548 UNARY_FUNCTION sext (const T &, unsigned int);
549 UNARY_FUNCTION zext (const T &, unsigned int);
550 UNARY_FUNCTION set_bit (const T &, unsigned int);
551
552 BINARY_FUNCTION min (const T1 &, const T2 &, signop);
553 BINARY_FUNCTION smin (const T1 &, const T2 &);
554 BINARY_FUNCTION umin (const T1 &, const T2 &);
555 BINARY_FUNCTION max (const T1 &, const T2 &, signop);
556 BINARY_FUNCTION smax (const T1 &, const T2 &);
557 BINARY_FUNCTION umax (const T1 &, const T2 &);
558
559 BINARY_FUNCTION bit_and (const T1 &, const T2 &);
560 BINARY_FUNCTION bit_and_not (const T1 &, const T2 &);
561 BINARY_FUNCTION bit_or (const T1 &, const T2 &);
562 BINARY_FUNCTION bit_or_not (const T1 &, const T2 &);
563 BINARY_FUNCTION bit_xor (const T1 &, const T2 &);
564 BINARY_FUNCTION add (const T1 &, const T2 &);
565 BINARY_FUNCTION add (const T1 &, const T2 &, signop, overflow_type *);
566 BINARY_FUNCTION sub (const T1 &, const T2 &);
567 BINARY_FUNCTION sub (const T1 &, const T2 &, signop, overflow_type *);
568 BINARY_FUNCTION mul (const T1 &, const T2 &);
569 BINARY_FUNCTION mul (const T1 &, const T2 &, signop, overflow_type *);
570 BINARY_FUNCTION smul (const T1 &, const T2 &, overflow_type *);
571 BINARY_FUNCTION umul (const T1 &, const T2 &, overflow_type *);
572 BINARY_FUNCTION mul_high (const T1 &, const T2 &, signop);
573 BINARY_FUNCTION div_trunc (const T1 &, const T2 &, signop,
574 overflow_type * = 0);
575 BINARY_FUNCTION sdiv_trunc (const T1 &, const T2 &);
576 BINARY_FUNCTION udiv_trunc (const T1 &, const T2 &);
577 BINARY_FUNCTION div_floor (const T1 &, const T2 &, signop,
578 overflow_type * = 0);
579 BINARY_FUNCTION udiv_floor (const T1 &, const T2 &);
580 BINARY_FUNCTION sdiv_floor (const T1 &, const T2 &);
581 BINARY_FUNCTION div_ceil (const T1 &, const T2 &, signop,
582 overflow_type * = 0);
583 BINARY_FUNCTION udiv_ceil (const T1 &, const T2 &);
584 BINARY_FUNCTION div_round (const T1 &, const T2 &, signop,
585 overflow_type * = 0);
586 BINARY_FUNCTION divmod_trunc (const T1 &, const T2 &, signop,
587 WI_BINARY_RESULT (T1, T2)typename wi::binary_traits <T1, T2>::result_type *);
588 BINARY_FUNCTION gcd (const T1 &, const T2 &, signop = UNSIGNED);
589 BINARY_FUNCTION mod_trunc (const T1 &, const T2 &, signop,
590 overflow_type * = 0);
591 BINARY_FUNCTION smod_trunc (const T1 &, const T2 &);
592 BINARY_FUNCTION umod_trunc (const T1 &, const T2 &);
593 BINARY_FUNCTION mod_floor (const T1 &, const T2 &, signop,
594 overflow_type * = 0);
595 BINARY_FUNCTION umod_floor (const T1 &, const T2 &);
596 BINARY_FUNCTION mod_ceil (const T1 &, const T2 &, signop,
597 overflow_type * = 0);
598 BINARY_FUNCTION mod_round (const T1 &, const T2 &, signop,
599 overflow_type * = 0);
600
601 template <typename T1, typename T2>
602 bool multiple_of_p (const T1 &, const T2 &, signop);
603
604 template <typename T1, typename T2>
605 bool multiple_of_p (const T1 &, const T2 &, signop,
606 WI_BINARY_RESULT (T1, T2)typename wi::binary_traits <T1, T2>::result_type *);
607
608 SHIFT_FUNCTION lshift (const T1 &, const T2 &);
609 SHIFT_FUNCTION lrshift (const T1 &, const T2 &);
610 SHIFT_FUNCTION arshift (const T1 &, const T2 &);
611 SHIFT_FUNCTION rshift (const T1 &, const T2 &, signop sgn);
612 SHIFT_FUNCTION lrotate (const T1 &, const T2 &, unsigned int = 0);
613 SHIFT_FUNCTION rrotate (const T1 &, const T2 &, unsigned int = 0);
614
615#undef SHIFT_FUNCTION
616#undef BINARY_PREDICATE
617#undef BINARY_FUNCTION
618#undef UNARY_PREDICATE
619#undef UNARY_FUNCTION
620
621 bool only_sign_bit_p (const wide_int_ref &, unsigned int);
622 bool only_sign_bit_p (const wide_int_ref &);
623 int clz (const wide_int_ref &);
624 int clrsb (const wide_int_ref &);
625 int ctz (const wide_int_ref &);
626 int exact_log2 (const wide_int_ref &);
627 int floor_log2 (const wide_int_ref &);
628 int ffs (const wide_int_ref &);
629 int popcount (const wide_int_ref &);
630 int parity (const wide_int_ref &);
631
632 template <typename T>
633 unsigned HOST_WIDE_INTlong extract_uhwi (const T &, unsigned int, unsigned int);
634
635 template <typename T>
636 unsigned int min_precision (const T &, signop);
637
638 static inline void accumulate_overflow (overflow_type &, overflow_type);
639}
640
641namespace wi
642{
643 /* Contains the components of a decomposed integer for easy, direct
644 access. */
645 class storage_ref
646 {
647 public:
648 storage_ref () {}
649 storage_ref (const HOST_WIDE_INTlong *, unsigned int, unsigned int);
650
651 const HOST_WIDE_INTlong *val;
652 unsigned int len;
653 unsigned int precision;
654
655 /* Provide enough trappings for this class to act as storage for
656 generic_wide_int. */
657 unsigned int get_len () const;
658 unsigned int get_precision () const;
659 const HOST_WIDE_INTlong *get_val () const;
660 };
661}
662
663inline::wi::storage_ref::storage_ref (const HOST_WIDE_INTlong *val_in,
664 unsigned int len_in,
665 unsigned int precision_in)
666 : val (val_in), len (len_in), precision (precision_in)
667{
668}
669
670inline unsigned int
671wi::storage_ref::get_len () const
672{
673 return len;
674}
675
676inline unsigned int
677wi::storage_ref::get_precision () const
678{
679 return precision;
680}
681
682inline const HOST_WIDE_INTlong *
683wi::storage_ref::get_val () const
684{
685 return val;
686}
687
688/* This class defines an integer type using the storage provided by the
689 template argument. The storage class must provide the following
690 functions:
691
692 unsigned int get_precision () const
693 Return the number of bits in the integer.
694
695 HOST_WIDE_INT *get_val () const
696 Return a pointer to the array of blocks that encodes the integer.
697
698 unsigned int get_len () const
699 Return the number of blocks in get_val (). If this is smaller
700 than the number of blocks implied by get_precision (), the
701 remaining blocks are sign extensions of block get_len () - 1.
702
703 Although not required by generic_wide_int itself, writable storage
704 classes can also provide the following functions:
705
706 HOST_WIDE_INT *write_val ()
707 Get a modifiable version of get_val ()
708
709 unsigned int set_len (unsigned int len)
710 Set the value returned by get_len () to LEN. */
711template <typename storage>
712class GTY(()) generic_wide_int : public storage
713{
714public:
715 generic_wide_int ();
2
Calling default constructor for 'fixed_wide_int_storage<192>'
4
Returning from default constructor for 'fixed_wide_int_storage<192>'
716
717 template <typename T>
718 generic_wide_int (const T &);
719
720 template <typename T>
721 generic_wide_int (const T &, unsigned int);
722
723 /* Conversions. */
724 HOST_WIDE_INTlong to_shwi (unsigned int) const;
725 HOST_WIDE_INTlong to_shwi () const;
726 unsigned HOST_WIDE_INTlong to_uhwi (unsigned int) const;
727 unsigned HOST_WIDE_INTlong to_uhwi () const;
728 HOST_WIDE_INTlong to_short_addr () const;
729
730 /* Public accessors for the interior of a wide int. */
731 HOST_WIDE_INTlong sign_mask () const;
732 HOST_WIDE_INTlong elt (unsigned int) const;
733 HOST_WIDE_INTlong sext_elt (unsigned int) const;
734 unsigned HOST_WIDE_INTlong ulow () const;
735 unsigned HOST_WIDE_INTlong uhigh () const;
736 HOST_WIDE_INTlong slow () const;
737 HOST_WIDE_INTlong shigh () const;
738
739 template <typename T>
740 generic_wide_int &operator = (const T &);
741
742#define ASSIGNMENT_OPERATOR(OP, F) \
743 template <typename T> \
744 generic_wide_int &OP (const T &c) { return (*this = wi::F (*this, c)); }
745
746/* Restrict these to cases where the shift operator is defined. */
747#define SHIFT_ASSIGNMENT_OPERATOR(OP, OP2) \
748 template <typename T> \
749 generic_wide_int &OP (const T &c) { return (*this = *this OP2 c); }
750
751#define INCDEC_OPERATOR(OP, DELTA) \
752 generic_wide_int &OP () { *this += DELTA; return *this; }
753
754 ASSIGNMENT_OPERATOR (operator &=, bit_and)
755 ASSIGNMENT_OPERATOR (operator |=, bit_or)
756 ASSIGNMENT_OPERATOR (operator ^=, bit_xor)
757 ASSIGNMENT_OPERATOR (operator +=, add)
758 ASSIGNMENT_OPERATOR (operator -=, sub)
759 ASSIGNMENT_OPERATOR (operator *=, mul)
760 ASSIGNMENT_OPERATOR (operator <<=, lshift)
761 SHIFT_ASSIGNMENT_OPERATOR (operator >>=, >>)
762 INCDEC_OPERATOR (operator ++, 1)
763 INCDEC_OPERATOR (operator --, -1)
764
765#undef SHIFT_ASSIGNMENT_OPERATOR
766#undef ASSIGNMENT_OPERATOR
767#undef INCDEC_OPERATOR
768
769 /* Debugging functions. */
770 void dump () const;
771
772 static const bool is_sign_extended
773 = wi::int_traits <generic_wide_int <storage> >::is_sign_extended;
774};
775
776template <typename storage>
777inline generic_wide_int <storage>::generic_wide_int () {}
5
Returning without writing to 'this->len'
778
779template <typename storage>
780template <typename T>
781inline generic_wide_int <storage>::generic_wide_int (const T &x)
782 : storage (x)
21
Calling constructor for 'wide_int_ref_storage<false, true>'
783{
784}
785
786template <typename storage>
787template <typename T>
788inline generic_wide_int <storage>::generic_wide_int (const T &x,
789 unsigned int precision)
790 : storage (x, precision)
791{
792}
793
794/* Return THIS as a signed HOST_WIDE_INT, sign-extending from PRECISION.
795 If THIS does not fit in PRECISION, the information is lost. */
796template <typename storage>
797inline HOST_WIDE_INTlong
798generic_wide_int <storage>::to_shwi (unsigned int precision) const
799{
800 if (precision < HOST_BITS_PER_WIDE_INT64)
801 return sext_hwi (this->get_val ()[0], precision);
802 else
803 return this->get_val ()[0];
804}
805
806/* Return THIS as a signed HOST_WIDE_INT, in its natural precision. */
807template <typename storage>
808inline HOST_WIDE_INTlong
809generic_wide_int <storage>::to_shwi () const
810{
811 if (is_sign_extended)
812 return this->get_val ()[0];
813 else
814 return to_shwi (this->get_precision ());
815}
816
817/* Return THIS as an unsigned HOST_WIDE_INT, zero-extending from
818 PRECISION. If THIS does not fit in PRECISION, the information
819 is lost. */
820template <typename storage>
821inline unsigned HOST_WIDE_INTlong
822generic_wide_int <storage>::to_uhwi (unsigned int precision) const
823{
824 if (precision < HOST_BITS_PER_WIDE_INT64)
825 return zext_hwi (this->get_val ()[0], precision);
826 else
827 return this->get_val ()[0];
828}
829
830/* Return THIS as an signed HOST_WIDE_INT, in its natural precision. */
831template <typename storage>
832inline unsigned HOST_WIDE_INTlong
833generic_wide_int <storage>::to_uhwi () const
834{
835 return to_uhwi (this->get_precision ());
836}
837
838/* TODO: The compiler is half converted from using HOST_WIDE_INT to
839 represent addresses to using offset_int to represent addresses.
840 We use to_short_addr at the interface from new code to old,
841 unconverted code. */
842template <typename storage>
843inline HOST_WIDE_INTlong
844generic_wide_int <storage>::to_short_addr () const
845{
846 return this->get_val ()[0];
847}
848
849/* Return the implicit value of blocks above get_len (). */
850template <typename storage>
851inline HOST_WIDE_INTlong
852generic_wide_int <storage>::sign_mask () const
853{
854 unsigned int len = this->get_len ();
855 gcc_assert (len > 0)((void)(!(len > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/wide-int.h"
, 855, __FUNCTION__), 0 : 0))
;
856
857 unsigned HOST_WIDE_INTlong high = this->get_val ()[len - 1];
858 if (!is_sign_extended)
859 {
860 unsigned int precision = this->get_precision ();
861 int excess = len * HOST_BITS_PER_WIDE_INT64 - precision;
862 if (excess > 0)
863 high <<= excess;
864 }
865 return (HOST_WIDE_INTlong) (high) < 0 ? -1 : 0;
866}
867
868/* Return the signed value of the least-significant explicitly-encoded
869 block. */
870template <typename storage>
871inline HOST_WIDE_INTlong
872generic_wide_int <storage>::slow () const
873{
874 return this->get_val ()[0];
875}
876
877/* Return the signed value of the most-significant explicitly-encoded
878 block. */
879template <typename storage>
880inline HOST_WIDE_INTlong
881generic_wide_int <storage>::shigh () const
882{
883 return this->get_val ()[this->get_len () - 1];
884}
885
886/* Return the unsigned value of the least-significant
887 explicitly-encoded block. */
888template <typename storage>
889inline unsigned HOST_WIDE_INTlong
890generic_wide_int <storage>::ulow () const
891{
892 return this->get_val ()[0];
893}
894
895/* Return the unsigned value of the most-significant
896 explicitly-encoded block. */
897template <typename storage>
898inline unsigned HOST_WIDE_INTlong
899generic_wide_int <storage>::uhigh () const
900{
901 return this->get_val ()[this->get_len () - 1];
902}
903
904/* Return block I, which might be implicitly or explicit encoded. */
905template <typename storage>
906inline HOST_WIDE_INTlong
907generic_wide_int <storage>::elt (unsigned int i) const
908{
909 if (i >= this->get_len ())
910 return sign_mask ();
911 else
912 return this->get_val ()[i];
913}
914
915/* Like elt, but sign-extend beyond the upper bit, instead of returning
916 the raw encoding. */
917template <typename storage>
918inline HOST_WIDE_INTlong
919generic_wide_int <storage>::sext_elt (unsigned int i) const
920{
921 HOST_WIDE_INTlong elt_i = elt (i);
922 if (!is_sign_extended)
923 {
924 unsigned int precision = this->get_precision ();
925 unsigned int lsb = i * HOST_BITS_PER_WIDE_INT64;
926 if (precision - lsb < HOST_BITS_PER_WIDE_INT64)
927 elt_i = sext_hwi (elt_i, precision - lsb);
928 }
929 return elt_i;
930}
931
932template <typename storage>
933template <typename T>
934inline generic_wide_int <storage> &
935generic_wide_int <storage>::operator = (const T &x)
936{
937 storage::operator = (x);
938 return *this;
939}
940
941/* Dump the contents of the integer to stderr, for debugging. */
942template <typename storage>
943void
944generic_wide_int <storage>::dump () const
945{
946 unsigned int len = this->get_len ();
947 const HOST_WIDE_INTlong *val = this->get_val ();
948 unsigned int precision = this->get_precision ();
949 fprintf (stderrstderr, "[");
950 if (len * HOST_BITS_PER_WIDE_INT64 < precision)
951 fprintf (stderrstderr, "...,");
952 for (unsigned int i = 0; i < len - 1; ++i)
953 fprintf (stderrstderr, HOST_WIDE_INT_PRINT_HEX"%#" "l" "x" ",", val[len - 1 - i]);
954 fprintf (stderrstderr, HOST_WIDE_INT_PRINT_HEX"%#" "l" "x" "], precision = %d\n",
955 val[0], precision);
956}
957
958namespace wi
959{
960 template <typename storage>
961 struct int_traits < generic_wide_int <storage> >
962 : public wi::int_traits <storage>
963 {
964 static unsigned int get_precision (const generic_wide_int <storage> &);
965 static wi::storage_ref decompose (HOST_WIDE_INTlong *, unsigned int,
966 const generic_wide_int <storage> &);
967 };
968}
969
970template <typename storage>
971inline unsigned int
972wi::int_traits < generic_wide_int <storage> >::
973get_precision (const generic_wide_int <storage> &x)
974{
975 return x.get_precision ();
976}
977
978template <typename storage>
979inline wi::storage_ref
980wi::int_traits < generic_wide_int <storage> >::
981decompose (HOST_WIDE_INTlong *, unsigned int precision,
982 const generic_wide_int <storage> &x)
983{
984 gcc_checking_assert (precision == x.get_precision ())((void)(!(precision == x.get_precision ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/wide-int.h"
, 984, __FUNCTION__), 0 : 0))
;
23
'?' condition is false
985 return wi::storage_ref (x.get_val (), x.get_len (), precision);
24
Calling 'fixed_wide_int_storage::get_len'
986}
987
988/* Provide the storage for a wide_int_ref. This acts like a read-only
989 wide_int, with the optimization that VAL is normally a pointer to
990 another integer's storage, so that no array copy is needed. */
991template <bool SE, bool HDP>
992class wide_int_ref_storage : public wi::storage_ref
993{
994private:
995 /* Scratch space that can be used when decomposing the original integer.
996 It must live as long as this object. */
997 HOST_WIDE_INTlong scratch[2];
998
999public:
1000 wide_int_ref_storage () {}
1001
1002 wide_int_ref_storage (const wi::storage_ref &);
1003
1004 template <typename T>
1005 wide_int_ref_storage (const T &);
1006
1007 template <typename T>
1008 wide_int_ref_storage (const T &, unsigned int);
1009};
1010
1011/* Create a reference from an existing reference. */
1012template <bool SE, bool HDP>
1013inline wide_int_ref_storage <SE, HDP>::
1014wide_int_ref_storage (const wi::storage_ref &x)
1015 : storage_ref (x)
1016{}
1017
1018/* Create a reference to integer X in its natural precision. Note
1019 that the natural precision is host-dependent for primitive
1020 types. */
1021template <bool SE, bool HDP>
1022template <typename T>
1023inline wide_int_ref_storage <SE, HDP>::wide_int_ref_storage (const T &x)
1024 : storage_ref (wi::int_traits <T>::decompose (scratch,
22
Calling 'int_traits::decompose'
1025 wi::get_precision (x), x))
1026{
1027}
1028
1029/* Create a reference to integer X in precision PRECISION. */
1030template <bool SE, bool HDP>
1031template <typename T>
1032inline wide_int_ref_storage <SE, HDP>::
1033wide_int_ref_storage (const T &x, unsigned int precision)
1034 : storage_ref (wi::int_traits <T>::decompose (scratch, precision, x))
1035{
1036}
1037
1038namespace wi
1039{
1040 template <bool SE, bool HDP>
1041 struct int_traits <wide_int_ref_storage <SE, HDP> >
1042 {
1043 static const enum precision_type precision_type = VAR_PRECISION;
1044 static const bool host_dependent_precision = HDP;
1045 static const bool is_sign_extended = SE;
1046 };
1047}
1048
1049namespace wi
1050{
1051 unsigned int force_to_size (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1052 unsigned int, unsigned int, unsigned int,
1053 signop sgn);
1054 unsigned int from_array (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1055 unsigned int, unsigned int, bool = true);
1056}
1057
1058/* The storage used by wide_int. */
1059class GTY(()) wide_int_storage
1060{
1061private:
1062 HOST_WIDE_INTlong val[WIDE_INT_MAX_ELTS((160 + 64) / 64)];
1063 unsigned int len;
1064 unsigned int precision;
1065
1066public:
1067 wide_int_storage ();
1068 template <typename T>
1069 wide_int_storage (const T &);
1070
1071 /* The standard generic_wide_int storage methods. */
1072 unsigned int get_precision () const;
1073 const HOST_WIDE_INTlong *get_val () const;
1074 unsigned int get_len () const;
1075 HOST_WIDE_INTlong *write_val ();
1076 void set_len (unsigned int, bool = false);
1077
1078 template <typename T>
1079 wide_int_storage &operator = (const T &);
1080
1081 static wide_int from (const wide_int_ref &, unsigned int, signop);
1082 static wide_int from_array (const HOST_WIDE_INTlong *, unsigned int,
1083 unsigned int, bool = true);
1084 static wide_int create (unsigned int);
1085
1086 /* FIXME: target-dependent, so should disappear. */
1087 wide_int bswap () const;
1088};
1089
1090namespace wi
1091{
1092 template <>
1093 struct int_traits <wide_int_storage>
1094 {
1095 static const enum precision_type precision_type = VAR_PRECISION;
1096 /* Guaranteed by a static assert in the wide_int_storage constructor. */
1097 static const bool host_dependent_precision = false;
1098 static const bool is_sign_extended = true;
1099 template <typename T1, typename T2>
1100 static wide_int get_binary_result (const T1 &, const T2 &);
1101 };
1102}
1103
1104inline wide_int_storage::wide_int_storage () {}
1105
1106/* Initialize the storage from integer X, in its natural precision.
1107 Note that we do not allow integers with host-dependent precision
1108 to become wide_ints; wide_ints must always be logically independent
1109 of the host. */
1110template <typename T>
1111inline wide_int_storage::wide_int_storage (const T &x)
1112{
1113 { STATIC_ASSERT (!wi::int_traits<T>::host_dependent_precision)static_assert ((!wi::int_traits<T>::host_dependent_precision
), "!wi::int_traits<T>::host_dependent_precision")
; }
1114 { STATIC_ASSERT (wi::int_traits<T>::precision_type != wi::CONST_PRECISION)static_assert ((wi::int_traits<T>::precision_type != wi
::CONST_PRECISION), "wi::int_traits<T>::precision_type != wi::CONST_PRECISION"
)
; }
1115 WIDE_INT_REF_FOR (T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
xi (x);
1116 precision = xi.precision;
1117 wi::copy (*this, xi);
1118}
1119
1120template <typename T>
1121inline wide_int_storage&
1122wide_int_storage::operator = (const T &x)
1123{
1124 { STATIC_ASSERT (!wi::int_traits<T>::host_dependent_precision)static_assert ((!wi::int_traits<T>::host_dependent_precision
), "!wi::int_traits<T>::host_dependent_precision")
; }
1125 { STATIC_ASSERT (wi::int_traits<T>::precision_type != wi::CONST_PRECISION)static_assert ((wi::int_traits<T>::precision_type != wi
::CONST_PRECISION), "wi::int_traits<T>::precision_type != wi::CONST_PRECISION"
)
; }
1126 WIDE_INT_REF_FOR (T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
xi (x);
1127 precision = xi.precision;
1128 wi::copy (*this, xi);
1129 return *this;
1130}
1131
1132inline unsigned int
1133wide_int_storage::get_precision () const
1134{
1135 return precision;
1136}
1137
1138inline const HOST_WIDE_INTlong *
1139wide_int_storage::get_val () const
1140{
1141 return val;
1142}
1143
1144inline unsigned int
1145wide_int_storage::get_len () const
1146{
1147 return len;
1148}
1149
1150inline HOST_WIDE_INTlong *
1151wide_int_storage::write_val ()
1152{
1153 return val;
1154}
1155
1156inline void
1157wide_int_storage::set_len (unsigned int l, bool is_sign_extended)
1158{
1159 len = l;
1160 if (!is_sign_extended && len * HOST_BITS_PER_WIDE_INT64 > precision)
1161 val[len - 1] = sext_hwi (val[len - 1],
1162 precision % HOST_BITS_PER_WIDE_INT64);
1163}
1164
1165/* Treat X as having signedness SGN and convert it to a PRECISION-bit
1166 number. */
1167inline wide_int
1168wide_int_storage::from (const wide_int_ref &x, unsigned int precision,
1169 signop sgn)
1170{
1171 wide_int result = wide_int::create (precision);
1172 result.set_len (wi::force_to_size (result.write_val (), x.val, x.len,
1173 x.precision, precision, sgn));
1174 return result;
1175}
1176
1177/* Create a wide_int from the explicit block encoding given by VAL and
1178 LEN. PRECISION is the precision of the integer. NEED_CANON_P is
1179 true if the encoding may have redundant trailing blocks. */
1180inline wide_int
1181wide_int_storage::from_array (const HOST_WIDE_INTlong *val, unsigned int len,
1182 unsigned int precision, bool need_canon_p)
1183{
1184 wide_int result = wide_int::create (precision);
1185 result.set_len (wi::from_array (result.write_val (), val, len, precision,
1186 need_canon_p));
1187 return result;
1188}
1189
1190/* Return an uninitialized wide_int with precision PRECISION. */
1191inline wide_int
1192wide_int_storage::create (unsigned int precision)
1193{
1194 wide_int x;
1195 x.precision = precision;
1196 return x;
1197}
1198
1199template <typename T1, typename T2>
1200inline wide_int
1201wi::int_traits <wide_int_storage>::get_binary_result (const T1 &x, const T2 &y)
1202{
1203 /* This shouldn't be used for two flexible-precision inputs. */
1204 STATIC_ASSERT (wi::int_traits <T1>::precision_type != FLEXIBLE_PRECISIONstatic_assert ((wi::int_traits <T1>::precision_type != FLEXIBLE_PRECISION
|| wi::int_traits <T2>::precision_type != FLEXIBLE_PRECISION
), "wi::int_traits <T1>::precision_type != FLEXIBLE_PRECISION || wi::int_traits <T2>::precision_type != FLEXIBLE_PRECISION"
)
1205 || wi::int_traits <T2>::precision_type != FLEXIBLE_PRECISION)static_assert ((wi::int_traits <T1>::precision_type != FLEXIBLE_PRECISION
|| wi::int_traits <T2>::precision_type != FLEXIBLE_PRECISION
), "wi::int_traits <T1>::precision_type != FLEXIBLE_PRECISION || wi::int_traits <T2>::precision_type != FLEXIBLE_PRECISION"
)
;
1206 if (wi::int_traits <T1>::precision_type == FLEXIBLE_PRECISION)
1207 return wide_int::create (wi::get_precision (y));
1208 else
1209 return wide_int::create (wi::get_precision (x));
1210}
1211
1212/* The storage used by FIXED_WIDE_INT (N). */
1213template <int N>
1214class GTY(()) fixed_wide_int_storage
1215{
1216private:
1217 HOST_WIDE_INTlong val[(N + HOST_BITS_PER_WIDE_INT64 + 1) / HOST_BITS_PER_WIDE_INT64];
1218 unsigned int len;
1219
1220public:
1221 fixed_wide_int_storage ();
1222 template <typename T>
1223 fixed_wide_int_storage (const T &);
1224
1225 /* The standard generic_wide_int storage methods. */
1226 unsigned int get_precision () const;
1227 const HOST_WIDE_INTlong *get_val () const;
1228 unsigned int get_len () const;
1229 HOST_WIDE_INTlong *write_val ();
1230 void set_len (unsigned int, bool = false);
1231
1232 static FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> > from (const wide_int_ref &, signop);
1233 static FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> > from_array (const HOST_WIDE_INTlong *, unsigned int,
1234 bool = true);
1235};
1236
1237namespace wi
1238{
1239 template <int N>
1240 struct int_traits < fixed_wide_int_storage <N> >
1241 {
1242 static const enum precision_type precision_type = CONST_PRECISION;
1243 static const bool host_dependent_precision = false;
1244 static const bool is_sign_extended = true;
1245 static const unsigned int precision = N;
1246 template <typename T1, typename T2>
1247 static FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> > get_binary_result (const T1 &, const T2 &);
1248 };
1249}
1250
1251template <int N>
1252inline fixed_wide_int_storage <N>::fixed_wide_int_storage () {}
3
Returning without writing to 'this->len'
1253
1254/* Initialize the storage from integer X, in precision N. */
1255template <int N>
1256template <typename T>
1257inline fixed_wide_int_storage <N>::fixed_wide_int_storage (const T &x)
1258{
1259 /* Check for type compatibility. We don't want to initialize a
1260 fixed-width integer from something like a wide_int. */
1261 WI_BINARY_RESULT (T, FIXED_WIDE_INT (N))typename wi::binary_traits <T, generic_wide_int < fixed_wide_int_storage
<N> > >::result_type
*assertion ATTRIBUTE_UNUSED__attribute__ ((__unused__));
1262 wi::copy (*this, WIDE_INT_REF_FOR (T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
(x, N));
1263}
1264
1265template <int N>
1266inline unsigned int
1267fixed_wide_int_storage <N>::get_precision () const
1268{
1269 return N;
1270}
1271
1272template <int N>
1273inline const HOST_WIDE_INTlong *
1274fixed_wide_int_storage <N>::get_val () const
1275{
1276 return val;
1277}
1278
1279template <int N>
1280inline unsigned int
1281fixed_wide_int_storage <N>::get_len () const
1282{
1283 return len;
25
Undefined or garbage value returned to caller
1284}
1285
1286template <int N>
1287inline HOST_WIDE_INTlong *
1288fixed_wide_int_storage <N>::write_val ()
1289{
1290 return val;
1291}
1292
1293template <int N>
1294inline void
1295fixed_wide_int_storage <N>::set_len (unsigned int l, bool)
1296{
1297 len = l;
1298 /* There are no excess bits in val[len - 1]. */
1299 STATIC_ASSERT (N % HOST_BITS_PER_WIDE_INT == 0)static_assert ((N % 64 == 0), "N % HOST_BITS_PER_WIDE_INT == 0"
)
;
1300}
1301
1302/* Treat X as having signedness SGN and convert it to an N-bit number. */
1303template <int N>
1304inline FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> >
1305fixed_wide_int_storage <N>::from (const wide_int_ref &x, signop sgn)
1306{
1307 FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> > result;
1308 result.set_len (wi::force_to_size (result.write_val (), x.val, x.len,
1309 x.precision, N, sgn));
1310 return result;
1311}
1312
1313/* Create a FIXED_WIDE_INT (N) from the explicit block encoding given by
1314 VAL and LEN. NEED_CANON_P is true if the encoding may have redundant
1315 trailing blocks. */
1316template <int N>
1317inline FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> >
1318fixed_wide_int_storage <N>::from_array (const HOST_WIDE_INTlong *val,
1319 unsigned int len,
1320 bool need_canon_p)
1321{
1322 FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> > result;
1323 result.set_len (wi::from_array (result.write_val (), val, len,
1324 N, need_canon_p));
1325 return result;
1326}
1327
1328template <int N>
1329template <typename T1, typename T2>
1330inline FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> >
1331wi::int_traits < fixed_wide_int_storage <N> >::
1332get_binary_result (const T1 &, const T2 &)
1333{
1334 return FIXED_WIDE_INT (N)generic_wide_int < fixed_wide_int_storage <N> > ();
1335}
1336
1337/* A reference to one element of a trailing_wide_ints structure. */
1338class trailing_wide_int_storage
1339{
1340private:
1341 /* The precision of the integer, which is a fixed property of the
1342 parent trailing_wide_ints. */
1343 unsigned int m_precision;
1344
1345 /* A pointer to the length field. */
1346 unsigned char *m_len;
1347
1348 /* A pointer to the HWI array. There are enough elements to hold all
1349 values of precision M_PRECISION. */
1350 HOST_WIDE_INTlong *m_val;
1351
1352public:
1353 trailing_wide_int_storage (unsigned int, unsigned char *, HOST_WIDE_INTlong *);
1354
1355 /* The standard generic_wide_int storage methods. */
1356 unsigned int get_len () const;
1357 unsigned int get_precision () const;
1358 const HOST_WIDE_INTlong *get_val () const;
1359 HOST_WIDE_INTlong *write_val ();
1360 void set_len (unsigned int, bool = false);
1361
1362 template <typename T>
1363 trailing_wide_int_storage &operator = (const T &);
1364};
1365
1366typedef generic_wide_int <trailing_wide_int_storage> trailing_wide_int;
1367
1368/* trailing_wide_int behaves like a wide_int. */
1369namespace wi
1370{
1371 template <>
1372 struct int_traits <trailing_wide_int_storage>
1373 : public int_traits <wide_int_storage> {};
1374}
1375
1376/* An array of N wide_int-like objects that can be put at the end of
1377 a variable-sized structure. Use extra_size to calculate how many
1378 bytes beyond the sizeof need to be allocated. Use set_precision
1379 to initialize the structure. */
1380template <int N>
1381struct GTY((user)) trailing_wide_ints
1382{
1383private:
1384 /* The shared precision of each number. */
1385 unsigned short m_precision;
1386
1387 /* The shared maximum length of each number. */
1388 unsigned char m_max_len;
1389
1390 /* The current length of each number.
1391 Avoid char array so the whole structure is not a typeless storage
1392 that will, in turn, turn off TBAA on gimple, trees and RTL. */
1393 struct {unsigned char len;} m_len[N];
1394
1395 /* The variable-length part of the structure, which always contains
1396 at least one HWI. Element I starts at index I * M_MAX_LEN. */
1397 HOST_WIDE_INTlong m_val[1];
1398
1399public:
1400 typedef WIDE_INT_REF_FOR (trailing_wide_int_storage)generic_wide_int <wide_int_ref_storage <wi::int_traits <
trailing_wide_int_storage>::is_sign_extended, wi::int_traits
<trailing_wide_int_storage>::host_dependent_precision>
>
const_reference;
1401
1402 void set_precision (unsigned int);
1403 unsigned int get_precision () const { return m_precision; }
1404 trailing_wide_int operator [] (unsigned int);
1405 const_reference operator [] (unsigned int) const;
1406 static size_t extra_size (unsigned int);
1407 size_t extra_size () const { return extra_size (m_precision); }
1408};
1409
1410inline trailing_wide_int_storage::
1411trailing_wide_int_storage (unsigned int precision, unsigned char *len,
1412 HOST_WIDE_INTlong *val)
1413 : m_precision (precision), m_len (len), m_val (val)
1414{
1415}
1416
1417inline unsigned int
1418trailing_wide_int_storage::get_len () const
1419{
1420 return *m_len;
1421}
1422
1423inline unsigned int
1424trailing_wide_int_storage::get_precision () const
1425{
1426 return m_precision;
1427}
1428
1429inline const HOST_WIDE_INTlong *
1430trailing_wide_int_storage::get_val () const
1431{
1432 return m_val;
1433}
1434
1435inline HOST_WIDE_INTlong *
1436trailing_wide_int_storage::write_val ()
1437{
1438 return m_val;
1439}
1440
1441inline void
1442trailing_wide_int_storage::set_len (unsigned int len, bool is_sign_extended)
1443{
1444 *m_len = len;
1445 if (!is_sign_extended && len * HOST_BITS_PER_WIDE_INT64 > m_precision)
1446 m_val[len - 1] = sext_hwi (m_val[len - 1],
1447 m_precision % HOST_BITS_PER_WIDE_INT64);
1448}
1449
1450template <typename T>
1451inline trailing_wide_int_storage &
1452trailing_wide_int_storage::operator = (const T &x)
1453{
1454 WIDE_INT_REF_FOR (T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
xi (x, m_precision);
1455 wi::copy (*this, xi);
1456 return *this;
1457}
1458
1459/* Initialize the structure and record that all elements have precision
1460 PRECISION. */
1461template <int N>
1462inline void
1463trailing_wide_ints <N>::set_precision (unsigned int precision)
1464{
1465 m_precision = precision;
1466 m_max_len = ((precision + HOST_BITS_PER_WIDE_INT64 - 1)
1467 / HOST_BITS_PER_WIDE_INT64);
1468}
1469
1470/* Return a reference to element INDEX. */
1471template <int N>
1472inline trailing_wide_int
1473trailing_wide_ints <N>::operator [] (unsigned int index)
1474{
1475 return trailing_wide_int_storage (m_precision, &m_len[index].len,
1476 &m_val[index * m_max_len]);
1477}
1478
1479template <int N>
1480inline typename trailing_wide_ints <N>::const_reference
1481trailing_wide_ints <N>::operator [] (unsigned int index) const
1482{
1483 return wi::storage_ref (&m_val[index * m_max_len],
1484 m_len[index].len, m_precision);
1485}
1486
1487/* Return how many extra bytes need to be added to the end of the structure
1488 in order to handle N wide_ints of precision PRECISION. */
1489template <int N>
1490inline size_t
1491trailing_wide_ints <N>::extra_size (unsigned int precision)
1492{
1493 unsigned int max_len = ((precision + HOST_BITS_PER_WIDE_INT64 - 1)
1494 / HOST_BITS_PER_WIDE_INT64);
1495 return (N * max_len - 1) * sizeof (HOST_WIDE_INTlong);
1496}
1497
1498/* This macro is used in structures that end with a trailing_wide_ints field
1499 called FIELD. It declares get_NAME() and set_NAME() methods to access
1500 element I of FIELD. */
1501#define TRAILING_WIDE_INT_ACCESSOR(NAME, FIELD, I)trailing_wide_int get_NAME () { return FIELD[I]; } template <
typename T> void set_NAME (const T &x) { FIELD[I] = x;
}
\
1502 trailing_wide_int get_##NAME () { return FIELD[I]; } \
1503 template <typename T> void set_##NAME (const T &x) { FIELD[I] = x; }
1504
1505namespace wi
1506{
1507 /* Implementation of int_traits for primitive integer types like "int". */
1508 template <typename T, bool signed_p>
1509 struct primitive_int_traits
1510 {
1511 static const enum precision_type precision_type = FLEXIBLE_PRECISION;
1512 static const bool host_dependent_precision = true;
1513 static const bool is_sign_extended = true;
1514 static unsigned int get_precision (T);
1515 static wi::storage_ref decompose (HOST_WIDE_INTlong *, unsigned int, T);
1516 };
1517}
1518
1519template <typename T, bool signed_p>
1520inline unsigned int
1521wi::primitive_int_traits <T, signed_p>::get_precision (T)
1522{
1523 return sizeof (T) * CHAR_BIT8;
1524}
1525
1526template <typename T, bool signed_p>
1527inline wi::storage_ref
1528wi::primitive_int_traits <T, signed_p>::decompose (HOST_WIDE_INTlong *scratch,
1529 unsigned int precision, T x)
1530{
1531 scratch[0] = x;
1532 if (signed_p || scratch[0] >= 0 || precision <= HOST_BITS_PER_WIDE_INT64)
1533 return wi::storage_ref (scratch, 1, precision);
1534 scratch[1] = 0;
1535 return wi::storage_ref (scratch, 2, precision);
1536}
1537
1538/* Allow primitive C types to be used in wi:: routines. */
1539namespace wi
1540{
1541 template <>
1542 struct int_traits <unsigned char>
1543 : public primitive_int_traits <unsigned char, false> {};
1544
1545 template <>
1546 struct int_traits <unsigned short>
1547 : public primitive_int_traits <unsigned short, false> {};
1548
1549 template <>
1550 struct int_traits <int>
1551 : public primitive_int_traits <int, true> {};
1552
1553 template <>
1554 struct int_traits <unsigned int>
1555 : public primitive_int_traits <unsigned int, false> {};
1556
1557 template <>
1558 struct int_traits <long>
1559 : public primitive_int_traits <long, true> {};
1560
1561 template <>
1562 struct int_traits <unsigned long>
1563 : public primitive_int_traits <unsigned long, false> {};
1564
1565#if defined HAVE_LONG_LONG1
1566 template <>
1567 struct int_traits <long long>
1568 : public primitive_int_traits <long long, true> {};
1569
1570 template <>
1571 struct int_traits <unsigned long long>
1572 : public primitive_int_traits <unsigned long long, false> {};
1573#endif
1574}
1575
1576namespace wi
1577{
1578 /* Stores HWI-sized integer VAL, treating it as having signedness SGN
1579 and precision PRECISION. */
1580 class hwi_with_prec
1581 {
1582 public:
1583 hwi_with_prec () {}
1584 hwi_with_prec (HOST_WIDE_INTlong, unsigned int, signop);
1585 HOST_WIDE_INTlong val;
1586 unsigned int precision;
1587 signop sgn;
1588 };
1589
1590 hwi_with_prec shwi (HOST_WIDE_INTlong, unsigned int);
1591 hwi_with_prec uhwi (unsigned HOST_WIDE_INTlong, unsigned int);
1592
1593 hwi_with_prec minus_one (unsigned int);
1594 hwi_with_prec zero (unsigned int);
1595 hwi_with_prec one (unsigned int);
1596 hwi_with_prec two (unsigned int);
1597}
1598
1599inline wi::hwi_with_prec::hwi_with_prec (HOST_WIDE_INTlong v, unsigned int p,
1600 signop s)
1601 : precision (p), sgn (s)
1602{
1603 if (precision < HOST_BITS_PER_WIDE_INT64)
1604 val = sext_hwi (v, precision);
1605 else
1606 val = v;
1607}
1608
1609/* Return a signed integer that has value VAL and precision PRECISION. */
1610inline wi::hwi_with_prec
1611wi::shwi (HOST_WIDE_INTlong val, unsigned int precision)
1612{
1613 return hwi_with_prec (val, precision, SIGNED);
1614}
1615
1616/* Return an unsigned integer that has value VAL and precision PRECISION. */
1617inline wi::hwi_with_prec
1618wi::uhwi (unsigned HOST_WIDE_INTlong val, unsigned int precision)
1619{
1620 return hwi_with_prec (val, precision, UNSIGNED);
1621}
1622
1623/* Return a wide int of -1 with precision PRECISION. */
1624inline wi::hwi_with_prec
1625wi::minus_one (unsigned int precision)
1626{
1627 return wi::shwi (-1, precision);
1628}
1629
1630/* Return a wide int of 0 with precision PRECISION. */
1631inline wi::hwi_with_prec
1632wi::zero (unsigned int precision)
1633{
1634 return wi::shwi (0, precision);
1635}
1636
1637/* Return a wide int of 1 with precision PRECISION. */
1638inline wi::hwi_with_prec
1639wi::one (unsigned int precision)
1640{
1641 return wi::shwi (1, precision);
1642}
1643
1644/* Return a wide int of 2 with precision PRECISION. */
1645inline wi::hwi_with_prec
1646wi::two (unsigned int precision)
1647{
1648 return wi::shwi (2, precision);
1649}
1650
1651namespace wi
1652{
1653 /* ints_for<T>::zero (X) returns a zero that, when asssigned to a T,
1654 gives that T the same precision as X. */
1655 template<typename T, precision_type = int_traits<T>::precision_type>
1656 struct ints_for
1657 {
1658 static int zero (const T &) { return 0; }
1659 };
1660
1661 template<typename T>
1662 struct ints_for<T, VAR_PRECISION>
1663 {
1664 static hwi_with_prec zero (const T &);
1665 };
1666}
1667
1668template<typename T>
1669inline wi::hwi_with_prec
1670wi::ints_for<T, wi::VAR_PRECISION>::zero (const T &x)
1671{
1672 return wi::zero (wi::get_precision (x));
1673}
1674
1675namespace wi
1676{
1677 template <>
1678 struct int_traits <wi::hwi_with_prec>
1679 {
1680 static const enum precision_type precision_type = VAR_PRECISION;
1681 /* hwi_with_prec has an explicitly-given precision, rather than the
1682 precision of HOST_WIDE_INT. */
1683 static const bool host_dependent_precision = false;
1684 static const bool is_sign_extended = true;
1685 static unsigned int get_precision (const wi::hwi_with_prec &);
1686 static wi::storage_ref decompose (HOST_WIDE_INTlong *, unsigned int,
1687 const wi::hwi_with_prec &);
1688 };
1689}
1690
1691inline unsigned int
1692wi::int_traits <wi::hwi_with_prec>::get_precision (const wi::hwi_with_prec &x)
1693{
1694 return x.precision;
1695}
1696
1697inline wi::storage_ref
1698wi::int_traits <wi::hwi_with_prec>::
1699decompose (HOST_WIDE_INTlong *scratch, unsigned int precision,
1700 const wi::hwi_with_prec &x)
1701{
1702 gcc_checking_assert (precision == x.precision)((void)(!(precision == x.precision) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/wide-int.h"
, 1702, __FUNCTION__), 0 : 0))
;
1703 scratch[0] = x.val;
1704 if (x.sgn == SIGNED || x.val >= 0 || precision <= HOST_BITS_PER_WIDE_INT64)
1705 return wi::storage_ref (scratch, 1, precision);
1706 scratch[1] = 0;
1707 return wi::storage_ref (scratch, 2, precision);
1708}
1709
1710/* Private functions for handling large cases out of line. They take
1711 individual length and array parameters because that is cheaper for
1712 the inline caller than constructing an object on the stack and
1713 passing a reference to it. (Although many callers use wide_int_refs,
1714 we generally want those to be removed by SRA.) */
1715namespace wi
1716{
1717 bool eq_p_large (const HOST_WIDE_INTlong *, unsigned int,
1718 const HOST_WIDE_INTlong *, unsigned int, unsigned int);
1719 bool lts_p_large (const HOST_WIDE_INTlong *, unsigned int, unsigned int,
1720 const HOST_WIDE_INTlong *, unsigned int);
1721 bool ltu_p_large (const HOST_WIDE_INTlong *, unsigned int, unsigned int,
1722 const HOST_WIDE_INTlong *, unsigned int);
1723 int cmps_large (const HOST_WIDE_INTlong *, unsigned int, unsigned int,
1724 const HOST_WIDE_INTlong *, unsigned int);
1725 int cmpu_large (const HOST_WIDE_INTlong *, unsigned int, unsigned int,
1726 const HOST_WIDE_INTlong *, unsigned int);
1727 unsigned int sext_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1728 unsigned int,
1729 unsigned int, unsigned int);
1730 unsigned int zext_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1731 unsigned int,
1732 unsigned int, unsigned int);
1733 unsigned int set_bit_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1734 unsigned int, unsigned int, unsigned int);
1735 unsigned int lshift_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1736 unsigned int, unsigned int, unsigned int);
1737 unsigned int lrshift_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1738 unsigned int, unsigned int, unsigned int,
1739 unsigned int);
1740 unsigned int arshift_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1741 unsigned int, unsigned int, unsigned int,
1742 unsigned int);
1743 unsigned int and_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *, unsigned int,
1744 const HOST_WIDE_INTlong *, unsigned int, unsigned int);
1745 unsigned int and_not_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1746 unsigned int, const HOST_WIDE_INTlong *,
1747 unsigned int, unsigned int);
1748 unsigned int or_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *, unsigned int,
1749 const HOST_WIDE_INTlong *, unsigned int, unsigned int);
1750 unsigned int or_not_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1751 unsigned int, const HOST_WIDE_INTlong *,
1752 unsigned int, unsigned int);
1753 unsigned int xor_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *, unsigned int,
1754 const HOST_WIDE_INTlong *, unsigned int, unsigned int);
1755 unsigned int add_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *, unsigned int,
1756 const HOST_WIDE_INTlong *, unsigned int, unsigned int,
1757 signop, overflow_type *);
1758 unsigned int sub_large (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *, unsigned int,
1759 const HOST_WIDE_INTlong *, unsigned int, unsigned int,
1760 signop, overflow_type *);
1761 unsigned int mul_internal (HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1762 unsigned int, const HOST_WIDE_INTlong *,
1763 unsigned int, unsigned int, signop,
1764 overflow_type *, bool);
1765 unsigned int divmod_internal (HOST_WIDE_INTlong *, unsigned int *,
1766 HOST_WIDE_INTlong *, const HOST_WIDE_INTlong *,
1767 unsigned int, unsigned int,
1768 const HOST_WIDE_INTlong *,
1769 unsigned int, unsigned int,
1770 signop, overflow_type *);
1771}
1772
1773/* Return the number of bits that integer X can hold. */
1774template <typename T>
1775inline unsigned int
1776wi::get_precision (const T &x)
1777{
1778 return wi::int_traits <T>::get_precision (x);
1779}
1780
1781/* Return the number of bits that the result of a binary operation can
1782 hold when the input operands are X and Y. */
1783template <typename T1, typename T2>
1784inline unsigned int
1785wi::get_binary_precision (const T1 &x, const T2 &y)
1786{
1787 return get_precision (wi::int_traits <WI_BINARY_RESULT (T1, T2)typename wi::binary_traits <T1, T2>::result_type>::
1788 get_binary_result (x, y));
1789}
1790
1791/* Copy the contents of Y to X, but keeping X's current precision. */
1792template <typename T1, typename T2>
1793inline void
1794wi::copy (T1 &x, const T2 &y)
1795{
1796 HOST_WIDE_INTlong *xval = x.write_val ();
1797 const HOST_WIDE_INTlong *yval = y.get_val ();
1798 unsigned int len = y.get_len ();
1799 unsigned int i = 0;
1800 do
1801 xval[i] = yval[i];
1802 while (++i < len);
1803 x.set_len (len, y.is_sign_extended);
1804}
1805
1806/* Return true if X fits in a HOST_WIDE_INT with no loss of precision. */
1807template <typename T>
1808inline bool
1809wi::fits_shwi_p (const T &x)
1810{
1811 WIDE_INT_REF_FOR (T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
xi (x);
1812 return xi.len == 1;
1813}
1814
1815/* Return true if X fits in an unsigned HOST_WIDE_INT with no loss of
1816 precision. */
1817template <typename T>
1818inline bool
1819wi::fits_uhwi_p (const T &x)
1820{
1821 WIDE_INT_REF_FOR (T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
xi (x);
1822 if (xi.precision <= HOST_BITS_PER_WIDE_INT64)
1823 return true;
1824 if (xi.len == 1)
1825 return xi.slow () >= 0;
1826 return xi.len == 2 && xi.uhigh () == 0;
1827}
1828
1829/* Return true if X is negative based on the interpretation of SGN.
1830 For UNSIGNED, this is always false. */
1831template <typename T>
1832inline bool
1833wi::neg_p (const T &x, signop sgn)
1834{
1835 WIDE_INT_REF_FOR (T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
xi (x);
1836 if (sgn == UNSIGNED)
1837 return false;
1838 return xi.sign_mask () < 0;
1839}
1840
1841/* Return -1 if the top bit of X is set and 0 if the top bit is clear. */
1842template <typename T>
1843inline HOST_WIDE_INTlong
1844wi::sign_mask (const T &x)
1845{
1846 WIDE_INT_REF_FOR (T)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T>::is_sign_extended, wi::int_traits <T>::host_dependent_precision
> >
xi (x);
1847 return xi.sign_mask ();
1848}
1849
1850/* Return true if X == Y. X and Y must be binary-compatible. */
1851template <typename T1, typename T2>
1852inline bool
1853wi::eq_p (const T1 &x, const T2 &y)
1854{
1855 unsigned int precision = get_binary_precision (x, y);
1856 WIDE_INT_REF_FOR (T1)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T1>::is_sign_extended, wi::int_traits <T1>::host_dependent_precision
> >
xi (x, precision);
1857 WIDE_INT_REF_FOR (T2)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T2>::is_sign_extended, wi::int_traits <T2>::host_dependent_precision
> >
yi (y, precision);
1858 if (xi.is_sign_extended && yi.is_sign_extended)
1859 {
1860 /* This case reduces to array equality. */
1861 if (xi.len != yi.len)
1862 return false;
1863 unsigned int i = 0;
1864 do
1865 if (xi.val[i] != yi.val[i])
1866 return false;
1867 while (++i != xi.len);
1868 return true;
1869 }
1870 if (__builtin_expect (yi.len == 1, true))
1871 {
1872 /* XI is only equal to YI if it too has a single HWI. */
1873 if (xi.len != 1)
1874 return false;
1875 /* Excess bits in xi.val[0] will be signs or zeros, so comparisons
1876 with 0 are simple. */
1877 if (STATIC_CONSTANT_P (yi.val[0] == 0)(__builtin_constant_p (yi.val[0] == 0) && (yi.val[0] ==
0))
)
1878 return xi.val[0] == 0;
1879 /* Otherwise flush out any excess bits first. */
1880 unsigned HOST_WIDE_INTlong diff = xi.val[0] ^ yi.val[0];
1881 int excess = HOST_BITS_PER_WIDE_INT64 - precision;
1882 if (excess > 0)
1883 diff <<= excess;
1884 return diff == 0;
1885 }
1886 return eq_p_large (xi.val, xi.len, yi.val, yi.len, precision);
1887}
1888
1889/* Return true if X != Y. X and Y must be binary-compatible. */
1890template <typename T1, typename T2>
1891inline bool
1892wi::ne_p (const T1 &x, const T2 &y)
1893{
1894 return !eq_p (x, y);
1895}
1896
1897/* Return true if X < Y when both are treated as signed values. */
1898template <typename T1, typename T2>
1899inline bool
1900wi::lts_p (const T1 &x, const T2 &y)
1901{
1902 unsigned int precision = get_binary_precision (x, y);
1903 WIDE_INT_REF_FOR (T1)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T1>::is_sign_extended, wi::int_traits <T1>::host_dependent_precision
> >
xi (x, precision);
1904 WIDE_INT_REF_FOR (T2)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T2>::is_sign_extended, wi::int_traits <T2>::host_dependent_precision
> >
yi (y, precision);
1905 /* We optimize x < y, where y is 64 or fewer bits. */
1906 if (wi::fits_shwi_p (yi))
1907 {
1908 /* Make lts_p (x, 0) as efficient as wi::neg_p (x). */
1909 if (STATIC_CONSTANT_P (yi.val[0] == 0)(__builtin_constant_p (yi.val[0] == 0) && (yi.val[0] ==
0))
)
1910 return neg_p (xi);
1911 /* If x fits directly into a shwi, we can compare directly. */
1912 if (wi::fits_shwi_p (xi))
1913 return xi.to_shwi () < yi.to_shwi ();
1914 /* If x doesn't fit and is negative, then it must be more
1915 negative than any value in y, and hence smaller than y. */
1916 if (neg_p (xi))
1917 return true;
1918 /* If x is positive, then it must be larger than any value in y,
1919 and hence greater than y. */
1920 return false;
1921 }
1922 /* Optimize the opposite case, if it can be detected at compile time. */
1923 if (STATIC_CONSTANT_P (xi.len == 1)(__builtin_constant_p (xi.len == 1) && (xi.len == 1)))
1924 /* If YI is negative it is lower than the least HWI.
1925 If YI is positive it is greater than the greatest HWI. */
1926 return !neg_p (yi);
1927 return lts_p_large (xi.val, xi.len, precision, yi.val, yi.len);
1928}
1929
1930/* Return true if X < Y when both are treated as unsigned values. */
1931template <typename T1, typename T2>
1932inline bool
1933wi::ltu_p (const T1 &x, const T2 &y)
1934{
1935 unsigned int precision = get_binary_precision (x, y);
1936 WIDE_INT_REF_FOR (T1)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T1>::is_sign_extended, wi::int_traits <T1>::host_dependent_precision
> >
xi (x, precision);
1937 WIDE_INT_REF_FOR (T2)generic_wide_int <wide_int_ref_storage <wi::int_traits <
T2>::is_sign_extended, wi::int_traits <T2>::host_dependent_precision
> >
yi (y, precision);
1938 /* Optimize comparisons with constants. */
1939 if (STATIC_CONSTANT_P (yi.len == 1 && yi.val[0] >= 0)(__builtin_constant_p (yi.len == 1 && yi.val[0] >=
0) && (yi.len == 1 && yi.val[0] >= 0))
)
1940 return xi.len == 1 && xi.to_uhwi () < (unsigned HOST_WIDE_INTlong) yi.val[0];
1941 if (STATIC_CONSTANT_P (xi.len == 1 && xi.val[0] >= 0)(__builtin_constant_p (xi.len == 1 && xi.val[0] >=
0) && (xi.len == 1 && xi.val[0] >= 0))
)
1942 return yi.len != 1 || yi.to_uhwi () > (unsigned HOST_WIDE_INTlong) xi.val[0];
1943 /* Optimize the case of two HWIs. The HWIs are implicitly sign-extended
1944 for precisions greater than HOST_BITS_WIDE_INT, but sign-extending both
1945 values does not change the result. */
1946 if (__builtin_expect (xi.len + yi.len == 2, true))
1947 {
1948 unsigned HOST_WIDE_INTlong xl = xi.to_uhwi ();
1949 unsigned HOST_WIDE_INTlong yl = yi.to_uhwi ();
1950 return xl < yl;
1951 }
1952 return ltu_p_large (xi.val, xi.len, precision, yi.val, yi.len);
1953}
1954
1955/* Return true if X < Y. Signedness of X and Y is indicated by SGN. */
1956template <typename T1, typename T2>
1957inline bool
1958wi::lt_p (const T1 &x, const T2 &y, signop sgn)
1959{
1960 if (sgn == SIGNED)
1961 return lts_p (x, y);
1962 else
1963 return ltu_p (x, y);
1964}
1965
1966/* Return true if X <= Y when both are treated as signed values. */
1967template <typename T1, typename T2>
1968inline bool
1969wi::les_p (const T1 &x, const T2 &y)
1970{
1971 return !lts_p (y, x);
1972}
1973
1974/* Return true if X <= Y when both are treated as unsigned values. */
1975template <typename T1, typename T2>
1976inline bool
1977wi::leu_p (const T1 &x, const T2 &y)
1978{
1979 return !ltu_p (y, x);
1980}
1981
1982/* Return true if X <= Y. Signedness of X and Y is indicated by SGN. */
1983template <typename T1, typename T2>
1984inline bool
1985wi::le_p (const T1 &x, const T2 &y, signop sgn)
1986{
1987 if (sgn == SIGNED)
1988 return les_p (x, y);
1989 else
1990 return leu_p (x, y);
1991}
1992
1993/* Return true if X > Y when both are treated as signed values. */
1994template <typename T1, typename T2>
1995inline bool
1996wi::gts_p (const T1 &x, const T2 &y)
1997{
1998 return lts_p (y, x);
1999}
2000
2001/* Return true if X > Y when both are treated as unsigned values. */
2002template <typename T1, typename T2>
2003inline bool
2004wi::gtu_p (const T1 &x, const T2 &y)
2005{
2006 return ltu_p (y, x);
2007}
2008
2009/* Return true if X > Y. Signedness of X and Y is indicated by SGN. */
2010template <typename T1, typename T2>
2011inline bool
2012wi::gt_p (const T1 &x, const T2 &y, signop sgn)
2013{
2014 if (sgn == SIGNED)
2015 return gts_p (x, y);
2016 else
2017 return gtu_p (x, y);
2018}
2019
2020/* Return true if X >= Y when both are treated as signed values. */
2021template <typename T1, typename T2>
2022inline bool
2023wi::ges_p (const T1 &x, const T2 &y)
2024{
2025 return !lts_p (x, y);
2026}
2027
2028/* Return true if X >= Y when both are treated as unsigned values. */
2029template <typename T1, typename T2>
2030inline bool
2031wi::geu_p (const T1 &x, const T2 &y)
2032{
2033 return !ltu_p (x, y);
2034}
2035
2036/* Return true if X >= Y. Signedness of X and Y is indicated by SGN. */
2037template <typename T1, typename T2>
2038inline bool
2039wi::ge_p (const T1 &x, const T2 &y, signop sgn)
2040{
2041 if (sgn == SIGNED)
2042 return ges_p (x, y);
2043 else
2044 return geu_p (x, y);
2045}
2046
2047/* Return -1 if X < Y, 0 if X == Y and 1 if X > Y. Treat both X and Y
2048 as signed values. */
2049template <typename T1, typename T2>
2050inline int
2051wi::cmps (const T1 &x, const T2 &y)
2052{
2053 unsigned int precision = get_binary_precision (x, y);