Bug Summary

File:build/gcc/wide-int.h
Warning:line 1160, column 57
The right operand of '>' is a garbage value

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name tree-ssa-sccvn.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-bVsJER.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c

1/* SCC value numbering for trees
2 Copyright (C) 2006-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "splay-tree.h"
25#include "backend.h"
26#include "rtl.h"
27#include "tree.h"
28#include "gimple.h"
29#include "ssa.h"
30#include "expmed.h"
31#include "insn-config.h"
32#include "memmodel.h"
33#include "emit-rtl.h"
34#include "cgraph.h"
35#include "gimple-pretty-print.h"
36#include "alias.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "cfganal.h"
40#include "tree-inline.h"
41#include "internal-fn.h"
42#include "gimple-fold.h"
43#include "tree-eh.h"
44#include "gimplify.h"
45#include "flags.h"
46#include "dojump.h"
47#include "explow.h"
48#include "calls.h"
49#include "varasm.h"
50#include "stmt.h"
51#include "expr.h"
52#include "tree-dfa.h"
53#include "tree-ssa.h"
54#include "dumpfile.h"
55#include "cfgloop.h"
56#include "tree-ssa-propagate.h"
57#include "tree-cfg.h"
58#include "domwalk.h"
59#include "gimple-iterator.h"
60#include "gimple-match.h"
61#include "stringpool.h"
62#include "attribs.h"
63#include "tree-pass.h"
64#include "statistics.h"
65#include "langhooks.h"
66#include "ipa-utils.h"
67#include "dbgcnt.h"
68#include "tree-cfgcleanup.h"
69#include "tree-ssa-loop.h"
70#include "tree-scalar-evolution.h"
71#include "tree-ssa-loop-niter.h"
72#include "builtins.h"
73#include "tree-ssa-sccvn.h"
74
75/* This algorithm is based on the SCC algorithm presented by Keith
76 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77 (http://citeseer.ist.psu.edu/41805.html). In
78 straight line code, it is equivalent to a regular hash based value
79 numbering that is performed in reverse postorder.
80
81 For code with cycles, there are two alternatives, both of which
82 require keeping the hashtables separate from the actual list of
83 value numbers for SSA names.
84
85 1. Iterate value numbering in an RPO walk of the blocks, removing
86 all the entries from the hashtable after each iteration (but
87 keeping the SSA name->value number mapping between iterations).
88 Iterate until it does not change.
89
90 2. Perform value numbering as part of an SCC walk on the SSA graph,
91 iterating only the cycles in the SSA graph until they do not change
92 (using a separate, optimistic hashtable for value numbering the SCC
93 operands).
94
95 The second is not just faster in practice (because most SSA graph
96 cycles do not involve all the variables in the graph), it also has
97 some nice properties.
98
99 One of these nice properties is that when we pop an SCC off the
100 stack, we are guaranteed to have processed all the operands coming from
101 *outside of that SCC*, so we do not need to do anything special to
102 ensure they have value numbers.
103
104 Another nice property is that the SCC walk is done as part of a DFS
105 of the SSA graph, which makes it easy to perform combining and
106 simplifying operations at the same time.
107
108 The code below is deliberately written in a way that makes it easy
109 to separate the SCC walk from the other work it does.
110
111 In order to propagate constants through the code, we track which
112 expressions contain constants, and use those while folding. In
113 theory, we could also track expressions whose value numbers are
114 replaced, in case we end up folding based on expression
115 identities.
116
117 In order to value number memory, we assign value numbers to vuses.
118 This enables us to note that, for example, stores to the same
119 address of the same value from the same starting memory states are
120 equivalent.
121 TODO:
122
123 1. We can iterate only the changing portions of the SCC's, but
124 I have not seen an SCC big enough for this to be a win.
125 2. If you differentiate between phi nodes for loops and phi nodes
126 for if-then-else, you can properly consider phi nodes in different
127 blocks for equivalence.
128 3. We could value number vuses in more cases, particularly, whole
129 structure copies.
130*/
131
132/* There's no BB_EXECUTABLE but we can use BB_VISITED. */
133#define BB_EXECUTABLE BB_VISITED
134
135static vn_lookup_kind default_vn_walk_kind;
136
137/* vn_nary_op hashtable helpers. */
138
139struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140{
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144};
145
146/* Return the computed hashcode for nary operation P1. */
147
148inline hashval_t
149vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150{
151 return vno1->hashcode;
152}
153
154/* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
156
157inline bool
158vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159{
160 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
161}
162
163typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165
166
167/* vn_phi hashtable helpers. */
168
169static int
170vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171
172struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
173{
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176};
177
178/* Return the computed hashcode for phi operation P1. */
179
180inline hashval_t
181vn_phi_hasher::hash (const vn_phi_s *vp1)
182{
183 return vp1->hashcode;
184}
185
186/* Compare two phi entries for equality, ignoring VN_TOP arguments. */
187
188inline bool
189vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
190{
191 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
192}
193
194typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196
197
198/* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
200
201static int
202vn_reference_op_eq (const void *p1, const void *p2)
203{
204 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206
207 return (vro1->opcode == vro2->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1->type == vro2->type
210 || (vro1->type && vro2->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type)((tree_class_check ((vro1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 211, __FUNCTION__))->type_common.main_variant)
,
212 TYPE_MAIN_VARIANT (vro2->type)((tree_class_check ((vro2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 212, __FUNCTION__))->type_common.main_variant)
)))
213 && expressions_equal_p (vro1->op0, vro2->op0)
214 && expressions_equal_p (vro1->op1, vro2->op1)
215 && expressions_equal_p (vro1->op2, vro2->op2));
216}
217
218/* Free a reference operation structure VP. */
219
220static inline void
221free_reference (vn_reference_s *vr)
222{
223 vr->operands.release ();
224}
225
226
227/* vn_reference hashtable helpers. */
228
229struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
230{
231 static inline hashval_t hash (const vn_reference_s *);
232 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
233};
234
235/* Return the hashcode for a given reference operation P1. */
236
237inline hashval_t
238vn_reference_hasher::hash (const vn_reference_s *vr1)
239{
240 return vr1->hashcode;
241}
242
243inline bool
244vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245{
246 return v == c || vn_reference_eq (v, c);
247}
248
249typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250typedef vn_reference_table_type::iterator vn_reference_iterator_type;
251
252
253/* The set of VN hashtables. */
254
255typedef struct vn_tables_s
256{
257 vn_nary_op_table_type *nary;
258 vn_phi_table_type *phis;
259 vn_reference_table_type *references;
260} *vn_tables_t;
261
262
263/* vn_constant hashtable helpers. */
264
265struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
266{
267 static inline hashval_t hash (const vn_constant_s *);
268 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
269};
270
271/* Hash table hash function for vn_constant_t. */
272
273inline hashval_t
274vn_constant_hasher::hash (const vn_constant_s *vc1)
275{
276 return vc1->hashcode;
277}
278
279/* Hash table equality function for vn_constant_t. */
280
281inline bool
282vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
283{
284 if (vc1->hashcode != vc2->hashcode)
285 return false;
286
287 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
288}
289
290static hash_table<vn_constant_hasher> *constant_to_value_id;
291
292
293/* Obstack we allocate the vn-tables elements from. */
294static obstack vn_tables_obstack;
295/* Special obstack we never unwind. */
296static obstack vn_tables_insert_obstack;
297
298static vn_reference_t last_inserted_ref;
299static vn_phi_t last_inserted_phi;
300static vn_nary_op_t last_inserted_nary;
301
302/* Valid hashtables storing information we have proven to be
303 correct. */
304static vn_tables_t valid_info;
305
306
307/* Valueization hook for simplify_replace_tree. Valueize NAME if it is
308 an SSA name, otherwise just return it. */
309tree (*vn_valueize) (tree);
310static tree
311vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
312{
313 basic_block saved_vn_context_bb = vn_context_bb;
314 /* Look for sth available at the definition block of the argument.
315 This avoids inconsistencies between availability there which
316 decides if the stmt can be removed and availability at the
317 use site. The SSA property ensures that things available
318 at the definition are also available at uses. */
319 if (!SSA_NAME_IS_DEFAULT_DEF (t)(tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 319, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
320 vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t)(tree_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 320, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
);
321 tree res = vn_valueize (t);
322 vn_context_bb = saved_vn_context_bb;
323 return res;
324}
325
326
327/* This represents the top of the VN lattice, which is the universal
328 value. */
329
330tree VN_TOP;
331
332/* Unique counter for our value ids. */
333
334static unsigned int next_value_id;
335static int next_constant_value_id;
336
337
338/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
339 are allocated on an obstack for locality reasons, and to free them
340 without looping over the vec. */
341
342struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
343{
344 typedef vn_ssa_aux_t value_type;
345 typedef tree compare_type;
346 static inline hashval_t hash (const value_type &);
347 static inline bool equal (const value_type &, const compare_type &);
348 static inline void mark_deleted (value_type &) {}
349 static const bool empty_zero_p = true;
350 static inline void mark_empty (value_type &e) { e = NULLnullptr; }
351 static inline bool is_deleted (value_type &) { return false; }
352 static inline bool is_empty (value_type &e) { return e == NULLnullptr; }
353};
354
355hashval_t
356vn_ssa_aux_hasher::hash (const value_type &entry)
357{
358 return SSA_NAME_VERSION (entry->name)(tree_check ((entry->name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 358, __FUNCTION__, (SSA_NAME)))->base.u.version
;
359}
360
361bool
362vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
363{
364 return name == entry->name;
365}
366
367static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
368typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
369static struct obstack vn_ssa_aux_obstack;
370
371static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
372static unsigned int vn_nary_length_from_stmt (gimple *);
373static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
374static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
375 vn_nary_op_table_type *, bool);
376static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
377static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
378 enum tree_code, tree, tree *);
379static tree vn_lookup_simplify_result (gimple_match_op *);
380static vn_reference_t vn_reference_lookup_or_insert_for_pieces
381 (tree, alias_set_type, alias_set_type, tree,
382 vec<vn_reference_op_s, va_heap>, tree);
383
384/* Return whether there is value numbering information for a given SSA name. */
385
386bool
387has_VN_INFO (tree name)
388{
389 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 389, __FUNCTION__, (SSA_NAME)))->base.u.version
);
390}
391
392vn_ssa_aux_t
393VN_INFO (tree name)
394{
395 vn_ssa_aux_t *res
396 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 396, __FUNCTION__, (SSA_NAME)))->base.u.version
,
397 INSERT);
398 if (*res != NULLnullptr)
399 return *res;
400
401 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux)((struct vn_ssa_aux *) __extension__ ({ struct obstack *__h =
((&vn_ssa_aux_obstack)); __extension__ ({ struct obstack
*__o = (__h); size_t __len = ((sizeof (struct vn_ssa_aux)));
if (__extension__ ({ struct obstack const *__o1 = (__o); (size_t
) (__o1->chunk_limit - __o1->next_free); }) < __len)
_obstack_newchunk (__o, __len); ((void) ((__o)->next_free
+= (__len))); }); __extension__ ({ struct obstack *__o1 = (__h
); void *__value = (void *) __o1->object_base; if (__o1->
next_free == __value) __o1->maybe_empty_object = 1; __o1->
next_free = ((sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (((__o1->next_free) - (sizeof
(ptrdiff_t) < sizeof (void *) ? (__o1->object_base) : (
char *) 0) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); }))
;
402 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
403 newinfo->name = name;
404 newinfo->valnum = VN_TOP;
405 /* We are using the visited flag to handle uses with defs not within the
406 region being value-numbered. */
407 newinfo->visited = false;
408
409 /* Given we create the VN_INFOs on-demand now we have to do initialization
410 different than VN_TOP here. */
411 if (SSA_NAME_IS_DEFAULT_DEF (name)(tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 411, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
412 switch (TREE_CODE (SSA_NAME_VAR (name))((enum tree_code) (((tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 412, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((name)->ssa_name.var)->base.code
) == IDENTIFIER_NODE ? (tree) nullptr : (name)->ssa_name.var
))->base.code)
)
413 {
414 case VAR_DECL:
415 /* All undefined vars are VARYING. */
416 newinfo->valnum = name;
417 newinfo->visited = true;
418 break;
419
420 case PARM_DECL:
421 /* Parameters are VARYING but we can record a condition
422 if we know it is a non-NULL pointer. */
423 newinfo->visited = true;
424 newinfo->valnum = name;
425 if (POINTER_TYPE_P (TREE_TYPE (name))(((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 425, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 425, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
426 && nonnull_arg_p (SSA_NAME_VAR (name)((tree_check ((name), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 426, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree) nullptr
|| ((enum tree_code) ((name)->ssa_name.var)->base.code
) == IDENTIFIER_NODE ? (tree) nullptr : (name)->ssa_name.var
)
))
427 {
428 tree ops[2];
429 ops[0] = name;
430 ops[1] = build_int_cst (TREE_TYPE (name)((contains_struct_check ((name), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 430, __FUNCTION__))->typed.type)
, 0);
431 vn_nary_op_t nary;
432 /* Allocate from non-unwinding stack. */
433 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
434 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
435 boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], ops);
436 nary->predicated_values = 0;
437 nary->u.result = boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE];
438 vn_nary_op_insert_into (nary, valid_info->nary, true);
439 gcc_assert (nary->unwind_to == NULL)((void)(!(nary->unwind_to == nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 439, __FUNCTION__), 0 : 0))
;
440 /* Also do not link it into the undo chain. */
441 last_inserted_nary = nary->next;
442 nary->next = (vn_nary_op_t)(void *)-1;
443 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
444 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
445 boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], ops);
446 nary->predicated_values = 0;
447 nary->u.result = boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE];
448 vn_nary_op_insert_into (nary, valid_info->nary, true);
449 gcc_assert (nary->unwind_to == NULL)((void)(!(nary->unwind_to == nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 449, __FUNCTION__), 0 : 0))
;
450 last_inserted_nary = nary->next;
451 nary->next = (vn_nary_op_t)(void *)-1;
452 if (dump_file && (dump_flags & TDF_DETAILS))
453 {
454 fprintf (dump_file, "Recording ");
455 print_generic_expr (dump_file, name, TDF_SLIM);
456 fprintf (dump_file, " != 0\n");
457 }
458 }
459 break;
460
461 case RESULT_DECL:
462 /* If the result is passed by invisible reference the default
463 def is initialized, otherwise it's uninitialized. Still
464 undefined is varying. */
465 newinfo->visited = true;
466 newinfo->valnum = name;
467 break;
468
469 default:
470 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 470, __FUNCTION__))
;
471 }
472 return newinfo;
473}
474
475/* Return the SSA value of X. */
476
477inline tree
478SSA_VAL (tree x, bool *visited = NULLnullptr)
479{
480 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x)(tree_check ((x), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 480, __FUNCTION__, (SSA_NAME)))->base.u.version
);
481 if (visited)
482 *visited = tem && tem->visited;
483 return tem && tem->visited ? tem->valnum : x;
484}
485
486/* Return the SSA value of the VUSE x, supporting released VDEFs
487 during elimination which will value-number the VDEF to the
488 associated VUSE (but not substitute in the whole lattice). */
489
490static inline tree
491vuse_ssa_val (tree x)
492{
493 if (!x)
494 return NULL_TREE(tree) nullptr;
495
496 do
497 {
498 x = SSA_VAL (x);
499 gcc_assert (x != VN_TOP)((void)(!(x != VN_TOP) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 499, __FUNCTION__), 0 : 0))
;
500 }
501 while (SSA_NAME_IN_FREE_LIST (x)(tree_check ((x), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 501, __FUNCTION__, (SSA_NAME)))->base.nothrow_flag
);
502
503 return x;
504}
505
506/* Similar to the above but used as callback for walk_non_aliases_vuses
507 and thus should stop at unvisited VUSE to not walk across region
508 boundaries. */
509
510static tree
511vuse_valueize (tree vuse)
512{
513 do
514 {
515 bool visited;
516 vuse = SSA_VAL (vuse, &visited);
517 if (!visited)
518 return NULL_TREE(tree) nullptr;
519 gcc_assert (vuse != VN_TOP)((void)(!(vuse != VN_TOP) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 519, __FUNCTION__), 0 : 0))
;
520 }
521 while (SSA_NAME_IN_FREE_LIST (vuse)(tree_check ((vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 521, __FUNCTION__, (SSA_NAME)))->base.nothrow_flag
);
522 return vuse;
523}
524
525
526/* Return the vn_kind the expression computed by the stmt should be
527 associated with. */
528
529enum vn_kind
530vn_get_stmt_kind (gimple *stmt)
531{
532 switch (gimple_code (stmt))
533 {
534 case GIMPLE_CALL:
535 return VN_REFERENCE;
536 case GIMPLE_PHI:
537 return VN_PHI;
538 case GIMPLE_ASSIGN:
539 {
540 enum tree_code code = gimple_assign_rhs_code (stmt);
541 tree rhs1 = gimple_assign_rhs1 (stmt);
542 switch (get_gimple_rhs_class (code))
543 {
544 case GIMPLE_UNARY_RHS:
545 case GIMPLE_BINARY_RHS:
546 case GIMPLE_TERNARY_RHS:
547 return VN_NARY;
548 case GIMPLE_SINGLE_RHS:
549 switch (TREE_CODE_CLASS (code)tree_code_type[(int) (code)])
550 {
551 case tcc_reference:
552 /* VOP-less references can go through unary case. */
553 if ((code == REALPART_EXPR
554 || code == IMAGPART_EXPR
555 || code == VIEW_CONVERT_EXPR
556 || code == BIT_FIELD_REF)
557 && (TREE_CODE (TREE_OPERAND (rhs1, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((rhs1), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 557, __FUNCTION__))))))->base.code)
== SSA_NAME
558 || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0)(*((const_cast<tree*> (tree_operand_check ((rhs1), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 558, __FUNCTION__)))))
)))
559 return VN_NARY;
560
561 /* Fallthrough. */
562 case tcc_declaration:
563 return VN_REFERENCE;
564
565 case tcc_constant:
566 return VN_CONSTANT;
567
568 default:
569 if (code == ADDR_EXPR)
570 return (is_gimple_min_invariant (rhs1)
571 ? VN_CONSTANT : VN_REFERENCE);
572 else if (code == CONSTRUCTOR)
573 return VN_NARY;
574 return VN_NONE;
575 }
576 default:
577 return VN_NONE;
578 }
579 }
580 default:
581 return VN_NONE;
582 }
583}
584
585/* Lookup a value id for CONSTANT and return it. If it does not
586 exist returns 0. */
587
588unsigned int
589get_constant_value_id (tree constant)
590{
591 vn_constant_s **slot;
592 struct vn_constant_s vc;
593
594 vc.hashcode = vn_hash_constant_with_type (constant);
595 vc.constant = constant;
596 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
597 if (slot)
598 return (*slot)->value_id;
599 return 0;
600}
601
602/* Lookup a value id for CONSTANT, and if it does not exist, create a
603 new one and return it. If it does exist, return it. */
604
605unsigned int
606get_or_alloc_constant_value_id (tree constant)
607{
608 vn_constant_s **slot;
609 struct vn_constant_s vc;
610 vn_constant_t vcp;
611
612 /* If the hashtable isn't initialized we're not running from PRE and thus
613 do not need value-ids. */
614 if (!constant_to_value_id)
615 return 0;
616
617 vc.hashcode = vn_hash_constant_with_type (constant);
618 vc.constant = constant;
619 slot = constant_to_value_id->find_slot (&vc, INSERT);
620 if (*slot)
621 return (*slot)->value_id;
622
623 vcp = XNEW (struct vn_constant_s)((struct vn_constant_s *) xmalloc (sizeof (struct vn_constant_s
)))
;
624 vcp->hashcode = vc.hashcode;
625 vcp->constant = constant;
626 vcp->value_id = get_next_constant_value_id ();
627 *slot = vcp;
628 return vcp->value_id;
629}
630
631/* Compute the hash for a reference operand VRO1. */
632
633static void
634vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
635{
636 hstate.add_int (vro1->opcode);
637 if (vro1->op0)
638 inchash::add_expr (vro1->op0, hstate);
639 if (vro1->op1)
640 inchash::add_expr (vro1->op1, hstate);
641 if (vro1->op2)
642 inchash::add_expr (vro1->op2, hstate);
643}
644
645/* Compute a hash for the reference operation VR1 and return it. */
646
647static hashval_t
648vn_reference_compute_hash (const vn_reference_t vr1)
649{
650 inchash::hash hstate;
651 hashval_t result;
652 int i;
653 vn_reference_op_t vro;
654 poly_int64 off = -1;
655 bool deref = false;
656
657 FOR_EACH_VEC_ELT (vr1->operands, i, vro)for (i = 0; (vr1->operands).iterate ((i), &(vro)); ++(
i))
658 {
659 if (vro->opcode == MEM_REF)
660 deref = true;
661 else if (vro->opcode != ADDR_EXPR)
662 deref = false;
663 if (maybe_ne (vro->off, -1))
664 {
665 if (known_eq (off, -1)(!maybe_ne (off, -1)))
666 off = 0;
667 off += vro->off;
668 }
669 else
670 {
671 if (maybe_ne (off, -1)
672 && maybe_ne (off, 0))
673 hstate.add_poly_int (off);
674 off = -1;
675 if (deref
676 && vro->opcode == ADDR_EXPR)
677 {
678 if (vro->op0)
679 {
680 tree op = TREE_OPERAND (vro->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro->op0
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 680, __FUNCTION__)))))
;
681 hstate.add_int (TREE_CODE (op)((enum tree_code) (op)->base.code));
682 inchash::add_expr (op, hstate);
683 }
684 }
685 else
686 vn_reference_op_compute_hash (vro, hstate);
687 }
688 }
689 result = hstate.end ();
690 /* ??? We would ICE later if we hash instead of adding that in. */
691 if (vr1->vuse)
692 result += SSA_NAME_VERSION (vr1->vuse)(tree_check ((vr1->vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 692, __FUNCTION__, (SSA_NAME)))->base.u.version
;
693
694 return result;
695}
696
697/* Return true if reference operations VR1 and VR2 are equivalent. This
698 means they have the same set of operands and vuses. */
699
700bool
701vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
702{
703 unsigned i, j;
704
705 /* Early out if this is not a hash collision. */
706 if (vr1->hashcode != vr2->hashcode)
707 return false;
708
709 /* The VOP needs to be the same. */
710 if (vr1->vuse != vr2->vuse)
711 return false;
712
713 /* If the operands are the same we are done. */
714 if (vr1->operands == vr2->operands)
715 return true;
716
717 if (COMPLETE_TYPE_P (vr1->type)(((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 717, __FUNCTION__))->type_common.size) != (tree) nullptr
)
!= COMPLETE_TYPE_P (vr2->type)(((tree_class_check ((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 717, __FUNCTION__))->type_common.size) != (tree) nullptr
)
718 || (COMPLETE_TYPE_P (vr1->type)(((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 718, __FUNCTION__))->type_common.size) != (tree) nullptr
)
719 && !expressions_equal_p (TYPE_SIZE (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 719, __FUNCTION__))->type_common.size)
,
720 TYPE_SIZE (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 720, __FUNCTION__))->type_common.size)
)))
721 return false;
722
723 if (INTEGRAL_TYPE_P (vr1->type)(((enum tree_code) (vr1->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr1->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr1->type)->base.code) == INTEGER_TYPE
)
724 && INTEGRAL_TYPE_P (vr2->type)(((enum tree_code) (vr2->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr2->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr2->type)->base.code) == INTEGER_TYPE
)
)
725 {
726 if (TYPE_PRECISION (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 726, __FUNCTION__))->type_common.precision)
!= TYPE_PRECISION (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 726, __FUNCTION__))->type_common.precision)
)
727 return false;
728 }
729 else if (INTEGRAL_TYPE_P (vr1->type)(((enum tree_code) (vr1->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr1->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr1->type)->base.code) == INTEGER_TYPE
)
730 && (TYPE_PRECISION (vr1->type)((tree_class_check ((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 730, __FUNCTION__))->type_common.precision)
731 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((vr1->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 731, __FUNCTION__))->type_common.size)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 731, __FUNCTION__)))
))
732 return false;
733 else if (INTEGRAL_TYPE_P (vr2->type)(((enum tree_code) (vr2->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr2->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr2->type)->base.code) == INTEGER_TYPE
)
734 && (TYPE_PRECISION (vr2->type)((tree_class_check ((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 734, __FUNCTION__))->type_common.precision)
735 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((vr2->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 735, __FUNCTION__))->type_common.size)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 735, __FUNCTION__)))
))
736 return false;
737
738 i = 0;
739 j = 0;
740 do
741 {
742 poly_int64 off1 = 0, off2 = 0;
743 vn_reference_op_t vro1, vro2;
744 vn_reference_op_s tem1, tem2;
745 bool deref1 = false, deref2 = false;
746 for (; vr1->operands.iterate (i, &vro1); i++)
747 {
748 if (vro1->opcode == MEM_REF)
749 deref1 = true;
750 /* Do not look through a storage order barrier. */
751 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
752 return false;
753 if (known_eq (vro1->off, -1)(!maybe_ne (vro1->off, -1)))
754 break;
755 off1 += vro1->off;
756 }
757 for (; vr2->operands.iterate (j, &vro2); j++)
758 {
759 if (vro2->opcode == MEM_REF)
760 deref2 = true;
761 /* Do not look through a storage order barrier. */
762 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
763 return false;
764 if (known_eq (vro2->off, -1)(!maybe_ne (vro2->off, -1)))
765 break;
766 off2 += vro2->off;
767 }
768 if (maybe_ne (off1, off2))
769 return false;
770 if (deref1 && vro1->opcode == ADDR_EXPR)
771 {
772 memset (&tem1, 0, sizeof (tem1));
773 tem1.op0 = TREE_OPERAND (vro1->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro1->op0
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 773, __FUNCTION__)))))
;
774 tem1.type = TREE_TYPE (tem1.op0)((contains_struct_check ((tem1.op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 774, __FUNCTION__))->typed.type)
;
775 tem1.opcode = TREE_CODE (tem1.op0)((enum tree_code) (tem1.op0)->base.code);
776 vro1 = &tem1;
777 deref1 = false;
778 }
779 if (deref2 && vro2->opcode == ADDR_EXPR)
780 {
781 memset (&tem2, 0, sizeof (tem2));
782 tem2.op0 = TREE_OPERAND (vro2->op0, 0)(*((const_cast<tree*> (tree_operand_check ((vro2->op0
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 782, __FUNCTION__)))))
;
783 tem2.type = TREE_TYPE (tem2.op0)((contains_struct_check ((tem2.op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 783, __FUNCTION__))->typed.type)
;
784 tem2.opcode = TREE_CODE (tem2.op0)((enum tree_code) (tem2.op0)->base.code);
785 vro2 = &tem2;
786 deref2 = false;
787 }
788 if (deref1 != deref2)
789 return false;
790 if (!vn_reference_op_eq (vro1, vro2))
791 return false;
792 ++j;
793 ++i;
794 }
795 while (vr1->operands.length () != i
796 || vr2->operands.length () != j);
797
798 return true;
799}
800
801/* Copy the operations present in load/store REF into RESULT, a vector of
802 vn_reference_op_s's. */
803
804static void
805copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
806{
807 /* For non-calls, store the information that makes up the address. */
808 tree orig = ref;
809 while (ref)
810 {
811 vn_reference_op_s temp;
812
813 memset (&temp, 0, sizeof (temp));
814 temp.type = TREE_TYPE (ref)((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 814, __FUNCTION__))->typed.type)
;
815 temp.opcode = TREE_CODE (ref)((enum tree_code) (ref)->base.code);
816 temp.off = -1;
817
818 switch (temp.opcode)
819 {
820 case MODIFY_EXPR:
821 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 821, __FUNCTION__)))))
;
822 break;
823 case WITH_SIZE_EXPR:
824 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 824, __FUNCTION__)))))
;
825 temp.off = 0;
826 break;
827 case MEM_REF:
828 /* The base address gets its own vn_reference_op_s structure. */
829 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 829, __FUNCTION__)))))
;
830 if (!mem_ref_offset (ref).to_shwi (&temp.off))
831 temp.off = -1;
832 temp.clique = MR_DEPENDENCE_CLIQUE (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 832, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u
.dependence_info.clique)
;
833 temp.base = MR_DEPENDENCE_BASE (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 833, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u
.dependence_info.base)
;
834 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 834, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag
)
;
835 break;
836 case TARGET_MEM_REF:
837 /* The base address gets its own vn_reference_op_s structure. */
838 temp.op0 = TMR_INDEX (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check
((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 838, __FUNCTION__, (TARGET_MEM_REF)))), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 838, __FUNCTION__))))))
;
839 temp.op1 = TMR_STEP (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check
((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 839, __FUNCTION__, (TARGET_MEM_REF)))), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 839, __FUNCTION__))))))
;
840 temp.op2 = TMR_OFFSET (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check
((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 840, __FUNCTION__, (TARGET_MEM_REF)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 840, __FUNCTION__))))))
;
841 temp.clique = MR_DEPENDENCE_CLIQUE (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 841, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u
.dependence_info.clique)
;
842 temp.base = MR_DEPENDENCE_BASE (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 842, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.u
.dependence_info.base)
;
843 result->safe_push (temp);
844 memset (&temp, 0, sizeof (temp));
845 temp.type = NULL_TREE(tree) nullptr;
846 temp.opcode = ERROR_MARK;
847 temp.op0 = TMR_INDEX2 (ref)((*((const_cast<tree*> (tree_operand_check (((tree_check
((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 847, __FUNCTION__, (TARGET_MEM_REF)))), (4), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 847, __FUNCTION__))))))
;
848 temp.off = -1;
849 break;
850 case BIT_FIELD_REF:
851 /* Record bits, position and storage order. */
852 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 852, __FUNCTION__)))))
;
853 temp.op1 = TREE_OPERAND (ref, 2)(*((const_cast<tree*> (tree_operand_check ((ref), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 853, __FUNCTION__)))))
;
854 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT(8), &temp.off))
855 temp.off = -1;
856 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref)((tree_check2 ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 856, __FUNCTION__, (BIT_FIELD_REF), (MEM_REF)))->base.default_def_flag
)
;
857 break;
858 case COMPONENT_REF:
859 /* The field decl is enough to unambiguously specify the field,
860 a matching type is not necessary and a mismatching type
861 is always a spurious difference. */
862 temp.type = NULL_TREE(tree) nullptr;
863 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 863, __FUNCTION__)))))
;
864 temp.op1 = TREE_OPERAND (ref, 2)(*((const_cast<tree*> (tree_operand_check ((ref), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 864, __FUNCTION__)))))
;
865 {
866 tree this_offset = component_ref_field_offset (ref);
867 if (this_offset
868 && poly_int_tree_p (this_offset))
869 {
870 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check
((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 870, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 870, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
;
871 if (TREE_INT_CST_LOW (bit_offset)((unsigned long) (*tree_int_cst_elt_check ((bit_offset), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 871, __FUNCTION__)))
% BITS_PER_UNIT(8) == 0)
872 {
873 poly_offset_int off
874 = (wi::to_poly_offset (this_offset)
875 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT3));
876 /* Probibit value-numbering zero offset components
877 of addresses the same before the pass folding
878 __builtin_object_size had a chance to run
879 (checking cfun->after_inlining does the
880 trick here). */
881 if (TREE_CODE (orig)((enum tree_code) (orig)->base.code) != ADDR_EXPR
882 || maybe_ne (off, 0)
883 || cfun(cfun + 0)->after_inlining)
884 off.to_shwi (&temp.off);
885 }
886 }
887 }
888 break;
889 case ARRAY_RANGE_REF:
890 case ARRAY_REF:
891 {
892 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)))((contains_struct_check ((((contains_struct_check (((*((const_cast
<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 892, __FUNCTION__)))))), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 892, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 892, __FUNCTION__))->typed.type)
;
893 /* Record index as operand. */
894 temp.op0 = TREE_OPERAND (ref, 1)(*((const_cast<tree*> (tree_operand_check ((ref), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 894, __FUNCTION__)))))
;
895 /* Always record lower bounds and element size. */
896 temp.op1 = array_ref_low_bound (ref);
897 /* But record element size in units of the type alignment. */
898 temp.op2 = TREE_OPERAND (ref, 3)(*((const_cast<tree*> (tree_operand_check ((ref), (3), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 898, __FUNCTION__)))))
;
899 temp.align = eltype->type_common.align;
900 if (! temp.op2)
901 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, ((tree_class_check
((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 901, __FUNCTION__))->type_common.size_unit), size_int_kind
((((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 902, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((eltype)->type_common.align - 1) : 0) / (8)), stk_sizetype
))
902 size_int (TYPE_ALIGN_UNIT (eltype)))size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, ((tree_class_check
((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 901, __FUNCTION__))->type_common.size_unit), size_int_kind
((((tree_class_check ((eltype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 902, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((eltype)->type_common.align - 1) : 0) / (8)), stk_sizetype
))
;
903 if (poly_int_tree_p (temp.op0)
904 && poly_int_tree_p (temp.op1)
905 && TREE_CODE (temp.op2)((enum tree_code) (temp.op2)->base.code) == INTEGER_CST)
906 {
907 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
908 - wi::to_poly_offset (temp.op1))
909 * wi::to_offset (temp.op2)
910 * vn_ref_op_align_unit (&temp));
911 off.to_shwi (&temp.off);
912 }
913 }
914 break;
915 case VAR_DECL:
916 if (DECL_HARD_REGISTER (ref)((tree_check ((ref), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 916, __FUNCTION__, (VAR_DECL)))->decl_with_vis.hard_register
)
)
917 {
918 temp.op0 = ref;
919 break;
920 }
921 /* Fallthru. */
922 case PARM_DECL:
923 case CONST_DECL:
924 case RESULT_DECL:
925 /* Canonicalize decls to MEM[&decl] which is what we end up with
926 when valueizing MEM[ptr] with ptr = &decl. */
927 temp.opcode = MEM_REF;
928 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 928, __FUNCTION__))->typed.type)
), 0);
929 temp.off = 0;
930 result->safe_push (temp);
931 temp.opcode = ADDR_EXPR;
932 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0)((contains_struct_check ((temp.op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 932, __FUNCTION__))->typed.type)
, ref);
933 temp.type = TREE_TYPE (temp.op0)((contains_struct_check ((temp.op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 933, __FUNCTION__))->typed.type)
;
934 temp.off = -1;
935 break;
936 case STRING_CST:
937 case INTEGER_CST:
938 case POLY_INT_CST:
939 case COMPLEX_CST:
940 case VECTOR_CST:
941 case REAL_CST:
942 case FIXED_CST:
943 case CONSTRUCTOR:
944 case SSA_NAME:
945 temp.op0 = ref;
946 break;
947 case ADDR_EXPR:
948 if (is_gimple_min_invariant (ref))
949 {
950 temp.op0 = ref;
951 break;
952 }
953 break;
954 /* These are only interesting for their operands, their
955 existence, and their type. They will never be the last
956 ref in the chain of references (IE they require an
957 operand), so we don't have to put anything
958 for op* as it will be handled by the iteration */
959 case REALPART_EXPR:
960 temp.off = 0;
961 break;
962 case VIEW_CONVERT_EXPR:
963 temp.off = 0;
964 temp.reverse = storage_order_barrier_p (ref);
965 break;
966 case IMAGPART_EXPR:
967 /* This is only interesting for its constant offset. */
968 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((((contains_struct_check ((ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 968, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 968, __FUNCTION__))->type_common.size_unit)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 968, __FUNCTION__)))
;
969 break;
970 default:
971 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 971, __FUNCTION__))
;
972 }
973 result->safe_push (temp);
974
975 if (REFERENCE_CLASS_P (ref)(tree_code_type[(int) (((enum tree_code) (ref)->base.code)
)] == tcc_reference)
976 || TREE_CODE (ref)((enum tree_code) (ref)->base.code) == MODIFY_EXPR
977 || TREE_CODE (ref)((enum tree_code) (ref)->base.code) == WITH_SIZE_EXPR
978 || (TREE_CODE (ref)((enum tree_code) (ref)->base.code) == ADDR_EXPR
979 && !is_gimple_min_invariant (ref)))
980 ref = TREE_OPERAND (ref, 0)(*((const_cast<tree*> (tree_operand_check ((ref), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 980, __FUNCTION__)))))
;
981 else
982 ref = NULL_TREE(tree) nullptr;
983 }
984}
985
986/* Build a alias-oracle reference abstraction in *REF from the vn_reference
987 operands in *OPS, the reference alias set SET and the reference type TYPE.
988 Return true if something useful was produced. */
989
990bool
991ao_ref_init_from_vn_reference (ao_ref *ref,
992 alias_set_type set, alias_set_type base_set,
993 tree type, vec<vn_reference_op_s> ops)
994{
995 vn_reference_op_t op;
996 unsigned i;
997 tree base = NULL_TREE(tree) nullptr;
998 tree *op0_p = &base;
999 poly_offset_int offset = 0;
1000 poly_offset_int max_size;
1001 poly_offset_int size = -1;
1002 tree size_tree = NULL_TREE(tree) nullptr;
1003
1004 /* First get the final access size from just the outermost expression. */
1005 op = &ops[0];
1006 if (op->opcode == COMPONENT_REF)
1007 size_tree = DECL_SIZE (op->op0)((contains_struct_check ((op->op0), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1007, __FUNCTION__))->decl_common.size)
;
1008 else if (op->opcode == BIT_FIELD_REF)
1009 size_tree = op->op0;
1010 else
1011 {
1012 machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1012, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
;
1013 if (mode == BLKmode((void) 0, E_BLKmode))
1014 size_tree = TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1014, __FUNCTION__))->type_common.size)
;
1015 else
1016 size = GET_MODE_BITSIZE (mode);
1017 }
1018 if (size_tree != NULL_TREE(tree) nullptr
1019 && poly_int_tree_p (size_tree))
1020 size = wi::to_poly_offset (size_tree);
1021
1022 /* Initially, maxsize is the same as the accessed element size.
1023 In the following it will only grow (or become -1). */
1024 max_size = size;
1025
1026 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1027 and find the ultimate containing object. */
1028 FOR_EACH_VEC_ELT (ops, i, op)for (i = 0; (ops).iterate ((i), &(op)); ++(i))
1029 {
1030 switch (op->opcode)
1031 {
1032 /* These may be in the reference ops, but we cannot do anything
1033 sensible with them here. */
1034 case ADDR_EXPR:
1035 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1036 if (base != NULL_TREE(tree) nullptr
1037 && TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF
1038 && op->op0
1039 && DECL_P (TREE_OPERAND (op->op0, 0))(tree_code_type[(int) (((enum tree_code) ((*((const_cast<tree
*> (tree_operand_check ((op->op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1039, __FUNCTION__))))))->base.code))] == tcc_declaration
)
)
1040 {
1041 vn_reference_op_t pop = &ops[i-1];
1042 base = TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1042, __FUNCTION__)))))
;
1043 if (known_eq (pop->off, -1)(!maybe_ne (pop->off, -1)))
1044 {
1045 max_size = -1;
1046 offset = 0;
1047 }
1048 else
1049 offset += pop->off * BITS_PER_UNIT(8);
1050 op0_p = NULLnullptr;
1051 break;
1052 }
1053 /* Fallthru. */
1054 case CALL_EXPR:
1055 return false;
1056
1057 /* Record the base objects. */
1058 case MEM_REF:
1059 *op0_p = build2 (MEM_REF, op->type,
1060 NULL_TREE(tree) nullptr, op->op0);
1061 MR_DEPENDENCE_CLIQUE (*op0_p)((tree_check2 ((*op0_p), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1061, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.
u.dependence_info.clique)
= op->clique;
1062 MR_DEPENDENCE_BASE (*op0_p)((tree_check2 ((*op0_p), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1062, __FUNCTION__, (MEM_REF), (TARGET_MEM_REF)))->base.
u.dependence_info.base)
= op->base;
1063 op0_p = &TREE_OPERAND (*op0_p, 0)(*((const_cast<tree*> (tree_operand_check ((*op0_p), (0
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1063, __FUNCTION__)))))
;
1064 break;
1065
1066 case VAR_DECL:
1067 case PARM_DECL:
1068 case RESULT_DECL:
1069 case SSA_NAME:
1070 *op0_p = op->op0;
1071 op0_p = NULLnullptr;
1072 break;
1073
1074 /* And now the usual component-reference style ops. */
1075 case BIT_FIELD_REF:
1076 offset += wi::to_poly_offset (op->op1);
1077 break;
1078
1079 case COMPONENT_REF:
1080 {
1081 tree field = op->op0;
1082 /* We do not have a complete COMPONENT_REF tree here so we
1083 cannot use component_ref_field_offset. Do the interesting
1084 parts manually. */
1085 tree this_offset = DECL_FIELD_OFFSET (field)((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1085, __FUNCTION__, (FIELD_DECL)))->field_decl.offset)
;
1086
1087 if (op->op1 || !poly_int_tree_p (this_offset))
1088 max_size = -1;
1089 else
1090 {
1091 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1092 << LOG2_BITS_PER_UNIT3);
1093 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field)((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1093, __FUNCTION__, (FIELD_DECL)))->field_decl.bit_offset
)
);
1094 offset += woffset;
1095 }
1096 break;
1097 }
1098
1099 case ARRAY_RANGE_REF:
1100 case ARRAY_REF:
1101 /* We recorded the lower bound and the element size. */
1102 if (!poly_int_tree_p (op->op0)
1103 || !poly_int_tree_p (op->op1)
1104 || TREE_CODE (op->op2)((enum tree_code) (op->op2)->base.code) != INTEGER_CST)
1105 max_size = -1;
1106 else
1107 {
1108 poly_offset_int woffset
1109 = wi::sext (wi::to_poly_offset (op->op0)
1110 - wi::to_poly_offset (op->op1),
1111 TYPE_PRECISION (TREE_TYPE (op->op0))((tree_class_check ((((contains_struct_check ((op->op0), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1111, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1111, __FUNCTION__))->type_common.precision)
);
1112 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1113 woffset <<= LOG2_BITS_PER_UNIT3;
1114 offset += woffset;
1115 }
1116 break;
1117
1118 case REALPART_EXPR:
1119 break;
1120
1121 case IMAGPART_EXPR:
1122 offset += size;
1123 break;
1124
1125 case VIEW_CONVERT_EXPR:
1126 break;
1127
1128 case STRING_CST:
1129 case INTEGER_CST:
1130 case COMPLEX_CST:
1131 case VECTOR_CST:
1132 case REAL_CST:
1133 case CONSTRUCTOR:
1134 case CONST_DECL:
1135 return false;
1136
1137 default:
1138 return false;
1139 }
1140 }
1141
1142 if (base == NULL_TREE(tree) nullptr)
1143 return false;
1144
1145 ref->ref = NULL_TREE(tree) nullptr;
1146 ref->base = base;
1147 ref->ref_alias_set = set;
1148 ref->base_alias_set = base_set;
1149 /* We discount volatiles from value-numbering elsewhere. */
1150 ref->volatile_p = false;
1151
1152 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1153 {
1154 ref->offset = 0;
1155 ref->size = -1;
1156 ref->max_size = -1;
1157 return true;
1158 }
1159
1160 if (!offset.to_shwi (&ref->offset))
1161 {
1162 ref->offset = 0;
1163 ref->max_size = -1;
1164 return true;
1165 }
1166
1167 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1168 ref->max_size = -1;
1169
1170 return true;
1171}
1172
1173/* Copy the operations present in load/store/call REF into RESULT, a vector of
1174 vn_reference_op_s's. */
1175
1176static void
1177copy_reference_ops_from_call (gcall *call,
1178 vec<vn_reference_op_s> *result)
1179{
1180 vn_reference_op_s temp;
1181 unsigned i;
1182 tree lhs = gimple_call_lhs (call);
1183 int lr;
1184
1185 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1186 different. By adding the lhs here in the vector, we ensure that the
1187 hashcode is different, guaranteeing a different value number. */
1188 if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
1189 {
1190 memset (&temp, 0, sizeof (temp));
1191 temp.opcode = MODIFY_EXPR;
1192 temp.type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1192, __FUNCTION__))->typed.type)
;
1193 temp.op0 = lhs;
1194 temp.off = -1;
1195 result->safe_push (temp);
1196 }
1197
1198 /* Copy the type, opcode, function, static chain and EH region, if any. */
1199 memset (&temp, 0, sizeof (temp));
1200 temp.type = gimple_call_fntype (call);
1201 temp.opcode = CALL_EXPR;
1202 temp.op0 = gimple_call_fn (call);
1203 temp.op1 = gimple_call_chain (call);
1204 if (stmt_could_throw_p (cfun(cfun + 0), call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1205 temp.op2 = size_int (lr)size_int_kind (lr, stk_sizetype);
1206 temp.off = -1;
1207 result->safe_push (temp);
1208
1209 /* Copy the call arguments. As they can be references as well,
1210 just chain them together. */
1211 for (i = 0; i < gimple_call_num_args (call); ++i)
1212 {
1213 tree callarg = gimple_call_arg (call, i);
1214 copy_reference_ops_from_ref (callarg, result);
1215 }
1216}
1217
1218/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1219 *I_P to point to the last element of the replacement. */
1220static bool
1221vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1222 unsigned int *i_p)
1223{
1224 unsigned int i = *i_p;
1225 vn_reference_op_t op = &(*ops)[i];
1226 vn_reference_op_t mem_op = &(*ops)[i - 1];
1227 tree addr_base;
1228 poly_int64 addr_offset = 0;
1229
1230 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1231 from .foo.bar to the preceding MEM_REF offset and replace the
1232 address with &OBJ. */
1233 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1233, __FUNCTION__)))))
,
1234 &addr_offset, vn_valueize);
1235 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF)((void)(!(addr_base && ((enum tree_code) (addr_base)->
base.code) != MEM_REF) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1235, __FUNCTION__), 0 : 0))
;
1236 if (addr_base != TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1236, __FUNCTION__)))))
)
1237 {
1238 poly_offset_int off
1239 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1240 SIGNED)
1241 + addr_offset);
1242 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1242, __FUNCTION__))->typed.type)
, off);
1243 op->op0 = build_fold_addr_expr (addr_base)build_fold_addr_expr_loc (((location_t) 0), (addr_base));
1244 if (tree_fits_shwi_p (mem_op->op0))
1245 mem_op->off = tree_to_shwi (mem_op->op0);
1246 else
1247 mem_op->off = -1;
1248 return true;
1249 }
1250 return false;
1251}
1252
1253/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1254 *I_P to point to the last element of the replacement. */
1255static bool
1256vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1257 unsigned int *i_p)
1258{
1259 bool changed = false;
1260 vn_reference_op_t op;
1261
1262 do
1263 {
1264 unsigned int i = *i_p;
1265 op = &(*ops)[i];
1266 vn_reference_op_t mem_op = &(*ops)[i - 1];
1267 gimple *def_stmt;
1268 enum tree_code code;
1269 poly_offset_int off;
1270
1271 def_stmt = SSA_NAME_DEF_STMT (op->op0)(tree_check ((op->op0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1271, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
1272 if (!is_gimple_assign (def_stmt))
1273 return changed;
1274
1275 code = gimple_assign_rhs_code (def_stmt);
1276 if (code != ADDR_EXPR
1277 && code != POINTER_PLUS_EXPR)
1278 return changed;
1279
1280 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1281
1282 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1283 from .foo.bar to the preceding MEM_REF offset and replace the
1284 address with &OBJ. */
1285 if (code == ADDR_EXPR)
1286 {
1287 tree addr, addr_base;
1288 poly_int64 addr_offset;
1289
1290 addr = gimple_assign_rhs1 (def_stmt);
1291 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1291, __FUNCTION__)))))
,
1292 &addr_offset,
1293 vn_valueize);
1294 /* If that didn't work because the address isn't invariant propagate
1295 the reference tree from the address operation in case the current
1296 dereference isn't offsetted. */
1297 if (!addr_base
1298 && *i_p == ops->length () - 1
1299 && known_eq (off, 0)(!maybe_ne (off, 0))
1300 /* This makes us disable this transform for PRE where the
1301 reference ops might be also used for code insertion which
1302 is invalid. */
1303 && default_vn_walk_kind == VN_WALKREWRITE)
1304 {
1305 auto_vec<vn_reference_op_s, 32> tem;
1306 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0)(*((const_cast<tree*> (tree_operand_check ((addr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1306, __FUNCTION__)))))
, &tem);
1307 /* Make sure to preserve TBAA info. The only objects not
1308 wrapped in MEM_REFs that can have their address taken are
1309 STRING_CSTs. */
1310 if (tem.length () >= 2
1311 && tem[tem.length () - 2].opcode == MEM_REF)
1312 {
1313 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1314 new_mem_op->op0
1315 = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1315, __FUNCTION__))->typed.type)
,
1316 wi::to_poly_wide (new_mem_op->op0));
1317 }
1318 else
1319 gcc_assert (tem.last ().opcode == STRING_CST)((void)(!(tem.last ().opcode == STRING_CST) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1319, __FUNCTION__), 0 : 0))
;
1320 ops->pop ();
1321 ops->pop ();
1322 ops->safe_splice (tem);
1323 --*i_p;
1324 return true;
1325 }
1326 if (!addr_base
1327 || TREE_CODE (addr_base)((enum tree_code) (addr_base)->base.code) != MEM_REF
1328 || (TREE_CODE (TREE_OPERAND (addr_base, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((addr_base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1328, __FUNCTION__))))))->base.code)
== SSA_NAME
1329 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,(tree_check (((*((const_cast<tree*> (tree_operand_check
((addr_base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1330, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1330, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
1330 0))(tree_check (((*((const_cast<tree*> (tree_operand_check
((addr_base), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1330, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1330, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
))
1331 return changed;
1332
1333 off += addr_offset;
1334 off += mem_ref_offset (addr_base);
1335 op->op0 = TREE_OPERAND (addr_base, 0)(*((const_cast<tree*> (tree_operand_check ((addr_base),
(0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1335, __FUNCTION__)))))
;
1336 }
1337 else
1338 {
1339 tree ptr, ptroff;
1340 ptr = gimple_assign_rhs1 (def_stmt);
1341 ptroff = gimple_assign_rhs2 (def_stmt);
1342 if (TREE_CODE (ptr)((enum tree_code) (ptr)->base.code) != SSA_NAME
1343 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)(tree_check ((ptr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1343, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
1344 /* Make sure to not endlessly recurse.
1345 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1346 happen when we value-number a PHI to its backedge value. */
1347 || SSA_VAL (ptr) == op->op0
1348 || !poly_int_tree_p (ptroff))
1349 return changed;
1350
1351 off += wi::to_poly_offset (ptroff);
1352 op->op0 = ptr;
1353 }
1354
1355 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0)((contains_struct_check ((mem_op->op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1355, __FUNCTION__))->typed.type)
, off);
1356 if (tree_fits_shwi_p (mem_op->op0))
1357 mem_op->off = tree_to_shwi (mem_op->op0);
1358 else
1359 mem_op->off = -1;
1360 /* ??? Can end up with endless recursion here!?
1361 gcc.c-torture/execute/strcmp-1.c */
1362 if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == SSA_NAME)
1363 op->op0 = SSA_VAL (op->op0);
1364 if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) != SSA_NAME)
1365 op->opcode = TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code);
1366
1367 changed = true;
1368 }
1369 /* Tail-recurse. */
1370 while (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == SSA_NAME);
1371
1372 /* Fold a remaining *&. */
1373 if (TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == ADDR_EXPR)
1374 vn_reference_fold_indirect (ops, i_p);
1375
1376 return changed;
1377}
1378
1379/* Optimize the reference REF to a constant if possible or return
1380 NULL_TREE if not. */
1381
1382tree
1383fully_constant_vn_reference_p (vn_reference_t ref)
1384{
1385 vec<vn_reference_op_s> operands = ref->operands;
1386 vn_reference_op_t op;
1387
1388 /* Try to simplify the translated expression if it is
1389 a call to a builtin function with at most two arguments. */
1390 op = &operands[0];
1391 if (op->opcode == CALL_EXPR
1392 && TREE_CODE (op->op0)((enum tree_code) (op->op0)->base.code) == ADDR_EXPR
1393 && TREE_CODE (TREE_OPERAND (op->op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((op->op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1393, __FUNCTION__))))))->base.code)
== FUNCTION_DECL
1394 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1394, __FUNCTION__)))))
)
1395 && operands.length () >= 2
1396 && operands.length () <= 3)
1397 {
1398 vn_reference_op_t arg0, arg1 = NULLnullptr;
1399 bool anyconst = false;
1400 arg0 = &operands[1];
1401 if (operands.length () > 2)
1402 arg1 = &operands[2];
1403 if (TREE_CODE_CLASS (arg0->opcode)tree_code_type[(int) (arg0->opcode)] == tcc_constant
1404 || (arg0->opcode == ADDR_EXPR
1405 && is_gimple_min_invariant (arg0->op0)))
1406 anyconst = true;
1407 if (arg1
1408 && (TREE_CODE_CLASS (arg1->opcode)tree_code_type[(int) (arg1->opcode)] == tcc_constant
1409 || (arg1->opcode == ADDR_EXPR
1410 && is_gimple_min_invariant (arg1->op0))))
1411 anyconst = true;
1412 if (anyconst)
1413 {
1414 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0)(*((const_cast<tree*> (tree_operand_check ((op->op0)
, (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1414, __FUNCTION__)))))
,
1415 arg1 ? 2 : 1,
1416 arg0->op0,
1417 arg1 ? arg1->op0 : NULLnullptr);
1418 if (folded
1419 && TREE_CODE (folded)((enum tree_code) (folded)->base.code) == NOP_EXPR)
1420 folded = TREE_OPERAND (folded, 0)(*((const_cast<tree*> (tree_operand_check ((folded), (0
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1420, __FUNCTION__)))))
;
1421 if (folded
1422 && is_gimple_min_invariant (folded))
1423 return folded;
1424 }
1425 }
1426
1427 /* Simplify reads from constants or constant initializers. */
1428 else if (BITS_PER_UNIT(8) == 8
1429 && COMPLETE_TYPE_P (ref->type)(((tree_class_check ((ref->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1429, __FUNCTION__))->type_common.size) != (tree) nullptr
)
1430 && is_gimple_reg_type (ref->type))
1431 {
1432 poly_int64 off = 0;
1433 HOST_WIDE_INTlong size;
1434 if (INTEGRAL_TYPE_P (ref->type)(((enum tree_code) (ref->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (ref->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (ref->type)->base.code) == INTEGER_TYPE
)
)
1435 size = TYPE_PRECISION (ref->type)((tree_class_check ((ref->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1435, __FUNCTION__))->type_common.precision)
;
1436 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)((tree_class_check ((ref->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1436, __FUNCTION__))->type_common.size)
))
1437 size = tree_to_shwi (TYPE_SIZE (ref->type)((tree_class_check ((ref->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1437, __FUNCTION__))->type_common.size)
);
1438 else
1439 return NULL_TREE(tree) nullptr;
1440 if (size % BITS_PER_UNIT(8) != 0
1441 || size > MAX_BITSIZE_MODE_ANY_MODE(256*(8)))
1442 return NULL_TREE(tree) nullptr;
1443 size /= BITS_PER_UNIT(8);
1444 unsigned i;
1445 for (i = 0; i < operands.length (); ++i)
1446 {
1447 if (TREE_CODE_CLASS (operands[i].opcode)tree_code_type[(int) (operands[i].opcode)] == tcc_constant)
1448 {
1449 ++i;
1450 break;
1451 }
1452 if (known_eq (operands[i].off, -1)(!maybe_ne (operands[i].off, -1)))
1453 return NULL_TREE(tree) nullptr;
1454 off += operands[i].off;
1455 if (operands[i].opcode == MEM_REF)
1456 {
1457 ++i;
1458 break;
1459 }
1460 }
1461 vn_reference_op_t base = &operands[--i];
1462 tree ctor = error_mark_nodeglobal_trees[TI_ERROR_MARK];
1463 tree decl = NULL_TREE(tree) nullptr;
1464 if (TREE_CODE_CLASS (base->opcode)tree_code_type[(int) (base->opcode)] == tcc_constant)
1465 ctor = base->op0;
1466 else if (base->opcode == MEM_REF
1467 && base[1].opcode == ADDR_EXPR
1468 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((base[1].op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1468, __FUNCTION__))))))->base.code)
== VAR_DECL
1469 || TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((base[1].op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1469, __FUNCTION__))))))->base.code)
== CONST_DECL
1470 || TREE_CODE (TREE_OPERAND (base[1].op0, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((base[1].op0), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1470, __FUNCTION__))))))->base.code)
== STRING_CST))
1471 {
1472 decl = TREE_OPERAND (base[1].op0, 0)(*((const_cast<tree*> (tree_operand_check ((base[1].op0
), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1472, __FUNCTION__)))))
;
1473 if (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == STRING_CST)
1474 ctor = decl;
1475 else
1476 ctor = ctor_for_folding (decl);
1477 }
1478 if (ctor == NULL_TREE(tree) nullptr)
1479 return build_zero_cst (ref->type);
1480 else if (ctor != error_mark_nodeglobal_trees[TI_ERROR_MARK])
1481 {
1482 HOST_WIDE_INTlong const_off;
1483 if (decl)
1484 {
1485 tree res = fold_ctor_reference (ref->type, ctor,
1486 off * BITS_PER_UNIT(8),
1487 size * BITS_PER_UNIT(8), decl);
1488 if (res)
1489 {
1490 STRIP_USELESS_TYPE_CONVERSION (res)(res) = tree_ssa_strip_useless_type_conversions (res);
1491 if (is_gimple_min_invariant (res))
1492 return res;
1493 }
1494 }
1495 else if (off.is_constant (&const_off))
1496 {
1497 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE(256*(8)) / BITS_PER_UNIT(8)];
1498 int len = native_encode_expr (ctor, buf, size, const_off);
1499 if (len > 0)
1500 return native_interpret_expr (ref->type, buf, len);
1501 }
1502 }
1503 }
1504
1505 return NULL_TREE(tree) nullptr;
1506}
1507
1508/* Return true if OPS contain a storage order barrier. */
1509
1510static bool
1511contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1512{
1513 vn_reference_op_t op;
1514 unsigned i;
1515
1516 FOR_EACH_VEC_ELT (ops, i, op)for (i = 0; (ops).iterate ((i), &(op)); ++(i))
1517 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1518 return true;
1519
1520 return false;
1521}
1522
1523/* Transform any SSA_NAME's in a vector of vn_reference_op_s
1524 structures into their value numbers. This is done in-place, and
1525 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1526 whether any operands were valueized. */
1527
1528static vec<vn_reference_op_s>
1529valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1530 bool with_avail = false)
1531{
1532 vn_reference_op_t vro;
1533 unsigned int i;
1534
1535 *valueized_anything = false;
1536
1537 FOR_EACH_VEC_ELT (orig, i, vro)for (i = 0; (orig).iterate ((i), &(vro)); ++(i))
1538 {
1539 if (vro->opcode == SSA_NAME
1540 || (vro->op0 && TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) == SSA_NAME))
1541 {
1542 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1543 if (tem != vro->op0)
1544 {
1545 *valueized_anything = true;
1546 vro->op0 = tem;
1547 }
1548 /* If it transforms from an SSA_NAME to a constant, update
1549 the opcode. */
1550 if (TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) != SSA_NAME && vro->opcode == SSA_NAME)
1551 vro->opcode = TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code);
1552 }
1553 if (vro->op1 && TREE_CODE (vro->op1)((enum tree_code) (vro->op1)->base.code) == SSA_NAME)
1554 {
1555 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1556 if (tem != vro->op1)
1557 {
1558 *valueized_anything = true;
1559 vro->op1 = tem;
1560 }
1561 }
1562 if (vro->op2 && TREE_CODE (vro->op2)((enum tree_code) (vro->op2)->base.code) == SSA_NAME)
1563 {
1564 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1565 if (tem != vro->op2)
1566 {
1567 *valueized_anything = true;
1568 vro->op2 = tem;
1569 }
1570 }
1571 /* If it transforms from an SSA_NAME to an address, fold with
1572 a preceding indirect reference. */
1573 if (i > 0
1574 && vro->op0
1575 && TREE_CODE (vro->op0)((enum tree_code) (vro->op0)->base.code) == ADDR_EXPR
1576 && orig[i - 1].opcode == MEM_REF)
1577 {
1578 if (vn_reference_fold_indirect (&orig, &i))
1579 *valueized_anything = true;
1580 }
1581 else if (i > 0
1582 && vro->opcode == SSA_NAME
1583 && orig[i - 1].opcode == MEM_REF)
1584 {
1585 if (vn_reference_maybe_forwprop_address (&orig, &i))
1586 *valueized_anything = true;
1587 }
1588 /* If it transforms a non-constant ARRAY_REF into a constant
1589 one, adjust the constant offset. */
1590 else if (vro->opcode == ARRAY_REF
1591 && known_eq (vro->off, -1)(!maybe_ne (vro->off, -1))
1592 && poly_int_tree_p (vro->op0)
1593 && poly_int_tree_p (vro->op1)
1594 && TREE_CODE (vro->op2)((enum tree_code) (vro->op2)->base.code) == INTEGER_CST)
1595 {
1596 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1597 - wi::to_poly_offset (vro->op1))
1598 * wi::to_offset (vro->op2)
1599 * vn_ref_op_align_unit (vro));
1600 off.to_shwi (&vro->off);
1601 }
1602 }
1603
1604 return orig;
1605}
1606
1607static vec<vn_reference_op_s>
1608valueize_refs (vec<vn_reference_op_s> orig)
1609{
1610 bool tem;
1611 return valueize_refs_1 (orig, &tem);
1612}
1613
1614static vec<vn_reference_op_s> shared_lookup_references;
1615
1616/* Create a vector of vn_reference_op_s structures from REF, a
1617 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1618 this function. *VALUEIZED_ANYTHING will specify whether any
1619 operands were valueized. */
1620
1621static vec<vn_reference_op_s>
1622valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1623{
1624 if (!ref)
1625 return vNULL;
1626 shared_lookup_references.truncate (0);
1627 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1628 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1629 valueized_anything);
1630 return shared_lookup_references;
1631}
1632
1633/* Create a vector of vn_reference_op_s structures from CALL, a
1634 call statement. The vector is shared among all callers of
1635 this function. */
1636
1637static vec<vn_reference_op_s>
1638valueize_shared_reference_ops_from_call (gcall *call)
1639{
1640 if (!call)
1641 return vNULL;
1642 shared_lookup_references.truncate (0);
1643 copy_reference_ops_from_call (call, &shared_lookup_references);
1644 shared_lookup_references = valueize_refs (shared_lookup_references);
1645 return shared_lookup_references;
1646}
1647
1648/* Lookup a SCCVN reference operation VR in the current hash table.
1649 Returns the resulting value number if it exists in the hash table,
1650 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1651 vn_reference_t stored in the hashtable if something is found. */
1652
1653static tree
1654vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1655{
1656 vn_reference_s **slot;
1657 hashval_t hash;
1658
1659 hash = vr->hashcode;
1660 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1661 if (slot)
1662 {
1663 if (vnresult)
1664 *vnresult = (vn_reference_t)*slot;
1665 return ((vn_reference_t)*slot)->result;
1666 }
1667
1668 return NULL_TREE(tree) nullptr;
1669}
1670
1671
1672/* Partial definition tracking support. */
1673
1674struct pd_range
1675{
1676 HOST_WIDE_INTlong offset;
1677 HOST_WIDE_INTlong size;
1678};
1679
1680struct pd_data
1681{
1682 tree rhs;
1683 HOST_WIDE_INTlong offset;
1684 HOST_WIDE_INTlong size;
1685};
1686
1687/* Context for alias walking. */
1688
1689struct vn_walk_cb_data
1690{
1691 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1692 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1693 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE(tree) nullptr),
1694 mask (mask_), masked_result (NULL_TREE(tree) nullptr), vn_walk_kind (vn_walk_kind_),
1695 tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1696 first_base_set (-2), known_ranges (NULLnullptr)
1697 {
1698 if (!last_vuse_ptr)
20
Assuming field 'last_vuse_ptr' is non-null
21
Taking false branch
1699 last_vuse_ptr = &last_vuse;
1700 ao_ref_init (&orig_ref, orig_ref_);
1701 if (mask)
22
Assuming field 'mask' is non-null
23
Taking true branch
1702 {
1703 wide_int w = wi::to_wide (mask);
24
Calling constructor for 'generic_wide_int<wide_int_storage>'
1704 unsigned int pos = 0, prec = w.get_precision ();
1705 pd_data pd;
1706 pd.rhs = build_constructor (NULL_TREE(tree) nullptr, NULLnullptr);
1707 /* When bitwise and with a constant is done on a memory load,
1708 we don't really need all the bits to be defined or defined
1709 to constants, we don't really care what is in the position
1710 corresponding to 0 bits in the mask.
1711 So, push the ranges of those 0 bits in the mask as artificial
1712 zero stores and let the partial def handling code do the
1713 rest. */
1714 while (pos < prec)
1715 {
1716 int tz = wi::ctz (w);
1717 if (pos + tz > prec)
1718 tz = prec - pos;
1719 if (tz)
1720 {
1721 if (BYTES_BIG_ENDIAN0)
1722 pd.offset = prec - pos - tz;
1723 else
1724 pd.offset = pos;
1725 pd.size = tz;
1726 void *r = push_partial_def (pd, 0, 0, 0, prec);
1727 gcc_assert (r == NULL_TREE)((void)(!(r == (tree) nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1727, __FUNCTION__), 0 : 0))
;
1728 }
1729 pos += tz;
1730 if (pos == prec)
1731 break;
1732 w = wi::lrshift (w, tz);
1733 tz = wi::ctz (wi::bit_not (w));
1734 if (pos + tz > prec)
1735 tz = prec - pos;
1736 pos += tz;
1737 w = wi::lrshift (w, tz);
1738 }
1739 }
1740 }
1741 ~vn_walk_cb_data ();
1742 void *finish (alias_set_type, alias_set_type, tree);
1743 void *push_partial_def (pd_data pd,
1744 alias_set_type, alias_set_type, HOST_WIDE_INTlong,
1745 HOST_WIDE_INTlong);
1746
1747 vn_reference_t vr;
1748 ao_ref orig_ref;
1749 tree *last_vuse_ptr;
1750 tree last_vuse;
1751 tree mask;
1752 tree masked_result;
1753 vn_lookup_kind vn_walk_kind;
1754 bool tbaa_p;
1755 vec<vn_reference_op_s> saved_operands;
1756
1757 /* The VDEFs of partial defs we come along. */
1758 auto_vec<pd_data, 2> partial_defs;
1759 /* The first defs range to avoid splay tree setup in most cases. */
1760 pd_range first_range;
1761 alias_set_type first_set;
1762 alias_set_type first_base_set;
1763 splay_tree known_ranges;
1764 obstack ranges_obstack;
1765};
1766
1767vn_walk_cb_data::~vn_walk_cb_data ()
1768{
1769 if (known_ranges)
1770 {
1771 splay_tree_delete (known_ranges);
1772 obstack_free (&ranges_obstack, NULL)__extension__ ({ struct obstack *__o = (&ranges_obstack);
void *__obj = (void *) (nullptr); if (__obj > (void *) __o
->chunk && __obj < (void *) __o->chunk_limit
) __o->next_free = __o->object_base = (char *) __obj; else
_obstack_free (__o, __obj); })
;
1773 }
1774 saved_operands.release ();
1775}
1776
1777void *
1778vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1779{
1780 if (first_set != -2)
1781 {
1782 set = first_set;
1783 base_set = first_base_set;
1784 }
1785 if (mask)
1786 {
1787 masked_result = val;
1788 return (void *) -1;
1789 }
1790 vec<vn_reference_op_s> &operands
1791 = saved_operands.exists () ? saved_operands : vr->operands;
1792 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1793 vr->type, operands, val);
1794}
1795
1796/* pd_range splay-tree helpers. */
1797
1798static int
1799pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1800{
1801 HOST_WIDE_INTlong offset1 = *(HOST_WIDE_INTlong *)offset1p;
1802 HOST_WIDE_INTlong offset2 = *(HOST_WIDE_INTlong *)offset2p;
1803 if (offset1 < offset2)
1804 return -1;
1805 else if (offset1 > offset2)
1806 return 1;
1807 return 0;
1808}
1809
1810static void *
1811pd_tree_alloc (int size, void *data_)
1812{
1813 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1814 return obstack_alloc (&data->ranges_obstack, size)__extension__ ({ struct obstack *__h = (&data->ranges_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((size)); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
;
1815}
1816
1817static void
1818pd_tree_dealloc (void *, void *)
1819{
1820}
1821
1822/* Push PD to the vector of partial definitions returning a
1823 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1824 NULL when we want to continue looking for partial defs or -1
1825 on failure. */
1826
1827void *
1828vn_walk_cb_data::push_partial_def (pd_data pd,
1829 alias_set_type set, alias_set_type base_set,
1830 HOST_WIDE_INTlong offseti,
1831 HOST_WIDE_INTlong maxsizei)
1832{
1833 const HOST_WIDE_INTlong bufsize = 64;
1834 /* We're using a fixed buffer for encoding so fail early if the object
1835 we want to interpret is bigger. */
1836 if (maxsizei > bufsize * BITS_PER_UNIT(8)
1837 || CHAR_BIT8 != 8
1838 || BITS_PER_UNIT(8) != 8
1839 /* Not prepared to handle PDP endian. */
1840 || BYTES_BIG_ENDIAN0 != WORDS_BIG_ENDIAN0)
1841 return (void *)-1;
1842
1843 /* Turn too large constant stores into non-constant stores. */
1844 if (CONSTANT_CLASS_P (pd.rhs)(tree_code_type[(int) (((enum tree_code) (pd.rhs)->base.code
))] == tcc_constant)
&& pd.size > bufsize * BITS_PER_UNIT(8))
1845 pd.rhs = error_mark_nodeglobal_trees[TI_ERROR_MARK];
1846
1847 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1848 most a partial byte before and/or after the region. */
1849 if (!CONSTANT_CLASS_P (pd.rhs)(tree_code_type[(int) (((enum tree_code) (pd.rhs)->base.code
))] == tcc_constant)
)
1850 {
1851 if (pd.offset < offseti)
1852 {
1853 HOST_WIDE_INTlong o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT)((offseti - pd.offset) & ~(((8)) - 1));
1854 gcc_assert (pd.size > o)((void)(!(pd.size > o) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 1854, __FUNCTION__), 0 : 0))
;
1855 pd.size -= o;
1856 pd.offset += o;
1857 }
1858 if (pd.size > maxsizei)
1859 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT(8));
1860 }
1861
1862 pd.offset -= offseti;
1863
1864 bool pd_constant_p = (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR
1865 || CONSTANT_CLASS_P (pd.rhs)(tree_code_type[(int) (((enum tree_code) (pd.rhs)->base.code
))] == tcc_constant)
);
1866 if (partial_defs.is_empty ())
1867 {
1868 /* If we get a clobber upfront, fail. */
1869 if (TREE_CLOBBER_P (pd.rhs)(((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR &&
((pd.rhs)->base.volatile_flag))
)
1870 return (void *)-1;
1871 if (!pd_constant_p)
1872 return (void *)-1;
1873 partial_defs.safe_push (pd);
1874 first_range.offset = pd.offset;
1875 first_range.size = pd.size;
1876 first_set = set;
1877 first_base_set = base_set;
1878 last_vuse_ptr = NULLnullptr;
1879 /* Continue looking for partial defs. */
1880 return NULLnullptr;
1881 }
1882
1883 if (!known_ranges)
1884 {
1885 /* ??? Optimize the case where the 2nd partial def completes things. */
1886 gcc_obstack_init (&ranges_obstack)_obstack_begin (((&ranges_obstack)), (memory_block_pool::
block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free
))
;
1887 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1888 pd_tree_alloc,
1889 pd_tree_dealloc, this);
1890 splay_tree_insert (known_ranges,
1891 (splay_tree_key)&first_range.offset,
1892 (splay_tree_value)&first_range);
1893 }
1894
1895 pd_range newr = { pd.offset, pd.size };
1896 splay_tree_node n;
1897 pd_range *r;
1898 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1899 HOST_WIDE_INTlong loffset = newr.offset + 1;
1900 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1901 && ((r = (pd_range *)n->value), true)
1902 && ranges_known_overlap_p (r->offset, r->size + 1,
1903 newr.offset, newr.size))
1904 {
1905 /* Ignore partial defs already covered. Here we also drop shadowed
1906 clobbers arriving here at the floor. */
1907 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1908 return NULLnullptr;
1909 r->size = MAX (r->offset + r->size, newr.offset + newr.size)((r->offset + r->size) > (newr.offset + newr.size) ?
(r->offset + r->size) : (newr.offset + newr.size))
- r->offset;
1910 }
1911 else
1912 {
1913 /* newr.offset wasn't covered yet, insert the range. */
1914 r = XOBNEW (&ranges_obstack, pd_range)((pd_range *) __extension__ ({ struct obstack *__h = ((&ranges_obstack
)); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (pd_range))); if (__extension__ ({ struct obstack
const *__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->
next_free); }) < __len) _obstack_newchunk (__o, __len); ((
void) ((__o)->next_free += (__len))); }); __extension__ ({
struct obstack *__o1 = (__h); void *__value = (void *) __o1->
object_base; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
1915 *r = newr;
1916 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1917 (splay_tree_value)r);
1918 }
1919 /* Merge r which now contains newr and is a member of the splay tree with
1920 adjacent overlapping ranges. */
1921 pd_range *rafter;
1922 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1923 && ((rafter = (pd_range *)n->value), true)
1924 && ranges_known_overlap_p (r->offset, r->size + 1,
1925 rafter->offset, rafter->size))
1926 {
1927 r->size = MAX (r->offset + r->size,((r->offset + r->size) > (rafter->offset + rafter
->size) ? (r->offset + r->size) : (rafter->offset
+ rafter->size))
1928 rafter->offset + rafter->size)((r->offset + r->size) > (rafter->offset + rafter
->size) ? (r->offset + r->size) : (rafter->offset
+ rafter->size))
- r->offset;
1929 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1930 }
1931 /* If we get a clobber, fail. */
1932 if (TREE_CLOBBER_P (pd.rhs)(((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR &&
((pd.rhs)->base.volatile_flag))
)
1933 return (void *)-1;
1934 /* Non-constants are OK as long as they are shadowed by a constant. */
1935 if (!pd_constant_p)
1936 return (void *)-1;
1937 partial_defs.safe_push (pd);
1938
1939 /* Now we have merged newr into the range tree. When we have covered
1940 [offseti, sizei] then the tree will contain exactly one node which has
1941 the desired properties and it will be 'r'. */
1942 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
1943 /* Continue looking for partial defs. */
1944 return NULLnullptr;
1945
1946 /* Now simply native encode all partial defs in reverse order. */
1947 unsigned ndefs = partial_defs.length ();
1948 /* We support up to 512-bit values (for V8DFmode). */
1949 unsigned char buffer[bufsize + 1];
1950 unsigned char this_buffer[bufsize + 1];
1951 int len;
1952
1953 memset (buffer, 0, bufsize + 1);
1954 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT)(((maxsizei) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8);
1955 while (!partial_defs.is_empty ())
1956 {
1957 pd_data pd = partial_defs.pop ();
1958 unsigned int amnt;
1959 if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) == CONSTRUCTOR)
1960 {
1961 /* Empty CONSTRUCTOR. */
1962 if (pd.size >= needed_len * BITS_PER_UNIT(8))
1963 len = needed_len;
1964 else
1965 len = ROUND_UP (pd.size, BITS_PER_UNIT)(((pd.size) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8);
1966 memset (this_buffer, 0, len);
1967 }
1968 else
1969 {
1970 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
1971 MAX (0, -pd.offset)((0) > (-pd.offset) ? (0) : (-pd.offset)) / BITS_PER_UNIT(8));
1972 if (len <= 0
1973 || len < (ROUND_UP (pd.size, BITS_PER_UNIT)(((pd.size) + ((8)) - 1) & ~(((8)) - 1)) / BITS_PER_UNIT(8)
1974 - MAX (0, -pd.offset)((0) > (-pd.offset) ? (0) : (-pd.offset)) / BITS_PER_UNIT(8)))
1975 {
1976 if (dump_file && (dump_flags & TDF_DETAILS))
1977 fprintf (dump_file, "Failed to encode %u "
1978 "partial definitions\n", ndefs);
1979 return (void *)-1;
1980 }
1981 }
1982
1983 unsigned char *p = buffer;
1984 HOST_WIDE_INTlong size = pd.size;
1985 if (pd.offset < 0)
1986 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT)((-pd.offset) & ~(((8)) - 1));
1987 this_buffer[len] = 0;
1988 if (BYTES_BIG_ENDIAN0)
1989 {
1990 /* LSB of this_buffer[len - 1] byte should be at
1991 pd.offset + pd.size - 1 bits in buffer. */
1992 amnt = ((unsigned HOST_WIDE_INTlong) pd.offset
1993 + pd.size) % BITS_PER_UNIT(8);
1994 if (amnt)
1995 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
1996 unsigned char *q = this_buffer;
1997 unsigned int off = 0;
1998 if (pd.offset >= 0)
1999 {
2000 unsigned int msk;
2001 off = pd.offset / BITS_PER_UNIT(8);
2002 gcc_assert (off < needed_len)((void)(!(off < needed_len) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2002, __FUNCTION__), 0 : 0))
;
2003 p = buffer + off;
2004 if (size <= amnt)
2005 {
2006 msk = ((1 << size) - 1) << (BITS_PER_UNIT(8) - amnt);
2007 *p = (*p & ~msk) | (this_buffer[len] & msk);
2008 size = 0;
2009 }
2010 else
2011 {
2012 if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) != CONSTRUCTOR)
2013 q = (this_buffer + len
2014 - (ROUND_UP (size - amnt, BITS_PER_UNIT)(((size - amnt) + ((8)) - 1) & ~(((8)) - 1))
2015 / BITS_PER_UNIT(8)));
2016 if (pd.offset % BITS_PER_UNIT(8))
2017 {
2018 msk = -1U << (BITS_PER_UNIT(8)
2019 - (pd.offset % BITS_PER_UNIT(8)));
2020 *p = (*p & msk) | (*q & ~msk);
2021 p++;
2022 q++;
2023 off++;
2024 size -= BITS_PER_UNIT(8) - (pd.offset % BITS_PER_UNIT(8));
2025 gcc_assert (size >= 0)((void)(!(size >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2025, __FUNCTION__), 0 : 0))
;
2026 }
2027 }
2028 }
2029 else if (TREE_CODE (pd.rhs)((enum tree_code) (pd.rhs)->base.code) != CONSTRUCTOR)
2030 {
2031 q = (this_buffer + len
2032 - (ROUND_UP (size - amnt, BITS_PER_UNIT)(((size - amnt) + ((8)) - 1) & ~(((8)) - 1))
2033 / BITS_PER_UNIT(8)));
2034 if (pd.offset % BITS_PER_UNIT(8))
2035 {
2036 q++;
2037 size -= BITS_PER_UNIT(8) - ((unsigned HOST_WIDE_INTlong) pd.offset
2038 % BITS_PER_UNIT(8));
2039 gcc_assert (size >= 0)((void)(!(size >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2039, __FUNCTION__), 0 : 0))
;
2040 }
2041 }
2042 if ((unsigned HOST_WIDE_INTlong) size / BITS_PER_UNIT(8) + off
2043 > needed_len)
2044 size = (needed_len - off) * BITS_PER_UNIT(8);
2045 memcpy (p, q, size / BITS_PER_UNIT(8));
2046 if (size % BITS_PER_UNIT(8))
2047 {
2048 unsigned int msk
2049 = -1U << (BITS_PER_UNIT(8) - (size % BITS_PER_UNIT(8)));
2050 p += size / BITS_PER_UNIT(8);
2051 q += size / BITS_PER_UNIT(8);
2052 *p = (*q & msk) | (*p & ~msk);
2053 }
2054 }
2055 else
2056 {
2057 if (pd.offset >= 0)
2058 {
2059 /* LSB of this_buffer[0] byte should be at pd.offset bits
2060 in buffer. */
2061 unsigned int msk;
2062 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT)((size) < ((long) needed_len * (8)) ? (size) : ((long) needed_len
* (8)))
;
2063 amnt = pd.offset % BITS_PER_UNIT(8);
2064 if (amnt)
2065 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2066 unsigned int off = pd.offset / BITS_PER_UNIT(8);
2067 gcc_assert (off < needed_len)((void)(!(off < needed_len) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2067, __FUNCTION__), 0 : 0))
;
2068 size = MIN (size,((size) < ((long) (needed_len - off) * (8)) ? (size) : ((long
) (needed_len - off) * (8)))
2069 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT)((size) < ((long) (needed_len - off) * (8)) ? (size) : ((long
) (needed_len - off) * (8)))
;
2070 p = buffer + off;
2071 if (amnt + size < BITS_PER_UNIT(8))
2072 {
2073 /* Low amnt bits come from *p, then size bits
2074 from this_buffer[0] and the remaining again from
2075 *p. */
2076 msk = ((1 << size) - 1) << amnt;
2077 *p = (*p & ~msk) | (this_buffer[0] & msk);
2078 size = 0;
2079 }
2080 else if (amnt)
2081 {
2082 msk = -1U << amnt;
2083 *p = (*p & ~msk) | (this_buffer[0] & msk);
2084 p++;
2085 size -= (BITS_PER_UNIT(8) - amnt);
2086 }
2087 }
2088 else
2089 {
2090 amnt = (unsigned HOST_WIDE_INTlong) pd.offset % BITS_PER_UNIT(8);
2091 if (amnt)
2092 size -= BITS_PER_UNIT(8) - amnt;
2093 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT)((size) < ((long) needed_len * (8)) ? (size) : ((long) needed_len
* (8)))
;
2094 if (amnt)
2095 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2096 }
2097 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT(8));
2098 p += size / BITS_PER_UNIT(8);
2099 if (size % BITS_PER_UNIT(8))
2100 {
2101 unsigned int msk = -1U << (size % BITS_PER_UNIT(8));
2102 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT(8)]
2103 & ~msk) | (*p & msk);
2104 }
2105 }
2106 }
2107
2108 tree type = vr->type;
2109 /* Make sure to interpret in a type that has a range covering the whole
2110 access size. */
2111 if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
&& maxsizei != TYPE_PRECISION (vr->type)((tree_class_check ((vr->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2111, __FUNCTION__))->type_common.precision)
)
2112 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2112, __FUNCTION__))->base.u.bits.unsigned_flag)
);
2113 tree val;
2114 if (BYTES_BIG_ENDIAN0)
2115 {
2116 unsigned sz = needed_len;
2117 if (maxsizei % BITS_PER_UNIT(8))
2118 shift_bytes_in_array_right (buffer, needed_len,
2119 BITS_PER_UNIT(8)
2120 - (maxsizei % BITS_PER_UNIT(8)));
2121 if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
)
2122 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)(as_a <scalar_int_mode> ((tree_class_check ((type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2122, __FUNCTION__))->type_common.mode))
);
2123 if (sz > needed_len)
2124 {
2125 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2126 val = native_interpret_expr (type, this_buffer, sz);
2127 }
2128 else
2129 val = native_interpret_expr (type, buffer, needed_len);
2130 }
2131 else
2132 val = native_interpret_expr (type, buffer, bufsize);
2133 /* If we chop off bits because the types precision doesn't match the memory
2134 access size this is ok when optimizing reads but not when called from
2135 the DSE code during elimination. */
2136 if (val && type != vr->type)
2137 {
2138 if (! int_fits_type_p (val, vr->type))
2139 val = NULL_TREE(tree) nullptr;
2140 else
2141 val = fold_convert (vr->type, val)fold_convert_loc (((location_t) 0), vr->type, val);
2142 }
2143
2144 if (val)
2145 {
2146 if (dump_file && (dump_flags & TDF_DETAILS))
2147 fprintf (dump_file,
2148 "Successfully combined %u partial definitions\n", ndefs);
2149 /* We are using the alias-set of the first store we encounter which
2150 should be appropriate here. */
2151 return finish (first_set, first_base_set, val);
2152 }
2153 else
2154 {
2155 if (dump_file && (dump_flags & TDF_DETAILS))
2156 fprintf (dump_file,
2157 "Failed to interpret %u encoded partial definitions\n", ndefs);
2158 return (void *)-1;
2159 }
2160}
2161
2162/* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2163 with the current VUSE and performs the expression lookup. */
2164
2165static void *
2166vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED__attribute__ ((__unused__)), tree vuse, void *data_)
2167{
2168 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2169 vn_reference_t vr = data->vr;
2170 vn_reference_s **slot;
2171 hashval_t hash;
2172
2173 /* If we have partial definitions recorded we have to go through
2174 vn_reference_lookup_3. */
2175 if (!data->partial_defs.is_empty ())
2176 return NULLnullptr;
2177
2178 if (data->last_vuse_ptr)
2179 {
2180 *data->last_vuse_ptr = vuse;
2181 data->last_vuse = vuse;
2182 }
2183
2184 /* Fixup vuse and hash. */
2185 if (vr->vuse)
2186 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse)(tree_check ((vr->vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2186, __FUNCTION__, (SSA_NAME)))->base.u.version
;
2187 vr->vuse = vuse_ssa_val (vuse);
2188 if (vr->vuse)
2189 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse)(tree_check ((vr->vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2189, __FUNCTION__, (SSA_NAME)))->base.u.version
;
2190
2191 hash = vr->hashcode;
2192 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2193 if (slot)
2194 {
2195 if ((*slot)->result && data->saved_operands.exists ())
2196 return data->finish (vr->set, vr->base_set, (*slot)->result);
2197 return *slot;
2198 }
2199
2200 return NULLnullptr;
2201}
2202
2203/* Lookup an existing or insert a new vn_reference entry into the
2204 value table for the VUSE, SET, TYPE, OPERANDS reference which
2205 has the value VALUE which is either a constant or an SSA name. */
2206
2207static vn_reference_t
2208vn_reference_lookup_or_insert_for_pieces (tree vuse,
2209 alias_set_type set,
2210 alias_set_type base_set,
2211 tree type,
2212 vec<vn_reference_op_s,
2213 va_heap> operands,
2214 tree value)
2215{
2216 vn_reference_s vr1;
2217 vn_reference_t result;
2218 unsigned value_id;
2219 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE(tree) nullptr;
2220 vr1.operands = operands;
2221 vr1.type = type;
2222 vr1.set = set;
2223 vr1.base_set = base_set;
2224 vr1.hashcode = vn_reference_compute_hash (&vr1);
2225 if (vn_reference_lookup_1 (&vr1, &result))
2226 return result;
2227 if (TREE_CODE (value)((enum tree_code) (value)->base.code) == SSA_NAME)
2228 value_id = VN_INFO (value)->value_id;
2229 else
2230 value_id = get_or_alloc_constant_value_id (value);
2231 return vn_reference_insert_pieces (vuse, set, base_set, type,
2232 operands.copy (), value, value_id);
2233}
2234
2235/* Return a value-number for RCODE OPS... either by looking up an existing
2236 value-number for the simplified result or by inserting the operation if
2237 INSERT is true. */
2238
2239static tree
2240vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
2241{
2242 tree result = NULL_TREE(tree) nullptr;
2243 /* We will be creating a value number for
2244 RCODE (OPS...).
2245 So first simplify and lookup this expression to see if it
2246 is already available. */
2247 /* For simplification valueize. */
2248 unsigned i;
2249 for (i = 0; i < res_op->num_ops; ++i)
2250 if (TREE_CODE (res_op->ops[i])((enum tree_code) (res_op->ops[i])->base.code) == SSA_NAME)
2251 {
2252 tree tem = vn_valueize (res_op->ops[i]);
2253 if (!tem)
2254 break;
2255 res_op->ops[i] = tem;
2256 }
2257 /* If valueization of an operand fails (it is not available), skip
2258 simplification. */
2259 bool res = false;
2260 if (i == res_op->num_ops)
2261 {
2262 mprts_hook = vn_lookup_simplify_result;
2263 res = res_op->resimplify (NULLnullptr, vn_valueize);
2264 mprts_hook = NULLnullptr;
2265 }
2266 gimple *new_stmt = NULLnullptr;
2267 if (res
2268 && gimple_simplified_result_is_gimple_val (res_op))
2269 {
2270 /* The expression is already available. */
2271 result = res_op->ops[0];
2272 /* Valueize it, simplification returns sth in AVAIL only. */
2273 if (TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME)
2274 result = SSA_VAL (result);
2275 }
2276 else
2277 {
2278 tree val = vn_lookup_simplify_result (res_op);
2279 if (!val && insert)
2280 {
2281 gimple_seq stmts = NULLnullptr;
2282 result = maybe_push_res_to_seq (res_op, &stmts);
2283 if (result)
2284 {
2285 gcc_assert (gimple_seq_singleton_p (stmts))((void)(!(gimple_seq_singleton_p (stmts)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2285, __FUNCTION__), 0 : 0))
;
2286 new_stmt = gimple_seq_first_stmt (stmts);
2287 }
2288 }
2289 else
2290 /* The expression is already available. */
2291 result = val;
2292 }
2293 if (new_stmt)
2294 {
2295 /* The expression is not yet available, value-number lhs to
2296 the new SSA_NAME we created. */
2297 /* Initialize value-number information properly. */
2298 vn_ssa_aux_t result_info = VN_INFO (result);
2299 result_info->valnum = result;
2300 result_info->value_id = get_next_value_id ();
2301 result_info->visited = 1;
2302 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2303 new_stmt);
2304 result_info->needs_insertion = true;
2305 /* ??? PRE phi-translation inserts NARYs without corresponding
2306 SSA name result. Re-use those but set their result according
2307 to the stmt we just built. */
2308 vn_nary_op_t nary = NULLnullptr;
2309 vn_nary_op_lookup_stmt (new_stmt, &nary);
2310 if (nary)
2311 {
2312 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE)((void)(!(! nary->predicated_values && nary->u.
result == (tree) nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2312, __FUNCTION__), 0 : 0))
;
2313 nary->u.result = gimple_assign_lhs (new_stmt);
2314 }
2315 /* As all "inserted" statements are singleton SCCs, insert
2316 to the valid table. This is strictly needed to
2317 avoid re-generating new value SSA_NAMEs for the same
2318 expression during SCC iteration over and over (the
2319 optimistic table gets cleared after each iteration).
2320 We do not need to insert into the optimistic table, as
2321 lookups there will fall back to the valid table. */
2322 else
2323 {
2324 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2325 vn_nary_op_t vno1
2326 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2327 vno1->value_id = result_info->value_id;
2328 vno1->length = length;
2329 vno1->predicated_values = 0;
2330 vno1->u.result = result;
2331 init_vn_nary_op_from_stmt (vno1, new_stmt);
2332 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2333 /* Also do not link it into the undo chain. */
2334 last_inserted_nary = vno1->next;
2335 vno1->next = (vn_nary_op_t)(void *)-1;
2336 }
2337 if (dump_file && (dump_flags & TDF_DETAILS))
2338 {
2339 fprintf (dump_file, "Inserting name ");
2340 print_generic_expr (dump_file, result);
2341 fprintf (dump_file, " for expression ");
2342 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2343 fprintf (dump_file, "\n");
2344 }
2345 }
2346 return result;
2347}
2348
2349/* Return a value-number for RCODE OPS... either by looking up an existing
2350 value-number for the simplified result or by inserting the operation. */
2351
2352static tree
2353vn_nary_build_or_lookup (gimple_match_op *res_op)
2354{
2355 return vn_nary_build_or_lookup_1 (res_op, true);
2356}
2357
2358/* Try to simplify the expression RCODE OPS... of type TYPE and return
2359 its value if present. */
2360
2361tree
2362vn_nary_simplify (vn_nary_op_t nary)
2363{
2364 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2365 return NULL_TREE(tree) nullptr;
2366 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2367 nary->type, nary->length);
2368 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2369 return vn_nary_build_or_lookup_1 (&op, false);
2370}
2371
2372/* Elimination engine. */
2373
2374class eliminate_dom_walker : public dom_walker
2375{
2376public:
2377 eliminate_dom_walker (cdi_direction, bitmap);
2378 ~eliminate_dom_walker ();
2379
2380 virtual edge before_dom_children (basic_block);
2381 virtual void after_dom_children (basic_block);
2382
2383 virtual tree eliminate_avail (basic_block, tree op);
2384 virtual void eliminate_push_avail (basic_block, tree op);
2385 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2386
2387 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2388
2389 unsigned eliminate_cleanup (bool region_p = false);
2390
2391 bool do_pre;
2392 unsigned int el_todo;
2393 unsigned int eliminations;
2394 unsigned int insertions;
2395
2396 /* SSA names that had their defs inserted by PRE if do_pre. */
2397 bitmap inserted_exprs;
2398
2399 /* Blocks with statements that have had their EH properties changed. */
2400 bitmap need_eh_cleanup;
2401
2402 /* Blocks with statements that have had their AB properties changed. */
2403 bitmap need_ab_cleanup;
2404
2405 /* Local state for the eliminate domwalk. */
2406 auto_vec<gimple *> to_remove;
2407 auto_vec<gimple *> to_fixup;
2408 auto_vec<tree> avail;
2409 auto_vec<tree> avail_stack;
2410};
2411
2412/* Adaptor to the elimination engine using RPO availability. */
2413
2414class rpo_elim : public eliminate_dom_walker
2415{
2416public:
2417 rpo_elim(basic_block entry_)
2418 : eliminate_dom_walker (CDI_DOMINATORS, NULLnullptr), entry (entry_),
2419 m_avail_freelist (NULLnullptr) {}
2420
2421 virtual tree eliminate_avail (basic_block, tree op);
2422
2423 virtual void eliminate_push_avail (basic_block, tree);
2424
2425 basic_block entry;
2426 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2427 obstack. */
2428 vn_avail *m_avail_freelist;
2429};
2430
2431/* Global RPO state for access from hooks. */
2432static eliminate_dom_walker *rpo_avail;
2433basic_block vn_context_bb;
2434
2435/* Return true if BASE1 and BASE2 can be adjusted so they have the
2436 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2437 Otherwise return false. */
2438
2439static bool
2440adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2441 tree base2, poly_int64 *offset2)
2442{
2443 poly_int64 soff;
2444 if (TREE_CODE (base1)((enum tree_code) (base1)->base.code) == MEM_REF
2445 && TREE_CODE (base2)((enum tree_code) (base2)->base.code) == MEM_REF)
2446 {
2447 if (mem_ref_offset (base1).to_shwi (&soff))
2448 {
2449 base1 = TREE_OPERAND (base1, 0)(*((const_cast<tree*> (tree_operand_check ((base1), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2449, __FUNCTION__)))))
;
2450 *offset1 += soff * BITS_PER_UNIT(8);
2451 }
2452 if (mem_ref_offset (base2).to_shwi (&soff))
2453 {
2454 base2 = TREE_OPERAND (base2, 0)(*((const_cast<tree*> (tree_operand_check ((base2), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2454, __FUNCTION__)))))
;
2455 *offset2 += soff * BITS_PER_UNIT(8);
2456 }
2457 return operand_equal_p (base1, base2, 0);
2458 }
2459 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2460}
2461
2462/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2463 from the statement defining VUSE and if not successful tries to
2464 translate *REFP and VR_ through an aggregate copy at the definition
2465 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2466 of *REF and *VR. If only disambiguation was performed then
2467 *DISAMBIGUATE_ONLY is set to true. */
2468
2469static void *
2470vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2471 translate_flags *disambiguate_only)
2472{
2473 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2474 vn_reference_t vr = data->vr;
2475 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse)(tree_check ((vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2475, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2476 tree base = ao_ref_base (ref);
2477 HOST_WIDE_INTlong offseti = 0, maxsizei, sizei = 0;
2478 static vec<vn_reference_op_s> lhs_ops;
2479 ao_ref lhs_ref;
2480 bool lhs_ref_ok = false;
2481 poly_int64 copy_size;
2482
2483 /* First try to disambiguate after value-replacing in the definitions LHS. */
2484 if (is_gimple_assign (def_stmt))
2485 {
2486 tree lhs = gimple_assign_lhs (def_stmt);
2487 bool valueized_anything = false;
2488 /* Avoid re-allocation overhead. */
2489 lhs_ops.truncate (0);
2490 basic_block saved_rpo_bb = vn_context_bb;
2491 vn_context_bb = gimple_bb (def_stmt);
2492 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2493 {
2494 copy_reference_ops_from_ref (lhs, &lhs_ops);
2495 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
2496 }
2497 vn_context_bb = saved_rpo_bb;
2498 ao_ref_init (&lhs_ref, lhs);
2499 lhs_ref_ok = true;
2500 if (valueized_anything
2501 && ao_ref_init_from_vn_reference
2502 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2503 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2503, __FUNCTION__))->typed.type)
, lhs_ops)
2504 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2505 {
2506 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2507 return NULLnullptr;
2508 }
2509
2510 /* Besides valueizing the LHS we can also use access-path based
2511 disambiguation on the original non-valueized ref. */
2512 if (!ref->ref
2513 && lhs_ref_ok
2514 && data->orig_ref.ref)
2515 {
2516 /* We want to use the non-valueized LHS for this, but avoid redundant
2517 work. */
2518 ao_ref *lref = &lhs_ref;
2519 ao_ref lref_alt;
2520 if (valueized_anything)
2521 {
2522 ao_ref_init (&lref_alt, lhs);
2523 lref = &lref_alt;
2524 }
2525 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2526 {
2527 *disambiguate_only = (valueized_anything
2528 ? TR_VALUEIZE_AND_DISAMBIGUATE
2529 : TR_DISAMBIGUATE);
2530 return NULLnullptr;
2531 }
2532 }
2533
2534 /* If we reach a clobbering statement try to skip it and see if
2535 we find a VN result with exactly the same value as the
2536 possible clobber. In this case we can ignore the clobber
2537 and return the found value. */
2538 if (is_gimple_reg_type (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2538, __FUNCTION__))->typed.type)
)
2539 && types_compatible_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2539, __FUNCTION__))->typed.type)
, vr->type)
2540 && (ref->ref || data->orig_ref.ref))
2541 {
2542 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2543 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2544 data->last_vuse_ptr = NULLnullptr;
2545 tree saved_vuse = vr->vuse;
2546 hashval_t saved_hashcode = vr->hashcode;
2547 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2548 /* Need to restore vr->vuse and vr->hashcode. */
2549 vr->vuse = saved_vuse;
2550 vr->hashcode = saved_hashcode;
2551 data->last_vuse_ptr = saved_last_vuse_ptr;
2552 if (res && res != (void *)-1)
2553 {
2554 vn_reference_t vnresult = (vn_reference_t) res;
2555 tree rhs = gimple_assign_rhs1 (def_stmt);
2556 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
2557 rhs = SSA_VAL (rhs);
2558 if (vnresult->result
2559 && operand_equal_p (vnresult->result, rhs, 0)
2560 /* We have to honor our promise about union type punning
2561 and also support arbitrary overlaps with
2562 -fno-strict-aliasing. So simply resort to alignment to
2563 rule out overlaps. Do this check last because it is
2564 quite expensive compared to the hash-lookup above. */
2565 && multiple_p (get_object_alignment
2566 (ref->ref ? ref->ref : data->orig_ref.ref),
2567 ref->size)
2568 && multiple_p (get_object_alignment (lhs), ref->size))
2569 return res;
2570 }
2571 }
2572 }
2573 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2574 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2575 && gimple_call_num_args (def_stmt) <= 4)
2576 {
2577 /* For builtin calls valueize its arguments and call the
2578 alias oracle again. Valueization may improve points-to
2579 info of pointers and constify size and position arguments.
2580 Originally this was motivated by PR61034 which has
2581 conditional calls to free falsely clobbering ref because
2582 of imprecise points-to info of the argument. */
2583 tree oldargs[4];
2584 bool valueized_anything = false;
2585 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2586 {
2587 oldargs[i] = gimple_call_arg (def_stmt, i);
2588 tree val = vn_valueize (oldargs[i]);
2589 if (val != oldargs[i])
2590 {
2591 gimple_call_set_arg (def_stmt, i, val);
2592 valueized_anything = true;
2593 }
2594 }
2595 if (valueized_anything)
2596 {
2597 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2598 ref, data->tbaa_p);
2599 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2600 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2601 if (!res)
2602 {
2603 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2604 return NULLnullptr;
2605 }
2606 }
2607 }
2608
2609 if (*disambiguate_only > TR_TRANSLATE)
2610 return (void *)-1;
2611
2612 /* If we cannot constrain the size of the reference we cannot
2613 test if anything kills it. */
2614 if (!ref->max_size_known_p ())
2615 return (void *)-1;
2616
2617 poly_int64 offset = ref->offset;
2618 poly_int64 maxsize = ref->max_size;
2619
2620 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2621 from that definition.
2622 1) Memset. */
2623 if (is_gimple_reg_type (vr->type)
2624 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2625 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2626 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2627 || ((TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code
)
== INTEGER_CST
2628 || (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
&& known_eq (ref->size, 8)(!maybe_ne (ref->size, 8))))
2629 && CHAR_BIT8 == 8
2630 && BITS_PER_UNIT(8) == 8
2631 && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0
2632 && offset.is_constant (&offseti)
2633 && ref->size.is_constant (&sizei)
2634 && (offseti % BITS_PER_UNIT(8) == 0
2635 || TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code
)
== INTEGER_CST)))
2636 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2637 || (TREE_CODE (gimple_call_arg (def_stmt, 2))((enum tree_code) (gimple_call_arg (def_stmt, 2))->base.code
)
== SSA_NAME
2638 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2639 && (TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== ADDR_EXPR
2640 || TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== SSA_NAME))
2641 {
2642 tree base2;
2643 poly_int64 offset2, size2, maxsize2;
2644 bool reverse;
2645 tree ref2 = gimple_call_arg (def_stmt, 0);
2646 if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME)
2647 {
2648 ref2 = SSA_VAL (ref2);
2649 if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME
2650 && (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
2651 || TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2651, __FUNCTION__)))))
!= ref2))
2652 {
2653 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2)(tree_check ((ref2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2653, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2654 if (gimple_assign_single_p (def_stmt)
2655 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2656 ref2 = gimple_assign_rhs1 (def_stmt);
2657 }
2658 }
2659 if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == ADDR_EXPR)
2660 {
2661 ref2 = TREE_OPERAND (ref2, 0)(*((const_cast<tree*> (tree_operand_check ((ref2), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2661, __FUNCTION__)))))
;
2662 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2663 &reverse);
2664 if (!known_size_p (maxsize2)
2665 || !known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2))
2666 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2667 return (void *)-1;
2668 }
2669 else if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME)
2670 {
2671 poly_int64 soff;
2672 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
2673 || !(mem_ref_offset (base)
2674 << LOG2_BITS_PER_UNIT3).to_shwi (&soff))
2675 return (void *)-1;
2676 offset += soff;
2677 offset2 = 0;
2678 if (TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2678, __FUNCTION__)))))
!= ref2)
2679 {
2680 gimple *def = SSA_NAME_DEF_STMT (ref2)(tree_check ((ref2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2680, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2681 if (is_gimple_assign (def)
2682 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2683 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2683, __FUNCTION__)))))
2684 && poly_int_tree_p (gimple_assign_rhs2 (def)))
2685 {
2686 tree rhs2 = gimple_assign_rhs2 (def);
2687 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2688 SIGNED)
2689 << LOG2_BITS_PER_UNIT3).to_shwi (&offset2))
2690 return (void *)-1;
2691 ref2 = gimple_assign_rhs1 (def);
2692 if (TREE_CODE (ref2)((enum tree_code) (ref2)->base.code) == SSA_NAME)
2693 ref2 = SSA_VAL (ref2);
2694 }
2695 else
2696 return (void *)-1;
2697 }
2698 }
2699 else
2700 return (void *)-1;
2701 tree len = gimple_call_arg (def_stmt, 2);
2702 HOST_WIDE_INTlong leni, offset2i;
2703 if (TREE_CODE (len)((enum tree_code) (len)->base.code) == SSA_NAME)
2704 len = SSA_VAL (len);
2705 /* Sometimes the above trickery is smarter than alias analysis. Take
2706 advantage of that. */
2707 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2708 (wi::to_poly_offset (len)
2709 << LOG2_BITS_PER_UNIT3)))
2710 return NULLnullptr;
2711 if (data->partial_defs.is_empty ()
2712 && known_subrange_p (offset, maxsize, offset2,
2713 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT3))
2714 {
2715 tree val;
2716 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2717 val = build_zero_cst (vr->type);
2718 else if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
2719 && known_eq (ref->size, 8)(!maybe_ne (ref->size, 8))
2720 && offseti % BITS_PER_UNIT(8) == 0)
2721 {
2722 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2723 vr->type, gimple_call_arg (def_stmt, 1));
2724 val = vn_nary_build_or_lookup (&res_op);
2725 if (!val
2726 || (TREE_CODE (val)((enum tree_code) (val)->base.code) == SSA_NAME
2727 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)(tree_check ((val), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2727, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
))
2728 return (void *)-1;
2729 }
2730 else
2731 {
2732 unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((vr->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2732, __FUNCTION__))->type_common.size_unit)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2732, __FUNCTION__)))
+ 1;
2733 if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
)
2734 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)(as_a <scalar_int_mode> ((tree_class_check ((vr->type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2734, __FUNCTION__))->type_common.mode))
) + 1;
2735 unsigned char *buf = XALLOCAVEC (unsigned char, buflen)((unsigned char *) __builtin_alloca(sizeof (unsigned char) * (
buflen)))
;
2736 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1))((unsigned long) (*tree_int_cst_elt_check ((gimple_call_arg (
def_stmt, 1)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2736, __FUNCTION__)))
,
2737 buflen);
2738 if (BYTES_BIG_ENDIAN0)
2739 {
2740 unsigned int amnt
2741 = (((unsigned HOST_WIDE_INTlong) offseti + sizei)
2742 % BITS_PER_UNIT(8));
2743 if (amnt)
2744 {
2745 shift_bytes_in_array_right (buf, buflen,
2746 BITS_PER_UNIT(8) - amnt);
2747 buf++;
2748 buflen--;
2749 }
2750 }
2751 else if (offseti % BITS_PER_UNIT(8) != 0)
2752 {
2753 unsigned int amnt
2754 = BITS_PER_UNIT(8) - ((unsigned HOST_WIDE_INTlong) offseti
2755 % BITS_PER_UNIT(8));
2756 shift_bytes_in_array_left (buf, buflen, amnt);
2757 buf++;
2758 buflen--;
2759 }
2760 val = native_interpret_expr (vr->type, buf, buflen);
2761 if (!val)
2762 return (void *)-1;
2763 }
2764 return data->finish (0, 0, val);
2765 }
2766 /* For now handle clearing memory with partial defs. */
2767 else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))
2768 && integer_zerop (gimple_call_arg (def_stmt, 1))
2769 && tree_fits_poly_int64_p (len)
2770 && tree_to_poly_int64 (len).is_constant (&leni)
2771 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT)((long) (~ (long) 0 - ((long) ((! ((long) 0 < (long) -1)) ?
(long) 1 << (sizeof (long) * 8 - 1) : (long) 0))))
/ BITS_PER_UNIT(8)
2772 && offset.is_constant (&offseti)
2773 && offset2.is_constant (&offset2i)
2774 && maxsize.is_constant (&maxsizei)
2775 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2776 leni << LOG2_BITS_PER_UNIT3))
2777 {
2778 pd_data pd;
2779 pd.rhs = build_constructor (NULL_TREE(tree) nullptr, NULLnullptr);
2780 pd.offset = offset2i;
2781 pd.size = leni << LOG2_BITS_PER_UNIT3;
2782 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2783 }
2784 }
2785
2786 /* 2) Assignment from an empty CONSTRUCTOR. */
2787 else if (is_gimple_reg_type (vr->type)
2788 && gimple_assign_single_p (def_stmt)
2789 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2790 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (def_stmt
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2790, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
== 0)
2791 {
2792 tree base2;
2793 poly_int64 offset2, size2, maxsize2;
2794 HOST_WIDE_INTlong offset2i, size2i;
2795 gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2795, __FUNCTION__), 0 : 0))
;
2796 base2 = ao_ref_base (&lhs_ref);
2797 offset2 = lhs_ref.offset;
2798 size2 = lhs_ref.size;
2799 maxsize2 = lhs_ref.max_size;
2800 if (known_size_p (maxsize2)
2801 && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2))
2802 && adjust_offsets_for_equal_base_address (base, &offset,
2803 base2, &offset2))
2804 {
2805 if (data->partial_defs.is_empty ()
2806 && known_subrange_p (offset, maxsize, offset2, size2))
2807 {
2808 /* While technically undefined behavior do not optimize
2809 a full read from a clobber. */
2810 if (gimple_clobber_p (def_stmt))
2811 return (void *)-1;
2812 tree val = build_zero_cst (vr->type);
2813 return data->finish (ao_ref_alias_set (&lhs_ref),
2814 ao_ref_base_alias_set (&lhs_ref), val);
2815 }
2816 else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))
2817 && maxsize.is_constant (&maxsizei)
2818 && offset.is_constant (&offseti)
2819 && offset2.is_constant (&offset2i)
2820 && size2.is_constant (&size2i)
2821 && ranges_known_overlap_p (offseti, maxsizei,
2822 offset2i, size2i))
2823 {
2824 /* Let clobbers be consumed by the partial-def tracker
2825 which can choose to ignore them if they are shadowed
2826 by a later def. */
2827 pd_data pd;
2828 pd.rhs = gimple_assign_rhs1 (def_stmt);
2829 pd.offset = offset2i;
2830 pd.size = size2i;
2831 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2832 ao_ref_base_alias_set (&lhs_ref),
2833 offseti, maxsizei);
2834 }
2835 }
2836 }
2837
2838 /* 3) Assignment from a constant. We can use folds native encode/interpret
2839 routines to extract the assigned bits. */
2840 else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))
2841 && is_gimple_reg_type (vr->type)
2842 && !contains_storage_order_barrier_p (vr->operands)
2843 && gimple_assign_single_p (def_stmt)
2844 && CHAR_BIT8 == 8
2845 && BITS_PER_UNIT(8) == 8
2846 && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0
2847 /* native_encode and native_decode operate on arrays of bytes
2848 and so fundamentally need a compile-time size and offset. */
2849 && maxsize.is_constant (&maxsizei)
2850 && offset.is_constant (&offseti)
2851 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2852 || (TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code
)
== SSA_NAME
2853 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2854 {
2855 tree lhs = gimple_assign_lhs (def_stmt);
2856 tree base2;
2857 poly_int64 offset2, size2, maxsize2;
2858 HOST_WIDE_INTlong offset2i, size2i;
2859 bool reverse;
2860 gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2860, __FUNCTION__), 0 : 0))
;
2861 base2 = ao_ref_base (&lhs_ref);
2862 offset2 = lhs_ref.offset;
2863 size2 = lhs_ref.size;
2864 maxsize2 = lhs_ref.max_size;
2865 reverse = reverse_storage_order_for_component_p (lhs);
2866 if (base2
2867 && !reverse
2868 && !storage_order_barrier_p (lhs)
2869 && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2))
2870 && adjust_offsets_for_equal_base_address (base, &offset,
2871 base2, &offset2)
2872 && offset.is_constant (&offseti)
2873 && offset2.is_constant (&offset2i)
2874 && size2.is_constant (&size2i))
2875 {
2876 if (data->partial_defs.is_empty ()
2877 && known_subrange_p (offseti, maxsizei, offset2, size2))
2878 {
2879 /* We support up to 512-bit values (for V8DFmode). */
2880 unsigned char buffer[65];
2881 int len;
2882
2883 tree rhs = gimple_assign_rhs1 (def_stmt);
2884 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
2885 rhs = SSA_VAL (rhs);
2886 len = native_encode_expr (rhs,
2887 buffer, sizeof (buffer) - 1,
2888 (offseti - offset2i) / BITS_PER_UNIT(8));
2889 if (len > 0 && len * BITS_PER_UNIT(8) >= maxsizei)
2890 {
2891 tree type = vr->type;
2892 unsigned char *buf = buffer;
2893 unsigned int amnt = 0;
2894 /* Make sure to interpret in a type that has a range
2895 covering the whole access size. */
2896 if (INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
2897 && maxsizei != TYPE_PRECISION (vr->type)((tree_class_check ((vr->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2897, __FUNCTION__))->type_common.precision)
)
2898 type = build_nonstandard_integer_type (maxsizei,
2899 TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2899, __FUNCTION__))->base.u.bits.unsigned_flag)
);
2900 if (BYTES_BIG_ENDIAN0)
2901 {
2902 /* For big-endian native_encode_expr stored the rhs
2903 such that the LSB of it is the LSB of buffer[len - 1].
2904 That bit is stored into memory at position
2905 offset2 + size2 - 1, i.e. in byte
2906 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2907 E.g. for offset2 1 and size2 14, rhs -1 and memory
2908 previously cleared that is:
2909 0 1
2910 01111111|11111110
2911 Now, if we want to extract offset 2 and size 12 from
2912 it using native_interpret_expr (which actually works
2913 for integral bitfield types in terms of byte size of
2914 the mode), the native_encode_expr stored the value
2915 into buffer as
2916 XX111111|11111111
2917 and returned len 2 (the X bits are outside of
2918 precision).
2919 Let sz be maxsize / BITS_PER_UNIT if not extracting
2920 a bitfield, and GET_MODE_SIZE otherwise.
2921 We need to align the LSB of the value we want to
2922 extract as the LSB of buf[sz - 1].
2923 The LSB from memory we need to read is at position
2924 offset + maxsize - 1. */
2925 HOST_WIDE_INTlong sz = maxsizei / BITS_PER_UNIT(8);
2926 if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
)
2927 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)(as_a <scalar_int_mode> ((tree_class_check ((type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 2927, __FUNCTION__))->type_common.mode))
);
2928 amnt = ((unsigned HOST_WIDE_INTlong) offset2i + size2i
2929 - offseti - maxsizei) % BITS_PER_UNIT(8);
2930 if (amnt)
2931 shift_bytes_in_array_right (buffer, len, amnt);
2932 amnt = ((unsigned HOST_WIDE_INTlong) offset2i + size2i
2933 - offseti - maxsizei - amnt) / BITS_PER_UNIT(8);
2934 if ((unsigned HOST_WIDE_INTlong) sz + amnt > (unsigned) len)
2935 len = 0;
2936 else
2937 {
2938 buf = buffer + len - sz - amnt;
2939 len -= (buf - buffer);
2940 }
2941 }
2942 else
2943 {
2944 amnt = ((unsigned HOST_WIDE_INTlong) offset2i
2945 - offseti) % BITS_PER_UNIT(8);
2946 if (amnt)
2947 {
2948 buffer[len] = 0;
2949 shift_bytes_in_array_left (buffer, len + 1, amnt);
2950 buf = buffer + 1;
2951 }
2952 }
2953 tree val = native_interpret_expr (type, buf, len);
2954 /* If we chop off bits because the types precision doesn't
2955 match the memory access size this is ok when optimizing
2956 reads but not when called from the DSE code during
2957 elimination. */
2958 if (val
2959 && type != vr->type)
2960 {
2961 if (! int_fits_type_p (val, vr->type))
2962 val = NULL_TREE(tree) nullptr;
2963 else
2964 val = fold_convert (vr->type, val)fold_convert_loc (((location_t) 0), vr->type, val);
2965 }
2966
2967 if (val)
2968 return data->finish (ao_ref_alias_set (&lhs_ref),
2969 ao_ref_base_alias_set (&lhs_ref), val);
2970 }
2971 }
2972 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
2973 size2i))
2974 {
2975 pd_data pd;
2976 tree rhs = gimple_assign_rhs1 (def_stmt);
2977 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
2978 rhs = SSA_VAL (rhs);
2979 pd.rhs = rhs;
2980 pd.offset = offset2i;
2981 pd.size = size2i;
2982 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2983 ao_ref_base_alias_set (&lhs_ref),
2984 offseti, maxsizei);
2985 }
2986 }
2987 }
2988
2989 /* 4) Assignment from an SSA name which definition we may be able
2990 to access pieces from or we can combine to a larger entity. */
2991 else if (known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize))
2992 && is_gimple_reg_type (vr->type)
2993 && !contains_storage_order_barrier_p (vr->operands)
2994 && gimple_assign_single_p (def_stmt)
2995 && TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code
)
== SSA_NAME)
2996 {
2997 tree lhs = gimple_assign_lhs (def_stmt);
2998 tree base2;
2999 poly_int64 offset2, size2, maxsize2;
3000 HOST_WIDE_INTlong offset2i, size2i, offseti;
3001 bool reverse;
3002 gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3002, __FUNCTION__), 0 : 0))
;
3003 base2 = ao_ref_base (&lhs_ref);
3004 offset2 = lhs_ref.offset;
3005 size2 = lhs_ref.size;
3006 maxsize2 = lhs_ref.max_size;
3007 reverse = reverse_storage_order_for_component_p (lhs);
3008 tree def_rhs = gimple_assign_rhs1 (def_stmt);
3009 if (!reverse
3010 && !storage_order_barrier_p (lhs)
3011 && known_size_p (maxsize2)
3012 && known_eq (maxsize2, size2)(!maybe_ne (maxsize2, size2))
3013 && adjust_offsets_for_equal_base_address (base, &offset,
3014 base2, &offset2))
3015 {
3016 if (data->partial_defs.is_empty ()
3017 && known_subrange_p (offset, maxsize, offset2, size2)
3018 /* ??? We can't handle bitfield precision extracts without
3019 either using an alternate type for the BIT_FIELD_REF and
3020 then doing a conversion or possibly adjusting the offset
3021 according to endianness. */
3022 && (! INTEGRAL_TYPE_P (vr->type)(((enum tree_code) (vr->type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (vr->type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (vr->type)->base.code) == INTEGER_TYPE
)
3023 || known_eq (ref->size, TYPE_PRECISION (vr->type))(!maybe_ne (ref->size, ((tree_class_check ((vr->type), (
tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3023, __FUNCTION__))->type_common.precision)))
)
3024 && multiple_p (ref->size, BITS_PER_UNIT(8)))
3025 {
3026 tree val = NULL_TREE(tree) nullptr;
3027 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))(((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3027, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3027, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((def_rhs), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3027, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
3028 || type_has_mode_precision_p (TREE_TYPE (def_rhs)((contains_struct_check ((def_rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3028, __FUNCTION__))->typed.type)
))
3029 {
3030 gimple_match_op op (gimple_match_cond::UNCOND,
3031 BIT_FIELD_REF, vr->type,
3032 SSA_VAL (def_rhs),
3033 bitsize_int (ref->size)size_int_kind (ref->size, stk_bitsizetype),
3034 bitsize_int (offset - offset2)size_int_kind (offset - offset2, stk_bitsizetype));
3035 val = vn_nary_build_or_lookup (&op);
3036 }
3037 else if (known_eq (ref->size, size2)(!maybe_ne (ref->size, size2)))
3038 {
3039 gimple_match_op op (gimple_match_cond::UNCOND,
3040 VIEW_CONVERT_EXPR, vr->type,
3041 SSA_VAL (def_rhs));
3042 val = vn_nary_build_or_lookup (&op);
3043 }
3044 if (val
3045 && (TREE_CODE (val)((enum tree_code) (val)->base.code) != SSA_NAME
3046 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)(tree_check ((val), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3046, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
))
3047 return data->finish (ao_ref_alias_set (&lhs_ref),
3048 ao_ref_base_alias_set (&lhs_ref), val);
3049 }
3050 else if (maxsize.is_constant (&maxsizei)
3051 && offset.is_constant (&offseti)
3052 && offset2.is_constant (&offset2i)
3053 && size2.is_constant (&size2i)
3054 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3055 {
3056 pd_data pd;
3057 pd.rhs = SSA_VAL (def_rhs);
3058 pd.offset = offset2i;
3059 pd.size = size2i;
3060 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3061 ao_ref_base_alias_set (&lhs_ref),
3062 offseti, maxsizei);
3063 }
3064 }
3065 }
3066
3067 /* 5) For aggregate copies translate the reference through them if
3068 the copy kills ref. */
3069 else if (data->vn_walk_kind == VN_WALKREWRITE
3070 && gimple_assign_single_p (def_stmt)
3071 && (DECL_P (gimple_assign_rhs1 (def_stmt))(tree_code_type[(int) (((enum tree_code) (gimple_assign_rhs1 (
def_stmt))->base.code))] == tcc_declaration)
3072 || TREE_CODE (gimple_assign_rhs1 (def_stmt))((enum tree_code) (gimple_assign_rhs1 (def_stmt))->base.code
)
== MEM_REF
3073 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3074 {
3075 tree base2;
3076 int i, j, k;
3077 auto_vec<vn_reference_op_s> rhs;
3078 vn_reference_op_t vro;
3079 ao_ref r;
3080
3081 gcc_assert (lhs_ref_ok)((void)(!(lhs_ref_ok) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3081, __FUNCTION__), 0 : 0))
;
3082
3083 /* See if the assignment kills REF. */
3084 base2 = ao_ref_base (&lhs_ref);
3085 if (!lhs_ref.max_size_known_p ()
3086 || (base != base2
3087 && (TREE_CODE (base)((enum tree_code) (base)->base.code) != MEM_REF
3088 || TREE_CODE (base2)((enum tree_code) (base2)->base.code) != MEM_REF
3089 || TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3089, __FUNCTION__)))))
!= TREE_OPERAND (base2, 0)(*((const_cast<tree*> (tree_operand_check ((base2), (0)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3089, __FUNCTION__)))))
3090 || !tree_int_cst_equal (TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3090, __FUNCTION__)))))
,
3091 TREE_OPERAND (base2, 1)(*((const_cast<tree*> (tree_operand_check ((base2), (1)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3091, __FUNCTION__)))))
)))
3092 || !stmt_kills_ref_p (def_stmt, ref))
3093 return (void *)-1;
3094
3095 /* Find the common base of ref and the lhs. lhs_ops already
3096 contains valueized operands for the lhs. */
3097 i = vr->operands.length () - 1;
3098 j = lhs_ops.length () - 1;
3099 while (j >= 0 && i >= 0
3100 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3101 {
3102 i--;
3103 j--;
3104 }
3105
3106 /* ??? The innermost op should always be a MEM_REF and we already
3107 checked that the assignment to the lhs kills vr. Thus for
3108 aggregate copies using char[] types the vn_reference_op_eq
3109 may fail when comparing types for compatibility. But we really
3110 don't care here - further lookups with the rewritten operands
3111 will simply fail if we messed up types too badly. */
3112 poly_int64 extra_off = 0;
3113 if (j == 0 && i >= 0
3114 && lhs_ops[0].opcode == MEM_REF
3115 && maybe_ne (lhs_ops[0].off, -1))
3116 {
3117 if (known_eq (lhs_ops[0].off, vr->operands[i].off)(!maybe_ne (lhs_ops[0].off, vr->operands[i].off)))
3118 i--, j--;
3119 else if (vr->operands[i].opcode == MEM_REF
3120 && maybe_ne (vr->operands[i].off, -1))
3121 {
3122 extra_off = vr->operands[i].off - lhs_ops[0].off;
3123 i--, j--;
3124 }
3125 }
3126
3127 /* i now points to the first additional op.
3128 ??? LHS may not be completely contained in VR, one or more
3129 VIEW_CONVERT_EXPRs could be in its way. We could at least
3130 try handling outermost VIEW_CONVERT_EXPRs. */
3131 if (j != -1)
3132 return (void *)-1;
3133
3134 /* Punt if the additional ops contain a storage order barrier. */
3135 for (k = i; k >= 0; k--)
3136 {
3137 vro = &vr->operands[k];
3138 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3139 return (void *)-1;
3140 }
3141
3142 /* Now re-write REF to be based on the rhs of the assignment. */
3143 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3144 copy_reference_ops_from_ref (rhs1, &rhs);
3145
3146 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3147 if (maybe_ne (extra_off, 0))
3148 {
3149 if (rhs.length () < 2)
3150 return (void *)-1;
3151 int ix = rhs.length () - 2;
3152 if (rhs[ix].opcode != MEM_REF
3153 || known_eq (rhs[ix].off, -1)(!maybe_ne (rhs[ix].off, -1)))
3154 return (void *)-1;
3155 rhs[ix].off += extra_off;
3156 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3157 build_int_cst (TREE_TYPE (rhs[ix].op0)((contains_struct_check ((rhs[ix].op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3157, __FUNCTION__))->typed.type)
,
3158 extra_off));
3159 }
3160
3161 /* Save the operands since we need to use the original ones for
3162 the hash entry we use. */
3163 if (!data->saved_operands.exists ())
3164 data->saved_operands = vr->operands.copy ();
3165
3166 /* We need to pre-pend vr->operands[0..i] to rhs. */
3167 vec<vn_reference_op_s> old = vr->operands;
3168 if (i + 1 + rhs.length () > vr->operands.length ())
3169 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3170 else
3171 vr->operands.truncate (i + 1 + rhs.length ());
3172 FOR_EACH_VEC_ELT (rhs, j, vro)for (j = 0; (rhs).iterate ((j), &(vro)); ++(j))
3173 vr->operands[i + 1 + j] = *vro;
3174 vr->operands = valueize_refs (vr->operands);
3175 if (old == shared_lookup_references)
3176 shared_lookup_references = vr->operands;
3177 vr->hashcode = vn_reference_compute_hash (vr);
3178
3179 /* Try folding the new reference to a constant. */
3180 tree val = fully_constant_vn_reference_p (vr);
3181 if (val)
3182 {
3183 if (data->partial_defs.is_empty ())
3184 return data->finish (ao_ref_alias_set (&lhs_ref),
3185 ao_ref_base_alias_set (&lhs_ref), val);
3186 /* This is the only interesting case for partial-def handling
3187 coming from targets that like to gimplify init-ctors as
3188 aggregate copies from constant data like aarch64 for
3189 PR83518. */
3190 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize)(!maybe_ne (ref->size, maxsize)))
3191 {
3192 pd_data pd;
3193 pd.rhs = val;
3194 pd.offset = 0;
3195 pd.size = maxsizei;
3196 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3197 ao_ref_base_alias_set (&lhs_ref),
3198 0, maxsizei);
3199 }
3200 }
3201
3202 /* Continuing with partial defs isn't easily possible here, we
3203 have to find a full def from further lookups from here. Probably
3204 not worth the special-casing everywhere. */
3205 if (!data->partial_defs.is_empty ())
3206 return (void *)-1;
3207
3208 /* Adjust *ref from the new operands. */
3209 ao_ref rhs1_ref;
3210 ao_ref_init (&rhs1_ref, rhs1);
3211 if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3212 ao_ref_base_alias_set (&rhs1_ref),
3213 vr->type, vr->operands))
3214 return (void *)-1;
3215 /* This can happen with bitfields. */
3216 if (maybe_ne (ref->size, r.size))
3217 return (void *)-1;
3218 *ref = r;
3219
3220 /* Do not update last seen VUSE after translating. */
3221 data->last_vuse_ptr = NULLnullptr;
3222 /* Invalidate the original access path since it now contains
3223 the wrong base. */
3224 data->orig_ref.ref = NULL_TREE(tree) nullptr;
3225 /* Use the alias-set of this LHS for recording an eventual result. */
3226 if (data->first_set == -2)
3227 {
3228 data->first_set = ao_ref_alias_set (&lhs_ref);
3229 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3230 }
3231
3232 /* Keep looking for the adjusted *REF / VR pair. */
3233 return NULLnullptr;
3234 }
3235
3236 /* 6) For memcpy copies translate the reference through them if the copy
3237 kills ref. But we cannot (easily) do this translation if the memcpy is
3238 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3239 can modify the storage order of objects (see storage_order_barrier_p). */
3240 else if (data->vn_walk_kind == VN_WALKREWRITE
3241 && is_gimple_reg_type (vr->type)
3242 /* ??? Handle BCOPY as well. */
3243 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3244 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3245 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3246 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3247 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3248 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3249 && (TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== ADDR_EXPR
3250 || TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== SSA_NAME)
3251 && (TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code
)
== ADDR_EXPR
3252 || TREE_CODE (gimple_call_arg (def_stmt, 1))((enum tree_code) (gimple_call_arg (def_stmt, 1))->base.code
)
== SSA_NAME)
3253 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3254 || (TREE_CODE (gimple_call_arg (def_stmt, 2))((enum tree_code) (gimple_call_arg (def_stmt, 2))->base.code
)
== SSA_NAME
3255 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3256 &copy_size)))
3257 /* Handling this is more complicated, give up for now. */
3258 && data->partial_defs.is_empty ())
3259 {
3260 tree lhs, rhs;
3261 ao_ref r;
3262 poly_int64 rhs_offset, lhs_offset;
3263 vn_reference_op_s op;
3264 poly_uint64 mem_offset;
3265 poly_int64 at, byte_maxsize;
3266
3267 /* Only handle non-variable, addressable refs. */
3268 if (maybe_ne (ref->size, maxsize)
3269 || !multiple_p (offset, BITS_PER_UNIT(8), &at)
3270 || !multiple_p (maxsize, BITS_PER_UNIT(8), &byte_maxsize))
3271 return (void *)-1;
3272
3273 /* Extract a pointer base and an offset for the destination. */
3274 lhs = gimple_call_arg (def_stmt, 0);
3275 lhs_offset = 0;
3276 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
3277 {
3278 lhs = vn_valueize (lhs);
3279 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
3280 {
3281 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3281, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3282 if (gimple_assign_single_p (def_stmt)
3283 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3284 lhs = gimple_assign_rhs1 (def_stmt);
3285 }
3286 }
3287 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == ADDR_EXPR)
3288 {
3289 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3289, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3289, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3289, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3289, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3289, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3289, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3289, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3289, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
3290 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs)))((tree_check4 ((((contains_struct_check ((((contains_struct_check
((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3290, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3290, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3290, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
)
3291 return (void *)-1;
3292 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0)(*((const_cast<tree*> (tree_operand_check ((lhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3292, __FUNCTION__)))))
,
3293 &lhs_offset);
3294 if (!tem)
3295 return (void *)-1;
3296 if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == MEM_REF
3297 && poly_int_tree_p (TREE_OPERAND (tem, 1)(*((const_cast<tree*> (tree_operand_check ((tem), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3297, __FUNCTION__)))))
, &mem_offset))
3298 {
3299 lhs = TREE_OPERAND (tem, 0)(*((const_cast<tree*> (tree_operand_check ((tem), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3299, __FUNCTION__)))))
;
3300 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
3301 lhs = vn_valueize (lhs);
3302 lhs_offset += mem_offset;
3303 }
3304 else if (DECL_P (tem)(tree_code_type[(int) (((enum tree_code) (tem)->base.code)
)] == tcc_declaration)
)
3305 lhs = build_fold_addr_expr (tem)build_fold_addr_expr_loc (((location_t) 0), (tem));
3306 else
3307 return (void *)-1;
3308 }
3309 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME
3310 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != ADDR_EXPR)
3311 return (void *)-1;
3312
3313 /* Extract a pointer base and an offset for the source. */
3314 rhs = gimple_call_arg (def_stmt, 1);
3315 rhs_offset = 0;
3316 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
3317 rhs = vn_valueize (rhs);
3318 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == ADDR_EXPR)
3319 {
3320 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))(((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3320, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3320, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
|| (((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3320, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3320, __FUNCTION__))->typed.type))->base.code) == RECORD_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3320, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3320, __FUNCTION__))->typed.type))->base.code) == UNION_TYPE
|| ((enum tree_code) (((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3320, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3320, __FUNCTION__))->typed.type))->base.code) == QUAL_UNION_TYPE
))
3321 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs)))((tree_check4 ((((contains_struct_check ((((contains_struct_check
((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3321, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3321, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3321, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
), (ARRAY_TYPE)))->base.u.bits.saturating_flag)
)
3322 return (void *)-1;
3323 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3323, __FUNCTION__)))))
,
3324 &rhs_offset);
3325 if (!tem)
3326 return (void *)-1;
3327 if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == MEM_REF
3328 && poly_int_tree_p (TREE_OPERAND (tem, 1)(*((const_cast<tree*> (tree_operand_check ((tem), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3328, __FUNCTION__)))))
, &mem_offset))
3329 {
3330 rhs = TREE_OPERAND (tem, 0)(*((const_cast<tree*> (tree_operand_check ((tem), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3330, __FUNCTION__)))))
;
3331 rhs_offset += mem_offset;
3332 }
3333 else if (DECL_P (tem)(tree_code_type[(int) (((enum tree_code) (tem)->base.code)
)] == tcc_declaration)
3334 || TREE_CODE (tem)((enum tree_code) (tem)->base.code) == STRING_CST)
3335 rhs = build_fold_addr_expr (tem)build_fold_addr_expr_loc (((location_t) 0), (tem));
3336 else
3337 return (void *)-1;
3338 }
3339 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == SSA_NAME)
3340 rhs = SSA_VAL (rhs);
3341 else if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) != ADDR_EXPR)
3342 return (void *)-1;
3343
3344 /* The bases of the destination and the references have to agree. */
3345 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF)
3346 {
3347 if (TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3347, __FUNCTION__)))))
!= lhs
3348 || !poly_int_tree_p (TREE_OPERAND (base, 1)(*((const_cast<tree*> (tree_operand_check ((base), (1),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3348, __FUNCTION__)))))
, &mem_offset))
3349 return (void *) -1;
3350 at += mem_offset;
3351 }
3352 else if (!DECL_P (base)(tree_code_type[(int) (((enum tree_code) (base)->base.code
))] == tcc_declaration)
3353 || TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != ADDR_EXPR
3354 || TREE_OPERAND (lhs, 0)(*((const_cast<tree*> (tree_operand_check ((lhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3354, __FUNCTION__)))))
!= base)
3355 return (void *)-1;
3356
3357 /* If the access is completely outside of the memcpy destination
3358 area there is no aliasing. */
3359 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3360 return NULLnullptr;
3361 /* And the access has to be contained within the memcpy destination. */
3362 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3363 return (void *)-1;
3364
3365 /* Save the operands since we need to use the original ones for
3366 the hash entry we use. */
3367 if (!data->saved_operands.exists ())
3368 data->saved_operands = vr->operands.copy ();
3369
3370 /* Make room for 2 operands in the new reference. */
3371 if (vr->operands.length () < 2)
3372 {
3373 vec<vn_reference_op_s> old = vr->operands;
3374 vr->operands.safe_grow_cleared (2, true);
3375 if (old == shared_lookup_references)
3376 shared_lookup_references = vr->operands;
3377 }
3378 else
3379 vr->operands.truncate (2);
3380
3381 /* The looked-through reference is a simple MEM_REF. */
3382 memset (&op, 0, sizeof (op));
3383 op.type = vr->type;
3384 op.opcode = MEM_REF;
3385 op.op0 = build_int_cst (ptr_type_nodeglobal_trees[TI_PTR_TYPE], at - lhs_offset + rhs_offset);
3386 op.off = at - lhs_offset + rhs_offset;
3387 vr->operands[0] = op;
3388 op.type = TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3388, __FUNCTION__))->typed.type)
;
3389 op.opcode = TREE_CODE (rhs)((enum tree_code) (rhs)->base.code);
3390 op.op0 = rhs;
3391 op.off = -1;
3392 vr->operands[1] = op;
3393 vr->hashcode = vn_reference_compute_hash (vr);
3394
3395 /* Try folding the new reference to a constant. */
3396 tree val = fully_constant_vn_reference_p (vr);
3397 if (val)
3398 return data->finish (0, 0, val);
3399
3400 /* Adjust *ref from the new operands. */
3401 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3402 return (void *)-1;
3403 /* This can happen with bitfields. */
3404 if (maybe_ne (ref->size, r.size))
3405 return (void *)-1;
3406 *ref = r;
3407
3408 /* Do not update last seen VUSE after translating. */
3409 data->last_vuse_ptr = NULLnullptr;
3410 /* Invalidate the original access path since it now contains
3411 the wrong base. */
3412 data->orig_ref.ref = NULL_TREE(tree) nullptr;
3413 /* Use the alias-set of this stmt for recording an eventual result. */
3414 if (data->first_set == -2)
3415 {
3416 data->first_set = 0;
3417 data->first_base_set = 0;
3418 }
3419
3420 /* Keep looking for the adjusted *REF / VR pair. */
3421 return NULLnullptr;
3422 }
3423
3424 /* Bail out and stop walking. */
3425 return (void *)-1;
3426}
3427
3428/* Return a reference op vector from OP that can be used for
3429 vn_reference_lookup_pieces. The caller is responsible for releasing
3430 the vector. */
3431
3432vec<vn_reference_op_s>
3433vn_reference_operands_for_lookup (tree op)
3434{
3435 bool valueized;
3436 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3437}
3438
3439/* Lookup a reference operation by it's parts, in the current hash table.
3440 Returns the resulting value number if it exists in the hash table,
3441 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3442 vn_reference_t stored in the hashtable if something is found. */
3443
3444tree
3445vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3446 alias_set_type base_set, tree type,
3447 vec<vn_reference_op_s> operands,
3448 vn_reference_t *vnresult, vn_lookup_kind kind)
3449{
3450 struct vn_reference_s vr1;
3451 vn_reference_t tmp;
3452 tree cst;
3453
3454 if (!vnresult)
3455 vnresult = &tmp;
3456 *vnresult = NULLnullptr;
3457
3458 vr1.vuse = vuse_ssa_val (vuse);
3459 shared_lookup_references.truncate (0);
3460 shared_lookup_references.safe_grow (operands.length (), true);
3461 memcpy (shared_lookup_references.address (),
3462 operands.address (),
3463 sizeof (vn_reference_op_s)
3464 * operands.length ());
3465 vr1.operands = operands = shared_lookup_references
3466 = valueize_refs (shared_lookup_references);
3467 vr1.type = type;
3468 vr1.set = set;
3469 vr1.base_set = base_set;
3470 vr1.hashcode = vn_reference_compute_hash (&vr1);
3471 if ((cst = fully_constant_vn_reference_p (&vr1)))
3472 return cst;
3473
3474 vn_reference_lookup_1 (&vr1, vnresult);
3475 if (!*vnresult
3476 && kind != VN_NOWALK
3477 && vr1.vuse)
3478 {
3479 ao_ref r;
3480 unsigned limit = param_sccvn_max_alias_queries_per_accessglobal_options.x_param_sccvn_max_alias_queries_per_access;
3481 vn_walk_cb_data data (&vr1, NULL_TREE(tree) nullptr, NULLnullptr, kind, true, NULL_TREE(tree) nullptr);
3482 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3483 vr1.operands))
3484 *vnresult
3485 = ((vn_reference_t)
3486 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3487 vn_reference_lookup_3, vuse_valueize,
3488 limit, &data));
3489 gcc_checking_assert (vr1.operands == shared_lookup_references)((void)(!(vr1.operands == shared_lookup_references) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3489, __FUNCTION__), 0 : 0))
;
3490 }
3491
3492 if (*vnresult)
3493 return (*vnresult)->result;
3494
3495 return NULL_TREE(tree) nullptr;
3496}
3497
3498/* Lookup OP in the current hash table, and return the resulting value
3499 number if it exists in the hash table. Return NULL_TREE if it does
3500 not exist in the hash table or if the result field of the structure
3501 was NULL.. VNRESULT will be filled in with the vn_reference_t
3502 stored in the hashtable if one exists. When TBAA_P is false assume
3503 we are looking up a store and treat it as having alias-set zero.
3504 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3505 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3506 load is bitwise anded with MASK and so we are only interested in a subset
3507 of the bits and can ignore if the other bits are uninitialized or
3508 not initialized with constants. */
3509
3510tree
3511vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3512 vn_reference_t *vnresult, bool tbaa_p,
3513 tree *last_vuse_ptr, tree mask)
3514{
3515 vec<vn_reference_op_s> operands;
3516 struct vn_reference_s vr1;
3517 bool valuezied_anything;
3518
3519 if (vnresult
11.1
'vnresult' is non-null
11.1
'vnresult' is non-null
)
12
Taking true branch
3520 *vnresult = NULLnullptr;
3521
3522 vr1.vuse = vuse_ssa_val (vuse);
3523 vr1.operands = operands
3524 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3525 vr1.type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3525, __FUNCTION__))->typed.type)
;
3526 ao_ref op_ref;
3527 ao_ref_init (&op_ref, op);
3528 vr1.set = ao_ref_alias_set (&op_ref);
3529 vr1.base_set = ao_ref_base_alias_set (&op_ref);
3530 vr1.hashcode = vn_reference_compute_hash (&vr1);
3531 if (mask == NULL_TREE(tree) nullptr)
13
Taking true branch
3532 if (tree cst
13.1
'cst' is null
13.1
'cst' is null
= fully_constant_vn_reference_p (&vr1))
14
Taking false branch
3533 return cst;
3534
3535 if (kind
14.1
'kind' is not equal to VN_NOWALK
14.1
'kind' is not equal to VN_NOWALK
!= VN_NOWALK && vr1.vuse)
15
Assuming field 'vuse' is non-null
16
Taking true branch
3536 {
3537 vn_reference_t wvnresult;
3538 ao_ref r;
3539 unsigned limit = param_sccvn_max_alias_queries_per_accessglobal_options.x_param_sccvn_max_alias_queries_per_access;
3540 /* Make sure to use a valueized reference if we valueized anything.
3541 Otherwise preserve the full reference for advanced TBAA. */
3542 if (!valuezied_anything
16.1
'valuezied_anything' is false
16.1
'valuezied_anything' is false
3543 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set, 3544 vr1.type, vr1.operands)) 3545 ao_ref_init (&r, op); 3546 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE(tree) nullptr : op,
17
Assuming field 'ref' is non-null
18
'?' condition is true
19
Calling constructor for 'vn_walk_cb_data'
3547 last_vuse_ptr, kind, tbaa_p, mask); 3548 3549 wvnresult 3550 = ((vn_reference_t) 3551 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2, 3552 vn_reference_lookup_3, vuse_valueize, limit, 3553 &data)); 3554 gcc_checking_assert (vr1.operands == shared_lookup_references)((void)(!(vr1.operands == shared_lookup_references) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3554, __FUNCTION__), 0 : 0))
; 3555 if (wvnresult) 3556 { 3557 gcc_assert (mask == NULL_TREE)((void)(!(mask == (tree) nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3557, __FUNCTION__), 0 : 0))
; 3558 if (vnresult) 3559 *vnresult = wvnresult; 3560 return wvnresult->result; 3561 } 3562 else if (mask) 3563 return data.masked_result; 3564 3565 return NULL_TREE(tree) nullptr; 3566 } 3567 3568 if (last_vuse_ptr) 3569 *last_vuse_ptr = vr1.vuse; 3570 if (mask) 3571 return NULL_TREE(tree) nullptr; 3572 return vn_reference_lookup_1 (&vr1, vnresult); 3573} 3574 3575/* Lookup CALL in the current hash table and return the entry in 3576 *VNRESULT if found. Populates *VR for the hashtable lookup. */ 3577 3578void 3579vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult, 3580 vn_reference_t vr) 3581{ 3582 if (vnresult) 3583 *vnresult = NULLnullptr; 3584 3585 tree vuse = gimple_vuse (call); 3586 3587 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE(tree) nullptr; 3588 vr->operands = valueize_shared_reference_ops_from_call (call); 3589 vr->type = gimple_expr_type (call); 3590 vr->punned = false; 3591 vr->set = 0; 3592 vr->base_set = 0; 3593 vr->hashcode = vn_reference_compute_hash (vr); 3594 vn_reference_lookup_1 (vr, vnresult); 3595} 3596 3597/* Insert OP into the current hash table with a value number of RESULT. */ 3598 3599static void 3600vn_reference_insert (tree op, tree result, tree vuse, tree vdef) 3601{ 3602 vn_reference_s **slot; 3603 vn_reference_t vr1; 3604 bool tem; 3605 3606 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s)((vn_reference_s *) __extension__ ({ struct obstack *__h = ((
&vn_tables_obstack)); __extension__ ({ struct obstack *__o
= (__h); size_t __len = ((sizeof (vn_reference_s))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); }))
; 3607 if (TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME) 3608 vr1->value_id = VN_INFO (result)->value_id; 3609 else 3610 vr1->value_id = get_or_alloc_constant_value_id (result); 3611 vr1->vuse = vuse_ssa_val (vuse); 3612 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy (); 3613 vr1->type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3613, __FUNCTION__))->typed.type)
; 3614 vr1->punned = false; 3615 ao_ref op_ref; 3616 ao_ref_init (&op_ref, op); 3617 vr1->set = ao_ref_alias_set (&op_ref); 3618 vr1->base_set = ao_ref_base_alias_set (&op_ref); 3619 vr1->hashcode = vn_reference_compute_hash (vr1); 3620 vr1->result = TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME ? SSA_VAL (result) : result; 3621 vr1->result_vdef = vdef; 3622 3623 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode, 3624 INSERT); 3625 3626 /* Because IL walking on reference lookup can end up visiting 3627 a def that is only to be visited later in iteration order 3628 when we are about to make an irreducible region reducible 3629 the def can be effectively processed and its ref being inserted 3630 by vn_reference_lookup_3 already. So we cannot assert (!*slot) 3631 but save a lookup if we deal with already inserted refs here. */ 3632 if (*slot) 3633 { 3634 /* We cannot assert that we have the same value either because 3635 when disentangling an irreducible region we may end up visiting 3636 a use before the corresponding def. That's a missed optimization 3637 only though. See gcc.dg/tree-ssa/pr87126.c for example. */ 3638 if (dump_file && (dump_flags & TDF_DETAILS) 3639 && !operand_equal_p ((*slot)->result, vr1->result, 0)) 3640 { 3641 fprintf (dump_file, "Keeping old value "); 3642 print_generic_expr (dump_file, (*slot)->result); 3643 fprintf (dump_file, " because of collision\n"); 3644 } 3645 free_reference (vr1); 3646 obstack_free (&vn_tables_obstack, vr1)__extension__ ({ struct obstack *__o = (&vn_tables_obstack
); void *__obj = (void *) (vr1); if (__obj > (void *) __o->
chunk && __obj < (void *) __o->chunk_limit) __o
->next_free = __o->object_base = (char *) __obj; else _obstack_free
(__o, __obj); })
; 3647 return; 3648 } 3649 3650 *slot = vr1; 3651 vr1->next = last_inserted_ref; 3652 last_inserted_ref = vr1; 3653} 3654 3655/* Insert a reference by it's pieces into the current hash table with 3656 a value number of RESULT. Return the resulting reference 3657 structure we created. */ 3658 3659vn_reference_t 3660vn_reference_insert_pieces (tree vuse, alias_set_type set, 3661 alias_set_type base_set, tree type, 3662 vec<vn_reference_op_s> operands, 3663 tree result, unsigned int value_id) 3664 3665{ 3666 vn_reference_s **slot; 3667 vn_reference_t vr1; 3668 3669 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s)((vn_reference_s *) __extension__ ({ struct obstack *__h = ((
&vn_tables_obstack)); __extension__ ({ struct obstack *__o
= (__h); size_t __len = ((sizeof (vn_reference_s))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); }))
; 3670 vr1->value_id = value_id; 3671 vr1->vuse = vuse_ssa_val (vuse); 3672 vr1->operands = valueize_refs (operands); 3673 vr1->type = type; 3674 vr1->punned = false; 3675 vr1->set = set; 3676 vr1->base_set = base_set; 3677 vr1->hashcode = vn_reference_compute_hash (vr1); 3678 if (result && TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME) 3679 result = SSA_VAL (result); 3680 vr1->result = result; 3681 3682 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode, 3683 INSERT); 3684 3685 /* At this point we should have all the things inserted that we have 3686 seen before, and we should never try inserting something that 3687 already exists. */ 3688 gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3688, __FUNCTION__), 0 : 0))
; 3689 3690 *slot = vr1; 3691 vr1->next = last_inserted_ref; 3692 last_inserted_ref = vr1; 3693 return vr1; 3694} 3695 3696/* Compute and return the hash value for nary operation VBO1. */ 3697 3698static hashval_t 3699vn_nary_op_compute_hash (const vn_nary_op_t vno1) 3700{ 3701 inchash::hash hstate; 3702 unsigned i; 3703 3704 for (i = 0; i < vno1->length; ++i) 3705 if (TREE_CODE (vno1->op[i])((enum tree_code) (vno1->op[i])->base.code) == SSA_NAME) 3706 vno1->op[i] = SSA_VAL (vno1->op[i]); 3707 3708 if (((vno1->length == 2 3709 && commutative_tree_code (vno1->opcode)) 3710 || (vno1->length == 3 3711 && commutative_ternary_tree_code (vno1->opcode))) 3712 && tree_swap_operands_p (vno1->op[0], vno1->op[1])) 3713 std::swap (vno1->op[0], vno1->op[1]); 3714 else if (TREE_CODE_CLASS (vno1->opcode)tree_code_type[(int) (vno1->opcode)] == tcc_comparison 3715 && tree_swap_operands_p (vno1->op[0], vno1->op[1])) 3716 { 3717 std::swap (vno1->op[0], vno1->op[1]); 3718 vno1->opcode = swap_tree_comparison (vno1->opcode); 3719 } 3720 3721 hstate.add_int (vno1->opcode); 3722 for (i = 0; i < vno1->length; ++i) 3723 inchash::add_expr (vno1->op[i], hstate); 3724 3725 return hstate.end (); 3726} 3727 3728/* Compare nary operations VNO1 and VNO2 and return true if they are 3729 equivalent. */ 3730 3731bool 3732vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2) 3733{ 3734 unsigned i; 3735 3736 if (vno1->hashcode != vno2->hashcode) 3737 return false; 3738 3739 if (vno1->length != vno2->length) 3740 return false; 3741 3742 if (vno1->opcode != vno2->opcode 3743 || !types_compatible_p (vno1->type, vno2->type)) 3744 return false; 3745 3746 for (i = 0; i < vno1->length; ++i) 3747 if (!expressions_equal_p (vno1->op[i], vno2->op[i])) 3748 return false; 3749 3750 /* BIT_INSERT_EXPR has an implict operand as the type precision 3751 of op1. Need to check to make sure they are the same. */ 3752 if (vno1->opcode == BIT_INSERT_EXPR 3753 && TREE_CODE (vno1->op[1])((enum tree_code) (vno1->op[1])->base.code) == INTEGER_CST 3754 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))((tree_class_check ((((contains_struct_check ((vno1->op[1]
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3754, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3754, __FUNCTION__))->type_common.precision)
3755 != TYPE_PRECISION (TREE_TYPE (vno2->op[1]))((tree_class_check ((((contains_struct_check ((vno2->op[1]
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3755, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3755, __FUNCTION__))->type_common.precision)
) 3756 return false; 3757 3758 return true; 3759} 3760 3761/* Initialize VNO from the pieces provided. */ 3762 3763static void 3764init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length, 3765 enum tree_code code, tree type, tree *ops) 3766{ 3767 vno->opcode = code; 3768 vno->length = length; 3769 vno->type = type; 3770 memcpy (&vno->op[0], ops, sizeof (tree) * length); 3771} 3772 3773/* Return the number of operands for a vn_nary ops structure from STMT. */ 3774 3775static unsigned int 3776vn_nary_length_from_stmt (gimple *stmt) 3777{ 3778 switch (gimple_assign_rhs_code (stmt)) 3779 { 3780 case REALPART_EXPR: 3781 case IMAGPART_EXPR: 3782 case VIEW_CONVERT_EXPR: 3783 return 1; 3784 3785 case BIT_FIELD_REF: 3786 return 3; 3787 3788 case CONSTRUCTOR: 3789 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3789, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
; 3790 3791 default: 3792 return gimple_num_ops (stmt) - 1; 3793 } 3794} 3795 3796/* Initialize VNO from STMT. */ 3797 3798static void 3799init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt) 3800{ 3801 unsigned i; 3802 3803 vno->opcode = gimple_assign_rhs_code (stmt); 3804 vno->type = gimple_expr_type (stmt); 3805 switch (vno->opcode) 3806 { 3807 case REALPART_EXPR: 3808 case IMAGPART_EXPR: 3809 case VIEW_CONVERT_EXPR: 3810 vno->length = 1; 3811 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1
(stmt)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3811, __FUNCTION__)))))
; 3812 break; 3813 3814 case BIT_FIELD_REF: 3815 vno->length = 3; 3816 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1
(stmt)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3816, __FUNCTION__)))))
; 3817 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1
(stmt)), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3817, __FUNCTION__)))))
; 3818 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2)(*((const_cast<tree*> (tree_operand_check ((gimple_assign_rhs1
(stmt)), (2), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3818, __FUNCTION__)))))
; 3819 break; 3820 3821 case CONSTRUCTOR: 3822 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt))(vec_safe_length (((tree_check ((gimple_assign_rhs1 (stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3822, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts)))
; 3823 for (i = 0; i < vno->length; ++i) 3824 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)(&(*((tree_check ((gimple_assign_rhs1 (stmt)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3824, __FUNCTION__, (CONSTRUCTOR)))->constructor.elts))[
i])
->value; 3825 break; 3826 3827 default: 3828 gcc_checking_assert (!gimple_assign_single_p (stmt))((void)(!(!gimple_assign_single_p (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3828, __FUNCTION__), 0 : 0))
; 3829 vno->length = gimple_num_ops (stmt) - 1; 3830 for (i = 0; i < vno->length; ++i) 3831 vno->op[i] = gimple_op (stmt, i + 1); 3832 } 3833} 3834 3835/* Compute the hashcode for VNO and look for it in the hash table; 3836 return the resulting value number if it exists in the hash table. 3837 Return NULL_TREE if it does not exist in the hash table or if the 3838 result field of the operation is NULL. VNRESULT will contain the 3839 vn_nary_op_t from the hashtable if it exists. */ 3840 3841static tree 3842vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult) 3843{ 3844 vn_nary_op_s **slot; 3845 3846 if (vnresult) 3847 *vnresult = NULLnullptr; 3848 3849 vno->hashcode = vn_nary_op_compute_hash (vno); 3850 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT); 3851 if (!slot) 3852 return NULL_TREE(tree) nullptr; 3853 if (vnresult) 3854 *vnresult = *slot; 3855 return (*slot)->predicated_values ? NULL_TREE(tree) nullptr : (*slot)->u.result; 3856} 3857 3858/* Lookup a n-ary operation by its pieces and return the resulting value 3859 number if it exists in the hash table. Return NULL_TREE if it does 3860 not exist in the hash table or if the result field of the operation 3861 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable 3862 if it exists. */ 3863 3864tree 3865vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, 3866 tree type, tree *ops, vn_nary_op_t *vnresult) 3867{ 3868 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op (
length))))
3869 sizeof_vn_nary_op (length))((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op (
length))))
; 3870 init_vn_nary_op_from_pieces (vno1, length, code, type, ops); 3871 return vn_nary_op_lookup_1 (vno1, vnresult); 3872} 3873 3874/* Lookup the rhs of STMT in the current hash table, and return the resulting 3875 value number if it exists in the hash table. Return NULL_TREE if 3876 it does not exist in the hash table. VNRESULT will contain the 3877 vn_nary_op_t from the hashtable if it exists. */ 3878 3879tree 3880vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult) 3881{ 3882 vn_nary_op_t vno1 3883 = XALLOCAVAR (struct vn_nary_op_s,((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op (
vn_nary_length_from_stmt (stmt)))))
3884 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)))((struct vn_nary_op_s *) __builtin_alloca((sizeof_vn_nary_op (
vn_nary_length_from_stmt (stmt)))))
; 3885 init_vn_nary_op_from_stmt (vno1, stmt); 3886 return vn_nary_op_lookup_1 (vno1, vnresult); 3887} 3888 3889/* Allocate a vn_nary_op_t with LENGTH operands on STACK. */ 3890 3891static vn_nary_op_t 3892alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack) 3893{ 3894 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length))__extension__ ({ struct obstack *__h = (stack); __extension__
({ struct obstack *__o = (__h); size_t __len = ((sizeof_vn_nary_op
(length))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
; 3895} 3896 3897/* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's 3898 obstack. */ 3899 3900static vn_nary_op_t 3901alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id) 3902{ 3903 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack); 3904 3905 vno1->value_id = value_id; 3906 vno1->length = length; 3907 vno1->predicated_values = 0; 3908 vno1->u.result = result; 3909 3910 return vno1; 3911} 3912 3913/* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute 3914 VNO->HASHCODE first. */ 3915 3916static vn_nary_op_t 3917vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table, 3918 bool compute_hash) 3919{ 3920 vn_nary_op_s **slot; 3921 3922 if (compute_hash) 3923 { 3924 vno->hashcode = vn_nary_op_compute_hash (vno); 3925 gcc_assert (! vno->predicated_values((void)(!(! vno->predicated_values || (! vno->u.values->
next && vno->u.values->n == 1)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3927, __FUNCTION__), 0 : 0))
3926 || (! vno->u.values->next((void)(!(! vno->predicated_values || (! vno->u.values->
next && vno->u.values->n == 1)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3927, __FUNCTION__), 0 : 0))
3927 && vno->u.values->n == 1))((void)(!(! vno->predicated_values || (! vno->u.values->
next && vno->u.values->n == 1)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3927, __FUNCTION__), 0 : 0))
; 3928 } 3929 3930 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT); 3931 vno->unwind_to = *slot; 3932 if (*slot) 3933 { 3934 /* Prefer non-predicated values. 3935 ??? Only if those are constant, otherwise, with constant predicated 3936 value, turn them into predicated values with entry-block validity 3937 (??? but we always find the first valid result currently). */ 3938 if ((*slot)->predicated_values 3939 && ! vno->predicated_values) 3940 { 3941 /* ??? We cannot remove *slot from the unwind stack list. 3942 For the moment we deal with this by skipping not found 3943 entries but this isn't ideal ... */ 3944 *slot = vno; 3945 /* ??? Maintain a stack of states we can unwind in 3946 vn_nary_op_s? But how far do we unwind? In reality 3947 we need to push change records somewhere... Or not 3948 unwind vn_nary_op_s and linking them but instead 3949 unwind the results "list", linking that, which also 3950 doesn't move on hashtable resize. */ 3951 /* We can also have a ->unwind_to recording *slot there. 3952 That way we can make u.values a fixed size array with 3953 recording the number of entries but of course we then 3954 have always N copies for each unwind_to-state. Or we 3955 make sure to only ever append and each unwinding will 3956 pop off one entry (but how to deal with predicated 3957 replaced with non-predicated here?) */ 3958 vno->next = last_inserted_nary; 3959 last_inserted_nary = vno; 3960 return vno; 3961 } 3962 else if (vno->predicated_values 3963 && ! (*slot)->predicated_values) 3964 return *slot; 3965 else if (vno->predicated_values 3966 && (*slot)->predicated_values) 3967 { 3968 /* ??? Factor this all into a insert_single_predicated_value 3969 routine. */ 3970 gcc_assert (!vno->u.values->next && vno->u.values->n == 1)((void)(!(!vno->u.values->next && vno->u.values
->n == 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3970, __FUNCTION__), 0 : 0))
; 3971 basic_block vno_bb 3972 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0])((*(((cfun + 0))->cfg->x_basic_block_info))[(vno->u.
values->valid_dominated_by_p[0])])
; 3973 vn_pval *nval = vno->u.values; 3974 vn_pval **next = &vno->u.values; 3975 bool found = false; 3976 for (vn_pval *val = (*slot)->u.values; val; val = val->next) 3977 { 3978 if (expressions_equal_p (val->result, vno->u.values->result)) 3979 { 3980 found = true; 3981 for (unsigned i = 0; i < val->n; ++i) 3982 { 3983 basic_block val_bb 3984 = BASIC_BLOCK_FOR_FN (cfun,((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p
[i])])
3985 val->valid_dominated_by_p[i])((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p
[i])])
; 3986 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb)) 3987 /* Value registered with more generic predicate. */ 3988 return *slot; 3989 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb)) 3990 /* Shouldn't happen, we insert in RPO order. */ 3991 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 3991, __FUNCTION__))
; 3992 } 3993 /* Append value. */ 3994 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); })
3995 sizeof (vn_pval)__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); })
3996 + val->n * sizeof (int))__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + val->n * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); })
; 3997 (*next)->next = NULLnullptr; 3998 (*next)->result = val->result; 3999 (*next)->n = val->n + 1; 4000 memcpy ((*next)->valid_dominated_by_p, 4001 val->valid_dominated_by_p, 4002 val->n * sizeof (int)); 4003 (*next)->valid_dominated_by_p[val->n] = vno_bb->index; 4004 next = &(*next)->next; 4005 if (dump_file && (dump_flags & TDF_DETAILS)) 4006 fprintf (dump_file, "Appending predicate to value.\n"); 4007 continue; 4008 } 4009 /* Copy other predicated values. */ 4010 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); })
4011 sizeof (vn_pval)__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); })
4012 + (val->n-1) * sizeof (int))__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval) + (val->n-1) * sizeof (int))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); })
; 4013 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int)); 4014 (*next)->next = NULLnullptr; 4015 next = &(*next)->next; 4016 } 4017 if (!found) 4018 *next = nval; 4019 4020 *slot = vno; 4021 vno->next = last_inserted_nary; 4022 last_inserted_nary = vno; 4023 return vno; 4024 } 4025 4026 /* While we do not want to insert things twice it's awkward to 4027 avoid it in the case where visit_nary_op pattern-matches stuff 4028 and ends up simplifying the replacement to itself. We then 4029 get two inserts, one from visit_nary_op and one from 4030 vn_nary_build_or_lookup. 4031 So allow inserts with the same value number. */ 4032 if ((*slot)->u.result == vno->u.result) 4033 return *slot; 4034 } 4035 4036 /* ??? There's also optimistic vs. previous commited state merging 4037 that is problematic for the case of unwinding. */ 4038 4039 /* ??? We should return NULL if we do not use 'vno' and have the 4040 caller release it. */ 4041 gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4041, __FUNCTION__), 0 : 0))
; 4042 4043 *slot = vno; 4044 vno->next = last_inserted_nary; 4045 last_inserted_nary = vno; 4046 return vno; 4047} 4048 4049/* Insert a n-ary operation into the current hash table using it's 4050 pieces. Return the vn_nary_op_t structure we created and put in 4051 the hashtable. */ 4052 4053vn_nary_op_t 4054vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, 4055 tree type, tree *ops, 4056 tree result, unsigned int value_id) 4057{ 4058 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id); 4059 init_vn_nary_op_from_pieces (vno1, length, code, type, ops); 4060 return vn_nary_op_insert_into (vno1, valid_info->nary, true); 4061} 4062 4063static vn_nary_op_t 4064vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code, 4065 tree type, tree *ops, 4066 tree result, unsigned int value_id, 4067 edge pred_e) 4068{ 4069 /* ??? Currently tracking BBs. */ 4070 if (! single_pred_p (pred_e->dest)) 4071 { 4072 /* Never record for backedges. */ 4073 if (pred_e->flags & EDGE_DFS_BACK) 4074 return NULLnullptr; 4075 edge_iterator ei; 4076 edge e; 4077 int cnt = 0; 4078 /* Ignore backedges. */ 4079 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)for ((ei) = ei_start_1 (&((pred_e->dest->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4080 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest)) 4081 cnt++; 4082 if (cnt != 1) 4083 return NULLnullptr; 4084 } 4085 if (dump_file && (dump_flags & TDF_DETAILS) 4086 /* ??? Fix dumping, but currently we only get comparisons. */ 4087 && TREE_CODE_CLASS (code)tree_code_type[(int) (code)] == tcc_comparison) 4088 { 4089 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index, 4090 pred_e->dest->index); 4091 print_generic_expr (dump_file, ops[0], TDF_SLIM); 4092 fprintf (dump_file, " %s ", get_tree_code_name (code)); 4093 print_generic_expr (dump_file, ops[1], TDF_SLIM); 4094 fprintf (dump_file, " == %s\n", 4095 integer_zerop (result) ? "false" : "true"); 4096 } 4097 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE(tree) nullptr, value_id); 4098 init_vn_nary_op_from_pieces (vno1, length, code, type, ops); 4099 vno1->predicated_values = 1; 4100 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval))); if (__extension__ ({ struct obstack const
*__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free
); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
4101 sizeof (vn_pval))__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_pval))); if (__extension__ ({ struct obstack const
*__o1 = (__o); (size_t) (__o1->chunk_limit - __o1->next_free
); }) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
; 4102 vno1->u.values->next = NULLnullptr; 4103 vno1->u.values->result = result; 4104 vno1->u.values->n = 1; 4105 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index; 4106 return vn_nary_op_insert_into (vno1, valid_info->nary, true); 4107} 4108 4109static bool 4110dominated_by_p_w_unex (basic_block bb1, basic_block bb2); 4111 4112static tree 4113vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb) 4114{ 4115 if (! vno->predicated_values) 4116 return vno->u.result; 4117 for (vn_pval *val = vno->u.values; val; val = val->next) 4118 for (unsigned i = 0; i < val->n; ++i) 4119 if (dominated_by_p_w_unex (bb, 4120 BASIC_BLOCK_FOR_FN((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p
[i])])
4121 (cfun, val->valid_dominated_by_p[i])((*(((cfun + 0))->cfg->x_basic_block_info))[(val->valid_dominated_by_p
[i])])
)) 4122 return val->result; 4123 return NULL_TREE(tree) nullptr; 4124} 4125 4126/* Insert the rhs of STMT into the current hash table with a value number of 4127 RESULT. */ 4128 4129static vn_nary_op_t 4130vn_nary_op_insert_stmt (gimple *stmt, tree result) 4131{ 4132 vn_nary_op_t vno1 4133 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt), 4134 result, VN_INFO (result)->value_id); 4135 init_vn_nary_op_from_stmt (vno1, stmt); 4136 return vn_nary_op_insert_into (vno1, valid_info->nary, true); 4137} 4138 4139/* Compute a hashcode for PHI operation VP1 and return it. */ 4140 4141static inline hashval_t 4142vn_phi_compute_hash (vn_phi_t vp1) 4143{ 4144 inchash::hash hstate; 4145 tree phi1op; 4146 tree type; 4147 edge e; 4148 edge_iterator ei; 4149 4150 hstate.add_int (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds)); 4151 switch (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds)) 4152 { 4153 case 1: 4154 break; 4155 case 2: 4156 if (vp1->block->loop_father->header == vp1->block) 4157 ; 4158 else 4159 break; 4160 /* Fallthru. */ 4161 default: 4162 hstate.add_int (vp1->block->index); 4163 } 4164 4165 /* If all PHI arguments are constants we need to distinguish 4166 the PHI node via its type. */ 4167 type = vp1->type; 4168 hstate.merge_hash (vn_hash_type (type)); 4169 4170 FOR_EACH_EDGE (e, ei, vp1->block->preds)for ((ei) = ei_start_1 (&((vp1->block->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4171 { 4172 /* Don't hash backedge values they need to be handled as VN_TOP 4173 for optimistic value-numbering. */ 4174 if (e->flags & EDGE_DFS_BACK) 4175 continue; 4176 4177 phi1op = vp1->phiargs[e->dest_idx]; 4178 if (phi1op == VN_TOP) 4179 continue; 4180 inchash::add_expr (phi1op, hstate); 4181 } 4182 4183 return hstate.end (); 4184} 4185 4186 4187/* Return true if COND1 and COND2 represent the same condition, set 4188 *INVERTED_P if one needs to be inverted to make it the same as 4189 the other. */ 4190 4191static bool 4192cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1, 4193 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p) 4194{ 4195 enum tree_code code1 = gimple_cond_code (cond1); 4196 enum tree_code code2 = gimple_cond_code (cond2); 4197 4198 *inverted_p = false; 4199 if (code1 == code2) 4200 ; 4201 else if (code1 == swap_tree_comparison (code2)) 4202 std::swap (lhs2, rhs2); 4203 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2))) 4204 *inverted_p = true; 4205 else if (code1 == invert_tree_comparison 4206 (swap_tree_comparison (code2), HONOR_NANS (lhs2))) 4207 { 4208 std::swap (lhs2, rhs2); 4209 *inverted_p = true; 4210 } 4211 else 4212 return false; 4213 4214 return ((expressions_equal_p (lhs1, lhs2) 4215 && expressions_equal_p (rhs1, rhs2)) 4216 || (commutative_tree_code (code1) 4217 && expressions_equal_p (lhs1, rhs2) 4218 && expressions_equal_p (rhs1, lhs2))); 4219} 4220 4221/* Compare two phi entries for equality, ignoring VN_TOP arguments. */ 4222 4223static int 4224vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2) 4225{ 4226 if (vp1->hashcode != vp2->hashcode) 4227 return false; 4228 4229 if (vp1->block != vp2->block) 4230 { 4231 if (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds) != EDGE_COUNT (vp2->block->preds)vec_safe_length (vp2->block->preds)) 4232 return false; 4233 4234 switch (EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds)) 4235 { 4236 case 1: 4237 /* Single-arg PHIs are just copies. */ 4238 break; 4239 4240 case 2: 4241 { 4242 /* Rule out backedges into the PHI. */ 4243 if (vp1->block->loop_father->header == vp1->block 4244 || vp2->block->loop_father->header == vp2->block) 4245 return false; 4246 4247 /* If the PHI nodes do not have compatible types 4248 they are not the same. */ 4249 if (!types_compatible_p (vp1->type, vp2->type)) 4250 return false; 4251 4252 basic_block idom1 4253 = get_immediate_dominator (CDI_DOMINATORS, vp1->block); 4254 basic_block idom2 4255 = get_immediate_dominator (CDI_DOMINATORS, vp2->block); 4256 /* If the immediate dominator end in switch stmts multiple 4257 values may end up in the same PHI arg via intermediate 4258 CFG merges. */ 4259 if (EDGE_COUNT (idom1->succs)vec_safe_length (idom1->succs) != 2 4260 || EDGE_COUNT (idom2->succs)vec_safe_length (idom2->succs) != 2) 4261 return false; 4262 4263 /* Verify the controlling stmt is the same. */ 4264 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)); 4265 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2)); 4266 if (! last1 || ! last2) 4267 return false; 4268 bool inverted_p; 4269 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs, 4270 last2, vp2->cclhs, vp2->ccrhs, 4271 &inverted_p)) 4272 return false; 4273 4274 /* Get at true/false controlled edges into the PHI. */ 4275 edge te1, te2, fe1, fe2; 4276 if (! extract_true_false_controlled_edges (idom1, vp1->block, 4277 &te1, &fe1) 4278 || ! extract_true_false_controlled_edges (idom2, vp2->block, 4279 &te2, &fe2)) 4280 return false; 4281 4282 /* Swap edges if the second condition is the inverted of the 4283 first. */ 4284 if (inverted_p) 4285 std::swap (te2, fe2); 4286 4287 /* ??? Handle VN_TOP specially. */ 4288 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx], 4289 vp2->phiargs[te2->dest_idx]) 4290 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx], 4291 vp2->phiargs[fe2->dest_idx])) 4292 return false; 4293 4294 return true; 4295 } 4296 4297 default: 4298 return false; 4299 } 4300 } 4301 4302 /* If the PHI nodes do not have compatible types 4303 they are not the same. */ 4304 if (!types_compatible_p (vp1->type, vp2->type)) 4305 return false; 4306 4307 /* Any phi in the same block will have it's arguments in the 4308 same edge order, because of how we store phi nodes. */ 4309 unsigned nargs = EDGE_COUNT (vp1->block->preds)vec_safe_length (vp1->block->preds); 4310 for (unsigned i = 0; i < nargs; ++i) 4311 { 4312 tree phi1op = vp1->phiargs[i]; 4313 tree phi2op = vp2->phiargs[i]; 4314 if (phi1op == phi2op) 4315 continue; 4316 if (!expressions_equal_p (phi1op, phi2op)) 4317 return false; 4318 } 4319 4320 return true; 4321} 4322 4323/* Lookup PHI in the current hash table, and return the resulting 4324 value number if it exists in the hash table. Return NULL_TREE if 4325 it does not exist in the hash table. */ 4326 4327static tree 4328vn_phi_lookup (gimple *phi, bool backedges_varying_p) 4329{ 4330 vn_phi_s **slot; 4331 struct vn_phi_s *vp1; 4332 edge e; 4333 edge_iterator ei; 4334 4335 vp1 = XALLOCAVAR (struct vn_phi_s,((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s
) + (gimple_phi_num_args (phi) - 1) * sizeof (tree))))
4336 sizeof (struct vn_phi_s)((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s
) + (gimple_phi_num_args (phi) - 1) * sizeof (tree))))
4337 + (gimple_phi_num_args (phi) - 1) * sizeof (tree))((struct vn_phi_s *) __builtin_alloca((sizeof (struct vn_phi_s
) + (gimple_phi_num_args (phi) - 1) * sizeof (tree))))
; 4338 4339 /* Canonicalize the SSA_NAME's to their value number. */ 4340 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)for ((ei) = ei_start_1 (&((gimple_bb (phi)->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4341 { 4342 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx)); 4343 if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME 4344 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))) 4345 def = SSA_VAL (def); 4346 vp1->phiargs[e->dest_idx] = def; 4347 } 4348 vp1->type = TREE_TYPE (gimple_phi_result (phi))((contains_struct_check ((gimple_phi_result (phi)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4348, __FUNCTION__))->typed.type)
; 4349 vp1->block = gimple_bb (phi); 4350 /* Extract values of the controlling condition. */ 4351 vp1->cclhs = NULL_TREE(tree) nullptr; 4352 vp1->ccrhs = NULL_TREE(tree) nullptr; 4353 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block); 4354 if (EDGE_COUNT (idom1->succs)vec_safe_length (idom1->succs) == 2) 4355 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1))) 4356 { 4357 /* ??? We want to use SSA_VAL here. But possibly not 4358 allow VN_TOP. */ 4359 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1)); 4360 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1)); 4361 } 4362 vp1->hashcode = vn_phi_compute_hash (vp1); 4363 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT); 4364 if (!slot) 4365 return NULL_TREE(tree) nullptr; 4366 return (*slot)->result; 4367} 4368 4369/* Insert PHI into the current hash table with a value number of 4370 RESULT. */ 4371 4372static vn_phi_t 4373vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p) 4374{ 4375 vn_phi_s **slot; 4376 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof
(tree)))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
4377 sizeof (vn_phi_s)__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof
(tree)))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
4378 + ((gimple_phi_num_args (phi) - 1)__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof
(tree)))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
4379 * sizeof (tree)))__extension__ ({ struct obstack *__h = (&vn_tables_obstack
); __extension__ ({ struct obstack *__o = (__h); size_t __len
= ((sizeof (vn_phi_s) + ((gimple_phi_num_args (phi) - 1) * sizeof
(tree)))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); })
; 4380 edge e; 4381 edge_iterator ei; 4382 4383 /* Canonicalize the SSA_NAME's to their value number. */ 4384 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)for ((ei) = ei_start_1 (&((gimple_bb (phi)->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4385 { 4386 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx)); 4387 if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME 4388 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))) 4389 def = SSA_VAL (def); 4390 vp1->phiargs[e->dest_idx] = def; 4391 } 4392 vp1->value_id = VN_INFO (result)->value_id; 4393 vp1->type = TREE_TYPE (gimple_phi_result (phi))((contains_struct_check ((gimple_phi_result (phi)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4393, __FUNCTION__))->typed.type)
; 4394 vp1->block = gimple_bb (phi); 4395 /* Extract values of the controlling condition. */ 4396 vp1->cclhs = NULL_TREE(tree) nullptr; 4397 vp1->ccrhs = NULL_TREE(tree) nullptr; 4398 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block); 4399 if (EDGE_COUNT (idom1->succs)vec_safe_length (idom1->succs) == 2) 4400 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1))) 4401 { 4402 /* ??? We want to use SSA_VAL here. But possibly not 4403 allow VN_TOP. */ 4404 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1)); 4405 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1)); 4406 } 4407 vp1->result = result; 4408 vp1->hashcode = vn_phi_compute_hash (vp1); 4409 4410 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT); 4411 gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4411, __FUNCTION__), 0 : 0))
; 4412 4413 *slot = vp1; 4414 vp1->next = last_inserted_phi; 4415 last_inserted_phi = vp1; 4416 return vp1; 4417} 4418 4419 4420/* Return true if BB1 is dominated by BB2 taking into account edges 4421 that are not executable. */ 4422 4423static bool 4424dominated_by_p_w_unex (basic_block bb1, basic_block bb2) 4425{ 4426 edge_iterator ei; 4427 edge e; 4428 4429 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2)) 4430 return true; 4431 4432 /* Before iterating we'd like to know if there exists a 4433 (executable) path from bb2 to bb1 at all, if not we can 4434 directly return false. For now simply iterate once. */ 4435 4436 /* Iterate to the single executable bb1 predecessor. */ 4437 if (EDGE_COUNT (bb1->preds)vec_safe_length (bb1->preds) > 1) 4438 { 4439 edge prede = NULLnullptr; 4440 FOR_EACH_EDGE (e, ei, bb1->preds)for ((ei) = ei_start_1 (&((bb1->preds))); ei_cond ((ei
), &(e)); ei_next (&(ei)))
4441 if (e->flags & EDGE_EXECUTABLE) 4442 { 4443 if (prede) 4444 { 4445 prede = NULLnullptr; 4446 break; 4447 } 4448 prede = e; 4449 } 4450 if (prede) 4451 { 4452 bb1 = prede->src; 4453 4454 /* Re-do the dominance check with changed bb1. */ 4455 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2)) 4456 return true; 4457 } 4458 } 4459 4460 /* Iterate to the single executable bb2 successor. */ 4461 edge succe = NULLnullptr; 4462 FOR_EACH_EDGE (e, ei, bb2->succs)for ((ei) = ei_start_1 (&((bb2->succs))); ei_cond ((ei
), &(e)); ei_next (&(ei)))
4463 if (e->flags & EDGE_EXECUTABLE) 4464 { 4465 if (succe) 4466 { 4467 succe = NULLnullptr; 4468 break; 4469 } 4470 succe = e; 4471 } 4472 if (succe) 4473 { 4474 /* Verify the reached block is only reached through succe. 4475 If there is only one edge we can spare us the dominator 4476 check and iterate directly. */ 4477 if (EDGE_COUNT (succe->dest->preds)vec_safe_length (succe->dest->preds) > 1) 4478 { 4479 FOR_EACH_EDGE (e, ei, succe->dest->preds)for ((ei) = ei_start_1 (&((succe->dest->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
4480 if (e != succe 4481 && (e->flags & EDGE_EXECUTABLE)) 4482 { 4483 succe = NULLnullptr; 4484 break; 4485 } 4486 } 4487 if (succe) 4488 { 4489 bb2 = succe->dest; 4490 4491 /* Re-do the dominance check with changed bb2. */ 4492 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2)) 4493 return true; 4494 } 4495 } 4496 4497 /* We could now iterate updating bb1 / bb2. */ 4498 return false; 4499} 4500 4501/* Set the value number of FROM to TO, return true if it has changed 4502 as a result. */ 4503 4504static inline bool 4505set_ssa_val_to (tree from, tree to) 4506{ 4507 vn_ssa_aux_t from_info = VN_INFO (from); 4508 tree currval = from_info->valnum; // SSA_VAL (from) 4509 poly_int64 toff, coff; 4510 bool curr_undefined = false; 4511 bool curr_invariant = false; 4512 4513 /* The only thing we allow as value numbers are ssa_names 4514 and invariants. So assert that here. We don't allow VN_TOP 4515 as visiting a stmt should produce a value-number other than 4516 that. 4517 ??? Still VN_TOP can happen for unreachable code, so force 4518 it to varying in that case. Not all code is prepared to 4519 get VN_TOP on valueization. */ 4520 if (to == VN_TOP) 4521 { 4522 /* ??? When iterating and visiting PHI <undef, backedge-value> 4523 for the first time we rightfully get VN_TOP and we need to 4524 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c. 4525 With SCCVN we were simply lucky we iterated the other PHI 4526 cycles first and thus visited the backedge-value DEF. */ 4527 if (currval == VN_TOP) 4528 goto set_and_exit; 4529 if (dump_file && (dump_flags & TDF_DETAILS)) 4530 fprintf (dump_file, "Forcing value number to varying on " 4531 "receiving VN_TOP\n"); 4532 to = from; 4533 } 4534 4535 gcc_checking_assert (to != NULL_TREE((void)(!(to != (tree) nullptr && ((((enum tree_code)
(to)->base.code) == SSA_NAME && (to == from || SSA_VAL
(to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4538, __FUNCTION__), 0 : 0))
4536 && ((TREE_CODE (to) == SSA_NAME((void)(!(to != (tree) nullptr && ((((enum tree_code)
(to)->base.code) == SSA_NAME && (to == from || SSA_VAL
(to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4538, __FUNCTION__), 0 : 0))
4537 && (to == from || SSA_VAL (to) == to))((void)(!(to != (tree) nullptr && ((((enum tree_code)
(to)->base.code) == SSA_NAME && (to == from || SSA_VAL
(to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4538, __FUNCTION__), 0 : 0))
4538 || is_gimple_min_invariant (to)))((void)(!(to != (tree) nullptr && ((((enum tree_code)
(to)->base.code) == SSA_NAME && (to == from || SSA_VAL
(to) == to)) || is_gimple_min_invariant (to))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4538, __FUNCTION__), 0 : 0))
; 4539 4540 if (from != to) 4541 { 4542 if (currval == from) 4543 { 4544 if (dump_file && (dump_flags & TDF_DETAILS)) 4545 { 4546 fprintf (dump_file, "Not changing value number of "); 4547 print_generic_expr (dump_file, from); 4548 fprintf (dump_file, " from VARYING to "); 4549 print_generic_expr (dump_file, to); 4550 fprintf (dump_file, "\n"); 4551 } 4552 return false; 4553 } 4554 curr_invariant = is_gimple_min_invariant (currval); 4555 curr_undefined = (TREE_CODE (currval)((enum tree_code) (currval)->base.code) == SSA_NAME 4556 && ssa_undefined_value_p (currval, false)); 4557 if (currval != VN_TOP 4558 && !curr_invariant 4559 && !curr_undefined 4560 && is_gimple_min_invariant (to)) 4561 { 4562 if (dump_file && (dump_flags & TDF_DETAILS)) 4563 { 4564 fprintf (dump_file, "Forcing VARYING instead of changing " 4565 "value number of "); 4566 print_generic_expr (dump_file, from); 4567 fprintf (dump_file, " from "); 4568 print_generic_expr (dump_file, currval); 4569 fprintf (dump_file, " (non-constant) to "); 4570 print_generic_expr (dump_file, to); 4571 fprintf (dump_file, " (constant)\n"); 4572 } 4573 to = from; 4574 } 4575 else if (currval != VN_TOP 4576 && !curr_undefined 4577 && TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME 4578 && ssa_undefined_value_p (to, false)) 4579 { 4580 if (dump_file && (dump_flags & TDF_DETAILS)) 4581 { 4582 fprintf (dump_file, "Forcing VARYING instead of changing " 4583 "value number of "); 4584 print_generic_expr (dump_file, from); 4585 fprintf (dump_file, " from "); 4586 print_generic_expr (dump_file, currval); 4587 fprintf (dump_file, " (non-undefined) to "); 4588 print_generic_expr (dump_file, to); 4589 fprintf (dump_file, " (undefined)\n"); 4590 } 4591 to = from; 4592 } 4593 else if (TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME 4594 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to)(tree_check ((to), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4594, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
) 4595 to = from; 4596 } 4597 4598set_and_exit: 4599 if (dump_file && (dump_flags & TDF_DETAILS)) 4600 { 4601 fprintf (dump_file, "Setting value number of "); 4602 print_generic_expr (dump_file, from); 4603 fprintf (dump_file, " to "); 4604 print_generic_expr (dump_file, to); 4605 } 4606 4607 if (currval != to 4608 && !operand_equal_p (currval, to, 0) 4609 /* Different undefined SSA names are not actually different. See 4610 PR82320 for a testcase were we'd otherwise not terminate iteration. */ 4611 && !(curr_undefined 4612 && TREE_CODE (to)((enum tree_code) (to)->base.code) == SSA_NAME 4613 && ssa_undefined_value_p (to, false)) 4614 /* ??? For addresses involving volatile objects or types operand_equal_p 4615 does not reliably detect ADDR_EXPRs as equal. We know we are only 4616 getting invariant gimple addresses here, so can use 4617 get_addr_base_and_unit_offset to do this comparison. */ 4618 && !(TREE_CODE (currval)((enum tree_code) (currval)->base.code) == ADDR_EXPR 4619 && TREE_CODE (to)((enum tree_code) (to)->base.code) == ADDR_EXPR 4620 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0)(*((const_cast<tree*> (tree_operand_check ((currval), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4620, __FUNCTION__)))))
, &coff) 4621 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0)(*((const_cast<tree*> (tree_operand_check ((to), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4621, __FUNCTION__)))))
, &toff)) 4622 && known_eq (coff, toff)(!maybe_ne (coff, toff)))) 4623 { 4624 if (to != from 4625 && currval != VN_TOP 4626 && !curr_undefined 4627 /* We do not want to allow lattice transitions from one value 4628 to another since that may lead to not terminating iteration 4629 (see PR95049). Since there's no convenient way to check 4630 for the allowed transition of VAL -> PHI (loop entry value, 4631 same on two PHIs, to same PHI result) we restrict the check 4632 to invariants. */ 4633 && curr_invariant 4634 && is_gimple_min_invariant (to)) 4635 { 4636 if (dump_file && (dump_flags & TDF_DETAILS)) 4637 fprintf (dump_file, " forced VARYING"); 4638 to = from; 4639 } 4640 if (dump_file && (dump_flags & TDF_DETAILS)) 4641 fprintf (dump_file, " (changed)\n"); 4642 from_info->valnum = to; 4643 return true; 4644 } 4645 if (dump_file && (dump_flags & TDF_DETAILS)) 4646 fprintf (dump_file, "\n"); 4647 return false; 4648} 4649 4650/* Set all definitions in STMT to value number to themselves. 4651 Return true if a value number changed. */ 4652 4653static bool 4654defs_to_varying (gimple *stmt) 4655{ 4656 bool changed = false; 4657 ssa_op_iter iter; 4658 def_operand_p defp; 4659 4660 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)for (defp = op_iter_init_def (&(iter), stmt, ((0x08) | 0x02
)); !op_iter_done (&(iter)); defp = op_iter_next_def (&
(iter)))
4661 { 4662 tree def = DEF_FROM_PTR (defp)get_def_from_ptr (defp); 4663 changed |= set_ssa_val_to (def, def); 4664 } 4665 return changed; 4666} 4667 4668/* Visit a copy between LHS and RHS, return true if the value number 4669 changed. */ 4670 4671static bool 4672visit_copy (tree lhs, tree rhs) 4673{ 4674 /* Valueize. */ 4675 rhs = SSA_VAL (rhs); 4676 4677 return set_ssa_val_to (lhs, rhs); 4678} 4679 4680/* Lookup a value for OP in type WIDE_TYPE where the value in type of OP 4681 is the same. */ 4682 4683static tree 4684valueized_wider_op (tree wide_type, tree op, bool allow_truncate) 4685{ 4686 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME) 4687 op = vn_valueize (op); 4688 4689 /* Either we have the op widened available. */ 4690 tree ops[3] = {}; 4691 ops[0] = op; 4692 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR, 4693 wide_type, ops, NULLnullptr); 4694 if (tem) 4695 return tem; 4696 4697 /* Or the op is truncated from some existing value. */ 4698 if (allow_truncate && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME) 4699 { 4700 gimple *def = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4700, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
; 4701 if (is_gimple_assign (def) 4702 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def))((gimple_assign_rhs_code (def)) == NOP_EXPR || (gimple_assign_rhs_code
(def)) == CONVERT_EXPR)
) 4703 { 4704 tem = gimple_assign_rhs1 (def); 4705 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)((contains_struct_check ((tem), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4705, __FUNCTION__))->typed.type)
)) 4706 { 4707 if (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == SSA_NAME) 4708 tem = vn_valueize (tem); 4709 return tem; 4710 } 4711 } 4712 } 4713 4714 /* For constants simply extend it. */ 4715 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == INTEGER_CST) 4716 return wide_int_to_tree (wide_type, wi::to_wide (op)); 4717 4718 return NULL_TREE(tree) nullptr; 4719} 4720 4721/* Visit a nary operator RHS, value number it, and return true if the 4722 value number of LHS has changed as a result. */ 4723 4724static bool 4725visit_nary_op (tree lhs, gassign *stmt) 4726{ 4727 vn_nary_op_t vnresult; 4728 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult); 4729 if (! result && vnresult) 4730 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt)); 4731 if (result) 4732 return set_ssa_val_to (lhs, result); 4733 4734 /* Do some special pattern matching for redundancies of operations 4735 in different types. */ 4736 enum tree_code code = gimple_assign_rhs_code (stmt); 4737 tree type = TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4737, __FUNCTION__))->typed.type)
; 4738 tree rhs1 = gimple_assign_rhs1 (stmt); 4739 switch (code) 4740 { 4741 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR: 4742 /* Match arithmetic done in a different type where we can easily 4743 substitute the result from some earlier sign-changed or widened 4744 operation. */ 4745 if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
4746 && TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) == SSA_NAME 4747 /* We only handle sign-changes, zero-extension -> & mask or 4748 sign-extension if we know the inner operation doesn't 4749 overflow. */ 4750 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4750, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4750, __FUNCTION__))->base.u.bits.unsigned_flag)
4751 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4751, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4751, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4751, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
4752 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))((((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4752, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4752, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
) ? !global_options.x_flag_wrapv_pointer : (!(any_integral_type_check
((((contains_struct_check ((rhs1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4752, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4752, __FUNCTION__))->base.u.bits.unsigned_flag &&
!global_options.x_flag_wrapv && !global_options.x_flag_trapv
))
)) 4753 && TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4753, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4753, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4753, __FUNCTION__))->type_common.precision)
) 4754 || TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4754, __FUNCTION__))->type_common.precision)
== TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4754, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4754, __FUNCTION__))->type_common.precision)
)) 4755 { 4756 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4756, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
); 4757 if (def 4758 && (gimple_assign_rhs_code (def) == PLUS_EXPR 4759 || gimple_assign_rhs_code (def) == MINUS_EXPR 4760 || gimple_assign_rhs_code (def) == MULT_EXPR)) 4761 { 4762 tree ops[3] = {}; 4763 /* When requiring a sign-extension we cannot model a 4764 previous truncation with a single op so don't bother. */ 4765 bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4765, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4765, __FUNCTION__))->base.u.bits.unsigned_flag)
; 4766 /* Either we have the op widened available. */ 4767 ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def), 4768 allow_truncate); 4769 if (ops[0]) 4770 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def), 4771 allow_truncate); 4772 if (ops[0] && ops[1]) 4773 { 4774 ops[0] = vn_nary_op_lookup_pieces 4775 (2, gimple_assign_rhs_code (def), type, ops, NULLnullptr); 4776 /* We have wider operation available. */ 4777 if (ops[0] 4778 /* If the leader is a wrapping operation we can 4779 insert it for code hoisting w/o introducing 4780 undefined overflow. If it is not it has to 4781 be available. See PR86554. */ 4782 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))((((enum tree_code) (((contains_struct_check ((ops[0]), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4782, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((ops[0]), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4782, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
) ? global_options.x_flag_wrapv_pointer : ((any_integral_type_check
((((contains_struct_check ((ops[0]), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4782, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4782, __FUNCTION__))->base.u.bits.unsigned_flag || global_options
.x_flag_wrapv))
4783 || (rpo_avail && vn_context_bb 4784 && rpo_avail->eliminate_avail (vn_context_bb, 4785 ops[0])))) 4786 { 4787 unsigned lhs_prec = TYPE_PRECISION (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4787, __FUNCTION__))->type_common.precision)
; 4788 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1))((tree_class_check ((((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4788, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4788, __FUNCTION__))->type_common.precision)
; 4789 if (lhs_prec == rhs_prec 4790 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))(((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4790, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4790, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4790, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
4791 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))((((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4791, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((rhs1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4791, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
) ? !global_options.x_flag_wrapv_pointer : (!(any_integral_type_check
((((contains_struct_check ((rhs1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4791, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4791, __FUNCTION__))->base.u.bits.unsigned_flag &&
!global_options.x_flag_wrapv && !global_options.x_flag_trapv
))
)) 4792 { 4793 gimple_match_op match_op (gimple_match_cond::UNCOND, 4794 NOP_EXPR, type, ops[0]); 4795 result = vn_nary_build_or_lookup (&match_op); 4796 if (result) 4797 { 4798 bool changed = set_ssa_val_to (lhs, result); 4799 vn_nary_op_insert_stmt (stmt, result); 4800 return changed; 4801 } 4802 } 4803 else 4804 { 4805 tree mask = wide_int_to_tree 4806 (type, wi::mask (rhs_prec, false, lhs_prec)); 4807 gimple_match_op match_op (gimple_match_cond::UNCOND, 4808 BIT_AND_EXPR, 4809 TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4809, __FUNCTION__))->typed.type)
, 4810 ops[0], mask); 4811 result = vn_nary_build_or_lookup (&match_op); 4812 if (result) 4813 { 4814 bool changed = set_ssa_val_to (lhs, result); 4815 vn_nary_op_insert_stmt (stmt, result); 4816 return changed; 4817 } 4818 } 4819 } 4820 } 4821 } 4822 } 4823 break; 4824 case BIT_AND_EXPR: 4825 if (INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
4826 && TREE_CODE (rhs1)((enum tree_code) (rhs1)->base.code) == SSA_NAME 4827 && TREE_CODE (gimple_assign_rhs2 (stmt))((enum tree_code) (gimple_assign_rhs2 (stmt))->base.code) == INTEGER_CST 4828 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4828, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
4829 && default_vn_walk_kind != VN_NOWALK 4830 && CHAR_BIT8 == 8 4831 && BITS_PER_UNIT(8) == 8 4832 && BYTES_BIG_ENDIAN0 == WORDS_BIG_ENDIAN0 4833 && !integer_all_onesp (gimple_assign_rhs2 (stmt)) 4834 && !integer_zerop (gimple_assign_rhs2 (stmt))) 4835 { 4836 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4836, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
); 4837 if (ass 4838 && !gimple_has_volatile_ops (ass) 4839 && vn_get_stmt_kind (ass) == VN_REFERENCE) 4840 { 4841 tree last_vuse = gimple_vuse (ass); 4842 tree op = gimple_assign_rhs1 (ass); 4843 tree result = vn_reference_lookup (op, gimple_vuse (ass), 4844 default_vn_walk_kind, 4845 NULLnullptr, true, &last_vuse, 4846 gimple_assign_rhs2 (stmt)); 4847 if (result 4848 && useless_type_conversion_p (TREE_TYPE (result)((contains_struct_check ((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4848, __FUNCTION__))->typed.type)
, 4849 TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4849, __FUNCTION__))->typed.type)
)) 4850 return set_ssa_val_to (lhs, result); 4851 } 4852 } 4853 break; 4854 case TRUNC_DIV_EXPR: 4855 if (TYPE_UNSIGNED (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4855, __FUNCTION__))->base.u.bits.unsigned_flag)
) 4856 break; 4857 /* Fallthru. */ 4858 case RDIV_EXPR: 4859 case MULT_EXPR: 4860 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */ 4861 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type)) 4862 { 4863 tree rhs[2]; 4864 rhs[0] = rhs1; 4865 rhs[1] = gimple_assign_rhs2 (stmt); 4866 for (unsigned i = 0; i <= 1; ++i) 4867 { 4868 unsigned j = i == 0 ? 1 : 0; 4869 tree ops[2]; 4870 gimple_match_op match_op (gimple_match_cond::UNCOND, 4871 NEGATE_EXPR, type, rhs[i]); 4872 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false); 4873 ops[j] = rhs[j]; 4874 if (ops[i] 4875 && (ops[0] = vn_nary_op_lookup_pieces (2, code, 4876 type, ops, NULLnullptr))) 4877 { 4878 gimple_match_op match_op (gimple_match_cond::UNCOND, 4879 NEGATE_EXPR, type, ops[0]); 4880 result = vn_nary_build_or_lookup (&match_op); 4881 if (result) 4882 { 4883 bool changed = set_ssa_val_to (lhs, result); 4884 vn_nary_op_insert_stmt (stmt, result); 4885 return changed; 4886 } 4887 } 4888 } 4889 } 4890 break; 4891 default: 4892 break; 4893 } 4894 4895 bool changed = set_ssa_val_to (lhs, lhs); 4896 vn_nary_op_insert_stmt (stmt, lhs); 4897 return changed; 4898} 4899 4900/* Visit a call STMT storing into LHS. Return true if the value number 4901 of the LHS has changed as a result. */ 4902 4903static bool 4904visit_reference_op_call (tree lhs, gcall *stmt) 4905{ 4906 bool changed = false; 4907 struct vn_reference_s vr1; 4908 vn_reference_t vnresult = NULLnullptr; 4909 tree vdef = gimple_vdef (stmt); 4910 4911 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */ 4912 if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME) 4913 lhs = NULL_TREE(tree) nullptr; 4914 4915 vn_reference_lookup_call (stmt, &vnresult, &vr1); 4916 if (vnresult) 4917 { 4918 if (vnresult->result_vdef && vdef) 4919 changed |= set_ssa_val_to (vdef, vnresult->result_vdef); 4920 else if (vdef) 4921 /* If the call was discovered to be pure or const reflect 4922 that as far as possible. */ 4923 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt))); 4924 4925 if (!vnresult->result && lhs) 4926 vnresult->result = lhs; 4927 4928 if (vnresult->result && lhs) 4929 changed |= set_ssa_val_to (lhs, vnresult->result); 4930 } 4931 else 4932 { 4933 vn_reference_t vr2; 4934 vn_reference_s **slot; 4935 tree vdef_val = vdef; 4936 if (vdef) 4937 { 4938 /* If we value numbered an indirect functions function to 4939 one not clobbering memory value number its VDEF to its 4940 VUSE. */ 4941 tree fn = gimple_call_fn (stmt); 4942 if (fn && TREE_CODE (fn)((enum tree_code) (fn)->base.code) == SSA_NAME) 4943 { 4944 fn = SSA_VAL (fn); 4945 if (TREE_CODE (fn)((enum tree_code) (fn)->base.code) == ADDR_EXPR 4946 && TREE_CODE (TREE_OPERAND (fn, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((fn), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4946, __FUNCTION__))))))->base.code)
== FUNCTION_DECL 4947 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0)(*((const_cast<tree*> (tree_operand_check ((fn), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4947, __FUNCTION__)))))
) 4948 & (ECF_CONST(1 << 0) | ECF_PURE(1 << 1)))) 4949 vdef_val = vuse_ssa_val (gimple_vuse (stmt)); 4950 } 4951 changed |= set_ssa_val_to (vdef, vdef_val); 4952 } 4953 if (lhs) 4954 changed |= set_ssa_val_to (lhs, lhs); 4955 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s)((vn_reference_s *) __extension__ ({ struct obstack *__h = ((
&vn_tables_obstack)); __extension__ ({ struct obstack *__o
= (__h); size_t __len = ((sizeof (vn_reference_s))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); }))
; 4956 vr2->vuse = vr1.vuse; 4957 /* As we are not walking the virtual operand chain we know the 4958 shared_lookup_references are still original so we can re-use 4959 them here. */ 4960 vr2->operands = vr1.operands.copy (); 4961 vr2->type = vr1.type; 4962 vr2->punned = vr1.punned; 4963 vr2->set = vr1.set; 4964 vr2->base_set = vr1.base_set; 4965 vr2->hashcode = vr1.hashcode; 4966 vr2->result = lhs; 4967 vr2->result_vdef = vdef_val; 4968 vr2->value_id = 0; 4969 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode, 4970 INSERT); 4971 gcc_assert (!*slot)((void)(!(!*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4971, __FUNCTION__), 0 : 0))
; 4972 *slot = vr2; 4973 vr2->next = last_inserted_ref; 4974 last_inserted_ref = vr2; 4975 } 4976 4977 return changed; 4978} 4979 4980/* Visit a load from a reference operator RHS, part of STMT, value number it, 4981 and return true if the value number of the LHS has changed as a result. */ 4982 4983static bool 4984visit_reference_op_load (tree lhs, tree op, gimple *stmt) 4985{ 4986 bool changed = false; 4987 tree last_vuse; 4988 tree result; 4989 vn_reference_t res; 4990 4991 last_vuse = gimple_vuse (stmt); 4992 result = vn_reference_lookup (op, gimple_vuse (stmt), 4993 default_vn_walk_kind, &res, true, &last_vuse); 4994 4995 /* We handle type-punning through unions by value-numbering based 4996 on offset and size of the access. Be prepared to handle a 4997 type-mismatch here via creating a VIEW_CONVERT_EXPR. */ 4998 if (result 4999 && !useless_type_conversion_p (TREE_TYPE (result)((contains_struct_check ((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4999, __FUNCTION__))->typed.type)
, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 4999, __FUNCTION__))->typed.type)
)) 5000 { 5001 /* Avoid the type punning in case the result mode has padding where 5002 the op we lookup has not. */ 5003 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5003, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5003, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5003, __FUNCTION__))->typed.type)) : (((contains_struct_check
((result), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5003, __FUNCTION__))->typed.type))->type_common.mode)
), 5004 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5004, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5004, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5004, __FUNCTION__))->typed.type)) : (((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5004, __FUNCTION__))->typed.type))->type_common.mode)
))) 5005 result = NULL_TREE(tree) nullptr; 5006 else 5007 { 5008 /* We will be setting the value number of lhs to the value number 5009 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result). 5010 So first simplify and lookup this expression to see if it 5011 is already available. */ 5012 gimple_match_op res_op (gimple_match_cond::UNCOND, 5013 VIEW_CONVERT_EXPR, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5013, __FUNCTION__))->typed.type)
, result); 5014 result = vn_nary_build_or_lookup (&res_op); 5015 if (result 5016 && TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME 5017 && VN_INFO (result)->needs_insertion) 5018 /* Track whether this is the canonical expression for different 5019 typed loads. We use that as a stopgap measure for code 5020 hoisting when dealing with floating point loads. */ 5021 res->punned = true; 5022 } 5023 5024 /* When building the conversion fails avoid inserting the reference 5025 again. */ 5026 if (!result) 5027 return set_ssa_val_to (lhs, lhs); 5028 } 5029 5030 if (result) 5031 changed = set_ssa_val_to (lhs, result); 5032 else 5033 { 5034 changed = set_ssa_val_to (lhs, lhs); 5035 vn_reference_insert (op, lhs, last_vuse, NULL_TREE(tree) nullptr); 5036 } 5037 5038 return changed; 5039} 5040 5041 5042/* Visit a store to a reference operator LHS, part of STMT, value number it, 5043 and return true if the value number of the LHS has changed as a result. */ 5044 5045static bool 5046visit_reference_op_store (tree lhs, tree op, gimple *stmt) 5047{ 5048 bool changed = false; 5049 vn_reference_t vnresult = NULLnullptr; 5050 tree assign; 5051 bool resultsame = false; 5052 tree vuse = gimple_vuse (stmt); 5053 tree vdef = gimple_vdef (stmt); 5054 5055 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME) 5056 op = SSA_VAL (op); 5057 5058 /* First we want to lookup using the *vuses* from the store and see 5059 if there the last store to this location with the same address 5060 had the same value. 5061 5062 The vuses represent the memory state before the store. If the 5063 memory state, address, and value of the store is the same as the 5064 last store to this location, then this store will produce the 5065 same memory state as that store. 5066 5067 In this case the vdef versions for this store are value numbered to those 5068 vuse versions, since they represent the same memory state after 5069 this store. 5070 5071 Otherwise, the vdefs for the store are used when inserting into 5072 the table, since the store generates a new memory state. */ 5073 5074 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false); 5075 if (vnresult 5076 && vnresult->result) 5077 { 5078 tree result = vnresult->result; 5079 gcc_checking_assert (TREE_CODE (result) != SSA_NAME((void)(!(((enum tree_code) (result)->base.code) != SSA_NAME
|| result == SSA_VAL (result)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5080, __FUNCTION__), 0 : 0))
5080 || result == SSA_VAL (result))((void)(!(((enum tree_code) (result)->base.code) != SSA_NAME
|| result == SSA_VAL (result)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5080, __FUNCTION__), 0 : 0))
; 5081 resultsame = expressions_equal_p (result, op); 5082 if (resultsame) 5083 { 5084 /* If the TBAA state isn't compatible for downstream reads 5085 we cannot value-number the VDEFs the same. */ 5086 ao_ref lhs_ref; 5087 ao_ref_init (&lhs_ref, lhs); 5088 alias_set_type set = ao_ref_alias_set (&lhs_ref); 5089 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref); 5090 if ((vnresult->set != set 5091 && ! alias_set_subset_of (set, vnresult->set)) 5092 || (vnresult->base_set != base_set 5093 && ! alias_set_subset_of (base_set, vnresult->base_set))) 5094 resultsame = false; 5095 } 5096 } 5097 5098 if (!resultsame) 5099 { 5100 /* Only perform the following when being called from PRE 5101 which embeds tail merging. */ 5102 if (default_vn_walk_kind == VN_WALK) 5103 { 5104 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5104, __FUNCTION__))->typed.type)
, lhs, op); 5105 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false); 5106 if (vnresult) 5107 { 5108 VN_INFO (vdef)->visited = true; 5109 return set_ssa_val_to (vdef, vnresult->result_vdef); 5110 } 5111 } 5112 5113 if (dump_file && (dump_flags & TDF_DETAILS)) 5114 { 5115 fprintf (dump_file, "No store match\n"); 5116 fprintf (dump_file, "Value numbering store "); 5117 print_generic_expr (dump_file, lhs); 5118 fprintf (dump_file, " to "); 5119 print_generic_expr (dump_file, op); 5120 fprintf (dump_file, "\n"); 5121 } 5122 /* Have to set value numbers before insert, since insert is 5123 going to valueize the references in-place. */ 5124 if (vdef) 5125 changed |= set_ssa_val_to (vdef, vdef); 5126 5127 /* Do not insert structure copies into the tables. */ 5128 if (is_gimple_min_invariant (op) 5129 || is_gimple_reg (op)) 5130 vn_reference_insert (lhs, op, vdef, NULLnullptr); 5131 5132 /* Only perform the following when being called from PRE 5133 which embeds tail merging. */ 5134 if (default_vn_walk_kind == VN_WALK) 5135 { 5136 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5136, __FUNCTION__))->typed.type)
, lhs, op); 5137 vn_reference_insert (assign, lhs, vuse, vdef); 5138 } 5139 } 5140 else 5141 { 5142 /* We had a match, so value number the vdef to have the value 5143 number of the vuse it came from. */ 5144 5145 if (dump_file && (dump_flags & TDF_DETAILS)) 5146 fprintf (dump_file, "Store matched earlier value, " 5147 "value numbering store vdefs to matching vuses.\n"); 5148 5149 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse)); 5150 } 5151 5152 return changed; 5153} 5154 5155/* Visit and value number PHI, return true if the value number 5156 changed. When BACKEDGES_VARYING_P is true then assume all 5157 backedge values are varying. When INSERTED is not NULL then 5158 this is just a ahead query for a possible iteration, set INSERTED 5159 to true if we'd insert into the hashtable. */ 5160 5161static bool 5162visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p) 5163{ 5164 tree result, sameval = VN_TOP, seen_undef = NULL_TREE(tree) nullptr; 5165 tree backedge_val = NULL_TREE(tree) nullptr; 5166 bool seen_non_backedge = false; 5167 tree sameval_base = NULL_TREE(tree) nullptr; 5168 poly_int64 soff, doff; 5169 unsigned n_executable = 0; 5170 edge_iterator ei; 5171 edge e; 5172 5173 /* TODO: We could check for this in initialization, and replace this 5174 with a gcc_assert. */ 5175 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))(tree_check ((get_def_from_ptr (gimple_phi_result_ptr (phi)))
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5175, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
) 5176 return set_ssa_val_to (PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)), PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi))); 5177 5178 /* We track whether a PHI was CSEd to to avoid excessive iterations 5179 that would be necessary only because the PHI changed arguments 5180 but not value. */ 5181 if (!inserted) 5182 gimple_set_plf (phi, GF_PLF_1, false); 5183 5184 /* See if all non-TOP arguments have the same value. TOP is 5185 equivalent to everything, so we can ignore it. */ 5186 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)for ((ei) = ei_start_1 (&((gimple_bb (phi)->preds))); ei_cond
((ei), &(e)); ei_next (&(ei)))
5187 if (e->flags & EDGE_EXECUTABLE) 5188 { 5189 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e)gimple_phi_arg_def (((phi)), ((e)->dest_idx)); 5190 5191 ++n_executable; 5192 if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME) 5193 { 5194 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)) 5195 def = SSA_VAL (def); 5196 if (e->flags & EDGE_DFS_BACK) 5197 backedge_val = def; 5198 } 5199 if (!(e->flags & EDGE_DFS_BACK)) 5200 seen_non_backedge = true; 5201 if (def == VN_TOP) 5202 ; 5203 /* Ignore undefined defs for sameval but record one. */ 5204 else if (TREE_CODE (def)((enum tree_code) (def)->base.code) == SSA_NAME 5205 && ! virtual_operand_p (def) 5206 && ssa_undefined_value_p (def, false)) 5207 seen_undef = def; 5208 else if (sameval == VN_TOP) 5209 sameval = def; 5210 else if (!expressions_equal_p (def, sameval)) 5211 { 5212 /* We know we're arriving only with invariant addresses here, 5213 try harder comparing them. We can do some caching here 5214 which we cannot do in expressions_equal_p. */ 5215 if (TREE_CODE (def)((enum tree_code) (def)->base.code) == ADDR_EXPR 5216 && TREE_CODE (sameval)((enum tree_code) (sameval)->base.code) == ADDR_EXPR 5217 && sameval_base != (void *)-1) 5218 { 5219 if (!sameval_base) 5220 sameval_base = get_addr_base_and_unit_offset 5221 (TREE_OPERAND (sameval, 0)(*((const_cast<tree*> (tree_operand_check ((sameval), (
0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5221, __FUNCTION__)))))
, &soff); 5222 if (!sameval_base) 5223 sameval_base = (tree)(void *)-1; 5224 else if ((get_addr_base_and_unit_offset 5225 (TREE_OPERAND (def, 0)(*((const_cast<tree*> (tree_operand_check ((def), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5225, __FUNCTION__)))))
, &doff) == sameval_base) 5226 && known_eq (soff, doff)(!maybe_ne (soff, doff))) 5227 continue; 5228 } 5229 sameval = NULL_TREE(tree) nullptr; 5230 break; 5231 } 5232 } 5233 5234 /* If the value we want to use is flowing over the backedge and we 5235 should take it as VARYING but it has a non-VARYING value drop to 5236 VARYING. 5237 If we value-number a virtual operand never value-number to the 5238 value from the backedge as that confuses the alias-walking code. 5239 See gcc.dg/torture/pr87176.c. If the value is the same on a 5240 non-backedge everything is OK though. */ 5241 bool visited_p; 5242 if ((backedge_val 5243 && !seen_non_backedge 5244 && TREE_CODE (backedge_val)((enum tree_code) (backedge_val)->base.code) == SSA_NAME 5245 && sameval == backedge_val 5246 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)(tree_check ((backedge_val), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5246, __FUNCTION__, (SSA_NAME)))->base.public_flag
5247 || SSA_VAL (backedge_val) != backedge_val)) 5248 /* Do not value-number a virtual operand to sth not visited though 5249 given that allows us to escape a region in alias walking. */ 5250 || (sameval 5251 && TREE_CODE (sameval)((enum tree_code) (sameval)->base.code) == SSA_NAME 5252 && !SSA_NAME_IS_DEFAULT_DEF (sameval)(tree_check ((sameval), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5252, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
5253 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)(tree_check ((sameval), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5253, __FUNCTION__, (SSA_NAME)))->base.public_flag
5254 && (SSA_VAL (sameval, &visited_p), !visited_p))) 5255 /* Note this just drops to VARYING without inserting the PHI into 5256 the hashes. */ 5257 result = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)); 5258 /* If none of the edges was executable keep the value-number at VN_TOP, 5259 if only a single edge is exectuable use its value. */ 5260 else if (n_executable <= 1) 5261 result = seen_undef ? seen_undef : sameval; 5262 /* If we saw only undefined values and VN_TOP use one of the 5263 undefined values. */ 5264 else if (sameval == VN_TOP) 5265 result = seen_undef ? seen_undef : sameval; 5266 /* First see if it is equivalent to a phi node in this block. We prefer 5267 this as it allows IV elimination - see PRs 66502 and 67167. */ 5268 else if ((result = vn_phi_lookup (phi, backedges_varying_p))) 5269 { 5270 if (!inserted 5271 && TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME 5272 && gimple_code (SSA_NAME_DEF_STMT (result)(tree_check ((result), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5272, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
) == GIMPLE_PHI) 5273 { 5274 gimple_set_plf (SSA_NAME_DEF_STMT (result)(tree_check ((result), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5274, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
, GF_PLF_1, true); 5275 if (dump_file && (dump_flags & TDF_DETAILS)) 5276 { 5277 fprintf (dump_file, "Marking CSEd to PHI node "); 5278 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result)(tree_check ((result), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5278, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
, 5279 0, TDF_SLIM); 5280 fprintf (dump_file, "\n"); 5281 } 5282 } 5283 } 5284 /* If all values are the same use that, unless we've seen undefined 5285 values as well and the value isn't constant. 5286 CCP/copyprop have the same restriction to not remove uninit warnings. */ 5287 else if (sameval 5288 && (! seen_undef || is_gimple_min_invariant (sameval))) 5289 result = sameval; 5290 else 5291 { 5292 result = PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)); 5293 /* Only insert PHIs that are varying, for constant value numbers 5294 we mess up equivalences otherwise as we are only comparing 5295 the immediate controlling predicates. */ 5296 vn_phi_insert (phi, result, backedges_varying_p); 5297 if (inserted) 5298 *inserted = true; 5299 } 5300 5301 return set_ssa_val_to (PHI_RESULT (phi)get_def_from_ptr (gimple_phi_result_ptr (phi)), result); 5302} 5303 5304/* Try to simplify RHS using equivalences and constant folding. */ 5305 5306static tree 5307try_to_simplify (gassign *stmt) 5308{ 5309 enum tree_code code = gimple_assign_rhs_code (stmt); 5310 tree tem; 5311 5312 /* For stores we can end up simplifying a SSA_NAME rhs. Just return 5313 in this case, there is no point in doing extra work. */ 5314 if (code == SSA_NAME) 5315 return NULL_TREE(tree) nullptr; 5316 5317 /* First try constant folding based on our current lattice. */ 5318 mprts_hook = vn_lookup_simplify_result; 5319 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize); 5320 mprts_hook = NULLnullptr; 5321 if (tem 5322 && (TREE_CODE (tem)((enum tree_code) (tem)->base.code) == SSA_NAME 5323 || is_gimple_min_invariant (tem))) 5324 return tem; 5325 5326 return NULL_TREE(tree) nullptr; 5327} 5328 5329/* Visit and value number STMT, return true if the value number 5330 changed. */ 5331 5332static bool 5333visit_stmt (gimple *stmt, bool backedges_varying_p = false) 5334{ 5335 bool changed = false; 5336 5337 if (dump_file && (dump_flags & TDF_DETAILS)) 5338 { 5339 fprintf (dump_file, "Value numbering stmt = "); 5340 print_gimple_stmt (dump_file, stmt, 0); 5341 } 5342 5343 if (gimple_code (stmt) == GIMPLE_PHI) 5344 changed = visit_phi (stmt, NULLnullptr, backedges_varying_p); 5345 else if (gimple_has_volatile_ops (stmt)) 5346 changed = defs_to_varying (stmt); 5347 else if (gassign *ass = dyn_cast <gassign *> (stmt)) 5348 { 5349 enum tree_code code = gimple_assign_rhs_code (ass); 5350 tree lhs = gimple_assign_lhs (ass); 5351 tree rhs1 = gimple_assign_rhs1 (ass); 5352 tree simplified; 5353 5354 /* Shortcut for copies. Simplifying copies is pointless, 5355 since we copy the expression and value they represent. */ 5356 if (code == SSA_NAME 5357 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) 5358 { 5359 changed = visit_copy (lhs, rhs1); 5360 goto done; 5361 } 5362 simplified = try_to_simplify (ass); 5363 if (simplified) 5364 { 5365 if (dump_file && (dump_flags & TDF_DETAILS)) 5366 { 5367 fprintf (dump_file, "RHS "); 5368 print_gimple_expr (dump_file, ass, 0); 5369 fprintf (dump_file, " simplified to "); 5370 print_generic_expr (dump_file, simplified); 5371 fprintf (dump_file, "\n"); 5372 } 5373 } 5374 /* Setting value numbers to constants will occasionally 5375 screw up phi congruence because constants are not 5376 uniquely associated with a single ssa name that can be 5377 looked up. */ 5378 if (simplified 5379 && is_gimple_min_invariant (simplified) 5380 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) 5381 { 5382 changed = set_ssa_val_to (lhs, simplified); 5383 goto done; 5384 } 5385 else if (simplified 5386 && TREE_CODE (simplified)((enum tree_code) (simplified)->base.code) == SSA_NAME 5387 && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) 5388 { 5389 changed = visit_copy (lhs, simplified); 5390 goto done; 5391 } 5392 5393 if ((TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME 5394 /* We can substitute SSA_NAMEs that are live over 5395 abnormal edges with their constant value. */ 5396 && !(gimple_assign_copy_p (ass) 5397 && is_gimple_min_invariant (rhs1)) 5398 && !(simplified 5399 && is_gimple_min_invariant (simplified)) 5400 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5400, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
) 5401 /* Stores or copies from SSA_NAMEs that are live over 5402 abnormal edges are a problem. */ 5403 || (code == SSA_NAME 5404 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)(tree_check ((rhs1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5404, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
)) 5405 changed = defs_to_varying (ass); 5406 else if (REFERENCE_CLASS_P (lhs)(tree_code_type[(int) (((enum tree_code) (lhs)->base.code)
)] == tcc_reference)
5407 || DECL_P (lhs)(tree_code_type[(int) (((enum tree_code) (lhs)->base.code)
)] == tcc_declaration)
) 5408 changed = visit_reference_op_store (lhs, rhs1, ass); 5409 else if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) 5410 { 5411 if ((gimple_assign_copy_p (ass) 5412 && is_gimple_min_invariant (rhs1)) 5413 || (simplified 5414 && is_gimple_min_invariant (simplified))) 5415 { 5416 if (simplified) 5417 changed = set_ssa_val_to (lhs, simplified); 5418 else 5419 changed = set_ssa_val_to (lhs, rhs1); 5420 } 5421 else 5422 { 5423 /* Visit the original statement. */ 5424 switch (vn_get_stmt_kind (ass)) 5425 { 5426 case VN_NARY: 5427 changed = visit_nary_op (lhs, ass); 5428 break; 5429 case VN_REFERENCE: 5430 changed = visit_reference_op_load (lhs, rhs1, ass); 5431 break; 5432 default: 5433 changed = defs_to_varying (ass); 5434 break; 5435 } 5436 } 5437 } 5438 else 5439 changed = defs_to_varying (ass); 5440 } 5441 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt)) 5442 { 5443 tree lhs = gimple_call_lhs (call_stmt); 5444 if (lhs && TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME) 5445 { 5446 /* Try constant folding based on our current lattice. */ 5447 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt, 5448 vn_valueize); 5449 if (simplified) 5450 { 5451 if (dump_file && (dump_flags & TDF_DETAILS)) 5452 { 5453 fprintf (dump_file, "call "); 5454 print_gimple_expr (dump_file, call_stmt, 0); 5455 fprintf (dump_file, " simplified to "); 5456 print_generic_expr (dump_file, simplified); 5457 fprintf (dump_file, "\n"); 5458 } 5459 } 5460 /* Setting value numbers to constants will occasionally 5461 screw up phi congruence because constants are not 5462 uniquely associated with a single ssa name that can be 5463 looked up. */ 5464 if (simplified 5465 && is_gimple_min_invariant (simplified)) 5466 { 5467 changed = set_ssa_val_to (lhs, simplified); 5468 if (gimple_vdef (call_stmt)) 5469 changed |= set_ssa_val_to (gimple_vdef (call_stmt), 5470 SSA_VAL (gimple_vuse (call_stmt))); 5471 goto done; 5472 } 5473 else if (simplified 5474 && TREE_CODE (simplified)((enum tree_code) (simplified)->base.code) == SSA_NAME) 5475 { 5476 changed = visit_copy (lhs, simplified); 5477 if (gimple_vdef (call_stmt)) 5478 changed |= set_ssa_val_to (gimple_vdef (call_stmt), 5479 SSA_VAL (gimple_vuse (call_stmt))); 5480 goto done; 5481 } 5482 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5482, __FUNCTION__, (SSA_NAME)))->base.asm_written_flag
) 5483 { 5484 changed = defs_to_varying (call_stmt); 5485 goto done; 5486 } 5487 } 5488 5489 /* Pick up flags from a devirtualization target. */ 5490 tree fn = gimple_call_fn (stmt); 5491 int extra_fnflags = 0; 5492 if (fn && TREE_CODE (fn)((enum tree_code) (fn)->base.code) == SSA_NAME) 5493 { 5494 fn = SSA_VAL (fn); 5495 if (TREE_CODE (fn)((enum tree_code) (fn)->base.code) == ADDR_EXPR 5496 && TREE_CODE (TREE_OPERAND (fn, 0))((enum tree_code) ((*((const_cast<tree*> (tree_operand_check
((fn), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5496, __FUNCTION__))))))->base.code)
== FUNCTION_DECL) 5497 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0)(*((const_cast<tree*> (tree_operand_check ((fn), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5497, __FUNCTION__)))))
); 5498 } 5499 if (!gimple_call_internal_p (call_stmt) 5500 && (/* Calls to the same function with the same vuse 5501 and the same operands do not necessarily return the same 5502 value, unless they're pure or const. */ 5503 ((gimple_call_flags (call_stmt) | extra_fnflags) 5504 & (ECF_PURE(1 << 1) | ECF_CONST(1 << 0))) 5505 /* If calls have a vdef, subsequent calls won't have 5506 the same incoming vuse. So, if 2 calls with vdef have the 5507 same vuse, we know they're not subsequent. 5508 We can value number 2 calls to the same function with the 5509 same vuse and the same operands which are not subsequent 5510 the same, because there is no code in the program that can 5511 compare the 2 values... */ 5512 || (gimple_vdef (call_stmt) 5513 /* ... unless the call returns a pointer which does 5514 not alias with anything else. In which case the 5515 information that the values are distinct are encoded 5516 in the IL. */ 5517 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS(1 << 3)) 5518 /* Only perform the following when being called from PRE 5519 which embeds tail merging. */ 5520 && default_vn_walk_kind == VN_WALK))) 5521 changed = visit_reference_op_call (lhs, call_stmt); 5522 else 5523 changed = defs_to_varying (call_stmt); 5524 } 5525 else 5526 changed = defs_to_varying (stmt); 5527 done: 5528 return changed; 5529} 5530 5531 5532/* Allocate a value number table. */ 5533 5534static void 5535allocate_vn_table (vn_tables_t table, unsigned size) 5536{ 5537 table->phis = new vn_phi_table_type (size); 5538 table->nary = new vn_nary_op_table_type (size); 5539 table->references = new vn_reference_table_type (size); 5540} 5541 5542/* Free a value number table. */ 5543 5544static void 5545free_vn_table (vn_tables_t table) 5546{ 5547 /* Walk over elements and release vectors. */ 5548 vn_reference_iterator_type hir; 5549 vn_reference_t vr; 5550 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)for ((hir) = (*table->references).begin (); (hir) != (*table
->references).end () ? (vr = *(hir) , true) : false; ++(hir
))
5551 vr->operands.release (); 5552 delete table->phis; 5553 table->phis = NULLnullptr; 5554 delete table->nary; 5555 table->nary = NULLnullptr; 5556 delete table->references; 5557 table->references = NULLnullptr; 5558} 5559 5560/* Set *ID according to RESULT. */ 5561 5562static void 5563set_value_id_for_result (tree result, unsigned int *id) 5564{ 5565 if (result && TREE_CODE (result)((enum tree_code) (result)->base.code) == SSA_NAME) 5566 *id = VN_INFO (result)->value_id; 5567 else if (result && is_gimple_min_invariant (result)) 5568 *id = get_or_alloc_constant_value_id (result); 5569 else 5570 *id = get_next_value_id (); 5571} 5572 5573/* Set the value ids in the valid hash tables. */ 5574 5575static void 5576set_hashtable_value_ids (void) 5577{ 5578 vn_nary_op_iterator_type hin; 5579 vn_phi_iterator_type hip; 5580 vn_reference_iterator_type hir; 5581 vn_nary_op_t vno; 5582 vn_reference_t vr; 5583 vn_phi_t vp; 5584 5585 /* Now set the value ids of the things we had put in the hash 5586 table. */ 5587 5588 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)for ((hin) = (*valid_info->nary).begin (); (hin) != (*valid_info
->nary).end () ? (vno = *(hin) , true) : false; ++(hin))
5589 if (! vno->predicated_values) 5590 set_value_id_for_result (vno->u.result, &vno->value_id); 5591 5592 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)for ((hip) = (*valid_info->phis).begin (); (hip) != (*valid_info
->phis).end () ? (vp = *(hip) , true) : false; ++(hip))
5593 set_value_id_for_result (vp->result, &vp->value_id); 5594 5595 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,for ((hir) = (*valid_info->references).begin (); (hir) != (
*valid_info->references).end () ? (vr = *(hir) , true) : false
; ++(hir))
5596 hir)for ((hir) = (*valid_info->references).begin (); (hir) != (
*valid_info->references).end () ? (vr = *(hir) , true) : false
; ++(hir))
5597 set_value_id_for_result (vr->result, &vr->value_id); 5598} 5599 5600/* Return the maximum value id we have ever seen. */ 5601 5602unsigned int 5603get_max_value_id (void) 5604{ 5605 return next_value_id; 5606} 5607 5608/* Return the maximum constant value id we have ever seen. */ 5609 5610unsigned int 5611get_max_constant_value_id (void) 5612{ 5613 return -next_constant_value_id; 5614} 5615 5616/* Return the next unique value id. */ 5617 5618unsigned int 5619get_next_value_id (void) 5620{ 5621 gcc_checking_assert ((int)next_value_id > 0)((void)(!((int)next_value_id > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5621, __FUNCTION__), 0 : 0))
; 5622 return next_value_id++; 5623} 5624 5625/* Return the next unique value id for constants. */ 5626 5627unsigned int 5628get_next_constant_value_id (void) 5629{ 5630 gcc_checking_assert (next_constant_value_id < 0)((void)(!(next_constant_value_id < 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-ssa-sccvn.c"
, 5630, __FUNCTION__), 0 : 0))
; 5631 return next_constant_value_id--; 5632} 5633 5634 5635/* Compare two expressions E1 and E2 and return true if they are equal. */ 5636 5637bool 5638expressions_equal_p (tree e1, tree e2) 5639{ 5640 /* The obvious case. */ 5641 if (e1 == e2) <