Bug Summary

File:build/gcc/vec.h
Warning:line 815, column 10
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name ipa-sra.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-ncjKoB.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c

1/* Interprocedural scalar replacement of aggregates
2 Copyright (C) 2008-2021 Free Software Foundation, Inc.
3
4 Contributed by Martin Jambor <mjambor@suse.cz>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* IPA-SRA is an interprocedural pass that removes unused function return
23 values (turning functions returning a value which is never used into void
24 functions), removes unused function parameters. It can also replace an
25 aggregate parameter by a set of other parameters representing part of the
26 original, turning those passed by reference into new ones which pass the
27 value directly.
28
29 The pass is a true IPA one, which means that it works in three stages in
30 order to be able to take advantage of LTO. First, summaries about functions
31 and each calls are generated. Function summaries (often called call graph
32 node summaries) contain mainly information about which parameters are
33 potential transformation candidates and which bits of candidates are
34 accessed. We differentiate between accesses done as a part of a call
35 statement (which might be not necessary if the callee is also transformed)
36 and others (which are mandatory). Call summaries (often called call graph
37 edge summaries) contain information about which function formal parameters
38 feed into which actual call arguments so that if two parameters are only
39 used in a sum which is then passed to another function which then however
40 does not use this parameter, all three parameters of the two functions can
41 be eliminated. Edge summaries also have flags whether the return value is
42 used or if it is only returned in the caller too. In LTO mode these
43 summaries are then streamed to the object file in the compilation phase and
44 streamed back in in the WPA analysis stage.
45
46 The interprocedural analysis phase traverses the graph in topological order
47 in two sweeps, one in each direction. First, from callees to callers for
48 parameter removal and splitting. Each strongly-connected component is
49 processed iteratively until the situation in it stabilizes. The pass from
50 callers to callees is then carried out to remove unused return values in a
51 very similar fashion.
52
53 Because parameter manipulation has big implications for call redirection
54 which is done only after all call graph nodes materialize, the
55 transformation phase is not part of this patch but is carried out by the
56 clone materialization and edge redirection itself, see comments in
57 ipa-param-manipulation.h for more details. */
58
59
60
61#include "config.h"
62#include "system.h"
63#include "coretypes.h"
64#include "backend.h"
65#include "tree.h"
66#include "gimple.h"
67#include "predict.h"
68#include "tree-pass.h"
69#include "ssa.h"
70#include "cgraph.h"
71#include "gimple-pretty-print.h"
72#include "alias.h"
73#include "tree-eh.h"
74#include "gimple-iterator.h"
75#include "gimple-walk.h"
76#include "tree-dfa.h"
77#include "tree-sra.h"
78#include "alloc-pool.h"
79#include "symbol-summary.h"
80#include "dbgcnt.h"
81#include "tree-inline.h"
82#include "ipa-utils.h"
83#include "builtins.h"
84#include "cfganal.h"
85#include "tree-streamer.h"
86#include "internal-fn.h"
87#include "symtab-clones.h"
88
89static void ipa_sra_summarize_function (cgraph_node *);
90
91/* Bits used to track size of an aggregate in bytes interprocedurally. */
92#define ISRA_ARG_SIZE_LIMIT_BITS16 16
93#define ISRA_ARG_SIZE_LIMIT(1 << 16) (1 << ISRA_ARG_SIZE_LIMIT_BITS16)
94/* How many parameters can feed into a call actual argument and still be
95 tracked. */
96#define IPA_SRA_MAX_PARAM_FLOW_LEN7 7
97
98/* Structure describing accesses to a specific portion of an aggregate
99 parameter, as given by the offset and size. Any smaller accesses that occur
100 within a function that fall within another access form a tree. The pass
101 cannot analyze parameters with only partially overlapping accesses. */
102
103struct GTY(()) param_access
104{
105 /* Type that a potential replacement should have. This field only has
106 meaning in the summary building and transformation phases, when it is
107 reconstructed from the body. Must not be touched in IPA analysis
108 stage. */
109 tree type;
110
111 /* Alias reference type to be used in MEM_REFs when adjusting caller
112 arguments. */
113 tree alias_ptr_type;
114
115 /* Values returned by get_ref_base_and_extent but converted to bytes and
116 stored as unsigned ints. */
117 unsigned unit_offset;
118 unsigned unit_size : ISRA_ARG_SIZE_LIMIT_BITS16;
119
120 /* Set once we are sure that the access will really end up in a potentially
121 transformed function - initially not set for portions of formal parameters
122 that are only used as actual function arguments passed to callees. */
123 unsigned certain : 1;
124 /* Set if the access has a reversed scalar storage order. */
125 unsigned reverse : 1;
126};
127
128/* This structure has the same purpose as the one above and additionally it
129 contains some fields that are only necessary in the summary generation
130 phase. */
131
132struct gensum_param_access
133{
134 /* Values returned by get_ref_base_and_extent. */
135 HOST_WIDE_INTlong offset;
136 HOST_WIDE_INTlong size;
137
138 /* if this access has any children (in terms of the definition above), this
139 points to the first one. */
140 struct gensum_param_access *first_child;
141 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
142 described above. */
143 struct gensum_param_access *next_sibling;
144
145 /* Type that a potential replacement should have. This field only has
146 meaning in the summary building and transformation phases, when it is
147 reconstructed from the body. Must not be touched in IPA analysis
148 stage. */
149 tree type;
150 /* Alias reference type to be used in MEM_REFs when adjusting caller
151 arguments. */
152 tree alias_ptr_type;
153
154 /* Have there been writes to or reads from this exact location except for as
155 arguments to a function call that can be tracked. */
156 bool nonarg;
157
158 /* Set if the access has a reversed scalar storage order. */
159 bool reverse;
160};
161
162/* Summary describing a parameter in the IPA stages. */
163
164struct GTY(()) isra_param_desc
165{
166 /* List of access representatives to the parameters, sorted according to
167 their offset. */
168 vec <param_access *, va_gc> *accesses;
169
170 /* Unit size limit of total size of all replacements. */
171 unsigned param_size_limit : ISRA_ARG_SIZE_LIMIT_BITS16;
172 /* Sum of unit sizes of all certain replacements. */
173 unsigned size_reached : ISRA_ARG_SIZE_LIMIT_BITS16;
174
175 /* A parameter that is used only in call arguments and can be removed if all
176 concerned actual arguments are removed. */
177 unsigned locally_unused : 1;
178 /* An aggregate that is a candidate for breaking up or complete removal. */
179 unsigned split_candidate : 1;
180 /* Is this a parameter passing stuff by reference? */
181 unsigned by_ref : 1;
182};
183
184/* Structure used when generating summaries that describes a parameter. */
185
186struct gensum_param_desc
187{
188 /* Roots of param_accesses. */
189 gensum_param_access *accesses;
190 /* Number of accesses in the access tree rooted in field accesses. */
191 unsigned access_count;
192
193 /* If the below is non-zero, this is the number of uses as actual arguments. */
194 int call_uses;
195 /* Number of times this parameter has been directly passed to. */
196 unsigned ptr_pt_count;
197
198 /* Size limit of total size of all replacements. */
199 unsigned param_size_limit;
200 /* Sum of sizes of nonarg accesses. */
201 unsigned nonarg_acc_size;
202
203 /* A parameter that is used only in call arguments and can be removed if all
204 concerned actual arguments are removed. */
205 bool locally_unused;
206 /* An aggregate that is a candidate for breaking up or a pointer passing data
207 by reference that is a candidate for being converted to a set of
208 parameters passing those data by value. */
209 bool split_candidate;
210 /* Is this a parameter passing stuff by reference? */
211 bool by_ref;
212
213 /* The number of this parameter as they are ordered in function decl. */
214 int param_number;
215 /* For parameters passing data by reference, this is parameter index to
216 compute indices to bb_dereferences. */
217 int deref_index;
218};
219
220/* Properly deallocate accesses of DESC. TODO: Since this data structure is
221 not in GC memory, this is not necessary and we can consider removing the
222 function. */
223
224static void
225free_param_decl_accesses (isra_param_desc *desc)
226{
227 unsigned len = vec_safe_length (desc->accesses);
228 for (unsigned i = 0; i < len; ++i)
229 ggc_free ((*desc->accesses)[i]);
230 vec_free (desc->accesses);
231}
232
233/* Class used to convey information about functions from the
234 intra-procedural analysis stage to inter-procedural one. */
235
236class GTY((for_user)) isra_func_summary
237{
238public:
239 /* initialize the object. */
240
241 isra_func_summary ()
242 : m_parameters (NULLnullptr), m_candidate (false), m_returns_value (false),
243 m_return_ignored (false), m_queued (false)
244 {}
245
246 /* Destroy m_parameters. */
247
248 ~isra_func_summary ();
249
250 /* Mark the function as not a candidate for any IPA-SRA transformation.
251 Return true if it was a candidate until now. */
252
253 bool zap ();
254
255 /* Vector of parameter descriptors corresponding to the function being
256 analyzed. */
257 vec<isra_param_desc, va_gc> *m_parameters;
258
259 /* Whether the node is even a candidate for any IPA-SRA transformation at
260 all. */
261 unsigned m_candidate : 1;
262
263 /* Whether the original function returns any value. */
264 unsigned m_returns_value : 1;
265
266 /* Set to true if all call statements do not actually use the returned
267 value. */
268
269 unsigned m_return_ignored : 1;
270
271 /* Whether the node is already queued in IPA SRA stack during processing of
272 call graphs SCCs. */
273
274 unsigned m_queued : 1;
275};
276
277/* Clean up and deallocate isra_func_summary points to. TODO: Since this data
278 structure is not in GC memory, this is not necessary and we can consider
279 removing the destructor. */
280
281isra_func_summary::~isra_func_summary ()
282{
283 unsigned len = vec_safe_length (m_parameters);
284 for (unsigned i = 0; i < len; ++i)
285 free_param_decl_accesses (&(*m_parameters)[i]);
286 vec_free (m_parameters);
287}
288
289
290/* Mark the function as not a candidate for any IPA-SRA transformation. Return
291 true if it was a candidate until now. */
292
293bool
294isra_func_summary::zap ()
295{
296 bool ret = m_candidate;
297 m_candidate = false;
298
299 unsigned len = vec_safe_length (m_parameters);
300 for (unsigned i = 0; i < len; ++i)
301 free_param_decl_accesses (&(*m_parameters)[i]);
302 vec_free (m_parameters);
303
304 return ret;
305}
306
307/* Structure to describe which formal parameters feed into a particular actual
308 arguments. */
309
310struct isra_param_flow
311{
312 /* Number of elements in array inputs that contain valid data. */
313 char length;
314 /* Indices of formal parameters that feed into the described actual argument.
315 If aggregate_pass_through or pointer_pass_through below are true, it must
316 contain exactly one element which is passed through from a formal
317 parameter if the given number. Otherwise, the array contains indices of
318 callee's formal parameters which are used to calculate value of this
319 actual argument. */
320 unsigned char inputs[IPA_SRA_MAX_PARAM_FLOW_LEN7];
321
322 /* Offset within the formal parameter. */
323 unsigned unit_offset;
324 /* Size of the portion of the formal parameter that is being passed. */
325 unsigned unit_size : ISRA_ARG_SIZE_LIMIT_BITS16;
326
327 /* True when the value of this actual copy is a portion of a formal
328 parameter. */
329 unsigned aggregate_pass_through : 1;
330 /* True when the value of this actual copy is a verbatim pass through of an
331 obtained pointer. */
332 unsigned pointer_pass_through : 1;
333 /* True when it is safe to copy access candidates here from the callee, which
334 would mean introducing dereferences into callers of the caller. */
335 unsigned safe_to_import_accesses : 1;
336};
337
338/* Structure used to convey information about calls from the intra-procedural
339 analysis stage to inter-procedural one. */
340
341class isra_call_summary
342{
343public:
344 isra_call_summary ()
345 : m_arg_flow (), m_return_ignored (false), m_return_returned (false),
346 m_bit_aligned_arg (false)
347 {}
348
349 void init_inputs (unsigned arg_count);
350 void dump (FILE *f);
351
352 /* Information about what formal parameters of the caller are used to compute
353 individual actual arguments of this call. */
354 auto_vec <isra_param_flow> m_arg_flow;
355
356 /* Set to true if the call statement does not have a LHS. */
357 unsigned m_return_ignored : 1;
358
359 /* Set to true if the LHS of call statement is only used to construct the
360 return value of the caller. */
361 unsigned m_return_returned : 1;
362
363 /* Set when any of the call arguments are not byte-aligned. */
364 unsigned m_bit_aligned_arg : 1;
365};
366
367/* Class to manage function summaries. */
368
369class GTY((user)) ipa_sra_function_summaries
370 : public function_summary <isra_func_summary *>
371{
372public:
373 ipa_sra_function_summaries (symbol_table *table, bool ggc):
374 function_summary<isra_func_summary *> (table, ggc) { }
375
376 virtual void duplicate (cgraph_node *, cgraph_node *,
377 isra_func_summary *old_sum,
378 isra_func_summary *new_sum);
379 virtual void insert (cgraph_node *, isra_func_summary *);
380};
381
382/* Hook that is called by summary when a node is duplicated. */
383
384void
385ipa_sra_function_summaries::duplicate (cgraph_node *, cgraph_node *,
386 isra_func_summary *old_sum,
387 isra_func_summary *new_sum)
388{
389 /* TODO: Somehow stop copying when ISRA is doing the cloning, it is
390 useless. */
391 new_sum->m_candidate = old_sum->m_candidate;
392 new_sum->m_returns_value = old_sum->m_returns_value;
393 new_sum->m_return_ignored = old_sum->m_return_ignored;
394 gcc_assert (!old_sum->m_queued)((void)(!(!old_sum->m_queued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 394, __FUNCTION__), 0 : 0))
;
395 new_sum->m_queued = false;
396
397 unsigned param_count = vec_safe_length (old_sum->m_parameters);
398 if (!param_count)
399 return;
400 vec_safe_reserve_exact (new_sum->m_parameters, param_count);
401 new_sum->m_parameters->quick_grow_cleared (param_count);
402 for (unsigned i = 0; i < param_count; i++)
403 {
404 isra_param_desc *s = &(*old_sum->m_parameters)[i];
405 isra_param_desc *d = &(*new_sum->m_parameters)[i];
406
407 d->param_size_limit = s->param_size_limit;
408 d->size_reached = s->size_reached;
409 d->locally_unused = s->locally_unused;
410 d->split_candidate = s->split_candidate;
411 d->by_ref = s->by_ref;
412
413 unsigned acc_count = vec_safe_length (s->accesses);
414 vec_safe_reserve_exact (d->accesses, acc_count);
415 for (unsigned j = 0; j < acc_count; j++)
416 {
417 param_access *from = (*s->accesses)[j];
418 param_access *to = ggc_cleared_alloc<param_access> ();
419 to->type = from->type;
420 to->alias_ptr_type = from->alias_ptr_type;
421 to->unit_offset = from->unit_offset;
422 to->unit_size = from->unit_size;
423 to->certain = from->certain;
424 d->accesses->quick_push (to);
425 }
426 }
427}
428
429/* Pointer to the pass function summary holder. */
430
431static GTY(()) ipa_sra_function_summaries *func_sums;
432
433/* Hook that is called by summary when new node appears. */
434
435void
436ipa_sra_function_summaries::insert (cgraph_node *node, isra_func_summary *)
437{
438 if (opt_for_fn (node->decl, flag_ipa_sra)(opts_for_fn (node->decl)->x_flag_ipa_sra))
439 {
440 push_cfun (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 440, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
);
441 ipa_sra_summarize_function (node);
442 pop_cfun ();
443 }
444 else
445 func_sums->remove (node);
446}
447
448/* Class to manage call summaries. */
449
450class ipa_sra_call_summaries: public call_summary <isra_call_summary *>
451{
452public:
453 ipa_sra_call_summaries (symbol_table *table):
454 call_summary<isra_call_summary *> (table) { }
455
456 /* Duplicate info when an edge is cloned. */
457 virtual void duplicate (cgraph_edge *, cgraph_edge *,
458 isra_call_summary *old_sum,
459 isra_call_summary *new_sum);
460};
461
462static ipa_sra_call_summaries *call_sums;
463
464
465/* Initialize m_arg_flow of a particular instance of isra_call_summary.
466 ARG_COUNT is the number of actual arguments passed. */
467
468void
469isra_call_summary::init_inputs (unsigned arg_count)
470{
471 if (arg_count == 0)
472 {
473 gcc_checking_assert (m_arg_flow.length () == 0)((void)(!(m_arg_flow.length () == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 473, __FUNCTION__), 0 : 0))
;
474 return;
475 }
476 if (m_arg_flow.length () == 0)
477 {
478 m_arg_flow.reserve_exact (arg_count);
479 m_arg_flow.quick_grow_cleared (arg_count);
480 }
481 else
482 gcc_checking_assert (arg_count == m_arg_flow.length ())((void)(!(arg_count == m_arg_flow.length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 482, __FUNCTION__), 0 : 0))
;
483}
484
485/* Dump all information in call summary to F. */
486
487void
488isra_call_summary::dump (FILE *f)
489{
490 if (m_return_ignored)
491 fprintf (f, " return value ignored\n");
492 if (m_return_returned)
493 fprintf (f, " return value used only to compute caller return value\n");
494 for (unsigned i = 0; i < m_arg_flow.length (); i++)
495 {
496 fprintf (f, " Parameter %u:\n", i);
497 isra_param_flow *ipf = &m_arg_flow[i];
498
499 if (ipf->length)
500 {
501 bool first = true;
502 fprintf (f, " Scalar param sources: ");
503 for (int j = 0; j < ipf->length; j++)
504 {
505 if (!first)
506 fprintf (f, ", ");
507 else
508 first = false;
509 fprintf (f, "%i", (int) ipf->inputs[j]);
510 }
511 fprintf (f, "\n");
512 }
513 if (ipf->aggregate_pass_through)
514 fprintf (f, " Aggregate pass through from the param given above, "
515 "unit offset: %u , unit size: %u\n",
516 ipf->unit_offset, ipf->unit_size);
517 if (ipf->pointer_pass_through)
518 fprintf (f, " Pointer pass through from the param given above, "
519 "safe_to_import_accesses: %u\n", ipf->safe_to_import_accesses);
520 }
521}
522
523/* Duplicate edge summary when an edge is cloned. */
524
525void
526ipa_sra_call_summaries::duplicate (cgraph_edge *, cgraph_edge *,
527 isra_call_summary *old_sum,
528 isra_call_summary *new_sum)
529{
530 unsigned arg_count = old_sum->m_arg_flow.length ();
531 new_sum->init_inputs (arg_count);
532 for (unsigned i = 0; i < arg_count; i++)
533 new_sum->m_arg_flow[i] = old_sum->m_arg_flow[i];
534
535 new_sum->m_return_ignored = old_sum->m_return_ignored;
536 new_sum->m_return_returned = old_sum->m_return_returned;
537 new_sum->m_bit_aligned_arg = old_sum->m_bit_aligned_arg;
538}
539
540
541/* With all GTY stuff done, we can move to anonymous namespace. */
542namespace {
543/* Quick mapping from a decl to its param descriptor. */
544
545hash_map<tree, gensum_param_desc *> *decl2desc;
546
547/* Countdown of allowed Alias analysis steps during summary building. */
548
549int aa_walking_limit;
550
551/* This is a table in which for each basic block and parameter there is a
552 distance (offset + size) in that parameter which is dereferenced and
553 accessed in that BB. */
554HOST_WIDE_INTlong *bb_dereferences = NULLnullptr;
555/* How many by-reference parameters there are in the current function. */
556int by_ref_count;
557
558/* Bitmap of BBs that can cause the function to "stop" progressing by
559 returning, throwing externally, looping infinitely or calling a function
560 which might abort etc.. */
561bitmap final_bbs;
562
563/* Obstack to allocate various small structures required only when generating
564 summary for a function. */
565struct obstack gensum_obstack;
566
567/* Return false the function is apparently unsuitable for IPA-SRA based on it's
568 attributes, return true otherwise. NODE is the cgraph node of the current
569 function. */
570
571static bool
572ipa_sra_preliminary_function_checks (cgraph_node *node)
573{
574 if (!node->can_change_signature)
575 {
576 if (dump_file)
577 fprintf (dump_file, "Function cannot change signature.\n");
578 return false;
579 }
580
581 if (!tree_versionable_function_p (node->decl))
582 {
583 if (dump_file)
584 fprintf (dump_file, "Function is not versionable.\n");
585 return false;
586 }
587
588 if (!opt_for_fn (node->decl, optimize)(opts_for_fn (node->decl)->x_optimize)
589 || !opt_for_fn (node->decl, flag_ipa_sra)(opts_for_fn (node->decl)->x_flag_ipa_sra))
590 {
591 if (dump_file)
592 fprintf (dump_file, "Not optimizing or IPA-SRA turned off for this "
593 "function.\n");
594 return false;
595 }
596
597 if (DECL_VIRTUAL_P (node->decl)((contains_struct_check ((node->decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 597, __FUNCTION__))->decl_common.virtual_flag)
)
598 {
599 if (dump_file)
600 fprintf (dump_file, "Function is a virtual method.\n");
601 return false;
602 }
603
604 struct function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 604, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
605 if (fun->stdarg)
606 {
607 if (dump_file)
608 fprintf (dump_file, "Function uses stdarg. \n");
609 return false;
610 }
611
612 if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl))((tree_class_check ((((contains_struct_check ((node->decl)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 612, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 612, __FUNCTION__))->type_common.attributes)
)
613 {
614 if (dump_file)
615 fprintf (dump_file, "Function type has attributes. \n");
616 return false;
617 }
618
619 if (DECL_DISREGARD_INLINE_LIMITS (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 619, __FUNCTION__, (FUNCTION_DECL)))->function_decl.disregard_inline_limits
)
)
620 {
621 if (dump_file)
622 fprintf (dump_file, "Always inline function will be inlined "
623 "anyway. \n");
624 return false;
625 }
626
627 return true;
628}
629
630/* Print access tree starting at ACCESS to F. */
631
632static void
633dump_gensum_access (FILE *f, gensum_param_access *access, unsigned indent)
634{
635 fprintf (f, " ");
636 for (unsigned i = 0; i < indent; i++)
637 fprintf (f, " ");
638 fprintf (f, " * Access to offset: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d",
639 access->offset);
640 fprintf (f, ", size: " HOST_WIDE_INT_PRINT_DEC"%" "l" "d", access->size);
641 fprintf (f, ", type: ");
642 print_generic_expr (f, access->type);
643 fprintf (f, ", alias_ptr_type: ");
644 print_generic_expr (f, access->alias_ptr_type);
645 fprintf (f, ", nonarg: %u, reverse: %u\n", access->nonarg, access->reverse);
646 for (gensum_param_access *ch = access->first_child;
647 ch;
648 ch = ch->next_sibling)
649 dump_gensum_access (f, ch, indent + 2);
650}
651
652
653/* Print access tree starting at ACCESS to F. */
654
655static void
656dump_isra_access (FILE *f, param_access *access)
657{
658 fprintf (f, " * Access to unit offset: %u", access->unit_offset);
659 fprintf (f, ", unit size: %u", access->unit_size);
660 fprintf (f, ", type: ");
661 print_generic_expr (f, access->type);
662 fprintf (f, ", alias_ptr_type: ");
663 print_generic_expr (f, access->alias_ptr_type);
664 if (access->certain)
665 fprintf (f, ", certain");
666 else
667 fprintf (f, ", not-certain");
668 if (access->reverse)
669 fprintf (f, ", reverse");
670 fprintf (f, "\n");
671}
672
673/* Dump access tree starting at ACCESS to stderr. */
674
675DEBUG_FUNCTION__attribute__ ((__used__)) void
676debug_isra_access (param_access *access)
677{
678 dump_isra_access (stderrstderr, access);
679}
680
681/* Dump DESC to F. */
682
683static void
684dump_gensum_param_descriptor (FILE *f, gensum_param_desc *desc)
685{
686 if (desc->locally_unused)
687 fprintf (f, " unused with %i call_uses\n", desc->call_uses);
688 if (!desc->split_candidate)
689 {
690 fprintf (f, " not a candidate\n");
691 return;
692 }
693 if (desc->by_ref)
694 fprintf (f, " by_ref with %u pass throughs\n", desc->ptr_pt_count);
695
696 for (gensum_param_access *acc = desc->accesses; acc; acc = acc->next_sibling)
697 dump_gensum_access (f, acc, 2);
698}
699
700/* Dump all parameter descriptors in IFS, assuming it describes FNDECL, to
701 F. */
702
703static void
704dump_gensum_param_descriptors (FILE *f, tree fndecl,
705 vec<gensum_param_desc> *param_descriptions)
706{
707 tree parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 707, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
708 for (unsigned i = 0;
709 i < param_descriptions->length ();
710 ++i, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 710, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 710, __FUNCTION__))->common.chain))
)
711 {
712 fprintf (f, " Descriptor for parameter %i ", i);
713 print_generic_expr (f, parm, TDF_UID);
714 fprintf (f, "\n");
715 dump_gensum_param_descriptor (f, &(*param_descriptions)[i]);
716 }
717}
718
719
720/* Dump DESC to F. */
721
722static void
723dump_isra_param_descriptor (FILE *f, isra_param_desc *desc)
724{
725 if (desc->locally_unused)
726 {
727 fprintf (f, " (locally) unused\n");
728 }
729 if (!desc->split_candidate)
730 {
731 fprintf (f, " not a candidate for splitting\n");
732 return;
733 }
734 fprintf (f, " param_size_limit: %u, size_reached: %u%s\n",
735 desc->param_size_limit, desc->size_reached,
736 desc->by_ref ? ", by_ref" : "");
737
738 for (unsigned i = 0; i < vec_safe_length (desc->accesses); ++i)
739 {
740 param_access *access = (*desc->accesses)[i];
741 dump_isra_access (f, access);
742 }
743}
744
745/* Dump all parameter descriptors in IFS, assuming it describes FNDECL, to
746 F. */
747
748static void
749dump_isra_param_descriptors (FILE *f, tree fndecl,
750 isra_func_summary *ifs)
751{
752 tree parm = DECL_ARGUMENTS (fndecl)((tree_check ((fndecl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 752, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
753 if (!ifs->m_parameters)
754 {
755 fprintf (f, " parameter descriptors not available\n");
756 return;
757 }
758
759 for (unsigned i = 0;
760 i < ifs->m_parameters->length ();
761 ++i, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 761, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 761, __FUNCTION__))->common.chain))
)
762 {
763 fprintf (f, " Descriptor for parameter %i ", i);
764 print_generic_expr (f, parm, TDF_UID);
765 fprintf (f, "\n");
766 dump_isra_param_descriptor (f, &(*ifs->m_parameters)[i]);
767 }
768}
769
770/* Add SRC to inputs of PARAM_FLOW, unless it would exceed storage. If the
771 function fails return false, otherwise return true. SRC must fit into an
772 unsigned char. Used for purposes of transitive unused parameter
773 removal. */
774
775static bool
776add_src_to_param_flow (isra_param_flow *param_flow, int src)
777{
778 gcc_checking_assert (src >= 0 && src <= UCHAR_MAX)((void)(!(src >= 0 && src <= (127*2 +1)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 778, __FUNCTION__), 0 : 0))
;
779 if (param_flow->length == IPA_SRA_MAX_PARAM_FLOW_LEN7)
780 return false;
781
782 param_flow->inputs[(int) param_flow->length] = src;
783 param_flow->length++;
784 return true;
785}
786
787/* Add a SRC to the inputs of PARAM_FLOW unless it is already there and assert
788 it is the only input. Used for purposes of transitive parameter
789 splitting. */
790
791static void
792set_single_param_flow_source (isra_param_flow *param_flow, int src)
793{
794 gcc_checking_assert (src >= 0 && src <= UCHAR_MAX)((void)(!(src >= 0 && src <= (127*2 +1)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 794, __FUNCTION__), 0 : 0))
;
795 if (param_flow->length == 0)
796 {
797 param_flow->inputs[0] = src;
798 param_flow->length = 1;
799 }
800 else if (param_flow->length == 1)
801 gcc_assert (param_flow->inputs[0] == src)((void)(!(param_flow->inputs[0] == src) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 801, __FUNCTION__), 0 : 0))
;
802 else
803 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 803, __FUNCTION__))
;
804}
805
806/* Assert that there is only a single value in PARAM_FLOW's inputs and return
807 it. */
808
809static unsigned
810get_single_param_flow_source (const isra_param_flow *param_flow)
811{
812 gcc_assert (param_flow->length == 1)((void)(!(param_flow->length == 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 812, __FUNCTION__), 0 : 0))
;
813 return param_flow->inputs[0];
814}
815
816/* Inspect all uses of NAME and simple arithmetic calculations involving NAME
817 in FUN represented with NODE and return a negative number if any of them is
818 used for something else than either an actual call argument, simple
819 arithmetic operation or debug statement. If there are no such uses, return
820 the number of actual arguments that this parameter eventually feeds to (or
821 zero if there is none). For any such parameter, mark PARM_NUM as one of its
822 sources. ANALYZED is a bitmap that tracks which SSA names we have already
823 started investigating. */
824
825static int
826isra_track_scalar_value_uses (function *fun, cgraph_node *node, tree name,
827 int parm_num, bitmap analyzed)
828{
829 int res = 0;
830 imm_use_iterator imm_iter;
831 gimple *stmt;
832
833 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(imm_iter), (name))), &
(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)); (void) (
(stmt) = next_imm_use_stmt (&(imm_iter))))
834 {
835 if (is_gimple_debug (stmt))
836 continue;
837
838 /* TODO: We could handle at least const builtin functions like arithmetic
839 operations below. */
840 if (is_gimple_call (stmt))
841 {
842 int all_uses = 0;
843 use_operand_p use_p;
844 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)for ((use_p) = first_imm_use_on_stmt (&(imm_iter)); !end_imm_use_on_stmt_p
(&(imm_iter)); (void) ((use_p) = next_imm_use_on_stmt (&
(imm_iter))))
845 all_uses++;
846
847 gcall *call = as_a <gcall *> (stmt);
848 unsigned arg_count;
849 if (gimple_call_internal_p (call)
850 || (arg_count = gimple_call_num_args (call)) == 0)
851 {
852 res = -1;
853 break;
854 }
855
856 cgraph_edge *cs = node->get_edge (stmt);
857 gcc_checking_assert (cs)((void)(!(cs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 857, __FUNCTION__), 0 : 0))
;
858 isra_call_summary *csum = call_sums->get_create (cs);
859 csum->init_inputs (arg_count);
860
861 int simple_uses = 0;
862 for (unsigned i = 0; i < arg_count; i++)
863 if (gimple_call_arg (call, i) == name)
864 {
865 if (!add_src_to_param_flow (&csum->m_arg_flow[i], parm_num))
866 {
867 simple_uses = -1;
868 break;
869 }
870 simple_uses++;
871 }
872
873 if (simple_uses < 0
874 || all_uses != simple_uses)
875 {
876 res = -1;
877 break;
878 }
879 res += all_uses;
880 }
881 else if (!stmt_unremovable_because_of_non_call_eh_p (fun, stmt)
882 && ((is_gimple_assign (stmt) && !gimple_has_volatile_ops (stmt))
883 || gimple_code (stmt) == GIMPLE_PHI))
884 {
885 tree lhs;
886 if (gimple_code (stmt) == GIMPLE_PHI)
887 lhs = gimple_phi_result (stmt);
888 else
889 lhs = gimple_assign_lhs (stmt);
890
891 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
892 {
893 res = -1;
894 break;
895 }
896 gcc_assert (!gimple_vdef (stmt))((void)(!(!gimple_vdef (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 896, __FUNCTION__), 0 : 0))
;
897 if (bitmap_set_bit (analyzed, SSA_NAME_VERSION (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 897, __FUNCTION__, (SSA_NAME)))->base.u.version
))
898 {
899 int tmp = isra_track_scalar_value_uses (fun, node, lhs, parm_num,
900 analyzed);
901 if (tmp < 0)
902 {
903 res = tmp;
904 break;
905 }
906 res += tmp;
907 }
908 }
909 else
910 {
911 res = -1;
912 break;
913 }
914 }
915 return res;
916}
917
918/* Inspect all uses of PARM, which must be a gimple register, in FUN (which is
919 also described by NODE) and simple arithmetic calculations involving PARM
920 and return false if any of them is used for something else than either an
921 actual call argument, simple arithmetic operation or debug statement. If
922 there are no such uses, return true and store the number of actual arguments
923 that this parameter eventually feeds to (or zero if there is none) to
924 *CALL_USES_P. For any such parameter, mark PARM_NUM as one of its
925 sources.
926
927 This function is similar to ptr_parm_has_nonarg_uses but its results are
928 meant for unused parameter removal, as opposed to splitting of parameters
929 passed by reference or converting them to passed by value.
930 */
931
932static bool
933isra_track_scalar_param_local_uses (function *fun, cgraph_node *node, tree parm,
934 int parm_num, int *call_uses_p)
935{
936 gcc_checking_assert (is_gimple_reg (parm))((void)(!(is_gimple_reg (parm)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 936, __FUNCTION__), 0 : 0))
;
937
938 tree name = ssa_default_def (fun, parm);
939 if (!name || has_zero_uses (name))
940 {
941 *call_uses_p = 0;
942 return false;
943 }
944
945 /* Edge summaries can only handle callers with fewer than 256 parameters. */
946 if (parm_num > UCHAR_MAX(127*2 +1))
947 return true;
948
949 bitmap analyzed = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
950 int call_uses = isra_track_scalar_value_uses (fun, node, name, parm_num,
951 analyzed);
952 BITMAP_FREE (analyzed)((void) (bitmap_obstack_free ((bitmap) analyzed), (analyzed) =
(bitmap) nullptr))
;
953 if (call_uses < 0)
954 return true;
955 *call_uses_p = call_uses;
956 return false;
957}
958
959/* Scan immediate uses of a default definition SSA name of a parameter PARM and
960 examine whether there are any nonarg uses that are not actual arguments or
961 otherwise infeasible uses. If so, return true, otherwise return false.
962 Create pass-through IPA flow records for any direct uses as argument calls
963 and if returning false, store their number into *PT_COUNT_P. NODE and FUN
964 must represent the function that is currently analyzed, PARM_NUM must be the
965 index of the analyzed parameter.
966
967 This function is similar to isra_track_scalar_param_local_uses but its
968 results are meant for splitting of parameters passed by reference or turning
969 them into bits passed by value, as opposed to generic unused parameter
970 removal.
971 */
972
973static bool
974ptr_parm_has_nonarg_uses (cgraph_node *node, function *fun, tree parm,
975 int parm_num, unsigned *pt_count_p)
976{
977 imm_use_iterator ui;
978 gimple *stmt;
979 tree name = ssa_default_def (fun, parm);
980 bool ret = false;
981 unsigned pt_count = 0;
982
983 if (!name || has_zero_uses (name))
984 return false;
985
986 /* Edge summaries can only handle callers with fewer than 256 parameters. */
987 if (parm_num > UCHAR_MAX(127*2 +1))
988 return true;
989
990 FOR_EACH_IMM_USE_STMT (stmt, ui, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(ui), (name))), &(ui
))); !end_imm_use_stmt_p (&(ui)); (void) ((stmt) = next_imm_use_stmt
(&(ui))))
991 {
992 unsigned uses_ok = 0;
993 use_operand_p use_p;
994
995 if (is_gimple_debug (stmt))
996 continue;
997
998 if (gimple_assign_single_p (stmt))
999 {
1000 tree rhs = gimple_assign_rhs1 (stmt);
1001 while (handled_component_p (rhs))
1002 rhs = TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1002, __FUNCTION__)))))
;
1003 if (TREE_CODE (rhs)((enum tree_code) (rhs)->base.code) == MEM_REF
1004 && TREE_OPERAND (rhs, 0)(*((const_cast<tree*> (tree_operand_check ((rhs), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1004, __FUNCTION__)))))
== name
1005 && integer_zerop (TREE_OPERAND (rhs, 1)(*((const_cast<tree*> (tree_operand_check ((rhs), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1005, __FUNCTION__)))))
)
1006 && types_compatible_p (TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1006, __FUNCTION__))->typed.type)
,
1007 TREE_TYPE (TREE_TYPE (name))((contains_struct_check ((((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1007, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1007, __FUNCTION__))->typed.type)
)
1008 && !TREE_THIS_VOLATILE (rhs)((rhs)->base.volatile_flag))
1009 uses_ok++;
1010 }
1011 else if (is_gimple_call (stmt))
1012 {
1013 gcall *call = as_a <gcall *> (stmt);
1014 unsigned arg_count;
1015 if (gimple_call_internal_p (call)
1016 || (arg_count = gimple_call_num_args (call)) == 0)
1017 {
1018 ret = true;
1019 break;
1020 }
1021
1022 cgraph_edge *cs = node->get_edge (stmt);
1023 gcc_checking_assert (cs)((void)(!(cs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1023, __FUNCTION__), 0 : 0))
;
1024 isra_call_summary *csum = call_sums->get_create (cs);
1025 csum->init_inputs (arg_count);
1026
1027 for (unsigned i = 0; i < arg_count; ++i)
1028 {
1029 tree arg = gimple_call_arg (stmt, i);
1030
1031 if (arg == name)
1032 {
1033 /* TODO: Allow &MEM_REF[name + offset] here,
1034 ipa_param_body_adjustments::modify_call_stmt has to be
1035 adjusted too. */
1036 csum->m_arg_flow[i].pointer_pass_through = true;
1037 set_single_param_flow_source (&csum->m_arg_flow[i], parm_num);
1038 pt_count++;
1039 uses_ok++;
1040 continue;
1041 }
1042
1043 while (handled_component_p (arg))
1044 arg = TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1044, __FUNCTION__)))))
;
1045 if (TREE_CODE (arg)((enum tree_code) (arg)->base.code) == MEM_REF
1046 && TREE_OPERAND (arg, 0)(*((const_cast<tree*> (tree_operand_check ((arg), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1046, __FUNCTION__)))))
== name
1047 && integer_zerop (TREE_OPERAND (arg, 1)(*((const_cast<tree*> (tree_operand_check ((arg), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1047, __FUNCTION__)))))
)
1048 && types_compatible_p (TREE_TYPE (arg)((contains_struct_check ((arg), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1048, __FUNCTION__))->typed.type)
,
1049 TREE_TYPE (TREE_TYPE (name))((contains_struct_check ((((contains_struct_check ((name), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1049, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1049, __FUNCTION__))->typed.type)
)
1050 && !TREE_THIS_VOLATILE (arg)((arg)->base.volatile_flag))
1051 uses_ok++;
1052 }
1053 }
1054
1055 /* If the number of valid uses does not match the number of
1056 uses in this stmt there is an unhandled use. */
1057 unsigned all_uses = 0;
1058 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)for ((use_p) = first_imm_use_on_stmt (&(ui)); !end_imm_use_on_stmt_p
(&(ui)); (void) ((use_p) = next_imm_use_on_stmt (&(ui
))))
1059 all_uses++;
1060
1061 gcc_checking_assert (uses_ok <= all_uses)((void)(!(uses_ok <= all_uses) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1061, __FUNCTION__), 0 : 0))
;
1062 if (uses_ok != all_uses)
1063 {
1064 ret = true;
1065 break;
1066 }
1067 }
1068
1069 *pt_count_p = pt_count;
1070 return ret;
1071}
1072
1073/* Initialize vector of parameter descriptors of NODE. Return true if there
1074 are any candidates for splitting or unused aggregate parameter removal (the
1075 function may return false if there are candidates for removal of register
1076 parameters) and function body must be scanned. */
1077
1078static bool
1079create_parameter_descriptors (cgraph_node *node,
1080 vec<gensum_param_desc> *param_descriptions)
1081{
1082 function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1082, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
1083 bool ret = false;
1084
1085 int num = 0;
1086 for (tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1086, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
1087 parm;
1088 parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1088, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1088, __FUNCTION__))->common.chain))
, num++)
1089 {
1090 const char *msg;
1091 gensum_param_desc *desc = &(*param_descriptions)[num];
1092 /* param_descriptions vector is grown cleared in the caller. */
1093 desc->param_number = num;
1094 decl2desc->put (parm, desc);
1095
1096 if (dump_file && (dump_flags & TDF_DETAILS))
1097 print_generic_expr (dump_file, parm, TDF_UID);
1098
1099 int scalar_call_uses;
1100 tree type = TREE_TYPE (parm)((contains_struct_check ((parm), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1100, __FUNCTION__))->typed.type)
;
1101 if (TREE_THIS_VOLATILE (parm)((parm)->base.volatile_flag))
1102 {
1103 if (dump_file && (dump_flags & TDF_DETAILS))
1104 fprintf (dump_file, " not a candidate, is volatile\n");
1105 continue;
1106 }
1107 if (!is_gimple_reg_type (type) && is_va_list_type (type))
1108 {
1109 if (dump_file && (dump_flags & TDF_DETAILS))
1110 fprintf (dump_file, " not a candidate, is a va_list type\n");
1111 continue;
1112 }
1113 if (TREE_ADDRESSABLE (parm)((parm)->base.addressable_flag))
1114 {
1115 if (dump_file && (dump_flags & TDF_DETAILS))
1116 fprintf (dump_file, " not a candidate, is addressable\n");
1117 continue;
1118 }
1119 if (TREE_ADDRESSABLE (type)((type)->base.addressable_flag))
1120 {
1121 if (dump_file && (dump_flags & TDF_DETAILS))
1122 fprintf (dump_file, " not a candidate, type cannot be split\n");
1123 continue;
1124 }
1125
1126 if (is_gimple_reg (parm)
1127 && !isra_track_scalar_param_local_uses (fun, node, parm, num,
1128 &scalar_call_uses))
1129 {
1130 if (dump_file && (dump_flags & TDF_DETAILS))
1131 fprintf (dump_file, " is a scalar with only %i call uses\n",
1132 scalar_call_uses);
1133
1134 desc->locally_unused = true;
1135 desc->call_uses = scalar_call_uses;
1136 }
1137
1138 if (POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
1139 {
1140 type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1140, __FUNCTION__))->typed.type)
;
1141
1142 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == FUNCTION_TYPE
1143 || TREE_CODE (type)((enum tree_code) (type)->base.code) == METHOD_TYPE)
1144 {
1145 if (dump_file && (dump_flags & TDF_DETAILS))
1146 fprintf (dump_file, " not a candidate, reference to "
1147 "a function\n");
1148 continue;
1149 }
1150 if (TYPE_VOLATILE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1150, __FUNCTION__))->base.volatile_flag)
)
1151 {
1152 if (dump_file && (dump_flags & TDF_DETAILS))
1153 fprintf (dump_file, " not a candidate, reference to "
1154 "a volatile type\n");
1155 continue;
1156 }
1157 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE
1158 && TYPE_NONALIASED_COMPONENT (type)((tree_check ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1158, __FUNCTION__, (ARRAY_TYPE)))->type_common.transparent_aggr_flag
)
)
1159 {
1160 if (dump_file && (dump_flags & TDF_DETAILS))
1161 fprintf (dump_file, " not a candidate, reference to "
1162 "a nonaliased component array\n");
1163 continue;
1164 }
1165 if (!is_gimple_reg (parm))
1166 {
1167 if (dump_file && (dump_flags & TDF_DETAILS))
1168 fprintf (dump_file, " not a candidate, a reference which is "
1169 "not a gimple register (probably addressable)\n");
1170 continue;
1171 }
1172 if (is_va_list_type (type))
1173 {
1174 if (dump_file && (dump_flags & TDF_DETAILS))
1175 fprintf (dump_file, " not a candidate, reference to "
1176 "a va list\n");
1177 continue;
1178 }
1179 if (ptr_parm_has_nonarg_uses (node, fun, parm, num,
1180 &desc->ptr_pt_count))
1181 {
1182 if (dump_file && (dump_flags & TDF_DETAILS))
1183 fprintf (dump_file, " not a candidate, reference has "
1184 "nonarg uses\n");
1185 continue;
1186 }
1187 desc->by_ref = true;
1188 }
1189 else if (!AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
1190 {
1191 /* This is in an else branch because scalars passed by reference are
1192 still candidates to be passed by value. */
1193 if (dump_file && (dump_flags & TDF_DETAILS))
1194 fprintf (dump_file, " not a candidate, not an aggregate\n");
1195 continue;
1196 }
1197
1198 if (!COMPLETE_TYPE_P (type)(((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1198, __FUNCTION__))->type_common.size) != (tree) nullptr
)
)
1199 {
1200 if (dump_file && (dump_flags & TDF_DETAILS))
1201 fprintf (dump_file, " not a candidate, not a complete type\n");
1202 continue;
1203 }
1204 if (!tree_fits_uhwi_p (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1204, __FUNCTION__))->type_common.size)
))
1205 {
1206 if (dump_file && (dump_flags & TDF_DETAILS))
1207 fprintf (dump_file, " not a candidate, size not representable\n");
1208 continue;
1209 }
1210 unsigned HOST_WIDE_INTlong type_size
1211 = tree_to_uhwi (TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1211, __FUNCTION__))->type_common.size)
) / BITS_PER_UNIT(8);
1212 if (type_size == 0
1213 || type_size >= ISRA_ARG_SIZE_LIMIT(1 << 16))
1214 {
1215 if (dump_file && (dump_flags & TDF_DETAILS))
1216 fprintf (dump_file, " not a candidate, has zero or huge size\n");
1217 continue;
1218 }
1219 if (type_internals_preclude_sra_p (type, &msg))
1220 {
1221 if (dump_file && (dump_flags & TDF_DETAILS))
1222 fprintf (dump_file, " not a candidate, %s\n", msg);
1223 continue;
1224 }
1225
1226 if (dump_file && (dump_flags & TDF_DETAILS))
1227 fprintf (dump_file, " is a candidate\n");
1228
1229 ret = true;
1230 desc->split_candidate = true;
1231 if (desc->by_ref)
1232 desc->deref_index = by_ref_count++;
1233 }
1234 return ret;
1235}
1236
1237/* Return pointer to descriptor of parameter DECL or NULL if it cannot be
1238 found, which happens if DECL is for a static chain. */
1239
1240static gensum_param_desc *
1241get_gensum_param_desc (tree decl)
1242{
1243 gcc_checking_assert (TREE_CODE (decl) == PARM_DECL)((void)(!(((enum tree_code) (decl)->base.code) == PARM_DECL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1243, __FUNCTION__), 0 : 0))
;
1244 gensum_param_desc **slot = decl2desc->get (decl);
1245 if (!slot)
1246 /* This can happen for static chains which we cannot handle so far. */
1247 return NULLnullptr;
1248 gcc_checking_assert (*slot)((void)(!(*slot) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1248, __FUNCTION__), 0 : 0))
;
1249 return *slot;
1250}
1251
1252
1253/* Remove parameter described by DESC from candidates for IPA-SRA splitting and
1254 write REASON to the dump file if there is one. */
1255
1256static void
1257disqualify_split_candidate (gensum_param_desc *desc, const char *reason)
1258{
1259 if (!desc->split_candidate)
1260 return;
1261
1262 if (dump_file && (dump_flags & TDF_DETAILS))
1263 fprintf (dump_file, "! Disqualifying parameter number %i - %s\n",
1264 desc->param_number, reason);
1265
1266 desc->split_candidate = false;
1267}
1268
1269/* Remove DECL from candidates for IPA-SRA and write REASON to the dump file if
1270 there is one. */
1271
1272static void
1273disqualify_split_candidate (tree decl, const char *reason)
1274{
1275 gensum_param_desc *desc = get_gensum_param_desc (decl);
1276 if (desc)
1277 disqualify_split_candidate (desc, reason);
1278}
1279
1280/* Allocate a new access to DESC and fill it in with OFFSET and SIZE. But
1281 first, check that there are not too many of them already. If so, do not
1282 allocate anything and return NULL. */
1283
1284static gensum_param_access *
1285allocate_access (gensum_param_desc *desc,
1286 HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size)
1287{
1288 if (desc->access_count
1289 == (unsigned) param_ipa_sra_max_replacementsglobal_options.x_param_ipa_sra_max_replacements)
1290 {
1291 disqualify_split_candidate (desc, "Too many replacement candidates");
1292 return NULLnullptr;
1293 }
1294
1295 gensum_param_access *access
1296 = (gensum_param_access *) obstack_alloc (&gensum_obstack,__extension__ ({ struct obstack *__h = (&gensum_obstack);
__extension__ ({ struct obstack *__o = (__h); size_t __len =
((sizeof (gensum_param_access))); if (__extension__ ({ struct
obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit -
__o1->next_free); }) < __len) _obstack_newchunk (__o, __len
); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); })
1297 sizeof (gensum_param_access))__extension__ ({ struct obstack *__h = (&gensum_obstack);
__extension__ ({ struct obstack *__o = (__h); size_t __len =
((sizeof (gensum_param_access))); if (__extension__ ({ struct
obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit -
__o1->next_free); }) < __len) _obstack_newchunk (__o, __len
); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); })
;
1298 memset (access, 0, sizeof (*access));
1299 access->offset = offset;
1300 access->size = size;
1301 return access;
1302}
1303
1304/* In what context scan_expr_access has been called, whether it deals with a
1305 load, a function argument, or a store. Please note that in rare
1306 circumstances when it is not clear if the access is a load or store,
1307 ISRA_CTX_STORE is used too. */
1308
1309enum isra_scan_context {ISRA_CTX_LOAD, ISRA_CTX_ARG, ISRA_CTX_STORE};
1310
1311/* Return an access describing memory access to the variable described by DESC
1312 at OFFSET with SIZE in context CTX, starting at pointer to the linked list
1313 at a certain tree level FIRST. Attempt to create it and put into the
1314 appropriate place in the access tree if does not exist, but fail and return
1315 NULL if there are already too many accesses, if it would create a partially
1316 overlapping access or if an access would end up within a pre-existing
1317 non-call access. */
1318
1319static gensum_param_access *
1320get_access_1 (gensum_param_desc *desc, gensum_param_access **first,
1321 HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size, isra_scan_context ctx)
1322{
1323 gensum_param_access *access = *first, **ptr = first;
1324
1325 if (!access)
1326 {
1327 /* No pre-existing access at this level, just create it. */
1328 gensum_param_access *a = allocate_access (desc, offset, size);
1329 if (!a)
1330 return NULLnullptr;
1331 *first = a;
1332 return *first;
1333 }
1334
1335 if (access->offset >= offset + size)
1336 {
1337 /* We want to squeeze it in front of the very first access, just do
1338 it. */
1339 gensum_param_access *r = allocate_access (desc, offset, size);
1340 if (!r)
1341 return NULLnullptr;
1342 r->next_sibling = access;
1343 *first = r;
1344 return r;
1345 }
1346
1347 /* Skip all accesses that have to come before us until the next sibling is
1348 already too far. */
1349 while (offset >= access->offset + access->size
1350 && access->next_sibling
1351 && access->next_sibling->offset < offset + size)
1352 {
1353 ptr = &access->next_sibling;
1354 access = access->next_sibling;
1355 }
1356
1357 /* At this point we know we do not belong before access. */
1358 gcc_assert (access->offset < offset + size)((void)(!(access->offset < offset + size) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1358, __FUNCTION__), 0 : 0))
;
1359
1360 if (access->offset == offset && access->size == size)
1361 /* We found what we were looking for. */
1362 return access;
1363
1364 if (access->offset <= offset
1365 && access->offset + access->size >= offset + size)
1366 {
1367 /* We fit into access which is larger than us. We need to find/create
1368 something below access. But we only allow nesting in call
1369 arguments. */
1370 if (access->nonarg)
1371 return NULLnullptr;
1372
1373 return get_access_1 (desc, &access->first_child, offset, size, ctx);
1374 }
1375
1376 if (offset <= access->offset
1377 && offset + size >= access->offset + access->size)
1378 /* We are actually bigger than access, which fully fits into us, take its
1379 place and make all accesses fitting into it its children. */
1380 {
1381 /* But first, we only allow nesting in call arguments so check if that is
1382 what we are trying to represent. */
1383 if (ctx != ISRA_CTX_ARG)
1384 return NULLnullptr;
1385
1386 gensum_param_access *r = allocate_access (desc, offset, size);
1387 if (!r)
1388 return NULLnullptr;
1389 r->first_child = access;
1390
1391 while (access->next_sibling
1392 && access->next_sibling->offset < offset + size)
1393 access = access->next_sibling;
1394 if (access->offset + access->size > offset + size)
1395 {
1396 /* This must be a different access, which are sorted, so the
1397 following must be true and this signals a partial overlap. */
1398 gcc_assert (access->offset > offset)((void)(!(access->offset > offset) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1398, __FUNCTION__), 0 : 0))
;
1399 return NULLnullptr;
1400 }
1401
1402 r->next_sibling = access->next_sibling;
1403 access->next_sibling = NULLnullptr;
1404 *ptr = r;
1405 return r;
1406 }
1407
1408 if (offset >= access->offset + access->size)
1409 {
1410 /* We belong after access. */
1411 gensum_param_access *r = allocate_access (desc, offset, size);
1412 if (!r)
1413 return NULLnullptr;
1414 r->next_sibling = access->next_sibling;
1415 access->next_sibling = r;
1416 return r;
1417 }
1418
1419 if (offset < access->offset)
1420 {
1421 /* We know the following, otherwise we would have created a
1422 super-access. */
1423 gcc_checking_assert (offset + size < access->offset + access->size)((void)(!(offset + size < access->offset + access->size
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1423, __FUNCTION__), 0 : 0))
;
1424 return NULLnullptr;
1425 }
1426
1427 if (offset + size > access->offset + access->size)
1428 {
1429 /* Likewise. */
1430 gcc_checking_assert (offset > access->offset)((void)(!(offset > access->offset) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1430, __FUNCTION__), 0 : 0))
;
1431 return NULLnullptr;
1432 }
1433
1434 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1434, __FUNCTION__))
;
1435}
1436
1437/* Return an access describing memory access to the variable described by DESC
1438 at OFFSET with SIZE in context CTX, mark it as used in context CTX. Attempt
1439 to create if it does not exist, but fail and return NULL if there are
1440 already too many accesses, if it would create a partially overlapping access
1441 or if an access would end up in a non-call access. */
1442
1443static gensum_param_access *
1444get_access (gensum_param_desc *desc, HOST_WIDE_INTlong offset, HOST_WIDE_INTlong size,
1445 isra_scan_context ctx)
1446{
1447 gcc_checking_assert (desc->split_candidate)((void)(!(desc->split_candidate) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1447, __FUNCTION__), 0 : 0))
;
1448
1449 gensum_param_access *access = get_access_1 (desc, &desc->accesses, offset,
1450 size, ctx);
1451 if (!access)
1452 {
1453 disqualify_split_candidate (desc,
1454 "Bad access overlap or too many accesses");
1455 return NULLnullptr;
1456 }
1457
1458 switch (ctx)
1459 {
1460 case ISRA_CTX_STORE:
1461 gcc_assert (!desc->by_ref)((void)(!(!desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1461, __FUNCTION__), 0 : 0))
;
1462 /* Fall-through */
1463 case ISRA_CTX_LOAD:
1464 access->nonarg = true;
1465 break;
1466 case ISRA_CTX_ARG:
1467 break;
1468 }
1469
1470 return access;
1471}
1472
1473/* Verify that parameter access tree starting with ACCESS is in good shape.
1474 PARENT_OFFSET and PARENT_SIZE are the corresponding fields of parent of
1475 ACCESS or zero if there is none. */
1476
1477static bool
1478verify_access_tree_1 (gensum_param_access *access, HOST_WIDE_INTlong parent_offset,
1479 HOST_WIDE_INTlong parent_size)
1480{
1481 while (access)
1482 {
1483 gcc_assert (access->offset >= 0 && access->size >= 0)((void)(!(access->offset >= 0 && access->size
>= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1483, __FUNCTION__), 0 : 0))
;
1484
1485 if (parent_size != 0)
1486 {
1487 if (access->offset < parent_offset)
1488 {
1489 error ("Access offset before parent offset");
1490 return true;
1491 }
1492 if (access->size >= parent_size)
1493 {
1494 error ("Access size greater or equal to its parent size");
1495 return true;
1496 }
1497 if (access->offset + access->size > parent_offset + parent_size)
1498 {
1499 error ("Access terminates outside of its parent");
1500 return true;
1501 }
1502 }
1503
1504 if (verify_access_tree_1 (access->first_child, access->offset,
1505 access->size))
1506 return true;
1507
1508 if (access->next_sibling
1509 && (access->next_sibling->offset < access->offset + access->size))
1510 {
1511 error ("Access overlaps with its sibling");
1512 return true;
1513 }
1514
1515 access = access->next_sibling;
1516 }
1517 return false;
1518}
1519
1520/* Verify that parameter access tree starting with ACCESS is in good shape,
1521 halt compilation and dump the tree to stderr if not. */
1522
1523DEBUG_FUNCTION__attribute__ ((__used__)) void
1524isra_verify_access_tree (gensum_param_access *access)
1525{
1526 if (verify_access_tree_1 (access, 0, 0))
1527 {
1528 for (; access; access = access->next_sibling)
1529 dump_gensum_access (stderrstderr, access, 2);
1530 internal_error ("IPA-SRA access verification failed");
1531 }
1532}
1533
1534
1535/* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1536 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1537
1538static bool
1539asm_visit_addr (gimple *, tree op, tree, void *)
1540{
1541 op = get_base_address (op);
1542 if (op
1543 && TREE_CODE (op)((enum tree_code) (op)->base.code) == PARM_DECL)
1544 disqualify_split_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1545
1546 return false;
1547}
1548
1549/* Mark a dereference of parameter identified by DESC of distance DIST in a
1550 basic block BB, unless the BB has already been marked as a potentially
1551 final. */
1552
1553static void
1554mark_param_dereference (gensum_param_desc *desc, HOST_WIDE_INTlong dist,
1555 basic_block bb)
1556{
1557 gcc_assert (desc->by_ref)((void)(!(desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1557, __FUNCTION__), 0 : 0))
;
1558 gcc_checking_assert (desc->split_candidate)((void)(!(desc->split_candidate) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1558, __FUNCTION__), 0 : 0))
;
1559
1560 if (bitmap_bit_p (final_bbs, bb->index))
1561 return;
1562
1563 int idx = bb->index * by_ref_count + desc->deref_index;
1564 if (bb_dereferences[idx] < dist)
1565 bb_dereferences[idx] = dist;
1566}
1567
1568/* Return true, if any potential replacements should use NEW_TYPE as opposed to
1569 previously recorded OLD_TYPE. */
1570
1571static bool
1572type_prevails_p (tree old_type, tree new_type)
1573{
1574 if (old_type == new_type)
1575 return false;
1576
1577 /* Non-aggregates are always better. */
1578 if (!is_gimple_reg_type (old_type)
1579 && is_gimple_reg_type (new_type))
1580 return true;
1581 if (is_gimple_reg_type (old_type)
1582 && !is_gimple_reg_type (new_type))
1583 return false;
1584
1585 /* Prefer any complex or vector type over any other scalar type. */
1586 if (TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) != COMPLEX_TYPE
1587 && TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) != VECTOR_TYPE
1588 && (TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) == COMPLEX_TYPE
1589 || TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) == VECTOR_TYPE))
1590 return true;
1591 if ((TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) == COMPLEX_TYPE
1592 || TREE_CODE (old_type)((enum tree_code) (old_type)->base.code) == VECTOR_TYPE)
1593 && TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) != COMPLEX_TYPE
1594 && TREE_CODE (new_type)((enum tree_code) (new_type)->base.code) != VECTOR_TYPE)
1595 return false;
1596
1597 /* Use the integral type with the bigger precision. */
1598 if (INTEGRAL_TYPE_P (old_type)(((enum tree_code) (old_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (old_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (old_type)->base.code) == INTEGER_TYPE
)
1599 && INTEGRAL_TYPE_P (new_type)(((enum tree_code) (new_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (new_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (new_type)->base.code) == INTEGER_TYPE
)
)
1600 return (TYPE_PRECISION (new_type)((tree_class_check ((new_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1600, __FUNCTION__))->type_common.precision)
> TYPE_PRECISION (old_type)((tree_class_check ((old_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1600, __FUNCTION__))->type_common.precision)
);
1601
1602 /* Attempt to disregard any integral type with non-full precision. */
1603 if (INTEGRAL_TYPE_P (old_type)(((enum tree_code) (old_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (old_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (old_type)->base.code) == INTEGER_TYPE
)
1604 && (TREE_INT_CST_LOW (TYPE_SIZE (old_type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((old_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1604, __FUNCTION__))->type_common.size)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1604, __FUNCTION__)))
1605 != TYPE_PRECISION (old_type)((tree_class_check ((old_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1605, __FUNCTION__))->type_common.precision)
))
1606 return true;
1607 if (INTEGRAL_TYPE_P (new_type)(((enum tree_code) (new_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (new_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (new_type)->base.code) == INTEGER_TYPE
)
1608 && (TREE_INT_CST_LOW (TYPE_SIZE (new_type))((unsigned long) (*tree_int_cst_elt_check ((((tree_class_check
((new_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1608, __FUNCTION__))->type_common.size)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1608, __FUNCTION__)))
1609 != TYPE_PRECISION (new_type)((tree_class_check ((new_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1609, __FUNCTION__))->type_common.precision)
))
1610 return false;
1611 /* Stabilize the selection. */
1612 return TYPE_UID (old_type)((tree_class_check ((old_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1612, __FUNCTION__))->type_common.uid)
< TYPE_UID (new_type)((tree_class_check ((new_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1612, __FUNCTION__))->type_common.uid)
;
1613}
1614
1615/* When scanning an expression which is a call argument, this structure
1616 specifies the call and the position of the argument. */
1617
1618struct scan_call_info
1619{
1620 /* Call graph edge representing the call. */
1621 cgraph_edge *cs;
1622 /* Total number of arguments in the call. */
1623 unsigned argument_count;
1624 /* Number of the actual argument being scanned. */
1625 unsigned arg_idx;
1626};
1627
1628/* Record use of ACCESS which belongs to a parameter described by DESC in a
1629 call argument described by CALL_INFO. */
1630
1631static void
1632record_nonregister_call_use (gensum_param_desc *desc,
1633 scan_call_info *call_info,
1634 unsigned unit_offset, unsigned unit_size)
1635{
1636 isra_call_summary *csum = call_sums->get_create (call_info->cs);
1637 csum->init_inputs (call_info->argument_count);
1638
1639 isra_param_flow *param_flow = &csum->m_arg_flow[call_info->arg_idx];
1640 param_flow->aggregate_pass_through = true;
1641 set_single_param_flow_source (param_flow, desc->param_number);
1642 param_flow->unit_offset = unit_offset;
1643 param_flow->unit_size = unit_size;
1644 desc->call_uses++;
1645}
1646
1647/* Callback of walk_aliased_vdefs, just mark that there was a possible
1648 modification. */
1649
1650static bool
1651mark_maybe_modified (ao_ref *, tree, void *data)
1652{
1653 bool *maybe_modified = (bool *) data;
1654 *maybe_modified = true;
1655 return true;
1656}
1657
1658/* Analyze expression EXPR from GIMPLE for accesses to parameters. CTX
1659 specifies whether EXPR is used in a load, store or as an argument call. BB
1660 must be the basic block in which expr resides. If CTX specifies call
1661 argument context, CALL_INFO must describe that call and argument position,
1662 otherwise it is ignored. */
1663
1664static void
1665scan_expr_access (tree expr, gimple *stmt, isra_scan_context ctx,
1666 basic_block bb, scan_call_info *call_info = NULLnullptr)
1667{
1668 poly_int64 poffset, psize, pmax_size;
1669 HOST_WIDE_INTlong offset, size, max_size;
1670 tree base;
1671 bool deref = false;
1672 bool reverse;
1673
1674 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == BIT_FIELD_REF
1675 || TREE_CODE (expr)((enum tree_code) (expr)->base.code) == IMAGPART_EXPR
1676 || TREE_CODE (expr)((enum tree_code) (expr)->base.code) == REALPART_EXPR)
1677 expr = TREE_OPERAND (expr, 0)(*((const_cast<tree*> (tree_operand_check ((expr), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1677, __FUNCTION__)))))
;
1678
1679 base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size, &reverse);
1680
1681 if (TREE_CODE (base)((enum tree_code) (base)->base.code) == MEM_REF)
1682 {
1683 tree op = TREE_OPERAND (base, 0)(*((const_cast<tree*> (tree_operand_check ((base), (0),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1683, __FUNCTION__)))))
;
1684 if (TREE_CODE (op)((enum tree_code) (op)->base.code) != SSA_NAME
1685 || !SSA_NAME_IS_DEFAULT_DEF (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1685, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
)
1686 return;
1687 base = SSA_NAME_VAR (op)((tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1687, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((op)->ssa_name.var)->base
.code) == IDENTIFIER_NODE ? (tree) nullptr : (op)->ssa_name
.var)
;
1688 if (!base)
1689 return;
1690 deref = true;
1691 }
1692 if (TREE_CODE (base)((enum tree_code) (base)->base.code) != PARM_DECL)
1693 return;
1694
1695 gensum_param_desc *desc = get_gensum_param_desc (base);
1696 if (!desc || !desc->split_candidate)
1697 return;
1698
1699 if (!poffset.is_constant (&offset)
1700 || !psize.is_constant (&size)
1701 || !pmax_size.is_constant (&max_size))
1702 {
1703 disqualify_split_candidate (desc, "Encountered a polynomial-sized "
1704 "access.");
1705 return;
1706 }
1707 if (size < 0 || size != max_size)
1708 {
1709 disqualify_split_candidate (desc, "Encountered a variable sized access.");
1710 return;
1711 }
1712 if (TREE_CODE (expr)((enum tree_code) (expr)->base.code) == COMPONENT_REF
1713 && DECL_BIT_FIELD (TREE_OPERAND (expr, 1))((tree_check (((*((const_cast<tree*> (tree_operand_check
((expr), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1713, __FUNCTION__)))))), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1713, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1
)
)
1714 {
1715 disqualify_split_candidate (desc, "Encountered a bit-field access.");
1716 return;
1717 }
1718 if (offset < 0)
1719 {
1720 disqualify_split_candidate (desc, "Encountered an access at a "
1721 "negative offset.");
1722 return;
1723 }
1724 gcc_assert ((offset % BITS_PER_UNIT) == 0)((void)(!((offset % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1724, __FUNCTION__), 0 : 0))
;
1725 gcc_assert ((size % BITS_PER_UNIT) == 0)((void)(!((size % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1725, __FUNCTION__), 0 : 0))
;
1726 if ((offset / BITS_PER_UNIT(8)) >= (UINT_MAX(2147483647 *2U +1U) - ISRA_ARG_SIZE_LIMIT(1 << 16))
1727 || (size / BITS_PER_UNIT(8)) >= ISRA_ARG_SIZE_LIMIT(1 << 16))
1728 {
1729 disqualify_split_candidate (desc, "Encountered an access with too big "
1730 "offset or size");
1731 return;
1732 }
1733
1734 tree type = TREE_TYPE (expr)((contains_struct_check ((expr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1734, __FUNCTION__))->typed.type)
;
1735 unsigned int exp_align = get_object_alignment (expr);
1736
1737 if (exp_align < TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1737, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
)
1738 {
1739 disqualify_split_candidate (desc, "Underaligned access.");
1740 return;
1741 }
1742
1743 if (deref)
1744 {
1745 if (!desc->by_ref)
1746 {
1747 disqualify_split_candidate (desc, "Dereferencing a non-reference.");
1748 return;
1749 }
1750 else if (ctx == ISRA_CTX_STORE)
1751 {
1752 disqualify_split_candidate (desc, "Storing to data passed by "
1753 "reference.");
1754 return;
1755 }
1756
1757 if (!aa_walking_limit)
1758 {
1759 disqualify_split_candidate (desc, "Out of alias analysis step "
1760 "limit.");
1761 return;
1762 }
1763
1764 gcc_checking_assert (gimple_vuse (stmt))((void)(!(gimple_vuse (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1764, __FUNCTION__), 0 : 0))
;
1765 bool maybe_modified = false;
1766 ao_ref ar;
1767
1768 ao_ref_init (&ar, expr);
1769 bitmap visited = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
1770 int walked = walk_aliased_vdefs (&ar, gimple_vuse (stmt),
1771 mark_maybe_modified, &maybe_modified,
1772 &visited, NULLnullptr, aa_walking_limit);
1773 BITMAP_FREE (visited)((void) (bitmap_obstack_free ((bitmap) visited), (visited) = (
bitmap) nullptr))
;
1774 if (walked > 0)
1775 {
1776 gcc_assert (aa_walking_limit > walked)((void)(!(aa_walking_limit > walked) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1776, __FUNCTION__), 0 : 0))
;
1777 aa_walking_limit = aa_walking_limit - walked;
1778 }
1779 if (walked < 0)
1780 aa_walking_limit = 0;
1781 if (maybe_modified || walked < 0)
1782 {
1783 disqualify_split_candidate (desc, "Data passed by reference possibly "
1784 "modified through an alias.");
1785 return;
1786 }
1787 else
1788 mark_param_dereference (desc, offset + size, bb);
1789 }
1790 else
1791 /* Pointer parameters with direct uses should have been ruled out by
1792 analyzing SSA default def when looking at the parameters. */
1793 gcc_assert (!desc->by_ref)((void)(!(!desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1793, __FUNCTION__), 0 : 0))
;
1794
1795 gensum_param_access *access = get_access (desc, offset, size, ctx);
1796 if (!access)
1797 return;
1798
1799 if (ctx == ISRA_CTX_ARG)
1800 {
1801 gcc_checking_assert (call_info)((void)(!(call_info) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1801, __FUNCTION__), 0 : 0))
;
1802
1803 if (!deref)
1804 record_nonregister_call_use (desc, call_info, offset / BITS_PER_UNIT(8),
1805 size / BITS_PER_UNIT(8));
1806 else
1807 /* This is not a pass-through of a pointer, this is a use like any
1808 other. */
1809 access->nonarg = true;
1810 }
1811
1812 if (!access->type)
1813 {
1814 access->type = type;
1815 access->alias_ptr_type = reference_alias_ptr_type (expr);
1816 access->reverse = reverse;
1817 }
1818 else
1819 {
1820 if (exp_align < TYPE_ALIGN (access->type)((tree_class_check ((access->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1820, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((access->type)->type_common.align - 1) : 0)
)
1821 {
1822 disqualify_split_candidate (desc, "Reference has lower alignment "
1823 "than a previous one.");
1824 return;
1825 }
1826 if (access->alias_ptr_type != reference_alias_ptr_type (expr))
1827 {
1828 disqualify_split_candidate (desc, "Multiple alias pointer types.");
1829 return;
1830 }
1831 if (access->reverse != reverse)
1832 {
1833 disqualify_split_candidate (desc, "Both normal and reverse "
1834 "scalar storage order.");
1835 return;
1836 }
1837 if (!deref
1838 && (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
|| AGGREGATE_TYPE_P (access->type)(((enum tree_code) (access->type)->base.code) == ARRAY_TYPE
|| (((enum tree_code) (access->type)->base.code) == RECORD_TYPE
|| ((enum tree_code) (access->type)->base.code) == UNION_TYPE
|| ((enum tree_code) (access->type)->base.code) == QUAL_UNION_TYPE
))
)
1839 && (TYPE_MAIN_VARIANT (access->type)((tree_class_check ((access->type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1839, __FUNCTION__))->type_common.main_variant)
!= TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1839, __FUNCTION__))->type_common.main_variant)
))
1840 {
1841 /* We need the same aggregate type on all accesses to be able to
1842 distinguish transformation spots from pass-through arguments in
1843 the transformation phase. */
1844 disqualify_split_candidate (desc, "We do not support aggregate "
1845 "type punning.");
1846 return;
1847 }
1848
1849 if (type_prevails_p (access->type, type))
1850 access->type = type;
1851 }
1852}
1853
1854/* Scan body function described by NODE and FUN and create access trees for
1855 parameters. */
1856
1857static void
1858scan_function (cgraph_node *node, struct function *fun)
1859{
1860 basic_block bb;
1861
1862 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
1863 {
1864 gimple_stmt_iterator gsi;
1865 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1866 {
1867 gimple *stmt = gsi_stmt (gsi);
1868
1869 if (stmt_can_throw_external (fun, stmt))
1870 bitmap_set_bit (final_bbs, bb->index);
1871 switch (gimple_code (stmt))
1872 {
1873 case GIMPLE_RETURN:
1874 {
1875 tree t = gimple_return_retval (as_a <greturn *> (stmt));
1876 if (t != NULL_TREE(tree) nullptr)
1877 scan_expr_access (t, stmt, ISRA_CTX_LOAD, bb);
1878 bitmap_set_bit (final_bbs, bb->index);
1879 }
1880 break;
1881
1882 case GIMPLE_ASSIGN:
1883 if (gimple_assign_single_p (stmt)
1884 && !gimple_clobber_p (stmt))
1885 {
1886 tree rhs = gimple_assign_rhs1 (stmt);
1887 scan_expr_access (rhs, stmt, ISRA_CTX_LOAD, bb);
1888 tree lhs = gimple_assign_lhs (stmt);
1889 scan_expr_access (lhs, stmt, ISRA_CTX_STORE, bb);
1890 }
1891 break;
1892
1893 case GIMPLE_CALL:
1894 {
1895 unsigned argument_count = gimple_call_num_args (stmt);
1896 isra_scan_context ctx = ISRA_CTX_ARG;
1897 scan_call_info call_info, *call_info_p = &call_info;
1898 if (gimple_call_internal_p (stmt))
1899 {
1900 call_info_p = NULLnullptr;
1901 ctx = ISRA_CTX_LOAD;
1902 internal_fn ifn = gimple_call_internal_fn (stmt);
1903 if (internal_store_fn_p (ifn))
1904 ctx = ISRA_CTX_STORE;
1905 }
1906 else
1907 {
1908 call_info.cs = node->get_edge (stmt);
1909 call_info.argument_count = argument_count;
1910 }
1911
1912 for (unsigned i = 0; i < argument_count; i++)
1913 {
1914 call_info.arg_idx = i;
1915 scan_expr_access (gimple_call_arg (stmt, i), stmt,
1916 ctx, bb, call_info_p);
1917 }
1918
1919 tree lhs = gimple_call_lhs (stmt);
1920 if (lhs)
1921 scan_expr_access (lhs, stmt, ISRA_CTX_STORE, bb);
1922 int flags = gimple_call_flags (stmt);
1923 if ((flags & (ECF_CONST(1 << 0) | ECF_PURE(1 << 1))) == 0)
1924 bitmap_set_bit (final_bbs, bb->index);
1925 }
1926 break;
1927
1928 case GIMPLE_ASM:
1929 {
1930 gasm *asm_stmt = as_a <gasm *> (stmt);
1931 walk_stmt_load_store_addr_ops (asm_stmt, NULLnullptr, NULLnullptr, NULLnullptr,
1932 asm_visit_addr);
1933 bitmap_set_bit (final_bbs, bb->index);
1934
1935 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1936 {
1937 tree t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i))((tree_check ((gimple_asm_input_op (asm_stmt, i)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1937, __FUNCTION__, (TREE_LIST)))->list.value)
;
1938 scan_expr_access (t, stmt, ISRA_CTX_LOAD, bb);
1939 }
1940 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1941 {
1942 tree t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i))((tree_check ((gimple_asm_output_op (asm_stmt, i)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1942, __FUNCTION__, (TREE_LIST)))->list.value)
;
1943 scan_expr_access (t, stmt, ISRA_CTX_STORE, bb);
1944 }
1945 }
1946 break;
1947
1948 default:
1949 break;
1950 }
1951 }
1952 }
1953}
1954
1955/* Return true if SSA_NAME NAME is only used in return statements, or if
1956 results of any operations it is involved in are only used in return
1957 statements. ANALYZED is a bitmap that tracks which SSA names we have
1958 already started investigating. */
1959
1960static bool
1961ssa_name_only_returned_p (tree name, bitmap analyzed)
1962{
1963 bool res = true;
1964 imm_use_iterator imm_iter;
1965 gimple *stmt;
1966
1967 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)for (struct auto_end_imm_use_stmt_traverse auto_end_imm_use_stmt_traverse
((((stmt) = first_imm_use_stmt (&(imm_iter), (name))), &
(imm_iter))); !end_imm_use_stmt_p (&(imm_iter)); (void) (
(stmt) = next_imm_use_stmt (&(imm_iter))))
1968 {
1969 if (is_gimple_debug (stmt))
1970 continue;
1971
1972 if (gimple_code (stmt) == GIMPLE_RETURN)
1973 {
1974 tree t = gimple_return_retval (as_a <greturn *> (stmt));
1975 if (t != name)
1976 {
1977 res = false;
1978 break;
1979 }
1980 }
1981 else if ((is_gimple_assign (stmt) && !gimple_has_volatile_ops (stmt))
1982 || gimple_code (stmt) == GIMPLE_PHI)
1983 {
1984 /* TODO: And perhaps for const function calls too? */
1985 tree lhs;
1986 if (gimple_code (stmt) == GIMPLE_PHI)
1987 lhs = gimple_phi_result (stmt);
1988 else
1989 lhs = gimple_assign_lhs (stmt);
1990
1991 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) != SSA_NAME)
1992 {
1993 res = false;
1994 break;
1995 }
1996 gcc_assert (!gimple_vdef (stmt))((void)(!(!gimple_vdef (stmt)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1996, __FUNCTION__), 0 : 0))
;
1997 if (bitmap_set_bit (analyzed, SSA_NAME_VERSION (lhs)(tree_check ((lhs), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 1997, __FUNCTION__, (SSA_NAME)))->base.u.version
)
1998 && !ssa_name_only_returned_p (lhs, analyzed))
1999 {
2000 res = false;
2001 break;
2002 }
2003 }
2004 else
2005 {
2006 res = false;
2007 break;
2008 }
2009 }
2010 return res;
2011}
2012
2013/* Inspect the uses of the return value of the call associated with CS, and if
2014 it is not used or if it is only used to construct the return value of the
2015 caller, mark it as such in call or caller summary. Also check for
2016 misaligned arguments. */
2017
2018static void
2019isra_analyze_call (cgraph_edge *cs)
2020{
2021 gcall *call_stmt = cs->call_stmt;
2022 unsigned count = gimple_call_num_args (call_stmt);
2023 isra_call_summary *csum = call_sums->get_create (cs);
2024
2025 for (unsigned i = 0; i < count; i++)
2026 {
2027 tree arg = gimple_call_arg (call_stmt, i);
2028 if (is_gimple_reg (arg))
2029 continue;
2030
2031 tree offset;
2032 poly_int64 bitsize, bitpos;
2033 machine_mode mode;
2034 int unsignedp, reversep, volatilep = 0;
2035 get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
2036 &unsignedp, &reversep, &volatilep);
2037 if (!multiple_p (bitpos, BITS_PER_UNIT(8)))
2038 {
2039 csum->m_bit_aligned_arg = true;
2040 break;
2041 }
2042 }
2043
2044 tree lhs = gimple_call_lhs (call_stmt);
2045 if (lhs)
2046 {
2047 /* TODO: Also detect aggregates on a LHS of a call that are only returned
2048 from this function (without being read anywhere). */
2049 if (TREE_CODE (lhs)((enum tree_code) (lhs)->base.code) == SSA_NAME)
2050 {
2051 bitmap analyzed = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
2052 if (ssa_name_only_returned_p (lhs, analyzed))
2053 csum->m_return_returned = true;
2054 BITMAP_FREE (analyzed)((void) (bitmap_obstack_free ((bitmap) analyzed), (analyzed) =
(bitmap) nullptr))
;
2055 }
2056 }
2057 else
2058 csum->m_return_ignored = true;
2059}
2060
2061/* Look at all calls going out of NODE, described also by IFS and perform all
2062 analyses necessary for IPA-SRA that are not done at body scan time or done
2063 even when body is not scanned because the function is not a candidate. */
2064
2065static void
2066isra_analyze_all_outgoing_calls (cgraph_node *node)
2067{
2068 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2069 isra_analyze_call (cs);
2070 for (cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2071 isra_analyze_call (cs);
2072}
2073
2074/* Dump a dereferences table with heading STR to file F. */
2075
2076static void
2077dump_dereferences_table (FILE *f, struct function *fun, const char *str)
2078{
2079 basic_block bb;
2080
2081 fprintf (dump_file, "%s", str);
2082 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),for (bb = ((fun)->cfg->x_entry_block_ptr); bb != ((fun)
->cfg->x_exit_block_ptr); bb = bb->next_bb)
2083 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)for (bb = ((fun)->cfg->x_entry_block_ptr); bb != ((fun)
->cfg->x_exit_block_ptr); bb = bb->next_bb)
2084 {
2085 fprintf (f, "%4i %i ", bb->index, bitmap_bit_p (final_bbs, bb->index));
2086 if (bb != EXIT_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_exit_block_ptr))
2087 {
2088 int i;
2089 for (i = 0; i < by_ref_count; i++)
2090 {
2091 int idx = bb->index * by_ref_count + i;
2092 fprintf (f, " %4" HOST_WIDE_INT_PRINT"l" "d", bb_dereferences[idx]);
2093 }
2094 }
2095 fprintf (f, "\n");
2096 }
2097 fprintf (dump_file, "\n");
2098}
2099
2100/* Propagate distances in bb_dereferences in the opposite direction than the
2101 control flow edges, in each step storing the maximum of the current value
2102 and the minimum of all successors. These steps are repeated until the table
2103 stabilizes. Note that BBs which might terminate the functions (according to
2104 final_bbs bitmap) never updated in this way. */
2105
2106static void
2107propagate_dereference_distances (struct function *fun)
2108{
2109 basic_block bb;
2110
2111 if (dump_file && (dump_flags & TDF_DETAILS))
2112 dump_dereferences_table (dump_file, fun,
2113 "Dereference table before propagation:\n");
2114
2115 auto_vec<basic_block> queue (last_basic_block_for_fn (fun)((fun)->cfg->x_last_basic_block));
2116 queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr));
2117 FOR_EACH_BB_FN (bb, fun)for (bb = (fun)->cfg->x_entry_block_ptr->next_bb; bb
!= (fun)->cfg->x_exit_block_ptr; bb = bb->next_bb)
2118 {
2119 queue.quick_push (bb);
2120 bb->aux = bb;
2121 }
2122
2123 while (!queue.is_empty ())
2124 {
2125 edge_iterator ei;
2126 edge e;
2127 bool change = false;
2128 int i;
2129
2130 bb = queue.pop ();
2131 bb->aux = NULLnullptr;
2132
2133 if (bitmap_bit_p (final_bbs, bb->index))
2134 continue;
2135
2136 for (i = 0; i < by_ref_count; i++)
2137 {
2138 int idx = bb->index * by_ref_count + i;
2139 bool first = true;
2140 HOST_WIDE_INTlong inh = 0;
2141
2142 FOR_EACH_EDGE (e, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2143 {
2144 int succ_idx = e->dest->index * by_ref_count + i;
2145
2146 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_exit_block_ptr))
2147 continue;
2148
2149 if (first)
2150 {
2151 first = false;
2152 inh = bb_dereferences [succ_idx];
2153 }
2154 else if (bb_dereferences [succ_idx] < inh)
2155 inh = bb_dereferences [succ_idx];
2156 }
2157
2158 if (!first && bb_dereferences[idx] < inh)
2159 {
2160 bb_dereferences[idx] = inh;
2161 change = true;
2162 }
2163 }
2164
2165 if (change)
2166 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
2167 {
2168 if (e->src->aux)
2169 continue;
2170
2171 e->src->aux = e->src;
2172 queue.quick_push (e->src);
2173 }
2174 }
2175
2176 if (dump_file && (dump_flags & TDF_DETAILS))
2177 dump_dereferences_table (dump_file, fun,
2178 "Dereference table after propagation:\n");
2179}
2180
2181/* Perform basic checks on ACCESS to PARM described by DESC and all its
2182 children, return true if the parameter cannot be split, otherwise return
2183 true and update *TOTAL_SIZE and *ONLY_CALLS. ENTRY_BB_INDEX must be the
2184 index of the entry BB in the function of PARM. */
2185
2186static bool
2187check_gensum_access (tree parm, gensum_param_desc *desc,
2188 gensum_param_access *access,
2189 HOST_WIDE_INTlong *nonarg_acc_size, bool *only_calls,
2190 int entry_bb_index)
2191{
2192 if (access->nonarg)
2193 {
2194 *only_calls = false;
2195 *nonarg_acc_size += access->size;
2196
2197 if (access->first_child)
2198 {
2199 disqualify_split_candidate (desc, "Overlapping non-call uses.");
2200 return true;
2201 }
2202 }
2203 /* Do not decompose a non-BLKmode param in a way that would create
2204 BLKmode params. Especially for by-reference passing (thus,
2205 pointer-type param) this is hardly worthwhile. */
2206 if (DECL_MODE (parm)((contains_struct_check ((parm), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2206, __FUNCTION__))->decl_common.mode)
!= BLKmode((void) 0, E_BLKmode)
2207 && TYPE_MODE (access->type)((((enum tree_code) ((tree_class_check ((access->type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2207, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(access->type) : (access->type)->type_common.mode)
== BLKmode((void) 0, E_BLKmode))
2208 {
2209 disqualify_split_candidate (desc, "Would convert a non-BLK to a BLK.");
2210 return true;
2211 }
2212
2213 if (desc->by_ref)
2214 {
2215 int idx = (entry_bb_index * by_ref_count + desc->deref_index);
2216 if ((access->offset + access->size) > bb_dereferences[idx])
2217 {
2218 disqualify_split_candidate (desc, "Would create a possibly "
2219 "illegal dereference in a caller.");
2220 return true;
2221 }
2222 }
2223
2224 for (gensum_param_access *ch = access->first_child;
2225 ch;
2226 ch = ch->next_sibling)
2227 if (check_gensum_access (parm, desc, ch, nonarg_acc_size, only_calls,
2228 entry_bb_index))
2229 return true;
2230
2231 return false;
2232}
2233
2234/* Copy data from FROM and all of its children to a vector of accesses in IPA
2235 descriptor DESC. */
2236
2237static void
2238copy_accesses_to_ipa_desc (gensum_param_access *from, isra_param_desc *desc)
2239{
2240 param_access *to = ggc_cleared_alloc<param_access> ();
2241 gcc_checking_assert ((from->offset % BITS_PER_UNIT) == 0)((void)(!((from->offset % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2241, __FUNCTION__), 0 : 0))
;
2242 gcc_checking_assert ((from->size % BITS_PER_UNIT) == 0)((void)(!((from->size % (8)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2242, __FUNCTION__), 0 : 0))
;
2243 to->unit_offset = from->offset / BITS_PER_UNIT(8);
2244 to->unit_size = from->size / BITS_PER_UNIT(8);
2245 to->type = from->type;
2246 to->alias_ptr_type = from->alias_ptr_type;
2247 to->certain = from->nonarg;
2248 to->reverse = from->reverse;
2249 vec_safe_push (desc->accesses, to);
2250
2251 for (gensum_param_access *ch = from->first_child;
2252 ch;
2253 ch = ch->next_sibling)
2254 copy_accesses_to_ipa_desc (ch, desc);
2255}
2256
2257/* Analyze function body scan results stored in param_accesses and
2258 param_accesses, detect possible transformations and store information of
2259 those in function summary. NODE, FUN and IFS are all various structures
2260 describing the currently analyzed function. */
2261
2262static void
2263process_scan_results (cgraph_node *node, struct function *fun,
2264 isra_func_summary *ifs,
2265 vec<gensum_param_desc> *param_descriptions)
2266{
2267 bool check_pass_throughs = false;
2268 bool dereferences_propagated = false;
2269 tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2269, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
;
2270 unsigned param_count = param_descriptions->length();
2271
2272 for (unsigned desc_index = 0;
2273 desc_index < param_count;
2274 desc_index++, parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2274, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2274, __FUNCTION__))->common.chain))
)
2275 {
2276 gensum_param_desc *desc = &(*param_descriptions)[desc_index];
2277 if (!desc->split_candidate)
2278 continue;
2279
2280 if (flag_checkingglobal_options.x_flag_checking)
2281 isra_verify_access_tree (desc->accesses);
2282
2283 if (!dereferences_propagated
2284 && desc->by_ref
2285 && desc->accesses)
2286 {
2287 propagate_dereference_distances (fun);
2288 dereferences_propagated = true;
2289 }
2290
2291 HOST_WIDE_INTlong nonarg_acc_size = 0;
2292 bool only_calls = true;
2293 bool check_failed = false;
2294
2295 int entry_bb_index = ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr)->index;
2296 for (gensum_param_access *acc = desc->accesses;
2297 acc;
2298 acc = acc->next_sibling)
2299 if (check_gensum_access (parm, desc, acc, &nonarg_acc_size, &only_calls,
2300 entry_bb_index))
2301 {
2302 check_failed = true;
2303 break;
2304 }
2305 if (check_failed)
2306 continue;
2307
2308 if (only_calls)
2309 desc->locally_unused = true;
2310
2311 HOST_WIDE_INTlong cur_param_size
2312 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm))((tree_class_check ((((contains_struct_check ((parm), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2312, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2312, __FUNCTION__))->type_common.size)
);
2313 HOST_WIDE_INTlong param_size_limit;
2314 if (!desc->by_ref || optimize_function_for_size_p (fun))
2315 param_size_limit = cur_param_size;
2316 else
2317 param_size_limit
2318 = opt_for_fn (node->decl,(opts_for_fn (node->decl)->x_param_ipa_sra_ptr_growth_factor
)
2319 param_ipa_sra_ptr_growth_factor)(opts_for_fn (node->decl)->x_param_ipa_sra_ptr_growth_factor
)
* cur_param_size;
2320 if (nonarg_acc_size > param_size_limit
2321 || (!desc->by_ref && nonarg_acc_size == param_size_limit))
2322 {
2323 disqualify_split_candidate (desc, "Would result into a too big set "
2324 "of replacements.");
2325 }
2326 else
2327 {
2328 /* create_parameter_descriptors makes sure unit sizes of all
2329 candidate parameters fit unsigned integers restricted to
2330 ISRA_ARG_SIZE_LIMIT. */
2331 desc->param_size_limit = param_size_limit / BITS_PER_UNIT(8);
2332 desc->nonarg_acc_size = nonarg_acc_size / BITS_PER_UNIT(8);
2333 if (desc->split_candidate && desc->ptr_pt_count)
2334 {
2335 gcc_assert (desc->by_ref)((void)(!(desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2335, __FUNCTION__), 0 : 0))
;
2336 check_pass_throughs = true;
2337 }
2338 }
2339 }
2340
2341 /* When a pointer parameter is passed-through to a callee, in which it is
2342 only used to read only one or a few items, we can attempt to transform it
2343 to obtaining and passing through the items instead of the pointer. But we
2344 must take extra care that 1) we do not introduce any segfault by moving
2345 dereferences above control flow and that 2) the data is not modified
2346 through an alias in this function. The IPA analysis must not introduce
2347 any accesses candidates unless it can prove both.
2348
2349 The current solution is very crude as it consists of ensuring that the
2350 call postdominates entry BB and that the definition of VUSE of the call is
2351 default definition. TODO: For non-recursive callees in the same
2352 compilation unit we could do better by doing analysis in topological order
2353 an looking into access candidates of callees, using their alias_ptr_types
2354 to attempt real AA. We could also use the maximum known dereferenced
2355 offset in this function at IPA level.
2356
2357 TODO: Measure the overhead and the effect of just being pessimistic.
2358 Maybe this is only -O3 material?
2359 */
2360 bool pdoms_calculated = false;
2361 if (check_pass_throughs)
2362 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2363 {
2364 gcall *call_stmt = cs->call_stmt;
2365 tree vuse = gimple_vuse (call_stmt);
2366
2367 /* If the callee is a const function, we don't get a VUSE. In such
2368 case there will be no memory accesses in the called function (or the
2369 const attribute is wrong) and then we just don't care. */
2370 bool uses_memory_as_obtained = vuse && SSA_NAME_IS_DEFAULT_DEF (vuse)(tree_check ((vuse), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2370, __FUNCTION__, (SSA_NAME)))->base.default_def_flag
;
2371
2372 unsigned count = gimple_call_num_args (call_stmt);
2373 isra_call_summary *csum = call_sums->get_create (cs);
2374 csum->init_inputs (count);
2375 for (unsigned argidx = 0; argidx < count; argidx++)
2376 {
2377 if (!csum->m_arg_flow[argidx].pointer_pass_through)
2378 continue;
2379 unsigned pidx
2380 = get_single_param_flow_source (&csum->m_arg_flow[argidx]);
2381 gensum_param_desc *desc = &(*param_descriptions)[pidx];
2382 if (!desc->split_candidate)
2383 {
2384 csum->m_arg_flow[argidx].pointer_pass_through = false;
2385 continue;
2386 }
2387 if (!uses_memory_as_obtained)
2388 continue;
2389
2390 /* Post-dominator check placed last, hoping that it usually won't
2391 be needed. */
2392 if (!pdoms_calculated)
2393 {
2394 gcc_checking_assert (cfun)((void)(!((cfun + 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2394, __FUNCTION__), 0 : 0))
;
2395 add_noreturn_fake_exit_edges ();
2396 connect_infinite_loops_to_exit ();
2397 calculate_dominance_info (CDI_POST_DOMINATORS);
2398 pdoms_calculated = true;
2399 }
2400 if (dominated_by_p (CDI_POST_DOMINATORS,
2401 gimple_bb (call_stmt),
2402 single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)((fun)->cfg->x_entry_block_ptr))))
2403 csum->m_arg_flow[argidx].safe_to_import_accesses = true;
2404 }
2405
2406 }
2407 if (pdoms_calculated)
2408 {
2409 free_dominance_info (CDI_POST_DOMINATORS);
2410 remove_fake_exit_edges ();
2411 }
2412
2413 /* TODO: Add early exit if we disqualified everything. This also requires
2414 that we either relax the restriction that
2415 ipa_param_adjustments.m_always_copy_start must be the number of PARM_DECLs
2416 or store the number of parameters to IPA-SRA function summary and use that
2417 when just removing params. */
2418
2419 vec_safe_reserve_exact (ifs->m_parameters, param_count);
2420 ifs->m_parameters->quick_grow_cleared (param_count);
2421 for (unsigned desc_index = 0; desc_index < param_count; desc_index++)
2422 {
2423 gensum_param_desc *s = &(*param_descriptions)[desc_index];
2424 isra_param_desc *d = &(*ifs->m_parameters)[desc_index];
2425
2426 d->param_size_limit = s->param_size_limit;
2427 d->size_reached = s->nonarg_acc_size;
2428 d->locally_unused = s->locally_unused;
2429 d->split_candidate = s->split_candidate;
2430 d->by_ref = s->by_ref;
2431
2432 for (gensum_param_access *acc = s->accesses;
2433 acc;
2434 acc = acc->next_sibling)
2435 copy_accesses_to_ipa_desc (acc, d);
2436 }
2437
2438 if (dump_file)
2439 dump_isra_param_descriptors (dump_file, node->decl, ifs);
2440}
2441
2442/* Return true if there are any overlaps among certain accesses of DESC. If
2443 non-NULL, set *CERTAIN_ACCESS_PRESENT_P upon encountering a certain access
2444 too. DESC is assumed to be a split candidate that is not locally
2445 unused. */
2446
2447static bool
2448overlapping_certain_accesses_p (isra_param_desc *desc,
2449 bool *certain_access_present_p)
2450{
2451 unsigned pclen = vec_safe_length (desc->accesses);
2452 for (unsigned i = 0; i < pclen; i++)
2453 {
2454 param_access *a1 = (*desc->accesses)[i];
2455
2456 if (!a1->certain)
2457 continue;
2458 if (certain_access_present_p)
2459 *certain_access_present_p = true;
2460 for (unsigned j = i + 1; j < pclen; j++)
2461 {
2462 param_access *a2 = (*desc->accesses)[j];
2463 if (a2->certain
2464 && a1->unit_offset < a2->unit_offset + a2->unit_size
2465 && a1->unit_offset + a1->unit_size > a2->unit_offset)
2466 return true;
2467 }
2468 }
2469 return false;
2470}
2471
2472/* Check for any overlaps of certain param accesses among splitting candidates
2473 and signal an ICE if there are any. If CERTAIN_MUST_EXIST is set, also
2474 check that used splitting candidates have at least one certain access. */
2475
2476static void
2477verify_splitting_accesses (cgraph_node *node, bool certain_must_exist)
2478{
2479 isra_func_summary *ifs = func_sums->get (node);
2480 if (!ifs || !ifs->m_candidate)
2481 return;
2482 unsigned param_count = vec_safe_length (ifs->m_parameters);
2483 for (unsigned pidx = 0; pidx < param_count; pidx++)
2484 {
2485 isra_param_desc *desc = &(*ifs->m_parameters)[pidx];
2486 if (!desc->split_candidate || desc->locally_unused)
2487 continue;
2488
2489 bool certain_access_present = !certain_must_exist;
2490 if (overlapping_certain_accesses_p (desc, &certain_access_present))
2491 internal_error ("Function %qs, parameter %u, has IPA-SRA accesses "
2492 "which overlap", node->dump_name (), pidx);
2493 if (!certain_access_present)
2494 internal_error ("Function %s, parameter %u, is used but does not "
2495 "have any certain IPA-SRA access",
2496 node->dump_name (), pidx);
2497 }
2498}
2499
2500/* Intraprocedural part of IPA-SRA analysis. Scan bodies of all functions in
2501 this compilation unit and create summary structures describing IPA-SRA
2502 opportunities and constraints in them. */
2503
2504static void
2505ipa_sra_generate_summary (void)
2506{
2507 struct cgraph_node *node;
2508
2509 gcc_checking_assert (!func_sums)((void)(!(!func_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2509, __FUNCTION__), 0 : 0))
;
2510 gcc_checking_assert (!call_sums)((void)(!(!call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2510, __FUNCTION__), 0 : 0))
;
2511 func_sums
2512 = (new (ggc_alloc_no_dtor <ipa_sra_function_summaries> ())
2513 ipa_sra_function_summaries (symtab, true));
2514 call_sums = new ipa_sra_call_summaries (symtab);
2515
2516 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
2517 ipa_sra_summarize_function (node);
2518 return;
2519}
2520
2521/* Write intraprocedural analysis information about E and all of its outgoing
2522 edges into a stream for LTO WPA. */
2523
2524static void
2525isra_write_edge_summary (output_block *ob, cgraph_edge *e)
2526{
2527 isra_call_summary *csum = call_sums->get (e);
2528 unsigned input_count = csum->m_arg_flow.length ();
2529 streamer_write_uhwi (ob, input_count);
2530 for (unsigned i = 0; i < input_count; i++)
2531 {
2532 isra_param_flow *ipf = &csum->m_arg_flow[i];
2533 streamer_write_hwi (ob, ipf->length);
2534 bitpack_d bp = bitpack_create (ob->main_stream);
2535 for (int j = 0; j < ipf->length; j++)
2536 bp_pack_value (&bp, ipf->inputs[j], 8);
2537 bp_pack_value (&bp, ipf->aggregate_pass_through, 1);
2538 bp_pack_value (&bp, ipf->pointer_pass_through, 1);
2539 bp_pack_value (&bp, ipf->safe_to_import_accesses, 1);
2540 streamer_write_bitpack (&bp);
2541 streamer_write_uhwi (ob, ipf->unit_offset);
2542 streamer_write_uhwi (ob, ipf->unit_size);
2543 }
2544 bitpack_d bp = bitpack_create (ob->main_stream);
2545 bp_pack_value (&bp, csum->m_return_ignored, 1);
2546 bp_pack_value (&bp, csum->m_return_returned, 1);
2547 bp_pack_value (&bp, csum->m_bit_aligned_arg, 1);
2548 streamer_write_bitpack (&bp);
2549}
2550
2551/* Write intraprocedural analysis information about NODE and all of its outgoing
2552 edges into a stream for LTO WPA. */
2553
2554static void
2555isra_write_node_summary (output_block *ob, cgraph_node *node)
2556{
2557 isra_func_summary *ifs = func_sums->get (node);
2558 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2559 int node_ref = lto_symtab_encoder_encode (encoder, node);
2560 streamer_write_uhwi (ob, node_ref);
2561
2562 unsigned param_desc_count = vec_safe_length (ifs->m_parameters);
2563 streamer_write_uhwi (ob, param_desc_count);
2564 for (unsigned i = 0; i < param_desc_count; i++)
2565 {
2566 isra_param_desc *desc = &(*ifs->m_parameters)[i];
2567 unsigned access_count = vec_safe_length (desc->accesses);
2568 streamer_write_uhwi (ob, access_count);
2569 for (unsigned j = 0; j < access_count; j++)
2570 {
2571 param_access *acc = (*desc->accesses)[j];
2572 stream_write_tree (ob, acc->type, true)streamer_hooks.write_tree (ob, acc->type, true, true);
2573 stream_write_tree (ob, acc->alias_ptr_type, true)streamer_hooks.write_tree (ob, acc->alias_ptr_type, true, true
)
;
2574 streamer_write_uhwi (ob, acc->unit_offset);
2575 streamer_write_uhwi (ob, acc->unit_size);
2576 bitpack_d bp = bitpack_create (ob->main_stream);
2577 bp_pack_value (&bp, acc->certain, 1);
2578 streamer_write_bitpack (&bp);
2579 }
2580 streamer_write_uhwi (ob, desc->param_size_limit);
2581 streamer_write_uhwi (ob, desc->size_reached);
2582 bitpack_d bp = bitpack_create (ob->main_stream);
2583 bp_pack_value (&bp, desc->locally_unused, 1);
2584 bp_pack_value (&bp, desc->split_candidate, 1);
2585 bp_pack_value (&bp, desc->by_ref, 1);
2586 streamer_write_bitpack (&bp);
2587 }
2588 bitpack_d bp = bitpack_create (ob->main_stream);
2589 bp_pack_value (&bp, ifs->m_candidate, 1);
2590 bp_pack_value (&bp, ifs->m_returns_value, 1);
2591 bp_pack_value (&bp, ifs->m_return_ignored, 1);
2592 gcc_assert (!ifs->m_queued)((void)(!(!ifs->m_queued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2592, __FUNCTION__), 0 : 0))
;
2593 streamer_write_bitpack (&bp);
2594
2595 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2596 isra_write_edge_summary (ob, e);
2597 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2598 isra_write_edge_summary (ob, e);
2599}
2600
2601/* Write intraprocedural analysis information into a stream for LTO WPA. */
2602
2603static void
2604ipa_sra_write_summary (void)
2605{
2606 if (!func_sums || !call_sums)
2607 return;
2608
2609 struct output_block *ob = create_output_block (LTO_section_ipa_sra);
2610 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2611 ob->symbol = NULLnullptr;
2612
2613 unsigned int count = 0;
2614 lto_symtab_encoder_iterator lsei;
2615 for (lsei = lsei_start_function_in_partition (encoder);
2616 !lsei_end_p (lsei);
2617 lsei_next_function_in_partition (&lsei))
2618 {
2619 cgraph_node *node = lsei_cgraph_node (lsei);
2620 if (node->has_gimple_body_p ()
2621 && func_sums->get (node) != NULLnullptr)
2622 count++;
2623 }
2624 streamer_write_uhwi (ob, count);
2625
2626 /* Process all of the functions. */
2627 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
2628 lsei_next_function_in_partition (&lsei))
2629 {
2630 cgraph_node *node = lsei_cgraph_node (lsei);
2631 if (node->has_gimple_body_p ()
2632 && func_sums->get (node) != NULLnullptr)
2633 isra_write_node_summary (ob, node);
2634 }
2635 streamer_write_char_stream (ob->main_stream, 0);
2636 produce_asm (ob, NULLnullptr);
2637 destroy_output_block (ob);
2638}
2639
2640/* Read intraprocedural analysis information about E and all of its outgoing
2641 edges into a stream for LTO WPA. */
2642
2643static void
2644isra_read_edge_summary (struct lto_input_block *ib, cgraph_edge *cs)
2645{
2646 isra_call_summary *csum = call_sums->get_create (cs);
2647 unsigned input_count = streamer_read_uhwi (ib);
2648 csum->init_inputs (input_count);
2649 for (unsigned i = 0; i < input_count; i++)
2650 {
2651 isra_param_flow *ipf = &csum->m_arg_flow[i];
2652 ipf->length = streamer_read_hwi (ib);
2653 bitpack_d bp = streamer_read_bitpack (ib);
2654 for (int j = 0; j < ipf->length; j++)
2655 ipf->inputs[j] = bp_unpack_value (&bp, 8);
2656 ipf->aggregate_pass_through = bp_unpack_value (&bp, 1);
2657 ipf->pointer_pass_through = bp_unpack_value (&bp, 1);
2658 ipf->safe_to_import_accesses = bp_unpack_value (&bp, 1);
2659 ipf->unit_offset = streamer_read_uhwi (ib);
2660 ipf->unit_size = streamer_read_uhwi (ib);
2661 }
2662 bitpack_d bp = streamer_read_bitpack (ib);
2663 csum->m_return_ignored = bp_unpack_value (&bp, 1);
2664 csum->m_return_returned = bp_unpack_value (&bp, 1);
2665 csum->m_bit_aligned_arg = bp_unpack_value (&bp, 1);
2666}
2667
2668/* Read intraprocedural analysis information about NODE and all of its outgoing
2669 edges into a stream for LTO WPA. */
2670
2671static void
2672isra_read_node_info (struct lto_input_block *ib, cgraph_node *node,
2673 struct data_in *data_in)
2674{
2675 isra_func_summary *ifs = func_sums->get_create (node);
2676 unsigned param_desc_count = streamer_read_uhwi (ib);
2677 if (param_desc_count > 0)
14
Assuming 'param_desc_count' is > 0
15
Taking true branch
2678 {
2679 vec_safe_reserve_exact (ifs->m_parameters, param_desc_count);
2680 ifs->m_parameters->quick_grow_cleared (param_desc_count);
2681 }
2682 for (unsigned i = 0; i
15.1
'i' is < 'param_desc_count'
15.1
'i' is < 'param_desc_count'
< param_desc_count; i++)
16
Loop condition is true. Entering loop body
2683 {
2684 isra_param_desc *desc = &(*ifs->m_parameters)[i];
2685 unsigned access_count = streamer_read_uhwi (ib);
2686 for (unsigned j = 0; j < access_count; j++)
17
Assuming 'j' is < 'access_count'
18
Loop condition is true. Entering loop body
2687 {
2688 param_access *acc = ggc_cleared_alloc<param_access> ();
2689 acc->type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
2690 acc->alias_ptr_type = stream_read_tree (ib, data_in)streamer_hooks.read_tree (ib, data_in);
2691 acc->unit_offset = streamer_read_uhwi (ib);
2692 acc->unit_size = streamer_read_uhwi (ib);
2693 bitpack_d bp = streamer_read_bitpack (ib);
2694 acc->certain = bp_unpack_value (&bp, 1);
2695 vec_safe_push (desc->accesses, acc);
19
Passing value via 1st parameter 'v'
20
Calling 'vec_safe_push<param_access *, va_gc>'
2696 }
2697 desc->param_size_limit = streamer_read_uhwi (ib);
2698 desc->size_reached = streamer_read_uhwi (ib);
2699 bitpack_d bp = streamer_read_bitpack (ib);
2700 desc->locally_unused = bp_unpack_value (&bp, 1);
2701 desc->split_candidate = bp_unpack_value (&bp, 1);
2702 desc->by_ref = bp_unpack_value (&bp, 1);
2703 }
2704 bitpack_d bp = streamer_read_bitpack (ib);
2705 ifs->m_candidate = bp_unpack_value (&bp, 1);
2706 ifs->m_returns_value = bp_unpack_value (&bp, 1);
2707 ifs->m_return_ignored = bp_unpack_value (&bp, 1);
2708 ifs->m_queued = 0;
2709
2710 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2711 isra_read_edge_summary (ib, e);
2712 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2713 isra_read_edge_summary (ib, e);
2714}
2715
2716/* Read IPA-SRA summaries from a section in file FILE_DATA of length LEN with
2717 data DATA. TODO: This function was copied almost verbatim from ipa-prop.c,
2718 it should be possible to unify them somehow. */
2719
2720static void
2721isra_read_summary_section (struct lto_file_decl_data *file_data,
2722 const char *data, size_t len)
2723{
2724 const struct lto_function_header *header =
2725 (const struct lto_function_header *) data;
2726 const int cfg_offset = sizeof (struct lto_function_header);
2727 const int main_offset = cfg_offset + header->cfg_size;
2728 const int string_offset = main_offset + header->main_size;
2729 struct data_in *data_in;
2730 unsigned int i;
2731 unsigned int count;
2732
2733 lto_input_block ib_main ((const char *) data + main_offset,
2734 header->main_size, file_data->mode_table);
2735
2736 data_in =
2737 lto_data_in_create (file_data, (const char *) data + string_offset,
2738 header->string_size, vNULL);
2739 count = streamer_read_uhwi (&ib_main);
2740
2741 for (i = 0; i < count; i++)
9
Assuming 'i' is < 'count'
10
Loop condition is true. Entering loop body
2742 {
2743 unsigned int index;
2744 struct cgraph_node *node;
2745 lto_symtab_encoder_t encoder;
2746
2747 index = streamer_read_uhwi (&ib_main);
2748 encoder = file_data->symtab_node_encoder;
2749 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
2750 index));
2751 gcc_assert (node->definition)((void)(!(node->definition) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2751, __FUNCTION__), 0 : 0))
;
11
Assuming field 'definition' is not equal to 0
12
'?' condition is false
2752 isra_read_node_info (&ib_main, node, data_in);
13
Calling 'isra_read_node_info'
2753 }
2754 lto_free_section_data (file_data, LTO_section_ipa_sra, NULLnullptr, data,
2755 len);
2756 lto_data_in_delete (data_in);
2757}
2758
2759/* Read intraprocedural analysis information into a stream for LTO WPA. */
2760
2761static void
2762ipa_sra_read_summary (void)
2763{
2764 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2765 struct lto_file_decl_data *file_data;
2766 unsigned int j = 0;
2767
2768 gcc_checking_assert (!func_sums)((void)(!(!func_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2768, __FUNCTION__), 0 : 0))
;
1
Assuming 'func_sums' is null
2
'?' condition is false
2769 gcc_checking_assert (!call_sums)((void)(!(!call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2769, __FUNCTION__), 0 : 0))
;
3
Assuming 'call_sums' is null
4
'?' condition is false
2770 func_sums
2771 = (new (ggc_alloc_no_dtor <ipa_sra_function_summaries> ())
2772 ipa_sra_function_summaries (symtab, true));
2773 call_sums = new ipa_sra_call_summaries (symtab);
2774
2775 while ((file_data = file_data_vec[j++]))
5
Loop condition is true. Entering loop body
2776 {
2777 size_t len;
2778 const char *data
2779 = lto_get_summary_section_data (file_data, LTO_section_ipa_sra, &len);
2780 if (data)
6
Assuming 'data' is non-null
7
Taking true branch
2781 isra_read_summary_section (file_data, data, len);
8
Calling 'isra_read_summary_section'
2782 }
2783}
2784
2785/* Dump all IPA-SRA summary data for all cgraph nodes and edges to file F. */
2786
2787static void
2788ipa_sra_dump_all_summaries (FILE *f)
2789{
2790 cgraph_node *node;
2791 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
2792 {
2793 fprintf (f, "\nSummary for node %s:\n", node->dump_name ());
2794
2795 isra_func_summary *ifs = func_sums->get (node);
2796 if (!ifs)
2797 {
2798 fprintf (f, " Function does not have any associated IPA-SRA "
2799 "summary\n");
2800 continue;
2801 }
2802 if (!ifs->m_candidate)
2803 {
2804 fprintf (f, " Not a candidate function\n");
2805 continue;
2806 }
2807 if (ifs->m_returns_value)
2808 fprintf (f, " Returns value\n");
2809 if (vec_safe_is_empty (ifs->m_parameters))
2810 fprintf (f, " No parameter information. \n");
2811 else
2812 for (unsigned i = 0; i < ifs->m_parameters->length (); ++i)
2813 {
2814 fprintf (f, " Descriptor for parameter %i:\n", i);
2815 dump_isra_param_descriptor (f, &(*ifs->m_parameters)[i]);
2816 }
2817 fprintf (f, "\n");
2818
2819 struct cgraph_edge *cs;
2820 for (cs = node->callees; cs; cs = cs->next_callee)
2821 {
2822 fprintf (f, " Summary for edge %s->%s:\n", cs->caller->dump_name (),
2823 cs->callee->dump_name ());
2824 isra_call_summary *csum = call_sums->get (cs);
2825 if (csum)
2826 csum->dump (f);
2827 else
2828 fprintf (f, " Call summary is MISSING!\n");
2829 }
2830
2831 }
2832 fprintf (f, "\n\n");
2833}
2834
2835/* Perform function-scope viability tests that can be only made at IPA level
2836 and return false if the function is deemed unsuitable for IPA-SRA. */
2837
2838static bool
2839ipa_sra_ipa_function_checks (cgraph_node *node)
2840{
2841 if (!node->can_be_local_p ())
2842 {
2843 if (dump_file)
2844 fprintf (dump_file, "Function %s disqualified because it cannot be "
2845 "made local.\n", node->dump_name ());
2846 return false;
2847 }
2848 if (!node->can_change_signature)
2849 {
2850 if (dump_file)
2851 fprintf (dump_file, "Function can not change signature.\n");
2852 return false;
2853 }
2854
2855 return true;
2856}
2857
2858/* Issues found out by check_callers_for_issues. */
2859
2860struct caller_issues
2861{
2862 /* The candidate being considered. */
2863 cgraph_node *candidate;
2864 /* There is a thunk among callers. */
2865 bool thunk;
2866 /* Call site with no available information. */
2867 bool unknown_callsite;
2868 /* Call from outside the the candidate's comdat group. */
2869 bool call_from_outside_comdat;
2870 /* There is a bit-aligned load into one of non-gimple-typed arguments. */
2871 bool bit_aligned_aggregate_argument;
2872};
2873
2874/* Worker for call_for_symbol_and_aliases, set any flags of passed caller_issues
2875 that apply. */
2876
2877static bool
2878check_for_caller_issues (struct cgraph_node *node, void *data)
2879{
2880 struct caller_issues *issues = (struct caller_issues *) data;
2881
2882 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
2883 {
2884 if (cs->caller->thunk)
2885 {
2886 issues->thunk = true;
2887 /* TODO: We should be able to process at least some types of
2888 thunks. */
2889 return true;
2890 }
2891 if (issues->candidate->calls_comdat_local
2892 && issues->candidate->same_comdat_group
2893 && !issues->candidate->in_same_comdat_group_p (cs->caller))
2894 {
2895 issues->call_from_outside_comdat = true;
2896 return true;
2897 }
2898
2899 isra_call_summary *csum = call_sums->get (cs);
2900 if (!csum)
2901 {
2902 issues->unknown_callsite = true;
2903 return true;
2904 }
2905
2906 if (csum->m_bit_aligned_arg)
2907 issues->bit_aligned_aggregate_argument = true;
2908 }
2909 return false;
2910}
2911
2912/* Look at all incoming edges to NODE, including aliases and thunks and look
2913 for problems. Return true if NODE type should not be modified at all. */
2914
2915static bool
2916check_all_callers_for_issues (cgraph_node *node)
2917{
2918 struct caller_issues issues;
2919 memset (&issues, 0, sizeof (issues));
2920 issues.candidate = node;
2921
2922 node->call_for_symbol_and_aliases (check_for_caller_issues, &issues, true);
2923 if (issues.unknown_callsite)
2924 {
2925 if (dump_file && (dump_flags & TDF_DETAILS))
2926 fprintf (dump_file, "A call of %s has not been analyzed. Disabling "
2927 "all modifications.\n", node->dump_name ());
2928 return true;
2929 }
2930 /* TODO: We should be able to process at least some types of thunks. */
2931 if (issues.thunk)
2932 {
2933 if (dump_file && (dump_flags & TDF_DETAILS))
2934 fprintf (dump_file, "A call of %s is through thunk, which are not"
2935 " handled yet. Disabling all modifications.\n",
2936 node->dump_name ());
2937 return true;
2938 }
2939 if (issues.call_from_outside_comdat)
2940 {
2941 if (dump_file)
2942 fprintf (dump_file, "Function would become private comdat called "
2943 "outside of its comdat group.\n");
2944 return true;
2945 }
2946
2947 if (issues.bit_aligned_aggregate_argument)
2948 {
2949 /* Let's only remove parameters/return values from such functions.
2950 TODO: We could only prevent splitting the problematic parameters if
2951 anybody thinks it is worth it. */
2952 if (dump_file && (dump_flags & TDF_DETAILS))
2953 fprintf (dump_file, "A call of %s has bit-aligned aggregate argument,"
2954 " disabling parameter splitting.\n", node->dump_name ());
2955
2956 isra_func_summary *ifs = func_sums->get (node);
2957 gcc_checking_assert (ifs)((void)(!(ifs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 2957, __FUNCTION__), 0 : 0))
;
2958 unsigned param_count = vec_safe_length (ifs->m_parameters);
2959 for (unsigned i = 0; i < param_count; i++)
2960 (*ifs->m_parameters)[i].split_candidate = false;
2961 }
2962 return false;
2963}
2964
2965/* Find the access with corresponding OFFSET and SIZE among accesses in
2966 PARAM_DESC and return it or NULL if such an access is not there. */
2967
2968static param_access *
2969find_param_access (isra_param_desc *param_desc, unsigned offset, unsigned size)
2970{
2971 unsigned pclen = vec_safe_length (param_desc->accesses);
2972
2973 /* The search is linear but the number of stored accesses is bound by
2974 PARAM_IPA_SRA_MAX_REPLACEMENTS, so most probably 8. */
2975
2976 for (unsigned i = 0; i < pclen; i++)
2977 if ((*param_desc->accesses)[i]->unit_offset == offset
2978 && (*param_desc->accesses)[i]->unit_size == size)
2979 return (*param_desc->accesses)[i];
2980
2981 return NULLnullptr;
2982}
2983
2984/* Return iff the total size of definite replacement SIZE would violate the
2985 limit set for it in PARAM. */
2986
2987static bool
2988size_would_violate_limit_p (isra_param_desc *desc, unsigned size)
2989{
2990 unsigned limit = desc->param_size_limit;
2991 if (size > limit
2992 || (!desc->by_ref && size == limit))
2993 return true;
2994 return false;
2995}
2996
2997/* Increase reached size of DESC by SIZE or disqualify it if it would violate
2998 the set limit. IDX is the parameter number which is dumped when
2999 disqualifying. */
3000
3001static void
3002bump_reached_size (isra_param_desc *desc, unsigned size, unsigned idx)
3003{
3004 unsigned after = desc->size_reached + size;
3005 if (size_would_violate_limit_p (desc, after))
3006 {
3007 if (dump_file && (dump_flags & TDF_DETAILS))
3008 fprintf (dump_file, " ...size limit reached, disqualifying "
3009 "candidate parameter %u\n", idx);
3010 desc->split_candidate = false;
3011 return;
3012 }
3013 desc->size_reached = after;
3014}
3015
3016/* Take all actions required to deal with an edge CS that represents a call to
3017 an unknown or un-analyzed function, for both parameter removal and
3018 splitting. */
3019
3020static void
3021process_edge_to_unknown_caller (cgraph_edge *cs)
3022{
3023 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3024 gcc_checking_assert (from_ifs)((void)(!(from_ifs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3024, __FUNCTION__), 0 : 0))
;
3025 isra_call_summary *csum = call_sums->get (cs);
3026
3027 if (dump_file && (dump_flags & TDF_DETAILS))
3028 fprintf (dump_file, "Processing an edge to an unknown caller from %s:\n",
3029 cs->caller->dump_name ());
3030
3031 unsigned args_count = csum->m_arg_flow.length ();
3032 for (unsigned i = 0; i < args_count; i++)
3033 {
3034 isra_param_flow *ipf = &csum->m_arg_flow[i];
3035
3036 if (ipf->pointer_pass_through)
3037 {
3038 isra_param_desc *param_desc
3039 = &(*from_ifs->m_parameters)[get_single_param_flow_source (ipf)];
3040 param_desc->locally_unused = false;
3041 param_desc->split_candidate = false;
3042 continue;
3043 }
3044 if (ipf->aggregate_pass_through)
3045 {
3046 unsigned idx = get_single_param_flow_source (ipf);
3047 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3048
3049 param_desc->locally_unused = false;
3050 if (!param_desc->split_candidate)
3051 continue;
3052 gcc_assert (!param_desc->by_ref)((void)(!(!param_desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3052, __FUNCTION__), 0 : 0))
;
3053 param_access *pacc = find_param_access (param_desc, ipf->unit_offset,
3054 ipf->unit_size);
3055 gcc_checking_assert (pacc)((void)(!(pacc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3055, __FUNCTION__), 0 : 0))
;
3056 pacc->certain = true;
3057 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3058 {
3059 if (dump_file && (dump_flags & TDF_DETAILS))
3060 fprintf (dump_file, " ...leading to overlap, "
3061 " disqualifying candidate parameter %u\n",
3062 idx);
3063 param_desc->split_candidate = false;
3064 }
3065 else
3066 bump_reached_size (param_desc, pacc->unit_size, idx);
3067 ipf->aggregate_pass_through = false;
3068 continue;
3069 }
3070
3071 for (int j = 0; j < ipf->length; j++)
3072 {
3073 int input_idx = ipf->inputs[j];
3074 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3075 }
3076 }
3077}
3078
3079/* Propagate parameter removal information through cross-SCC edge CS,
3080 i.e. decrease the use count in the caller parameter descriptor for each use
3081 in this call. */
3082
3083static void
3084param_removal_cross_scc_edge (cgraph_edge *cs)
3085{
3086 enum availability availability;
3087 cgraph_node *callee = cs->callee->function_symbol (&availability);
3088 isra_func_summary *to_ifs = func_sums->get (callee);
3089 if (!to_ifs || !to_ifs->m_candidate
3090 || (availability < AVAIL_AVAILABLE)
3091 || vec_safe_is_empty (to_ifs->m_parameters))
3092 {
3093 process_edge_to_unknown_caller (cs);
3094 return;
3095 }
3096 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3097 gcc_checking_assert (from_ifs)((void)(!(from_ifs) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3097, __FUNCTION__), 0 : 0))
;
3098
3099 isra_call_summary *csum = call_sums->get (cs);
3100 unsigned args_count = csum->m_arg_flow.length ();
3101 unsigned param_count = vec_safe_length (to_ifs->m_parameters);
3102
3103 for (unsigned i = 0; i < args_count; i++)
3104 {
3105 bool unused_in_callee;
3106 if (i < param_count)
3107 unused_in_callee = (*to_ifs->m_parameters)[i].locally_unused;
3108 else
3109 unused_in_callee = false;
3110
3111 if (!unused_in_callee)
3112 {
3113 isra_param_flow *ipf = &csum->m_arg_flow[i];
3114 for (int j = 0; j < ipf->length; j++)
3115 {
3116 int input_idx = ipf->inputs[j];
3117 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3118 }
3119 }
3120 }
3121}
3122
3123/* Unless it is already there, push NODE which is also described by IFS to
3124 STACK. */
3125
3126static void
3127isra_push_node_to_stack (cgraph_node *node, isra_func_summary *ifs,
3128 vec<cgraph_node *> *stack)
3129{
3130 if (!ifs->m_queued)
3131 {
3132 ifs->m_queued = true;
3133 stack->safe_push (node);
3134 }
3135}
3136
3137/* If parameter with index INPUT_IDX is marked as locally unused, mark it as
3138 used and push CALLER on STACK. */
3139
3140static void
3141isra_mark_caller_param_used (isra_func_summary *from_ifs, int input_idx,
3142 cgraph_node *caller, vec<cgraph_node *> *stack)
3143{
3144 if ((*from_ifs->m_parameters)[input_idx].locally_unused)
3145 {
3146 (*from_ifs->m_parameters)[input_idx].locally_unused = false;
3147 isra_push_node_to_stack (caller, from_ifs, stack);
3148 }
3149}
3150
3151
3152/* Propagate information that any parameter is not used only locally within a
3153 SCC across CS to the caller, which must be in the same SCC as the
3154 callee. Push any callers that need to be re-processed to STACK. */
3155
3156static void
3157propagate_used_across_scc_edge (cgraph_edge *cs, vec<cgraph_node *> *stack)
3158{
3159 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3160 if (!from_ifs || vec_safe_is_empty (from_ifs->m_parameters))
3161 return;
3162
3163 isra_call_summary *csum = call_sums->get (cs);
3164 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3164, __FUNCTION__), 0 : 0))
;
3165 unsigned args_count = csum->m_arg_flow.length ();
3166 enum availability availability;
3167 cgraph_node *callee = cs->callee->function_symbol (&availability);
3168 isra_func_summary *to_ifs = func_sums->get (callee);
3169
3170 unsigned param_count
3171 = (to_ifs && (availability >= AVAIL_AVAILABLE))
3172 ? vec_safe_length (to_ifs->m_parameters) : 0;
3173 for (unsigned i = 0; i < args_count; i++)
3174 {
3175 if (i < param_count
3176 && (*to_ifs->m_parameters)[i].locally_unused)
3177 continue;
3178
3179 /* The argument is needed in the callee it, we must mark the parameter as
3180 used also in the caller and its callers within this SCC. */
3181 isra_param_flow *ipf = &csum->m_arg_flow[i];
3182 for (int j = 0; j < ipf->length; j++)
3183 {
3184 int input_idx = ipf->inputs[j];
3185 isra_mark_caller_param_used (from_ifs, input_idx, cs->caller, stack);
3186 }
3187 }
3188}
3189
3190/* Propagate information that any parameter is not used only locally within a
3191 SCC (i.e. is used also elsewhere) to all callers of NODE that are in the
3192 same SCC. Push any callers that need to be re-processed to STACK. */
3193
3194static bool
3195propagate_used_to_scc_callers (cgraph_node *node, void *data)
3196{
3197 vec<cgraph_node *> *stack = (vec<cgraph_node *> *) data;
3198 cgraph_edge *cs;
3199 for (cs = node->callers; cs; cs = cs->next_caller)
3200 if (ipa_edge_within_scc (cs))
3201 propagate_used_across_scc_edge (cs, stack);
3202 return false;
3203}
3204
3205/* Return true iff all certain accesses in ARG_DESC are also present as
3206 certain accesses in PARAM_DESC. */
3207
3208static bool
3209all_callee_accesses_present_p (isra_param_desc *param_desc,
3210 isra_param_desc *arg_desc)
3211{
3212 unsigned aclen = vec_safe_length (arg_desc->accesses);
3213 for (unsigned j = 0; j < aclen; j++)
3214 {
3215 param_access *argacc = (*arg_desc->accesses)[j];
3216 if (!argacc->certain)
3217 continue;
3218 param_access *pacc = find_param_access (param_desc, argacc->unit_offset,
3219 argacc->unit_size);
3220 if (!pacc
3221 || !pacc->certain
3222 || !types_compatible_p (argacc->type, pacc->type))
3223 return false;
3224 }
3225 return true;
3226}
3227
3228/* Type internal to function pull_accesses_from_callee. Unfortunately gcc 4.8
3229 does not allow instantiating an auto_vec with a type defined within a
3230 function so it is a global type. */
3231enum acc_prop_kind {ACC_PROP_DONT, ACC_PROP_COPY, ACC_PROP_CERTAIN};
3232
3233
3234/* Attempt to propagate all definite accesses from ARG_DESC to PARAM_DESC,
3235 (which belongs to CALLER) if they would not violate some constraint there.
3236 If successful, return NULL, otherwise return the string reason for failure
3237 (which can be written to the dump file). DELTA_OFFSET is the known offset
3238 of the actual argument withing the formal parameter (so of ARG_DESCS within
3239 PARAM_DESCS), ARG_SIZE is the size of the actual argument or zero, if not
3240 known. In case of success, set *CHANGE_P to true if propagation actually
3241 changed anything. */
3242
3243static const char *
3244pull_accesses_from_callee (cgraph_node *caller, isra_param_desc *param_desc,
3245 isra_param_desc *arg_desc,
3246 unsigned delta_offset, unsigned arg_size,
3247 bool *change_p)
3248{
3249 unsigned pclen = vec_safe_length (param_desc->accesses);
3250 unsigned aclen = vec_safe_length (arg_desc->accesses);
3251 unsigned prop_count = 0;
3252 unsigned prop_size = 0;
3253 bool change = false;
3254
3255 auto_vec <enum acc_prop_kind, 8> prop_kinds (aclen);
3256 for (unsigned j = 0; j < aclen; j++)
3257 {
3258 param_access *argacc = (*arg_desc->accesses)[j];
3259 prop_kinds.safe_push (ACC_PROP_DONT);
3260
3261 if (arg_size > 0
3262 && argacc->unit_offset + argacc->unit_size > arg_size)
3263 return "callee access outsize size boundary";
3264
3265 if (!argacc->certain)
3266 continue;
3267
3268 unsigned offset = argacc->unit_offset + delta_offset;
3269 /* Given that accesses are initially stored according to increasing
3270 offset and decreasing size in case of equal offsets, the following
3271 searches could be written more efficiently if we kept the ordering
3272 when copying. But the number of accesses is capped at
3273 PARAM_IPA_SRA_MAX_REPLACEMENTS (so most likely 8) and the code gets
3274 messy quickly, so let's improve on that only if necessary. */
3275
3276 bool exact_match = false;
3277 for (unsigned i = 0; i < pclen; i++)
3278 {
3279 /* Check for overlaps. */
3280 param_access *pacc = (*param_desc->accesses)[i];
3281 if (pacc->unit_offset == offset
3282 && pacc->unit_size == argacc->unit_size)
3283 {
3284 if (argacc->alias_ptr_type != pacc->alias_ptr_type
3285 || !types_compatible_p (argacc->type, pacc->type))
3286 return "propagated access types would not match existing ones";
3287
3288 exact_match = true;
3289 if (!pacc->certain)
3290 {
3291 prop_kinds[j] = ACC_PROP_CERTAIN;
3292 prop_size += argacc->unit_size;
3293 change = true;
3294 }
3295 continue;
3296 }
3297
3298 if (offset < pacc->unit_offset + pacc->unit_size
3299 && offset + argacc->unit_size > pacc->unit_offset)
3300 {
3301 /* None permissible with load accesses, possible to fit into
3302 argument ones. */
3303 if (pacc->certain
3304 || offset < pacc->unit_offset
3305 || (offset + argacc->unit_size
3306 > pacc->unit_offset + pacc->unit_size))
3307 return "a propagated access would conflict in caller";
3308 }
3309 }
3310
3311 if (!exact_match)
3312 {
3313 prop_kinds[j] = ACC_PROP_COPY;
3314 prop_count++;
3315 prop_size += argacc->unit_size;
3316 change = true;
3317 }
3318 }
3319
3320 if (!change)
3321 return NULLnullptr;
3322
3323 if ((prop_count + pclen
3324 > (unsigned) opt_for_fn (caller->decl, param_ipa_sra_max_replacements)(opts_for_fn (caller->decl)->x_param_ipa_sra_max_replacements
)
)
3325 || size_would_violate_limit_p (param_desc,
3326 param_desc->size_reached + prop_size))
3327 return "propagating accesses would violate the count or size limit";
3328
3329 *change_p = true;
3330 for (unsigned j = 0; j < aclen; j++)
3331 {
3332 if (prop_kinds[j] == ACC_PROP_COPY)
3333 {
3334 param_access *argacc = (*arg_desc->accesses)[j];
3335
3336 param_access *copy = ggc_cleared_alloc<param_access> ();
3337 copy->unit_offset = argacc->unit_offset + delta_offset;
3338 copy->unit_size = argacc->unit_size;
3339 copy->type = argacc->type;
3340 copy->alias_ptr_type = argacc->alias_ptr_type;
3341 copy->certain = true;
3342 vec_safe_push (param_desc->accesses, copy);
3343 }
3344 else if (prop_kinds[j] == ACC_PROP_CERTAIN)
3345 {
3346 param_access *argacc = (*arg_desc->accesses)[j];
3347 param_access *csp
3348 = find_param_access (param_desc, argacc->unit_offset + delta_offset,
3349 argacc->unit_size);
3350 csp->certain = true;
3351 }
3352 }
3353
3354 param_desc->size_reached += prop_size;
3355
3356 return NULLnullptr;
3357}
3358
3359/* Propagate parameter splitting information through call graph edge CS.
3360 Return true if any changes that might need to be propagated within SCCs have
3361 been made. The function also clears the aggregate_pass_through and
3362 pointer_pass_through in call summaries which do not need to be processed
3363 again if this CS is revisited when iterating while changes are propagated
3364 within an SCC. */
3365
3366static bool
3367param_splitting_across_edge (cgraph_edge *cs)
3368{
3369 bool res = false;
3370 bool cross_scc = !ipa_edge_within_scc (cs);
3371 enum availability availability;
3372 cgraph_node *callee = cs->callee->function_symbol (&availability);
3373 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3374 gcc_checking_assert (from_ifs && from_ifs->m_parameters)((void)(!(from_ifs && from_ifs->m_parameters) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3374, __FUNCTION__), 0 : 0))
;
3375
3376 isra_call_summary *csum = call_sums->get (cs);
3377 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3377, __FUNCTION__), 0 : 0))
;
3378 unsigned args_count = csum->m_arg_flow.length ();
3379 isra_func_summary *to_ifs = func_sums->get (callee);
3380 unsigned param_count
3381 = ((to_ifs && to_ifs->m_candidate && (availability >= AVAIL_AVAILABLE))
3382 ? vec_safe_length (to_ifs->m_parameters)
3383 : 0);
3384
3385 if (dump_file && (dump_flags & TDF_DETAILS))
3386 fprintf (dump_file, "Splitting across %s->%s:\n",
3387 cs->caller->dump_name (), callee->dump_name ());
3388
3389 unsigned i;
3390 for (i = 0; (i < args_count) && (i < param_count); i++)
3391 {
3392 isra_param_desc *arg_desc = &(*to_ifs->m_parameters)[i];
3393 isra_param_flow *ipf = &csum->m_arg_flow[i];
3394
3395 if (arg_desc->locally_unused)
3396 {
3397 if (dump_file && (dump_flags & TDF_DETAILS))
3398 fprintf (dump_file, " ->%u: unused in callee\n", i);
3399 ipf->pointer_pass_through = false;
3400 continue;
3401 }
3402
3403 if (ipf->pointer_pass_through)
3404 {
3405 int idx = get_single_param_flow_source (ipf);
3406 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3407 if (!param_desc->split_candidate)
3408 continue;
3409 gcc_assert (param_desc->by_ref)((void)(!(param_desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3409, __FUNCTION__), 0 : 0))
;
3410
3411 if (!arg_desc->split_candidate || !arg_desc->by_ref)
3412 {
3413 if (dump_file && (dump_flags & TDF_DETAILS))
3414 fprintf (dump_file, " %u->%u: not candidate or not by "
3415 "reference in callee\n", idx, i);
3416 param_desc->split_candidate = false;
3417 ipf->pointer_pass_through = false;
3418 res = true;
3419 }
3420 else if (!ipf->safe_to_import_accesses)
3421 {
3422 if (!all_callee_accesses_present_p (param_desc, arg_desc))
3423 {
3424 if (dump_file && (dump_flags & TDF_DETAILS))
3425 fprintf (dump_file, " %u->%u: cannot import accesses.\n",
3426 idx, i);
3427 param_desc->split_candidate = false;
3428 ipf->pointer_pass_through = false;
3429 res = true;
3430
3431 }
3432 else
3433 {
3434 if (dump_file && (dump_flags & TDF_DETAILS))
3435 fprintf (dump_file, " %u->%u: verified callee accesses "
3436 "present.\n", idx, i);
3437 if (cross_scc)
3438 ipf->pointer_pass_through = false;
3439 }
3440 }
3441 else
3442 {
3443 const char *pull_failure
3444 = pull_accesses_from_callee (cs->caller, param_desc, arg_desc,
3445 0, 0, &res);
3446 if (pull_failure)
3447 {
3448 if (dump_file && (dump_flags & TDF_DETAILS))
3449 fprintf (dump_file, " %u->%u: by_ref access pull "
3450 "failed: %s.\n", idx, i, pull_failure);
3451 param_desc->split_candidate = false;
3452 ipf->pointer_pass_through = false;
3453 res = true;
3454 }
3455 else
3456 {
3457 if (dump_file && (dump_flags & TDF_DETAILS))
3458 fprintf (dump_file, " %u->%u: by_ref access pull "
3459 "succeeded.\n", idx, i);
3460 if (cross_scc)
3461 ipf->pointer_pass_through = false;
3462 }
3463 }
3464 }
3465 else if (ipf->aggregate_pass_through)
3466 {
3467 int idx = get_single_param_flow_source (ipf);
3468 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3469 if (!param_desc->split_candidate)
3470 continue;
3471 gcc_assert (!param_desc->by_ref)((void)(!(!param_desc->by_ref) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3471, __FUNCTION__), 0 : 0))
;
3472 param_access *pacc = find_param_access (param_desc, ipf->unit_offset,
3473 ipf->unit_size);
3474 gcc_checking_assert (pacc)((void)(!(pacc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3474, __FUNCTION__), 0 : 0))
;
3475
3476 if (pacc->certain)
3477 {
3478 if (dump_file && (dump_flags & TDF_DETAILS))
3479 fprintf (dump_file, " %u->%u: already certain\n", idx, i);
3480 ipf->aggregate_pass_through = false;
3481 }
3482 else if (!arg_desc->split_candidate || arg_desc->by_ref)
3483 {
3484 if (dump_file && (dump_flags & TDF_DETAILS))
3485 fprintf (dump_file, " %u->%u: not candidate or by "
3486 "reference in callee\n", idx, i);
3487
3488 pacc->certain = true;
3489 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3490 {
3491 if (dump_file && (dump_flags & TDF_DETAILS))
3492 fprintf (dump_file, " ...leading to overlap, "
3493 " disqualifying candidate parameter %u\n",
3494 idx);
3495 param_desc->split_candidate = false;
3496 }
3497 else
3498 bump_reached_size (param_desc, pacc->unit_size, idx);
3499
3500 ipf->aggregate_pass_through = false;
3501 res = true;
3502 }
3503 else
3504 {
3505 const char *pull_failure
3506 = pull_accesses_from_callee (cs->caller, param_desc, arg_desc,
3507 ipf->unit_offset,
3508 ipf->unit_size, &res);
3509 if (pull_failure)
3510 {
3511 if (dump_file && (dump_flags & TDF_DETAILS))
3512 fprintf (dump_file, " %u->%u: arg access pull "
3513 "failed: %s.\n", idx, i, pull_failure);
3514
3515 ipf->aggregate_pass_through = false;
3516 pacc->certain = true;
3517
3518 if (overlapping_certain_accesses_p (param_desc, NULLnullptr))
3519 {
3520 if (dump_file && (dump_flags & TDF_DETAILS))
3521 fprintf (dump_file, " ...leading to overlap, "
3522 " disqualifying candidate parameter %u\n",
3523 idx);
3524 param_desc->split_candidate = false;
3525 }
3526 else
3527 bump_reached_size (param_desc, pacc->unit_size, idx);
3528
3529 res = true;
3530 }
3531 else
3532 {
3533 if (dump_file && (dump_flags & TDF_DETAILS))
3534 fprintf (dump_file, " %u->%u: arg access pull "
3535 "succeeded.\n", idx, i);
3536 if (cross_scc)
3537 ipf->aggregate_pass_through = false;
3538 }
3539 }
3540 }
3541 }
3542
3543 /* Handle argument-parameter count mismatches. */
3544 for (; (i < args_count); i++)
3545 {
3546 isra_param_flow *ipf = &csum->m_arg_flow[i];
3547
3548 if (ipf->pointer_pass_through || ipf->aggregate_pass_through)
3549 {
3550 int idx = get_single_param_flow_source (ipf);
3551 ipf->pointer_pass_through = false;
3552 ipf->aggregate_pass_through = false;
3553 isra_param_desc *param_desc = &(*from_ifs->m_parameters)[idx];
3554 if (!param_desc->split_candidate)
3555 continue;
3556
3557 if (dump_file && (dump_flags & TDF_DETAILS))
3558 fprintf (dump_file, " %u->%u: no corresponding formal parameter\n",
3559 idx, i);
3560 param_desc->split_candidate = false;
3561 res = true;
3562 }
3563 }
3564 return res;
3565}
3566
3567/* Worker for call_for_symbol_and_aliases, look at all callers and if all their
3568 callers ignore the return value, or come from the same SCC and use the
3569 return value only to compute their return value, return false, otherwise
3570 return true. */
3571
3572static bool
3573retval_used_p (cgraph_node *node, void *)
3574{
3575 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
3576 {
3577 isra_call_summary *csum = call_sums->get (cs);
3578 gcc_checking_assert (csum)((void)(!(csum) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3578, __FUNCTION__), 0 : 0))
;
3579 if (csum->m_return_ignored)
3580 continue;
3581 if (!csum->m_return_returned)
3582 return true;
3583
3584 isra_func_summary *from_ifs = func_sums->get (cs->caller);
3585 if (!from_ifs || !from_ifs->m_candidate)
3586 return true;
3587
3588 if (!ipa_edge_within_scc (cs)
3589 && !from_ifs->m_return_ignored)
3590 return true;
3591 }
3592
3593 return false;
3594}
3595
3596/* Push into NEW_PARAMS all required parameter adjustment entries to copy or
3597 modify parameter which originally had index BASE_INDEX, in the adjustment
3598 vector of parent clone (if any) had PREV_CLONE_INDEX and was described by
3599 PREV_ADJUSTMENT. If the parent clone is the original function,
3600 PREV_ADJUSTMENT is NULL and PREV_CLONE_INDEX is equal to BASE_INDEX. */
3601
3602
3603static void
3604push_param_adjustments_for_index (isra_func_summary *ifs, unsigned base_index,
3605 unsigned prev_clone_index,
3606 ipa_adjusted_param *prev_adjustment,
3607 vec<ipa_adjusted_param, va_gc> **new_params)
3608{
3609 isra_param_desc *desc = &(*ifs->m_parameters)[base_index];
3610 if (desc->locally_unused)
3611 {
3612 if (dump_file)
3613 fprintf (dump_file, " Will remove parameter %u\n", base_index);
3614 return;
3615 }
3616
3617 if (!desc->split_candidate)
3618 {
3619 ipa_adjusted_param adj;
3620 if (prev_adjustment)
3621 {
3622 adj = *prev_adjustment;
3623 adj.prev_clone_adjustment = true;
3624 adj.prev_clone_index = prev_clone_index;
3625 }
3626 else
3627 {
3628 memset (&adj, 0, sizeof (adj));
3629 adj.op = IPA_PARAM_OP_COPY;
3630 adj.base_index = base_index;
3631 adj.prev_clone_index = prev_clone_index;
3632 }
3633 vec_safe_push ((*new_params), adj);
3634 return;
3635 }
3636
3637 if (dump_file)
3638 fprintf (dump_file, " Will split parameter %u\n", base_index);
3639
3640 gcc_assert (!prev_adjustment || prev_adjustment->op == IPA_PARAM_OP_COPY)((void)(!(!prev_adjustment || prev_adjustment->op == IPA_PARAM_OP_COPY
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3640, __FUNCTION__), 0 : 0))
;
3641 unsigned aclen = vec_safe_length (desc->accesses);
3642 for (unsigned j = 0; j < aclen; j++)
3643 {
3644 param_access *pa = (*desc->accesses)[j];
3645 if (!pa->certain)
3646 continue;
3647 if (dump_file)
3648 fprintf (dump_file, " - component at byte offset %u, "
3649 "size %u\n", pa->unit_offset, pa->unit_size);
3650
3651 ipa_adjusted_param adj;
3652 memset (&adj, 0, sizeof (adj));
3653 adj.op = IPA_PARAM_OP_SPLIT;
3654 adj.base_index = base_index;
3655 adj.prev_clone_index = prev_clone_index;
3656 adj.param_prefix_index = IPA_PARAM_PREFIX_ISRA;
3657 adj.reverse = pa->reverse;
3658 adj.type = pa->type;
3659 adj.alias_ptr_type = pa->alias_ptr_type;
3660 adj.unit_offset = pa->unit_offset;
3661 vec_safe_push ((*new_params), adj);
3662 }
3663}
3664
3665/* Worker for all call_for_symbol_thunks_and_aliases. Set calls_comdat_local
3666 flag of all callers of NODE. */
3667
3668static bool
3669mark_callers_calls_comdat_local (struct cgraph_node *node, void *)
3670{
3671 for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
3672 cs->caller->calls_comdat_local = true;
3673 return false;
3674}
3675
3676
3677/* Do final processing of results of IPA propagation regarding NODE, clone it
3678 if appropriate. */
3679
3680static void
3681process_isra_node_results (cgraph_node *node,
3682 hash_map<const char *, unsigned> *clone_num_suffixes)
3683{
3684 isra_func_summary *ifs = func_sums->get (node);
3685 if (!ifs || !ifs->m_candidate)
3686 return;
3687
3688 auto_vec<bool, 16> surviving_params;
3689 bool check_surviving = false;
3690 clone_info *cinfo = clone_info::get (node);
3691 if (cinfo && cinfo->param_adjustments)
3692 {
3693 check_surviving = true;
3694 cinfo->param_adjustments->get_surviving_params (&surviving_params);
3695 }
3696
3697 unsigned param_count = vec_safe_length (ifs->m_parameters);
3698 bool will_change_function = false;
3699 if (ifs->m_returns_value && ifs->m_return_ignored)
3700 will_change_function = true;
3701 else
3702 for (unsigned i = 0; i < param_count; i++)
3703 {
3704 isra_param_desc *desc = &(*ifs->m_parameters)[i];
3705 if ((desc->locally_unused || desc->split_candidate)
3706 /* Make sure we do not clone just to attempt to remove an already
3707 removed unused argument. */
3708 && (!check_surviving
3709 || (i < surviving_params.length ()
3710 && surviving_params[i])))
3711 {
3712 will_change_function = true;
3713 break;
3714 }
3715 }
3716 if (!will_change_function)
3717 return;
3718
3719 if (dump_file)
3720 {
3721 fprintf (dump_file, "\nEvaluating analysis results for %s\n",
3722 node->dump_name ());
3723 if (ifs->m_returns_value && ifs->m_return_ignored)
3724 fprintf (dump_file, " Will remove return value.\n");
3725 }
3726
3727 vec<ipa_adjusted_param, va_gc> *new_params = NULLnullptr;
3728 if (ipa_param_adjustments *old_adjustments
3729 = cinfo ? cinfo->param_adjustments : NULLnullptr)
3730 {
3731 unsigned old_adj_len = vec_safe_length (old_adjustments->m_adj_params);
3732 for (unsigned i = 0; i < old_adj_len; i++)
3733 {
3734 ipa_adjusted_param *old_adj = &(*old_adjustments->m_adj_params)[i];
3735 push_param_adjustments_for_index (ifs, old_adj->base_index, i,
3736 old_adj, &new_params);
3737 }
3738 }
3739 else
3740 for (unsigned i = 0; i < param_count; i++)
3741 push_param_adjustments_for_index (ifs, i, i, NULLnullptr, &new_params);
3742
3743 ipa_param_adjustments *new_adjustments
3744 = (new (ggc_alloc <ipa_param_adjustments> ())
3745 ipa_param_adjustments (new_params, param_count,
3746 ifs->m_returns_value && ifs->m_return_ignored));
3747
3748 if (dump_file && (dump_flags & TDF_DETAILS))
3749 {
3750 fprintf (dump_file, "\n Created adjustments:\n");
3751 new_adjustments->dump (dump_file);
3752 }
3753
3754 unsigned &suffix_counter = clone_num_suffixes->get_or_insert (
3755 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (((const char *) (tree_check ((decl_assembler_name (node->decl
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3756, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
3756 node->decl))((const char *) (tree_check ((decl_assembler_name (node->decl
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3756, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
);
3757 vec<cgraph_edge *> callers = node->collect_callers ();
3758 cgraph_node *new_node
3759 = node->create_virtual_clone (callers, NULLnullptr, new_adjustments, "isra",
3760 suffix_counter);
3761 suffix_counter++;
3762 if (node->calls_comdat_local && node->same_comdat_group)
3763 {
3764 new_node->add_to_same_comdat_group (node);
3765 new_node->call_for_symbol_and_aliases (mark_callers_calls_comdat_local,
3766 NULLnullptr, true);
3767 }
3768 new_node->calls_comdat_local = node->calls_comdat_local;
3769
3770 if (dump_file)
3771 fprintf (dump_file, " Created new node %s\n", new_node->dump_name ());
3772 callers.release ();
3773}
3774
3775/* Check which parameters of NODE described by IFS have survived until IPA-SRA
3776 and disable transformations for those which have not or which should not
3777 transformed because the associated debug counter reached its limit. Return
3778 true if none survived or if there were no candidates to begin with. */
3779
3780static bool
3781disable_unavailable_parameters (cgraph_node *node, isra_func_summary *ifs)
3782{
3783 bool ret = true;
3784 unsigned len = vec_safe_length (ifs->m_parameters);
3785 if (!len)
3786 return true;
3787
3788 auto_vec<bool, 16> surviving_params;
3789 bool check_surviving = false;
3790 clone_info *cinfo = clone_info::get (node);
3791 if (cinfo && cinfo->param_adjustments)
3792 {
3793 check_surviving = true;
3794 cinfo->param_adjustments->get_surviving_params (&surviving_params);
3795 }
3796 bool dumped_first = false;
3797 for (unsigned i = 0; i < len; i++)
3798 {
3799 isra_param_desc *desc = &(*ifs->m_parameters)[i];
3800 if (!dbg_cnt (ipa_sra_params))
3801 {
3802 desc->locally_unused = false;
3803 desc->split_candidate = false;
3804 }
3805 else if (check_surviving
3806 && (i >= surviving_params.length ()
3807 || !surviving_params[i]))
3808 {
3809 /* Even if the parameter was removed by a previous IPA pass, we do
3810 not clear locally_unused because if it really is unused, this
3811 information might be useful in callers. */
3812 desc->split_candidate = false;
3813
3814 if (dump_file && (dump_flags & TDF_DETAILS))
3815 {
3816 if (!dumped_first)
3817 {
3818 fprintf (dump_file,
3819 "The following parameters of %s are dead on "
3820 "arrival:", node->dump_name ());
3821 dumped_first = true;
3822 }
3823 fprintf (dump_file, " %u", i);
3824 }
3825 }
3826 else if (desc->locally_unused || desc->split_candidate)
3827 ret = false;
3828 }
3829
3830 if (dumped_first)
3831 fprintf (dump_file, "\n");
3832
3833 return ret;
3834}
3835
3836
3837/* Run the interprocedural part of IPA-SRA. */
3838
3839static unsigned int
3840ipa_sra_analysis (void)
3841{
3842 if (dump_file)
3843 {
3844 fprintf (dump_file, "\n========== IPA-SRA IPA stage ==========\n");
3845 ipa_sra_dump_all_summaries (dump_file);
3846 }
3847
3848 gcc_checking_assert (func_sums)((void)(!(func_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3848, __FUNCTION__), 0 : 0))
;
3849 gcc_checking_assert (call_sums)((void)(!(call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3849, __FUNCTION__), 0 : 0))
;
3850 cgraph_node **order = XCNEWVEC (cgraph_node *, symtab->cgraph_count)((cgraph_node * *) xcalloc ((symtab->cgraph_count), sizeof
(cgraph_node *)))
;
3851 auto_vec <cgraph_node *, 16> stack;
3852 int node_scc_count = ipa_reduced_postorder (order, true, NULLnullptr);
3853
3854 /* One sweep from callees to callers for parameter removal and splitting. */
3855 for (int i = 0; i < node_scc_count; i++)
3856 {
3857 cgraph_node *scc_rep = order[i];
3858 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (scc_rep);
3859 unsigned j;
3860
3861 /* Preliminary IPA function level checks and first step of parameter
3862 removal. */
3863 cgraph_node *v;
3864 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3865 {
3866 isra_func_summary *ifs = func_sums->get (v);
3867 if (!ifs || !ifs->m_candidate)
3868 continue;
3869 if (!ipa_sra_ipa_function_checks (v)
3870 || check_all_callers_for_issues (v))
3871 {
3872 ifs->zap ();
3873 continue;
3874 }
3875 if (disable_unavailable_parameters (v, ifs))
3876 continue;
3877 for (cgraph_edge *cs = v->indirect_calls; cs; cs = cs->next_callee)
3878 process_edge_to_unknown_caller (cs);
3879 for (cgraph_edge *cs = v->callees; cs; cs = cs->next_callee)
3880 if (!ipa_edge_within_scc (cs))
3881 param_removal_cross_scc_edge (cs);
3882 }
3883
3884 /* Look at edges within the current SCC and propagate used-ness across
3885 them, pushing onto the stack all notes which might need to be
3886 revisited. */
3887 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3888 v->call_for_symbol_thunks_and_aliases (propagate_used_to_scc_callers,
3889 &stack, true);
3890
3891 /* Keep revisiting and pushing until nothing changes. */
3892 while (!stack.is_empty ())
3893 {
3894 cgraph_node *v = stack.pop ();
3895 isra_func_summary *ifs = func_sums->get (v);
3896 gcc_checking_assert (ifs && ifs->m_queued)((void)(!(ifs && ifs->m_queued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3896, __FUNCTION__), 0 : 0))
;
3897 ifs->m_queued = false;
3898
3899 v->call_for_symbol_thunks_and_aliases (propagate_used_to_scc_callers,
3900 &stack, true);
3901 }
3902
3903 /* Parameter splitting. */
3904 bool repeat_scc_access_propagation;
3905 do
3906 {
3907 repeat_scc_access_propagation = false;
3908 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3909 {
3910 isra_func_summary *ifs = func_sums->get (v);
3911 if (!ifs
3912 || !ifs->m_candidate
3913 || vec_safe_is_empty (ifs->m_parameters))
3914 continue;
3915 for (cgraph_edge *cs = v->callees; cs; cs = cs->next_callee)
3916 if (param_splitting_across_edge (cs))
3917 repeat_scc_access_propagation = true;
3918 }
3919 }
3920 while (repeat_scc_access_propagation);
3921
3922 if (flag_checkingglobal_options.x_flag_checking)
3923 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3924 verify_splitting_accesses (v, true);
3925
3926 cycle_nodes.release ();
3927 }
3928
3929 /* One sweep from caller to callees for result removal. */
3930 for (int i = node_scc_count - 1; i >= 0 ; i--)
3931 {
3932 cgraph_node *scc_rep = order[i];
3933 vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (scc_rep);
3934 unsigned j;
3935
3936 cgraph_node *v;
3937 FOR_EACH_VEC_ELT (cycle_nodes, j, v)for (j = 0; (cycle_nodes).iterate ((j), &(v)); ++(j))
3938 {
3939 isra_func_summary *ifs = func_sums->get (v);
3940 if (!ifs || !ifs->m_candidate)
3941 continue;
3942
3943 bool return_needed
3944 = (ifs->m_returns_value
3945 && (!dbg_cnt (ipa_sra_retvalues)
3946 || v->call_for_symbol_and_aliases (retval_used_p,
3947 NULLnullptr, true)));
3948 ifs->m_return_ignored = !return_needed;
3949 if (return_needed)
3950 isra_push_node_to_stack (v, ifs, &stack);
3951 }
3952
3953 while (!stack.is_empty ())
3954 {
3955 cgraph_node *node = stack.pop ();
3956 isra_func_summary *ifs = func_sums->get (node);
3957 gcc_checking_assert (ifs && ifs->m_queued)((void)(!(ifs && ifs->m_queued) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 3957, __FUNCTION__), 0 : 0))
;
3958 ifs->m_queued = false;
3959
3960 for (cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3961 if (ipa_edge_within_scc (cs)
3962 && call_sums->get (cs)->m_return_returned)
3963 {
3964 enum availability av;
3965 cgraph_node *callee = cs->callee->function_symbol (&av);
3966 isra_func_summary *to_ifs = func_sums->get (callee);
3967 if (to_ifs && to_ifs->m_return_ignored)
3968 {
3969 to_ifs->m_return_ignored = false;
3970 isra_push_node_to_stack (callee, to_ifs, &stack);
3971 }
3972 }
3973 }
3974 cycle_nodes.release ();
3975 }
3976
3977 ipa_free_postorder_info ();
3978 free (order);
3979
3980 if (dump_file)
3981 {
3982 if (dump_flags & TDF_DETAILS)
3983 {
3984 fprintf (dump_file, "\n========== IPA-SRA propagation final state "
3985 " ==========\n");
3986 ipa_sra_dump_all_summaries (dump_file);
3987 }
3988 fprintf (dump_file, "\n========== IPA-SRA decisions ==========\n");
3989 }
3990
3991 hash_map<const char *, unsigned> *clone_num_suffixes
3992 = new hash_map<const char *, unsigned>;
3993
3994 cgraph_node *node;
3995 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
3996 process_isra_node_results (node, clone_num_suffixes);
3997
3998 delete clone_num_suffixes;
3999 ggc_delete (func_sums);
4000 func_sums = NULLnullptr;
4001 delete call_sums;
4002 call_sums = NULLnullptr;
4003
4004 if (dump_file)
4005 fprintf (dump_file, "\n========== IPA SRA IPA analysis done "
4006 "==========\n\n");
4007 return 0;
4008}
4009
4010
4011const pass_data pass_data_ipa_sra =
4012{
4013 IPA_PASS, /* type */
4014 "sra", /* name */
4015 OPTGROUP_NONE, /* optinfo_flags */
4016 TV_IPA_SRA, /* tv_id */
4017 0, /* properties_required */
4018 0, /* properties_provided */
4019 0, /* properties_destroyed */
4020 0, /* todo_flags_start */
4021 ( TODO_dump_symtab(1 << 7) | TODO_remove_functions(1 << 8) ), /* todo_flags_finish */
4022};
4023
4024class pass_ipa_sra : public ipa_opt_pass_d
4025{
4026public:
4027 pass_ipa_sra (gcc::context *ctxt)
4028 : ipa_opt_pass_d (pass_data_ipa_sra, ctxt,
4029 ipa_sra_generate_summary, /* generate_summary */
4030 ipa_sra_write_summary, /* write_summary */
4031 ipa_sra_read_summary, /* read_summary */
4032 NULLnullptr , /* write_optimization_summary */
4033 NULLnullptr, /* read_optimization_summary */
4034 NULLnullptr, /* stmt_fixup */
4035 0, /* function_transform_todo_flags_start */
4036 NULLnullptr, /* function_transform */
4037 NULLnullptr) /* variable_transform */
4038 {}
4039
4040 /* opt_pass methods: */
4041 virtual bool gate (function *)
4042 {
4043 /* TODO: We should remove the optimize check after we ensure we never run
4044 IPA passes when not optimizing. */
4045 return (flag_ipa_sraglobal_options.x_flag_ipa_sra && optimizeglobal_options.x_optimize);
4046 }
4047
4048 virtual unsigned int execute (function *) { return ipa_sra_analysis (); }
4049
4050}; // class pass_ipa_sra
4051
4052} // anon namespace
4053
4054/* Intraprocedural part of IPA-SRA analysis. Scan function body of NODE and
4055 create a summary structure describing IPA-SRA opportunities and constraints
4056 in it. */
4057
4058static void
4059ipa_sra_summarize_function (cgraph_node *node)
4060{
4061 if (dump_file)
4062 fprintf (dump_file, "Creating summary for %s/%i:\n", node->name (),
4063 node->order);
4064 if (!ipa_sra_preliminary_function_checks (node))
4065 return;
4066 gcc_obstack_init (&gensum_obstack)_obstack_begin (((&gensum_obstack)), (memory_block_pool::
block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free
))
;
4067 isra_func_summary *ifs = func_sums->get_create (node);
4068 ifs->m_candidate = true;
4069 tree ret = TREE_TYPE (TREE_TYPE (node->decl))((contains_struct_check ((((contains_struct_check ((node->
decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4069, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4069, __FUNCTION__))->typed.type)
;
4070 ifs->m_returns_value = (TREE_CODE (ret)((enum tree_code) (ret)->base.code) != VOID_TYPE);
4071
4072 decl2desc = new hash_map<tree, gensum_param_desc *>;
4073 unsigned count = 0;
4074 for (tree parm = DECL_ARGUMENTS (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4074, __FUNCTION__, (FUNCTION_DECL)))->function_decl.arguments
)
; parm; parm = DECL_CHAIN (parm)(((contains_struct_check (((contains_struct_check ((parm), (TS_DECL_MINIMAL
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4074, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4074, __FUNCTION__))->common.chain))
)
4075 count++;
4076
4077 if (count > 0)
4078 {
4079 auto_vec<gensum_param_desc, 16> param_descriptions (count);
4080 param_descriptions.reserve_exact (count);
4081 param_descriptions.quick_grow_cleared (count);
4082
4083 bool cfun_pushed = false;
4084 struct function *fun = DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-sra.c"
, 4084, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
;
4085 if (create_parameter_descriptors (node, &param_descriptions))
4086 {
4087 push_cfun (fun);
4088 cfun_pushed = true;
4089 final_bbs = BITMAP_ALLOCbitmap_alloc (NULLnullptr);
4090 bb_dereferences = XCNEWVEC (HOST_WIDE_INT,((long *) xcalloc ((by_ref_count * ((fun)->cfg->x_last_basic_block
)), sizeof (long)))
4091 by_ref_count((long *) xcalloc ((by_ref_count * ((fun)->cfg->x_last_basic_block
)), sizeof (long)))
4092 * last_basic_block_for_fn (fun))((long *) xcalloc ((by_ref_count * ((fun)->cfg->x_last_basic_block
)), sizeof (long)))
;
4093 aa_walking_limit = opt_for_fn (node->decl, param_ipa_max_aa_steps)(opts_for_fn (node->decl)->x_param_ipa_max_aa_steps);
4094 scan_function (node, fun);
4095
4096 if (dump_file)
4097 {
4098 dump_gensum_param_descriptors (dump_file, node->decl,
4099 &param_descriptions);
4100 fprintf (dump_file, "----------------------------------------\n");
4101 }
4102 }
4103 process_scan_results (node, fun, ifs, &param_descriptions);
4104
4105 if (cfun_pushed)
4106 pop_cfun ();
4107 if (bb_dereferences)
4108 {
4109 free (bb_dereferences);
4110 bb_dereferences = NULLnullptr;
4111 BITMAP_FREE (final_bbs)((void) (bitmap_obstack_free ((bitmap) final_bbs), (final_bbs
) = (bitmap) nullptr))
;
4112 final_bbs = NULLnullptr;
4113 }
4114 }
4115 isra_analyze_all_outgoing_calls (node);
4116
4117 delete decl2desc;
4118 decl2desc = NULLnullptr;
4119 obstack_free (&gensum_obstack, NULL)__extension__ ({ struct obstack *__o = (&gensum_obstack);
void *__obj = (void *) (nullptr); if (__obj > (void *) __o
->chunk && __obj < (void *) __o->chunk_limit
) __o->next_free = __o->object_base = (char *) __obj; else
_obstack_free (__o, __obj); })
;
4120 if (dump_file)
4121 fprintf (dump_file, "\n\n");
4122 if (flag_checkingglobal_options.x_flag_checking)
4123 verify_splitting_accesses (node, false);
4124 return;
4125}
4126
4127ipa_opt_pass_d *
4128make_pass_ipa_sra (gcc::context *ctxt)
4129{
4130 return new pass_ipa_sra (ctxt);
4131}
4132
4133
4134#include "gt-ipa-sra.h"

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h

1/* Vector API for GNU compiler.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Nathan Sidwell <nathan@codesourcery.com>
4 Re-implemented in C++ by Diego Novillo <dnovillo@google.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#ifndef GCC_VEC_H
23#define GCC_VEC_H
24
25/* Some gen* file have no ggc support as the header file gtype-desc.h is
26 missing. Provide these definitions in case ggc.h has not been included.
27 This is not a problem because any code that runs before gengtype is built
28 will never need to use GC vectors.*/
29
30extern void ggc_free (void *);
31extern size_t ggc_round_alloc_size (size_t requested_size);
32extern void *ggc_realloc (void *, size_t MEM_STAT_DECL);
33
34/* Templated vector type and associated interfaces.
35
36 The interface functions are typesafe and use inline functions,
37 sometimes backed by out-of-line generic functions. The vectors are
38 designed to interoperate with the GTY machinery.
39
40 There are both 'index' and 'iterate' accessors. The index accessor
41 is implemented by operator[]. The iterator returns a boolean
42 iteration condition and updates the iteration variable passed by
43 reference. Because the iterator will be inlined, the address-of
44 can be optimized away.
45
46 Each operation that increases the number of active elements is
47 available in 'quick' and 'safe' variants. The former presumes that
48 there is sufficient allocated space for the operation to succeed
49 (it dies if there is not). The latter will reallocate the
50 vector, if needed. Reallocation causes an exponential increase in
51 vector size. If you know you will be adding N elements, it would
52 be more efficient to use the reserve operation before adding the
53 elements with the 'quick' operation. This will ensure there are at
54 least as many elements as you ask for, it will exponentially
55 increase if there are too few spare slots. If you want reserve a
56 specific number of slots, but do not want the exponential increase
57 (for instance, you know this is the last allocation), use the
58 reserve_exact operation. You can also create a vector of a
59 specific size from the get go.
60
61 You should prefer the push and pop operations, as they append and
62 remove from the end of the vector. If you need to remove several
63 items in one go, use the truncate operation. The insert and remove
64 operations allow you to change elements in the middle of the
65 vector. There are two remove operations, one which preserves the
66 element ordering 'ordered_remove', and one which does not
67 'unordered_remove'. The latter function copies the end element
68 into the removed slot, rather than invoke a memmove operation. The
69 'lower_bound' function will determine where to place an item in the
70 array using insert that will maintain sorted order.
71
72 Vectors are template types with three arguments: the type of the
73 elements in the vector, the allocation strategy, and the physical
74 layout to use
75
76 Four allocation strategies are supported:
77
78 - Heap: allocation is done using malloc/free. This is the
79 default allocation strategy.
80
81 - GC: allocation is done using ggc_alloc/ggc_free.
82
83 - GC atomic: same as GC with the exception that the elements
84 themselves are assumed to be of an atomic type that does
85 not need to be garbage collected. This means that marking
86 routines do not need to traverse the array marking the
87 individual elements. This increases the performance of
88 GC activities.
89
90 Two physical layouts are supported:
91
92 - Embedded: The vector is structured using the trailing array
93 idiom. The last member of the structure is an array of size
94 1. When the vector is initially allocated, a single memory
95 block is created to hold the vector's control data and the
96 array of elements. These vectors cannot grow without
97 reallocation (see discussion on embeddable vectors below).
98
99 - Space efficient: The vector is structured as a pointer to an
100 embedded vector. This is the default layout. It means that
101 vectors occupy a single word of storage before initial
102 allocation. Vectors are allowed to grow (the internal
103 pointer is reallocated but the main vector instance does not
104 need to relocate).
105
106 The type, allocation and layout are specified when the vector is
107 declared.
108
109 If you need to directly manipulate a vector, then the 'address'
110 accessor will return the address of the start of the vector. Also
111 the 'space' predicate will tell you whether there is spare capacity
112 in the vector. You will not normally need to use these two functions.
113
114 Notes on the different layout strategies
115
116 * Embeddable vectors (vec<T, A, vl_embed>)
117
118 These vectors are suitable to be embedded in other data
119 structures so that they can be pre-allocated in a contiguous
120 memory block.
121
122 Embeddable vectors are implemented using the trailing array
123 idiom, thus they are not resizeable without changing the address
124 of the vector object itself. This means you cannot have
125 variables or fields of embeddable vector type -- always use a
126 pointer to a vector. The one exception is the final field of a
127 structure, which could be a vector type.
128
129 You will have to use the embedded_size & embedded_init calls to
130 create such objects, and they will not be resizeable (so the
131 'safe' allocation variants are not available).
132
133 Properties of embeddable vectors:
134
135 - The whole vector and control data are allocated in a single
136 contiguous block. It uses the trailing-vector idiom, so
137 allocation must reserve enough space for all the elements
138 in the vector plus its control data.
139 - The vector cannot be re-allocated.
140 - The vector cannot grow nor shrink.
141 - No indirections needed for access/manipulation.
142 - It requires 2 words of storage (prior to vector allocation).
143
144
145 * Space efficient vector (vec<T, A, vl_ptr>)
146
147 These vectors can grow dynamically and are allocated together
148 with their control data. They are suited to be included in data
149 structures. Prior to initial allocation, they only take a single
150 word of storage.
151
152 These vectors are implemented as a pointer to embeddable vectors.
153 The semantics allow for this pointer to be NULL to represent
154 empty vectors. This way, empty vectors occupy minimal space in
155 the structure containing them.
156
157 Properties:
158
159 - The whole vector and control data are allocated in a single
160 contiguous block.
161 - The whole vector may be re-allocated.
162 - Vector data may grow and shrink.
163 - Access and manipulation requires a pointer test and
164 indirection.
165 - It requires 1 word of storage (prior to vector allocation).
166
167 An example of their use would be,
168
169 struct my_struct {
170 // A space-efficient vector of tree pointers in GC memory.
171 vec<tree, va_gc, vl_ptr> v;
172 };
173
174 struct my_struct *s;
175
176 if (s->v.length ()) { we have some contents }
177 s->v.safe_push (decl); // append some decl onto the end
178 for (ix = 0; s->v.iterate (ix, &elt); ix++)
179 { do something with elt }
180*/
181
182/* Support function for statistics. */
183extern void dump_vec_loc_statistics (void);
184
185/* Hashtable mapping vec addresses to descriptors. */
186extern htab_t vec_mem_usage_hash;
187
188/* Control data for vectors. This contains the number of allocated
189 and used slots inside a vector. */
190
191struct vec_prefix
192{
193 /* FIXME - These fields should be private, but we need to cater to
194 compilers that have stricter notions of PODness for types. */
195
196 /* Memory allocation support routines in vec.c. */
197 void register_overhead (void *, size_t, size_t CXX_MEM_STAT_INFO);
198 void release_overhead (void *, size_t, size_t, bool CXX_MEM_STAT_INFO);
199 static unsigned calculate_allocation (vec_prefix *, unsigned, bool);
200 static unsigned calculate_allocation_1 (unsigned, unsigned);
201
202 /* Note that vec_prefix should be a base class for vec, but we use
203 offsetof() on vector fields of tree structures (e.g.,
204 tree_binfo::base_binfos), and offsetof only supports base types.
205
206 To compensate, we make vec_prefix a field inside vec and make
207 vec a friend class of vec_prefix so it can access its fields. */
208 template <typename, typename, typename> friend struct vec;
209
210 /* The allocator types also need access to our internals. */
211 friend struct va_gc;
212 friend struct va_gc_atomic;
213 friend struct va_heap;
214
215 unsigned m_alloc : 31;
216 unsigned m_using_auto_storage : 1;
217 unsigned m_num;
218};
219
220/* Calculate the number of slots to reserve a vector, making sure that
221 RESERVE slots are free. If EXACT grow exactly, otherwise grow
222 exponentially. PFX is the control data for the vector. */
223
224inline unsigned
225vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve,
226 bool exact)
227{
228 if (exact)
229 return (pfx ? pfx->m_num : 0) + reserve;
230 else if (!pfx)
231 return MAX (4, reserve)((4) > (reserve) ? (4) : (reserve));
232 return calculate_allocation_1 (pfx->m_alloc, pfx->m_num + reserve);
233}
234
235template<typename, typename, typename> struct vec;
236
237/* Valid vector layouts
238
239 vl_embed - Embeddable vector that uses the trailing array idiom.
240 vl_ptr - Space efficient vector that uses a pointer to an
241 embeddable vector. */
242struct vl_embed { };
243struct vl_ptr { };
244
245
246/* Types of supported allocations
247
248 va_heap - Allocation uses malloc/free.
249 va_gc - Allocation uses ggc_alloc.
250 va_gc_atomic - Same as GC, but individual elements of the array
251 do not need to be marked during collection. */
252
253/* Allocator type for heap vectors. */
254struct va_heap
255{
256 /* Heap vectors are frequently regular instances, so use the vl_ptr
257 layout for them. */
258 typedef vl_ptr default_layout;
259
260 template<typename T>
261 static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool
262 CXX_MEM_STAT_INFO);
263
264 template<typename T>
265 static void release (vec<T, va_heap, vl_embed> *&);
266};
267
268
269/* Allocator for heap memory. Ensure there are at least RESERVE free
270 slots in V. If EXACT is true, grow exactly, else grow
271 exponentially. As a special case, if the vector had not been
272 allocated and RESERVE is 0, no vector will be created. */
273
274template<typename T>
275inline void
276va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact
277 MEM_STAT_DECL)
278{
279 size_t elt_size = sizeof (T);
280 unsigned alloc
281 = vec_prefix::calculate_allocation (v ? &v->m_vecpfx : 0, reserve, exact);
282 gcc_checking_assert (alloc)((void)(!(alloc) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 282, __FUNCTION__), 0 : 0))
;
283
284 if (GATHER_STATISTICS0 && v)
285 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
286 v->allocated (), false);
287
288 size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc);
289 unsigned nelem = v ? v->length () : 0;
290 v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size));
291 v->embedded_init (alloc, nelem);
292
293 if (GATHER_STATISTICS0)
294 v->m_vecpfx.register_overhead (v, alloc, elt_size PASS_MEM_STAT);
295}
296
297
298#if GCC_VERSION(4 * 1000 + 2) >= 4007
299#pragma GCC diagnostic push
300#pragma GCC diagnostic ignored "-Wfree-nonheap-object"
301#endif
302
303/* Free the heap space allocated for vector V. */
304
305template<typename T>
306void
307va_heap::release (vec<T, va_heap, vl_embed> *&v)
308{
309 size_t elt_size = sizeof (T);
310 if (v == NULLnullptr)
311 return;
312
313 if (GATHER_STATISTICS0)
314 v->m_vecpfx.release_overhead (v, elt_size * v->allocated (),
315 v->allocated (), true);
316 ::free (v);
317 v = NULLnullptr;
318}
319
320#if GCC_VERSION(4 * 1000 + 2) >= 4007
321#pragma GCC diagnostic pop
322#endif
323
324/* Allocator type for GC vectors. Notice that we need the structure
325 declaration even if GC is not enabled. */
326
327struct va_gc
328{
329 /* Use vl_embed as the default layout for GC vectors. Due to GTY
330 limitations, GC vectors must always be pointers, so it is more
331 efficient to use a pointer to the vl_embed layout, rather than
332 using a pointer to a pointer as would be the case with vl_ptr. */
333 typedef vl_embed default_layout;
334
335 template<typename T, typename A>
336 static void reserve (vec<T, A, vl_embed> *&, unsigned, bool
337 CXX_MEM_STAT_INFO);
338
339 template<typename T, typename A>
340 static void release (vec<T, A, vl_embed> *&v);
341};
342
343
344/* Free GC memory used by V and reset V to NULL. */
345
346template<typename T, typename A>
347inline void
348va_gc::release (vec<T, A, vl_embed> *&v)
349{
350 if (v)
351 ::ggc_free (v);
352 v = NULLnullptr;
353}
354
355
356/* Allocator for GC memory. Ensure there are at least RESERVE free
357 slots in V. If EXACT is true, grow exactly, else grow
358 exponentially. As a special case, if the vector had not been
359 allocated and RESERVE is 0, no vector will be created. */
360
361template<typename T, typename A>
362void
363va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact
364 MEM_STAT_DECL)
365{
366 unsigned alloc
367 = vec_prefix::calculate_allocation (v
24.1
'v' is null
24.1
'v' is null
? &v->m_vecpfx : 0, reserve, exact);
25
'?' condition is false
368 if (!alloc)
26
Assuming 'alloc' is 0
27
Taking true branch
369 {
370 ::ggc_free (v);
371 v = NULLnullptr;
28
Null pointer value stored to field 'accesses'
372 return;
373 }
374
375 /* Calculate the amount of space we want. */
376 size_t size = vec<T, A, vl_embed>::embedded_size (alloc);
377
378 /* Ask the allocator how much space it will really give us. */
379 size = ::ggc_round_alloc_size (size);
380
381 /* Adjust the number of slots accordingly. */
382 size_t vec_offset = sizeof (vec_prefix);
383 size_t elt_size = sizeof (T);
384 alloc = (size - vec_offset) / elt_size;
385
386 /* And finally, recalculate the amount of space we ask for. */
387 size = vec_offset + alloc * elt_size;
388
389 unsigned nelem = v ? v->length () : 0;
390 v = static_cast <vec<T, A, vl_embed> *> (::ggc_realloc (v, size
391 PASS_MEM_STAT));
392 v->embedded_init (alloc, nelem);
393}
394
395
396/* Allocator type for GC vectors. This is for vectors of types
397 atomics w.r.t. collection, so allocation and deallocation is
398 completely inherited from va_gc. */
399struct va_gc_atomic : va_gc
400{
401};
402
403
404/* Generic vector template. Default values for A and L indicate the
405 most commonly used strategies.
406
407 FIXME - Ideally, they would all be vl_ptr to encourage using regular
408 instances for vectors, but the existing GTY machinery is limited
409 in that it can only deal with GC objects that are pointers
410 themselves.
411
412 This means that vector operations that need to deal with
413 potentially NULL pointers, must be provided as free
414 functions (see the vec_safe_* functions above). */
415template<typename T,
416 typename A = va_heap,
417 typename L = typename A::default_layout>
418struct GTY((user)) vec
419{
420};
421
422/* Allow C++11 range-based 'for' to work directly on vec<T>*. */
423template<typename T, typename A, typename L>
424T* begin (vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
425template<typename T, typename A, typename L>
426T* end (vec<T,A,L> *v) { return v ? v->end () : nullptr; }
427template<typename T, typename A, typename L>
428const T* begin (const vec<T,A,L> *v) { return v ? v->begin () : nullptr; }
429template<typename T, typename A, typename L>
430const T* end (const vec<T,A,L> *v) { return v ? v->end () : nullptr; }
431
432/* Generic vec<> debug helpers.
433
434 These need to be instantiated for each vec<TYPE> used throughout
435 the compiler like this:
436
437 DEFINE_DEBUG_VEC (TYPE)
438
439 The reason we have a debug_helper() is because GDB can't
440 disambiguate a plain call to debug(some_vec), and it must be called
441 like debug<TYPE>(some_vec). */
442
443template<typename T>
444void
445debug_helper (vec<T> &ref)
446{
447 unsigned i;
448 for (i = 0; i < ref.length (); ++i)
449 {
450 fprintf (stderrstderr, "[%d] = ", i);
451 debug_slim (ref[i]);
452 fputc ('\n', stderrstderr);
453 }
454}
455
456/* We need a separate va_gc variant here because default template
457 argument for functions cannot be used in c++-98. Once this
458 restriction is removed, those variant should be folded with the
459 above debug_helper. */
460
461template<typename T>
462void
463debug_helper (vec<T, va_gc> &ref)
464{
465 unsigned i;
466 for (i = 0; i < ref.length (); ++i)
467 {
468 fprintf (stderrstderr, "[%d] = ", i);
469 debug_slim (ref[i]);
470 fputc ('\n', stderrstderr);
471 }
472}
473
474/* Macro to define debug(vec<T>) and debug(vec<T, va_gc>) helper
475 functions for a type T. */
476
477#define DEFINE_DEBUG_VEC(T)template void debug_helper (vec<T> &); template void
debug_helper (vec<T, va_gc> &); __attribute__ ((__used__
)) void debug (vec<T> &ref) { debug_helper <T>
(ref); } __attribute__ ((__used__)) void debug (vec<T>
*ptr) { if (ptr) debug (*ptr); else fprintf (stderr, "<nil>\n"
); } __attribute__ ((__used__)) void debug (vec<T, va_gc>
&ref) { debug_helper <T> (ref); } __attribute__ ((
__used__)) void debug (vec<T, va_gc> *ptr) { if (ptr) debug
(*ptr); else fprintf (stderr, "<nil>\n"); }
\
478 template void debug_helper (vec<T> &); \
479 template void debug_helper (vec<T, va_gc> &); \
480 /* Define the vec<T> debug functions. */ \
481 DEBUG_FUNCTION__attribute__ ((__used__)) void \
482 debug (vec<T> &ref) \
483 { \
484 debug_helper <T> (ref); \
485 } \
486 DEBUG_FUNCTION__attribute__ ((__used__)) void \
487 debug (vec<T> *ptr) \
488 { \
489 if (ptr) \
490 debug (*ptr); \
491 else \
492 fprintf (stderrstderr, "<nil>\n"); \
493 } \
494 /* Define the vec<T, va_gc> debug functions. */ \
495 DEBUG_FUNCTION__attribute__ ((__used__)) void \
496 debug (vec<T, va_gc> &ref) \
497 { \
498 debug_helper <T> (ref); \
499 } \
500 DEBUG_FUNCTION__attribute__ ((__used__)) void \
501 debug (vec<T, va_gc> *ptr) \
502 { \
503 if (ptr) \
504 debug (*ptr); \
505 else \
506 fprintf (stderrstderr, "<nil>\n"); \
507 }
508
509/* Default-construct N elements in DST. */
510
511template <typename T>
512inline void
513vec_default_construct (T *dst, unsigned n)
514{
515#ifdef BROKEN_VALUE_INITIALIZATION
516 /* Versions of GCC before 4.4 sometimes leave certain objects
517 uninitialized when value initialized, though if the type has
518 user defined default ctor, that ctor is invoked. As a workaround
519 perform clearing first and then the value initialization, which
520 fixes the case when value initialization doesn't initialize due to
521 the bugs and should initialize to all zeros, but still allows
522 vectors for types with user defined default ctor that initializes
523 some or all elements to non-zero. If T has no user defined
524 default ctor and some non-static data members have user defined
525 default ctors that initialize to non-zero the workaround will
526 still not work properly; in that case we just need to provide
527 user defined default ctor. */
528 memset (dst, '\0', sizeof (T) * n);
529#endif
530 for ( ; n; ++dst, --n)
531 ::new (static_cast<void*>(dst)) T ();
532}
533
534/* Copy-construct N elements in DST from *SRC. */
535
536template <typename T>
537inline void
538vec_copy_construct (T *dst, const T *src, unsigned n)
539{
540 for ( ; n; ++dst, ++src, --n)
541 ::new (static_cast<void*>(dst)) T (*src);
542}
543
544/* Type to provide NULL values for vec<T, A, L>. This is used to
545 provide nil initializers for vec instances. Since vec must be
546 a POD, we cannot have proper ctor/dtor for it. To initialize
547 a vec instance, you can assign it the value vNULL. This isn't
548 needed for file-scope and function-local static vectors, which
549 are zero-initialized by default. */
550struct vnull
551{
552 template <typename T, typename A, typename L>
553 CONSTEXPRconstexpr operator vec<T, A, L> () const { return vec<T, A, L>(); }
554};
555extern vnull vNULL;
556
557
558/* Embeddable vector. These vectors are suitable to be embedded
559 in other data structures so that they can be pre-allocated in a
560 contiguous memory block.
561
562 Embeddable vectors are implemented using the trailing array idiom,
563 thus they are not resizeable without changing the address of the
564 vector object itself. This means you cannot have variables or
565 fields of embeddable vector type -- always use a pointer to a
566 vector. The one exception is the final field of a structure, which
567 could be a vector type.
568
569 You will have to use the embedded_size & embedded_init calls to
570 create such objects, and they will not be resizeable (so the 'safe'
571 allocation variants are not available).
572
573 Properties:
574
575 - The whole vector and control data are allocated in a single
576 contiguous block. It uses the trailing-vector idiom, so
577 allocation must reserve enough space for all the elements
578 in the vector plus its control data.
579 - The vector cannot be re-allocated.
580 - The vector cannot grow nor shrink.
581 - No indirections needed for access/manipulation.
582 - It requires 2 words of storage (prior to vector allocation). */
583
584template<typename T, typename A>
585struct GTY((user)) vec<T, A, vl_embed>
586{
587public:
588 unsigned allocated (void) const { return m_vecpfx.m_alloc; }
589 unsigned length (void) const { return m_vecpfx.m_num; }
590 bool is_empty (void) const { return m_vecpfx.m_num == 0; }
591 T *address (void) { return m_vecdata; }
592 const T *address (void) const { return m_vecdata; }
593 T *begin () { return address (); }
594 const T *begin () const { return address (); }
595 T *end () { return address () + length (); }
596 const T *end () const { return address () + length (); }
597 const T &operator[] (unsigned) const;
598 T &operator[] (unsigned);
599 T &last (void);
600 bool space (unsigned) const;
601 bool iterate (unsigned, T *) const;
602 bool iterate (unsigned, T **) const;
603 vec *copy (ALONE_CXX_MEM_STAT_INFO) const;
604 void splice (const vec &);
605 void splice (const vec *src);
606 T *quick_push (const T &);
607 T &pop (void);
608 void truncate (unsigned);
609 void quick_insert (unsigned, const T &);
610 void ordered_remove (unsigned);
611 void unordered_remove (unsigned);
612 void block_remove (unsigned, unsigned);
613 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
614 void sort (int (*) (const void *, const void *, void *), void *);
615 T *bsearch (const void *key, int (*compar)(const void *, const void *));
616 T *bsearch (const void *key,
617 int (*compar)(const void *, const void *, void *), void *);
618 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
619 bool contains (const T &search) const;
620 static size_t embedded_size (unsigned);
621 void embedded_init (unsigned, unsigned = 0, unsigned = 0);
622 void quick_grow (unsigned len);
623 void quick_grow_cleared (unsigned len);
624
625 /* vec class can access our internal data and functions. */
626 template <typename, typename, typename> friend struct vec;
627
628 /* The allocator types also need access to our internals. */
629 friend struct va_gc;
630 friend struct va_gc_atomic;
631 friend struct va_heap;
632
633 /* FIXME - These fields should be private, but we need to cater to
634 compilers that have stricter notions of PODness for types. */
635 vec_prefix m_vecpfx;
636 T m_vecdata[1];
637};
638
639
640/* Convenience wrapper functions to use when dealing with pointers to
641 embedded vectors. Some functionality for these vectors must be
642 provided via free functions for these reasons:
643
644 1- The pointer may be NULL (e.g., before initial allocation).
645
646 2- When the vector needs to grow, it must be reallocated, so
647 the pointer will change its value.
648
649 Because of limitations with the current GC machinery, all vectors
650 in GC memory *must* be pointers. */
651
652
653/* If V contains no room for NELEMS elements, return false. Otherwise,
654 return true. */
655template<typename T, typename A>
656inline bool
657vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems)
658{
659 return v ? v->space (nelems) : nelems == 0;
660}
661
662
663/* If V is NULL, return 0. Otherwise, return V->length(). */
664template<typename T, typename A>
665inline unsigned
666vec_safe_length (const vec<T, A, vl_embed> *v)
667{
668 return v ? v->length () : 0;
669}
670
671
672/* If V is NULL, return NULL. Otherwise, return V->address(). */
673template<typename T, typename A>
674inline T *
675vec_safe_address (vec<T, A, vl_embed> *v)
676{
677 return v ? v->address () : NULLnullptr;
678}
679
680
681/* If V is NULL, return true. Otherwise, return V->is_empty(). */
682template<typename T, typename A>
683inline bool
684vec_safe_is_empty (vec<T, A, vl_embed> *v)
685{
686 return v ? v->is_empty () : true;
687}
688
689/* If V does not have space for NELEMS elements, call
690 V->reserve(NELEMS, EXACT). */
691template<typename T, typename A>
692inline bool
693vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false
694 CXX_MEM_STAT_INFO)
695{
696 bool extend = nelems
21.1
'nelems' is 1
21.1
'nelems' is 1
? !vec_safe_space (v, nelems) : false;
22
'?' condition is true
697 if (extend
22.1
'extend' is true
22.1
'extend' is true
)
23
Taking true branch
698 A::reserve (v, nelems, exact PASS_MEM_STAT);
24
Calling 'va_gc::reserve'
29
Returning from 'va_gc::reserve'
699 return extend;
700}
701
702template<typename T, typename A>
703inline bool
704vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems
705 CXX_MEM_STAT_INFO)
706{
707 return vec_safe_reserve (v, nelems, true PASS_MEM_STAT);
708}
709
710
711/* Allocate GC memory for V with space for NELEMS slots. If NELEMS
712 is 0, V is initialized to NULL. */
713
714template<typename T, typename A>
715inline void
716vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems CXX_MEM_STAT_INFO)
717{
718 v = NULLnullptr;
719 vec_safe_reserve (v, nelems, false PASS_MEM_STAT);
720}
721
722
723/* Free the GC memory allocated by vector V and set it to NULL. */
724
725template<typename T, typename A>
726inline void
727vec_free (vec<T, A, vl_embed> *&v)
728{
729 A::release (v);
730}
731
732
733/* Grow V to length LEN. Allocate it, if necessary. */
734template<typename T, typename A>
735inline void
736vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len,
737 bool exact = false CXX_MEM_STAT_INFO)
738{
739 unsigned oldlen = vec_safe_length (v);
740 gcc_checking_assert (len >= oldlen)((void)(!(len >= oldlen) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 740, __FUNCTION__), 0 : 0))
;
741 vec_safe_reserve (v, len - oldlen, exact PASS_MEM_STAT);
742 v->quick_grow (len);
743}
744
745
746/* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */
747template<typename T, typename A>
748inline void
749vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len,
750 bool exact = false CXX_MEM_STAT_INFO)
751{
752 unsigned oldlen = vec_safe_length (v);
753 vec_safe_grow (v, len, exact PASS_MEM_STAT);
754 vec_default_construct (v->address () + oldlen, len - oldlen);
755}
756
757
758/* Assume V is not NULL. */
759
760template<typename T>
761inline void
762vec_safe_grow_cleared (vec<T, va_heap, vl_ptr> *&v,
763 unsigned len, bool exact = false CXX_MEM_STAT_INFO)
764{
765 v->safe_grow_cleared (len, exact PASS_MEM_STAT);
766}
767
768/* If V does not have space for NELEMS elements, call
769 V->reserve(NELEMS, EXACT). */
770
771template<typename T>
772inline bool
773vec_safe_reserve (vec<T, va_heap, vl_ptr> *&v, unsigned nelems, bool exact = false
774 CXX_MEM_STAT_INFO)
775{
776 return v->reserve (nelems, exact);
777}
778
779
780/* If V is NULL return false, otherwise return V->iterate(IX, PTR). */
781template<typename T, typename A>
782inline bool
783vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr)
784{
785 if (v)
786 return v->iterate (ix, ptr);
787 else
788 {
789 *ptr = 0;
790 return false;
791 }
792}
793
794template<typename T, typename A>
795inline bool
796vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr)
797{
798 if (v)
799 return v->iterate (ix, ptr);
800 else
801 {
802 *ptr = 0;
803 return false;
804 }
805}
806
807
808/* If V has no room for one more element, reallocate it. Then call
809 V->quick_push(OBJ). */
810template<typename T, typename A>
811inline T *
812vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj CXX_MEM_STAT_INFO)
813{
814 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
21
Calling 'vec_safe_reserve<param_access *, va_gc>'
30
Returning from 'vec_safe_reserve<param_access *, va_gc>'
815 return v->quick_push (obj);
31
Called C++ object pointer is null
816}
817
818
819/* if V has no room for one more element, reallocate it. Then call
820 V->quick_insert(IX, OBJ). */
821template<typename T, typename A>
822inline void
823vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj
824 CXX_MEM_STAT_INFO)
825{
826 vec_safe_reserve (v, 1, false PASS_MEM_STAT);
827 v->quick_insert (ix, obj);
828}
829
830
831/* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */
832template<typename T, typename A>
833inline void
834vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size)
835{
836 if (v)
837 v->truncate (size);
838}
839
840
841/* If SRC is not NULL, return a pointer to a copy of it. */
842template<typename T, typename A>
843inline vec<T, A, vl_embed> *
844vec_safe_copy (vec<T, A, vl_embed> *src CXX_MEM_STAT_INFO)
845{
846 return src ? src->copy (ALONE_PASS_MEM_STAT) : NULLnullptr;
847}
848
849/* Copy the elements from SRC to the end of DST as if by memcpy.
850 Reallocate DST, if necessary. */
851template<typename T, typename A>
852inline void
853vec_safe_splice (vec<T, A, vl_embed> *&dst, const vec<T, A, vl_embed> *src
854 CXX_MEM_STAT_INFO)
855{
856 unsigned src_len = vec_safe_length (src);
857 if (src_len)
858 {
859 vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len
860 PASS_MEM_STAT);
861 dst->splice (*src);
862 }
863}
864
865/* Return true if SEARCH is an element of V. Note that this is O(N) in the
866 size of the vector and so should be used with care. */
867
868template<typename T, typename A>
869inline bool
870vec_safe_contains (vec<T, A, vl_embed> *v, const T &search)
871{
872 return v ? v->contains (search) : false;
873}
874
875/* Index into vector. Return the IX'th element. IX must be in the
876 domain of the vector. */
877
878template<typename T, typename A>
879inline const T &
880vec<T, A, vl_embed>::operator[] (unsigned ix) const
881{
882 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 882, __FUNCTION__), 0 : 0))
;
883 return m_vecdata[ix];
884}
885
886template<typename T, typename A>
887inline T &
888vec<T, A, vl_embed>::operator[] (unsigned ix)
889{
890 gcc_checking_assert (ix < m_vecpfx.m_num)((void)(!(ix < m_vecpfx.m_num) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 890, __FUNCTION__), 0 : 0))
;
891 return m_vecdata[ix];
892}
893
894
895/* Get the final element of the vector, which must not be empty. */
896
897template<typename T, typename A>
898inline T &
899vec<T, A, vl_embed>::last (void)
900{
901 gcc_checking_assert (m_vecpfx.m_num > 0)((void)(!(m_vecpfx.m_num > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 901, __FUNCTION__), 0 : 0))
;
902 return (*this)[m_vecpfx.m_num - 1];
903}
904
905
906/* If this vector has space for NELEMS additional entries, return
907 true. You usually only need to use this if you are doing your
908 own vector reallocation, for instance on an embedded vector. This
909 returns true in exactly the same circumstances that vec::reserve
910 will. */
911
912template<typename T, typename A>
913inline bool
914vec<T, A, vl_embed>::space (unsigned nelems) const
915{
916 return m_vecpfx.m_alloc - m_vecpfx.m_num >= nelems;
917}
918
919
920/* Return iteration condition and update PTR to point to the IX'th
921 element of this vector. Use this to iterate over the elements of a
922 vector as follows,
923
924 for (ix = 0; vec<T, A>::iterate (v, ix, &ptr); ix++)
925 continue; */
926
927template<typename T, typename A>
928inline bool
929vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const
930{
931 if (ix < m_vecpfx.m_num)
932 {
933 *ptr = m_vecdata[ix];
934 return true;
935 }
936 else
937 {
938 *ptr = 0;
939 return false;
940 }
941}
942
943
944/* Return iteration condition and update *PTR to point to the
945 IX'th element of this vector. Use this to iterate over the
946 elements of a vector as follows,
947
948 for (ix = 0; v->iterate (ix, &ptr); ix++)
949 continue;
950
951 This variant is for vectors of objects. */
952
953template<typename T, typename A>
954inline bool
955vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const
956{
957 if (ix < m_vecpfx.m_num)
958 {
959 *ptr = CONST_CAST (T *, &m_vecdata[ix])(const_cast<T *> ((&m_vecdata[ix])));
960 return true;
961 }
962 else
963 {
964 *ptr = 0;
965 return false;
966 }
967}
968
969
970/* Return a pointer to a copy of this vector. */
971
972template<typename T, typename A>
973inline vec<T, A, vl_embed> *
974vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECLvoid) const
975{
976 vec<T, A, vl_embed> *new_vec = NULLnullptr;
977 unsigned len = length ();
978 if (len)
979 {
980 vec_alloc (new_vec, len PASS_MEM_STAT);
981 new_vec->embedded_init (len, len);
982 vec_copy_construct (new_vec->address (), m_vecdata, len);
983 }
984 return new_vec;
985}
986
987
988/* Copy the elements from SRC to the end of this vector as if by memcpy.
989 The vector must have sufficient headroom available. */
990
991template<typename T, typename A>
992inline void
993vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> &src)
994{
995 unsigned len = src.length ();
996 if (len)
997 {
998 gcc_checking_assert (space (len))((void)(!(space (len)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 998, __FUNCTION__), 0 : 0))
;
999 vec_copy_construct (end (), src.address (), len);
1000 m_vecpfx.m_num += len;
1001 }
1002}
1003
1004template<typename T, typename A>
1005inline void
1006vec<T, A, vl_embed>::splice (const vec<T, A, vl_embed> *src)
1007{
1008 if (src)
1009 splice (*src);
1010}
1011
1012
1013/* Push OBJ (a new element) onto the end of the vector. There must be
1014 sufficient space in the vector. Return a pointer to the slot
1015 where OBJ was inserted. */
1016
1017template<typename T, typename A>
1018inline T *
1019vec<T, A, vl_embed>::quick_push (const T &obj)
1020{
1021 gcc_checking_assert (space (1))((void)(!(space (1)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1021, __FUNCTION__), 0 : 0))
;
1022 T *slot = &m_vecdata[m_vecpfx.m_num++];
1023 *slot = obj;
1024 return slot;
1025}
1026
1027
1028/* Pop and return the last element off the end of the vector. */
1029
1030template<typename T, typename A>
1031inline T &
1032vec<T, A, vl_embed>::pop (void)
1033{
1034 gcc_checking_assert (length () > 0)((void)(!(length () > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1034, __FUNCTION__), 0 : 0))
;
1035 return m_vecdata[--m_vecpfx.m_num];
1036}
1037
1038
1039/* Set the length of the vector to SIZE. The new length must be less
1040 than or equal to the current length. This is an O(1) operation. */
1041
1042template<typename T, typename A>
1043inline void
1044vec<T, A, vl_embed>::truncate (unsigned size)
1045{
1046 gcc_checking_assert (length () >= size)((void)(!(length () >= size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1046, __FUNCTION__), 0 : 0))
;
1047 m_vecpfx.m_num = size;
1048}
1049
1050
1051/* Insert an element, OBJ, at the IXth position of this vector. There
1052 must be sufficient space. */
1053
1054template<typename T, typename A>
1055inline void
1056vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj)
1057{
1058 gcc_checking_assert (length () < allocated ())((void)(!(length () < allocated ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1058, __FUNCTION__), 0 : 0))
;
1059 gcc_checking_assert (ix <= length ())((void)(!(ix <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1059, __FUNCTION__), 0 : 0))
;
1060 T *slot = &m_vecdata[ix];
1061 memmove (slot + 1, slot, (m_vecpfx.m_num++ - ix) * sizeof (T));
1062 *slot = obj;
1063}
1064
1065
1066/* Remove an element from the IXth position of this vector. Ordering of
1067 remaining elements is preserved. This is an O(N) operation due to
1068 memmove. */
1069
1070template<typename T, typename A>
1071inline void
1072vec<T, A, vl_embed>::ordered_remove (unsigned ix)
1073{
1074 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1074, __FUNCTION__), 0 : 0))
;
1075 T *slot = &m_vecdata[ix];
1076 memmove (slot, slot + 1, (--m_vecpfx.m_num - ix) * sizeof (T));
1077}
1078
1079
1080/* Remove elements in [START, END) from VEC for which COND holds. Ordering of
1081 remaining elements is preserved. This is an O(N) operation. */
1082
1083#define VEC_ORDERED_REMOVE_IF_FROM_TO(vec, read_index, write_index, \{ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
1084 elem_ptr, start, end, cond){ ((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1084, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (start); read_index < (end); ++read_index) { elem_ptr =
&(vec)[read_index]; bool remove_p = (cond); if (remove_p
) continue; if (read_index != write_index) (vec)[write_index]
= (vec)[read_index]; write_index++; } if (read_index - write_index
> 0) (vec).block_remove (write_index, read_index - write_index
); }
\
1085 { \
1086 gcc_assert ((end) <= (vec).length ())((void)(!((end) <= (vec).length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1086, __FUNCTION__), 0 : 0))
; \
1087 for (read_index = write_index = (start); read_index < (end); \
1088 ++read_index) \
1089 { \
1090 elem_ptr = &(vec)[read_index]; \
1091 bool remove_p = (cond); \
1092 if (remove_p) \
1093 continue; \
1094 \
1095 if (read_index != write_index) \
1096 (vec)[write_index] = (vec)[read_index]; \
1097 \
1098 write_index++; \
1099 } \
1100 \
1101 if (read_index - write_index > 0) \
1102 (vec).block_remove (write_index, read_index - write_index); \
1103 }
1104
1105
1106/* Remove elements from VEC for which COND holds. Ordering of remaining
1107 elements is preserved. This is an O(N) operation. */
1108
1109#define VEC_ORDERED_REMOVE_IF(vec, read_index, write_index, elem_ptr, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1110 cond){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1110, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
\
1111 VEC_ORDERED_REMOVE_IF_FROM_TO ((vec), read_index, write_index, \{ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1112 elem_ptr, 0, (vec).length (), (cond)){ ((void)(!(((vec).length ()) <= ((vec)).length ()) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1112, __FUNCTION__), 0 : 0)); for (read_index = write_index
= (0); read_index < ((vec).length ()); ++read_index) { elem_ptr
= &((vec))[read_index]; bool remove_p = ((cond)); if (remove_p
) continue; if (read_index != write_index) ((vec))[write_index
] = ((vec))[read_index]; write_index++; } if (read_index - write_index
> 0) ((vec)).block_remove (write_index, read_index - write_index
); }
1113
1114/* Remove an element from the IXth position of this vector. Ordering of
1115 remaining elements is destroyed. This is an O(1) operation. */
1116
1117template<typename T, typename A>
1118inline void
1119vec<T, A, vl_embed>::unordered_remove (unsigned ix)
1120{
1121 gcc_checking_assert (ix < length ())((void)(!(ix < length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1121, __FUNCTION__), 0 : 0))
;
1122 m_vecdata[ix] = m_vecdata[--m_vecpfx.m_num];
1123}
1124
1125
1126/* Remove LEN elements starting at the IXth. Ordering is retained.
1127 This is an O(N) operation due to memmove. */
1128
1129template<typename T, typename A>
1130inline void
1131vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len)
1132{
1133 gcc_checking_assert (ix + len <= length ())((void)(!(ix + len <= length ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1133, __FUNCTION__), 0 : 0))
;
1134 T *slot = &m_vecdata[ix];
1135 m_vecpfx.m_num -= len;
1136 memmove (slot, slot + len, (m_vecpfx.m_num - ix) * sizeof (T));
1137}
1138
1139
1140/* Sort the contents of this vector with qsort. CMP is the comparison
1141 function to pass to qsort. */
1142
1143template<typename T, typename A>
1144inline void
1145vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))qsort (int (*cmp) (const void *, const void *))
1146{
1147 if (length () > 1)
1148 gcc_qsort (address (), length (), sizeof (T), cmp);
1149}
1150
1151/* Sort the contents of this vector with qsort. CMP is the comparison
1152 function to pass to qsort. */
1153
1154template<typename T, typename A>
1155inline void
1156vec<T, A, vl_embed>::sort (int (*cmp) (const void *, const void *, void *),
1157 void *data)
1158{
1159 if (length () > 1)
1160 gcc_sort_r (address (), length (), sizeof (T), cmp, data);
1161}
1162
1163
1164/* Search the contents of the sorted vector with a binary search.
1165 CMP is the comparison function to pass to bsearch. */
1166
1167template<typename T, typename A>
1168inline T *
1169vec<T, A, vl_embed>::bsearch (const void *key,
1170 int (*compar) (const void *, const void *))
1171{
1172 const void *base = this->address ();
1173 size_t nmemb = this->length ();
1174 size_t size = sizeof (T);
1175 /* The following is a copy of glibc stdlib-bsearch.h. */
1176 size_t l, u, idx;
1177 const void *p;
1178 int comparison;
1179
1180 l = 0;
1181 u = nmemb;
1182 while (l < u)
1183 {
1184 idx = (l + u) / 2;
1185 p = (const void *) (((const char *) base) + (idx * size));
1186 comparison = (*compar) (key, p);
1187 if (comparison < 0)
1188 u = idx;
1189 else if (comparison > 0)
1190 l = idx + 1;
1191 else
1192 return (T *)const_cast<void *>(p);
1193 }
1194
1195 return NULLnullptr;
1196}
1197
1198/* Search the contents of the sorted vector with a binary search.
1199 CMP is the comparison function to pass to bsearch. */
1200
1201template<typename T, typename A>
1202inline T *
1203vec<T, A, vl_embed>::bsearch (const void *key,
1204 int (*compar) (const void *, const void *,
1205 void *), void *data)
1206{
1207 const void *base = this->address ();
1208 size_t nmemb = this->length ();
1209 size_t size = sizeof (T);
1210 /* The following is a copy of glibc stdlib-bsearch.h. */
1211 size_t l, u, idx;
1212 const void *p;
1213 int comparison;
1214
1215 l = 0;
1216 u = nmemb;
1217 while (l < u)
1218 {
1219 idx = (l + u) / 2;
1220 p = (const void *) (((const char *) base) + (idx * size));
1221 comparison = (*compar) (key, p, data);
1222 if (comparison < 0)
1223 u = idx;
1224 else if (comparison > 0)
1225 l = idx + 1;
1226 else
1227 return (T *)const_cast<void *>(p);
1228 }
1229
1230 return NULLnullptr;
1231}
1232
1233/* Return true if SEARCH is an element of V. Note that this is O(N) in the
1234 size of the vector and so should be used with care. */
1235
1236template<typename T, typename A>
1237inline bool
1238vec<T, A, vl_embed>::contains (const T &search) const
1239{
1240 unsigned int len = length ();
1241 for (unsigned int i = 0; i < len; i++)
1242 if ((*this)[i] == search)
1243 return true;
1244
1245 return false;
1246}
1247
1248/* Find and return the first position in which OBJ could be inserted
1249 without changing the ordering of this vector. LESSTHAN is a
1250 function that returns true if the first argument is strictly less
1251 than the second. */
1252
1253template<typename T, typename A>
1254unsigned
1255vec<T, A, vl_embed>::lower_bound (T obj, bool (*lessthan)(const T &, const T &))
1256 const
1257{
1258 unsigned int len = length ();
1259 unsigned int half, middle;
1260 unsigned int first = 0;
1261 while (len > 0)
1262 {
1263 half = len / 2;
1264 middle = first;
1265 middle += half;
1266 T middle_elem = (*this)[middle];
1267 if (lessthan (middle_elem, obj))
1268 {
1269 first = middle;
1270 ++first;
1271 len = len - half - 1;
1272 }
1273 else
1274 len = half;
1275 }
1276 return first;
1277}
1278
1279
1280/* Return the number of bytes needed to embed an instance of an
1281 embeddable vec inside another data structure.
1282
1283 Use these methods to determine the required size and initialization
1284 of a vector V of type T embedded within another structure (as the
1285 final member):
1286
1287 size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc);
1288 void v->embedded_init (unsigned alloc, unsigned num);
1289
1290 These allow the caller to perform the memory allocation. */
1291
1292template<typename T, typename A>
1293inline size_t
1294vec<T, A, vl_embed>::embedded_size (unsigned alloc)
1295{
1296 struct alignas (T) U { char data[sizeof (T)]; };
1297 typedef vec<U, A, vl_embed> vec_embedded;
1298 typedef typename std::conditional<std::is_standard_layout<T>::value,
1299 vec, vec_embedded>::type vec_stdlayout;
1300 static_assert (sizeof (vec_stdlayout) == sizeof (vec), "");
1301 static_assert (alignof (vec_stdlayout) == alignof (vec), "");
1302 return offsetof (vec_stdlayout, m_vecdata)__builtin_offsetof(vec_stdlayout, m_vecdata) + alloc * sizeof (T);
1303}
1304
1305
1306/* Initialize the vector to contain room for ALLOC elements and
1307 NUM active elements. */
1308
1309template<typename T, typename A>
1310inline void
1311vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num, unsigned aut)
1312{
1313 m_vecpfx.m_alloc = alloc;
1314 m_vecpfx.m_using_auto_storage = aut;
1315 m_vecpfx.m_num = num;
1316}
1317
1318
1319/* Grow the vector to a specific length. LEN must be as long or longer than
1320 the current length. The new elements are uninitialized. */
1321
1322template<typename T, typename A>
1323inline void
1324vec<T, A, vl_embed>::quick_grow (unsigned len)
1325{
1326 gcc_checking_assert (length () <= len && len <= m_vecpfx.m_alloc)((void)(!(length () <= len && len <= m_vecpfx.m_alloc
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1326, __FUNCTION__), 0 : 0))
;
1327 m_vecpfx.m_num = len;
1328}
1329
1330
1331/* Grow the vector to a specific length. LEN must be as long or longer than
1332 the current length. The new elements are initialized to zero. */
1333
1334template<typename T, typename A>
1335inline void
1336vec<T, A, vl_embed>::quick_grow_cleared (unsigned len)
1337{
1338 unsigned oldlen = length ();
1339 size_t growby = len - oldlen;
1340 quick_grow (len);
1341 if (growby != 0)
1342 vec_default_construct (address () + oldlen, growby);
1343}
1344
1345/* Garbage collection support for vec<T, A, vl_embed>. */
1346
1347template<typename T>
1348void
1349gt_ggc_mx (vec<T, va_gc> *v)
1350{
1351 extern void gt_ggc_mx (T &);
1352 for (unsigned i = 0; i < v->length (); i++)
1353 gt_ggc_mx ((*v)[i]);
1354}
1355
1356template<typename T>
1357void
1358gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1359{
1360 /* Nothing to do. Vectors of atomic types wrt GC do not need to
1361 be traversed. */
1362}
1363
1364
1365/* PCH support for vec<T, A, vl_embed>. */
1366
1367template<typename T, typename A>
1368void
1369gt_pch_nx (vec<T, A, vl_embed> *v)
1370{
1371 extern void gt_pch_nx (T &);
1372 for (unsigned i = 0; i < v->length (); i++)
1373 gt_pch_nx ((*v)[i]);
1374}
1375
1376template<typename T, typename A>
1377void
1378gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1379{
1380 for (unsigned i = 0; i < v->length (); i++)
1381 op (&((*v)[i]), cookie);
1382}
1383
1384template<typename T, typename A>
1385void
1386gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
1387{
1388 extern void gt_pch_nx (T *, gt_pointer_operator, void *);
1389 for (unsigned i = 0; i < v->length (); i++)
1390 gt_pch_nx (&((*v)[i]), op, cookie);
1391}
1392
1393
1394/* Space efficient vector. These vectors can grow dynamically and are
1395 allocated together with their control data. They are suited to be
1396 included in data structures. Prior to initial allocation, they
1397 only take a single word of storage.
1398
1399 These vectors are implemented as a pointer to an embeddable vector.
1400 The semantics allow for this pointer to be NULL to represent empty
1401 vectors. This way, empty vectors occupy minimal space in the
1402 structure containing them.
1403
1404 Properties:
1405
1406 - The whole vector and control data are allocated in a single
1407 contiguous block.
1408 - The whole vector may be re-allocated.
1409 - Vector data may grow and shrink.
1410 - Access and manipulation requires a pointer test and
1411 indirection.
1412 - It requires 1 word of storage (prior to vector allocation).
1413
1414
1415 Limitations:
1416
1417 These vectors must be PODs because they are stored in unions.
1418 (http://en.wikipedia.org/wiki/Plain_old_data_structures).
1419 As long as we use C++03, we cannot have constructors nor
1420 destructors in classes that are stored in unions. */
1421
1422template<typename T>
1423struct vec<T, va_heap, vl_ptr>
1424{
1425public:
1426 /* Memory allocation and deallocation for the embedded vector.
1427 Needed because we cannot have proper ctors/dtors defined. */
1428 void create (unsigned nelems CXX_MEM_STAT_INFO);
1429 void release (void);
1430
1431 /* Vector operations. */
1432 bool exists (void) const
1433 { return m_vec != NULLnullptr; }
1434
1435 bool is_empty (void) const
1436 { return m_vec ? m_vec->is_empty () : true; }
1437
1438 unsigned length (void) const
1439 { return m_vec ? m_vec->length () : 0; }
1440
1441 T *address (void)
1442 { return m_vec ? m_vec->m_vecdata : NULLnullptr; }
1443
1444 const T *address (void) const
1445 { return m_vec ? m_vec->m_vecdata : NULLnullptr; }
1446
1447 T *begin () { return address (); }
1448 const T *begin () const { return address (); }
1449 T *end () { return begin () + length (); }
1450 const T *end () const { return begin () + length (); }
1451 const T &operator[] (unsigned ix) const
1452 { return (*m_vec)[ix]; }
1453
1454 bool operator!=(const vec &other) const
1455 { return !(*this == other); }
1456
1457 bool operator==(const vec &other) const
1458 { return address () == other.address (); }
1459
1460 T &operator[] (unsigned ix)
1461 { return (*m_vec)[ix]; }
1462
1463 T &last (void)
1464 { return m_vec->last (); }
1465
1466 bool space (int nelems) const
1467 { return m_vec ? m_vec->space (nelems) : nelems == 0; }
1468
1469 bool iterate (unsigned ix, T *p) const;
1470 bool iterate (unsigned ix, T **p) const;
1471 vec copy (ALONE_CXX_MEM_STAT_INFO) const;
1472 bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO);
1473 bool reserve_exact (unsigned CXX_MEM_STAT_INFO);
1474 void splice (const vec &);
1475 void safe_splice (const vec & CXX_MEM_STAT_INFO);
1476 T *quick_push (const T &);
1477 T *safe_push (const T &CXX_MEM_STAT_INFO);
1478 T &pop (void);
1479 void truncate (unsigned);
1480 void safe_grow (unsigned, bool = false CXX_MEM_STAT_INFO);
1481 void safe_grow_cleared (unsigned, bool = false CXX_MEM_STAT_INFO);
1482 void quick_grow (unsigned);
1483 void quick_grow_cleared (unsigned);
1484 void quick_insert (unsigned, const T &);
1485 void safe_insert (unsigned, const T & CXX_MEM_STAT_INFO);
1486 void ordered_remove (unsigned);
1487 void unordered_remove (unsigned);
1488 void block_remove (unsigned, unsigned);
1489 void qsort (int (*) (const void *, const void *))qsort (int (*) (const void *, const void *));
1490 void sort (int (*) (const void *, const void *, void *), void *);
1491 T *bsearch (const void *key, int (*compar)(const void *, const void *));
1492 T *bsearch (const void *key,
1493 int (*compar)(const void *, const void *, void *), void *);
1494 unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
1495 bool contains (const T &search) const;
1496 void reverse (void);
1497
1498 bool using_auto_storage () const;
1499
1500 /* FIXME - This field should be private, but we need to cater to
1501 compilers that have stricter notions of PODness for types. */
1502 vec<T, va_heap, vl_embed> *m_vec;
1503};
1504
1505
1506/* auto_vec is a subclass of vec that automatically manages creating and
1507 releasing the internal vector. If N is non zero then it has N elements of
1508 internal storage. The default is no internal storage, and you probably only
1509 want to ask for internal storage for vectors on the stack because if the
1510 size of the vector is larger than the internal storage that space is wasted.
1511 */
1512template<typename T, size_t N = 0>
1513class auto_vec : public vec<T, va_heap>
1514{
1515public:
1516 auto_vec ()
1517 {
1518 m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1);
1519 this->m_vec = &m_auto;
1520 }
1521
1522 auto_vec (size_t s)
1523 {
1524 if (s > N)
1525 {
1526 this->create (s);
1527 return;
1528 }
1529
1530 m_auto.embedded_init (MAX (N, 2)((N) > (2) ? (N) : (2)), 0, 1);
1531 this->m_vec = &m_auto;
1532 }
1533
1534 ~auto_vec ()
1535 {
1536 this->release ();
1537 }
1538
1539private:
1540 vec<T, va_heap, vl_embed> m_auto;
1541 T m_data[MAX (N - 1, 1)((N - 1) > (1) ? (N - 1) : (1))];
1542};
1543
1544/* auto_vec is a sub class of vec whose storage is released when it is
1545 destroyed. */
1546template<typename T>
1547class auto_vec<T, 0> : public vec<T, va_heap>
1548{
1549public:
1550 auto_vec () { this->m_vec = NULLnullptr; }
1551 auto_vec (size_t n) { this->create (n); }
1552 ~auto_vec () { this->release (); }
1553
1554 auto_vec (vec<T, va_heap>&& r)
1555 {
1556 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1556, __FUNCTION__), 0 : 0))
;
1557 this->m_vec = r.m_vec;
1558 r.m_vec = NULLnullptr;
1559 }
1560 auto_vec& operator= (vec<T, va_heap>&& r)
1561 {
1562 gcc_assert (!r.using_auto_storage ())((void)(!(!r.using_auto_storage ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/vec.h"
, 1562, __FUNCTION__), 0 : 0))
;
1563 this->release ();
1564 this->m_vec = r.m_vec;
1565 r.m_vec = NULLnullptr;
1566 return *this;
1567 }
1568};
1569
1570
1571/* Allocate heap memory for pointer V and create the internal vector
1572 with space for NELEMS elements. If NELEMS is 0, the internal
1573 vector is initialized to empty. */
1574
1575template<typename T>
1576inline void
1577vec_alloc (vec<T> *&v, unsigned nelems CXX_MEM_STAT_INFO)
1578{
1579 v = new vec<T>;
1580 v->create (nelems PASS_MEM_STAT);
1581}
1582
1583
1584/* A subclass of auto_vec <char *> that frees all of its elements on
1585 deletion. */
1586
1587class auto_string_vec : public auto_vec <char *>
1588{
1589 public:
1590 ~auto_string_vec ();
1591};
1592
1593/* A subclass of auto_vec <T *> that deletes all of its elements on
1594 destruction.
1595
1596 This is a crude way for a vec to "own" the objects it points to
1597 and clean up automatically.
1598
1599 For example, no attempt is made to delete elements when an item
1600 within the vec is overwritten.
1601
1602 We can't rely on gnu::unique_ptr within a container,
1603 since we can't rely on move semantics in C++98. */
1604
1605template <typename T>
1606class auto_delete_vec : public auto_vec <T *>
1607{
1608 public:
1609 auto_delete_vec () {}
1610 auto_delete_vec (size_t s) : auto_vec <T *> (s) {}
1611
1612 ~auto_delete_vec ();
1613
1614private:
1615 DISABLE_COPY_AND_ASSIGN(auto_delete_vec)auto_delete_vec (const auto_delete_vec&) = delete; void operator
= (const auto_delete_vec &) = delete
;
1616};
1617
1618/* Conditionally allocate heap memory for VEC and its internal vector. */
1619
1620template<typename T>
1621inline void
1622vec_check_alloc (vec<T, va_heap> *&vec, unsigned nelems CXX_MEM_STAT_INFO)
1623{
1624 if (!vec)
1625 vec_alloc (vec, nelems PASS_MEM_STAT);
1626}
1627
1628
1629/* Free the heap memory allocated by vector V and set it to NULL. */
1630
1631template<typename T>
1632inline void
1633vec_free (vec<T> *&v)
1634{
1635 if (v == NULLnullptr)
1636 return;
1637
1638 v->release ();
1639 delete v;
1640 v = NULLnullptr;
1641}
1642
1643
1644/* Return iteration condition and update PTR to point to the IX'th
1645 element of this vector. Use this to iterate over the elements of a
1646 vector as follows,
1647
1648 for (ix = 0; v.iterate (ix, &ptr); ix++)
1649 continue; */
1650
1651template<typename T>
1652inline bool
1653vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T *ptr) const
1654{
1655 if (m_vec)
1656 return m_vec->iterate (ix, ptr);
1657 else
1658 {
1659 *ptr = 0;
1660 return false;
1661 }
1662}
1663
1664
1665/* Return iteration condition and update *PTR to point to the
1666 IX'th element of this vector. Use this to iterate over the
1667 elements of a vector as follows,
1668
1669 for (ix = 0; v->iterate (ix, &ptr); ix++)
1670 continue;
1671
1672 This variant is for vectors of objects. */
1673
1674template<typename T>
1675inline bool
1676vec<T, va_heap, vl_ptr>::iterate (unsigned ix, T **ptr) const
1677{
1678 if (m_vec)
1679 return m_vec->iterate (ix, ptr);
1680 else
1681 {
1682 *ptr = 0;
1683 return false;
1684 }
1685}
1686
1687
1688/* Convenience macro for forward iteration. */
1689#define FOR_EACH_VEC_ELT(V, I, P)for (I = 0; (V).iterate ((I), &(P)); ++(I)) \
1690 for (I = 0; (V).iterate ((I), &(P)); ++(I))
1691
1692#define FOR_EACH_VEC_SAFE_ELT(V, I, P)for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I)) \
1693 for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I))
1694
1695/* Likewise, but start from FROM rather than 0. */
1696#define FOR_EACH_VEC_ELT_FROM(V, I, P, FROM)for (I = (FROM); (V).iterate ((I), &(P)); ++(I)) \
1697 for (I = (FROM); (V).iterate ((I), &(P)); ++(I))
1698
1699/* Convenience macro for reverse iteration. */
1700#define FOR_EACH_VEC_ELT_REVERSE(V, I, P)for (I = (V).length () - 1; (V).iterate ((I), &(P)); (I)--
)
\
1701 for (I = (V).length () - 1; \
1702 (V).iterate ((I), &(P)); \
1703 (I)--)
1704
1705#define FOR_EACH_VEC_SAFE_ELT_REVERSE(V, I, P)for (I = vec_safe_length (V) - 1; vec_safe_iterate ((V), (I),
&(P)); (I)--)
\
1706 for (I = vec_safe_length (V) - 1; \
1707 vec_safe_iterate ((V), (I), &(P)); \
1708 (I)--)
1709
1710/* auto_string_vec's dtor, freeing all contained strings, automatically
1711 chaining up to ~auto_vec <char *>, which frees the internal buffer. */
1712
1713inline
1714auto_string_vec::~auto_string_vec ()
1715{
1716 int i;
1717 char *str;
1718 FOR_EACH_VEC_ELT (*this, i, str)for (i = 0; (*this).iterate ((i), &(str)); ++(i))
1719 free (str);
1720}
1721
1722/* auto_delete_vec's dtor, deleting all contained items, automatically
1723 chaining up to ~auto_vec <T*>, which frees the internal buffer. */
1724
1725template <typename T>
1726inline
1727auto_delete_vec<T>::~auto_delete_vec ()
1728{
1729 int i;
1730 T *item;
1731 FOR_EACH_VEC_ELT (*this, i, item)for (i = 0; (*this).iterate ((i), &(item)); ++(i))
1732 delete item;
1733}
1734
1735
1736/* Return a copy of this vector. */
1737
1738template<typename T>
1739inline vec<T, va_heap, vl_ptr>
1740vec<T, va_heap, vl_ptr>::copy (ALONE_MEM_STAT_DECLvoid) const
1741{
1742 vec<T, va_heap, vl_ptr> new_vec = vNULL;
1743 if (length ())
1744 new_vec.m_vec = m_vec->copy (ALONE_PASS_MEM_STAT);
1745 return new_vec;
1746}
1747
1748
1749/* Ensure that the vector has at least RESERVE slots available (if
1750 EXACT is false), or exactly RESERVE slots available (if EXACT is
1751 true).
1752
1753 This may create additional headroom if EXACT is false.
1754
1755 Note that this can cause the embedded vector to be reallocated.
1756 Returns true iff reallocation actually occurred. */
1757
1758template<typename T>
1759inline bool
1760vec<T, va_heap, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
1761{
1762 if (space (nelems))
1763 return false;
1764
1765 /* For now play a game with va_heap::reserve to hide our auto storage if any,
1766 this is necessary because it doesn't have enough information to know the
1767 embedded vector is in auto storage, and so should not be freed. */
1768 vec<T, va_heap, vl_embed> *oldvec = m_vec;
1769 unsigned int oldsize = 0;
1770 bool handle_auto_vec = m_vec && using_auto_storage ();
1771 if (handle_auto_vec)
1772 {
1773 m_vec = NULLnullptr;
1774 oldsize = oldvec->length ();
1775 nelems += oldsize;
1776 }
1777
1778 va_heap::reserve (m_vec, nelems, exact PASS_MEM_STAT);
1779 if (handle_auto_vec)
1780 {
1781 vec_copy_construct (m_vec->address (), oldvec->address (), oldsize);
1782 m_vec->m_vecpfx.m_num = oldsize;
1783 }
1784
1785 return true;
1786}
1787
1788
1789/* Ensure that this vector has exactly NELEMS slots available. This
1790 will not create additional headroom. Note this can cause the
1791 embedded vector to be reallocated. Returns true iff reallocation
1792 actually occurred. */
1793
1794template<typename T>
1795inline bool
1796vec<T, va_heap, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
1797{
1798 return reserve (nelems, true PASS_MEM_STAT);
1799}
1800
1801
1802/* Create the internal vector and reserve NELEMS for it. This is
1803 exactly like vec::reserve, but the internal vector is
1804 unconditionally allocated from scratch. The old one, if it
1805 existed, is lost. */
1806
1807template<typename T>
1808inline void
1809vec<T, va_heap, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
1810{
1811 m_vec = NULLnullptr;
1812 if (nelems > 0)
1813 reserve_exact (nelems PASS_MEM_STAT);
1814}
1815
1816
1817/* Free the memory occupied by the embedded vector. */
1818
1819template<typename T>
1820inline void
1821vec<T, va_heap, vl_ptr>::release (void)
1822{
1823 if (!m_vec)
1824 return;
1825
1826 if (using_auto_storage ())
1827 {
1828 m_vec->m_vecpfx.m_num = 0;
1829 return;
1830 }
1831
1832 va_heap::release (m_vec);
1833}
1834
1835/* Copy the elements from SRC to the end of this vector as if by memcpy.
1836 SRC and this vector must be allocated with the same memory
1837 allocation mechanism. This vector is assumed to have sufficient
1838 headroom available. */
1839
1840template<typename T>
1841inline void
1842vec<T, va_heap, vl_ptr>::splice (const vec<T, va_heap, vl_ptr> &src)
1843{
1844 if (src.length ())
1845 m_vec->splice (*(src.m_vec));
1846}
1847
1848
1849/* Copy the elements in SRC to the end of this vector as if by memcpy.
1850 SRC and this vector must be allocated with the same mechanism.
1851 If there is not enough headroom in this vector, it will be reallocated
1852 as needed. */
1853
1854template<typename T>
1855inline void
1856vec<T, va_heap, vl_ptr>::safe_splice (const vec<T, va_heap, vl_ptr> &src
1857 MEM_STAT_DECL)
1858{
1859 if (src.length ())
1860 {
1861 reserve_exact (src.length ());
1862 splice (src);
1863 }
1864}
1865
1866
1867/* Push OBJ (a new element) onto the end of the vector. There must be
1868 sufficient space in the vector. Return a pointer to the slot
1869 where OBJ was inserted. */
1870
1871template<typename T>
1872inline T *
1873vec<T, va_heap, vl_ptr>::quick_push (const T &obj)
1874{
1875 return m_vec->quick_push (obj);
1876}
1877
1878
1879/* Push a new element OBJ onto the end of this vector. Reallocates
1880 the embedded vector, if needed. Return a pointer to the slot where
1881 OBJ was inserted. */
1882
1883template<typename T>
1884inline T *
1885vec<T, va_heap, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL)
1886{
1887 reserve (1, false PASS_MEM_STAT);
1888 return quick_push (obj);
1889}