Bug Summary

File:build/gcc/symbol-summary.h
Warning:line 701, column 35
Called C++ object pointer is null

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name ipa-profile.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-rYW302.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c

1/* Basic IPA optimizations based on profile.
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20/* ipa-profile pass implements the following analysis propagating profille
21 inter-procedurally.
22
23 - Count histogram construction. This is a histogram analyzing how much
24 time is spent executing statements with a given execution count read
25 from profile feedback. This histogram is complete only with LTO,
26 otherwise it contains information only about the current unit.
27
28 The information is used to set hot/cold thresholds.
29 - Next speculative indirect call resolution is performed: the local
30 profile pass assigns profile-id to each function and provide us with a
31 histogram specifying the most common target. We look up the callgraph
32 node corresponding to the target and produce a speculative call.
33
34 This call may or may not survive through IPA optimization based on decision
35 of inliner.
36 - Finally we propagate the following flags: unlikely executed, executed
37 once, executed at startup and executed at exit. These flags are used to
38 control code size/performance threshold and code placement (by producing
39 .text.unlikely/.text.hot/.text.startup/.text.exit subsections). */
40#include "config.h"
41#include "system.h"
42#include "coretypes.h"
43#include "backend.h"
44#include "tree.h"
45#include "gimple.h"
46#include "predict.h"
47#include "alloc-pool.h"
48#include "tree-pass.h"
49#include "cgraph.h"
50#include "data-streamer.h"
51#include "gimple-iterator.h"
52#include "ipa-utils.h"
53#include "profile.h"
54#include "value-prof.h"
55#include "tree-inline.h"
56#include "symbol-summary.h"
57#include "tree-vrp.h"
58#include "ipa-prop.h"
59#include "ipa-fnsummary.h"
60
61/* Entry in the histogram. */
62
63struct histogram_entry
64{
65 gcov_type count;
66 int time;
67 int size;
68};
69
70/* Histogram of profile values.
71 The histogram is represented as an ordered vector of entries allocated via
72 histogram_pool. During construction a separate hashtable is kept to lookup
73 duplicate entries. */
74
75vec<histogram_entry *> histogram;
76static object_allocator<histogram_entry> histogram_pool ("IPA histogram");
77
78/* Hashtable support for storing SSA names hashed by their SSA_NAME_VAR. */
79
80struct histogram_hash : nofree_ptr_hash <histogram_entry>
81{
82 static inline hashval_t hash (const histogram_entry *);
83 static inline int equal (const histogram_entry *, const histogram_entry *);
84};
85
86inline hashval_t
87histogram_hash::hash (const histogram_entry *val)
88{
89 return val->count;
90}
91
92inline int
93histogram_hash::equal (const histogram_entry *val, const histogram_entry *val2)
94{
95 return val->count == val2->count;
96}
97
98/* Account TIME and SIZE executed COUNT times into HISTOGRAM.
99 HASHTABLE is the on-side hash kept to avoid duplicates. */
100
101static void
102account_time_size (hash_table<histogram_hash> *hashtable,
103 vec<histogram_entry *> &histogram,
104 gcov_type count, int time, int size)
105{
106 histogram_entry key = {count, 0, 0};
107 histogram_entry **val = hashtable->find_slot (&key, INSERT);
108
109 if (!*val)
110 {
111 *val = histogram_pool.allocate ();
112 **val = key;
113 histogram.safe_push (*val);
114 }
115 (*val)->time += time;
116 (*val)->size += size;
117}
118
119int
120cmp_counts (const void *v1, const void *v2)
121{
122 const histogram_entry *h1 = *(const histogram_entry * const *)v1;
123 const histogram_entry *h2 = *(const histogram_entry * const *)v2;
124 if (h1->count < h2->count)
125 return 1;
126 if (h1->count > h2->count)
127 return -1;
128 return 0;
129}
130
131/* Dump HISTOGRAM to FILE. */
132
133static void
134dump_histogram (FILE *file, vec<histogram_entry *> histogram)
135{
136 unsigned int i;
137 gcov_type overall_time = 0, cumulated_time = 0, cumulated_size = 0,
138 overall_size = 0;
139
140 fprintf (dump_file, "Histogram:\n");
141 for (i = 0; i < histogram.length (); i++)
142 {
143 overall_time += histogram[i]->count * histogram[i]->time;
144 overall_size += histogram[i]->size;
145 }
146 if (!overall_time)
147 overall_time = 1;
148 if (!overall_size)
149 overall_size = 1;
150 for (i = 0; i < histogram.length (); i++)
151 {
152 cumulated_time += histogram[i]->count * histogram[i]->time;
153 cumulated_size += histogram[i]->size;
154 fprintf (file, " %" PRId64"l" "d"": time:%i (%2.2f) size:%i (%2.2f)\n",
155 (int64_t) histogram[i]->count,
156 histogram[i]->time,
157 cumulated_time * 100.0 / overall_time,
158 histogram[i]->size,
159 cumulated_size * 100.0 / overall_size);
160 }
161}
162
163/* Structure containing speculative target information from profile. */
164
165struct speculative_call_target
166{
167 speculative_call_target (unsigned int id = 0, int prob = 0)
168 : target_id (id), target_probability (prob)
169 {
170 }
171
172 /* Profile_id of target obtained from profile. */
173 unsigned int target_id;
174 /* Probability that call will land in function with target_id. */
175 unsigned int target_probability;
176};
177
178class speculative_call_summary
179{
180public:
181 speculative_call_summary () : speculative_call_targets ()
182 {}
183
184 auto_vec<speculative_call_target> speculative_call_targets;
185
186 void dump (FILE *f);
187
188};
189
190 /* Class to manage call summaries. */
191
192class ipa_profile_call_summaries
193 : public call_summary<speculative_call_summary *>
194{
195public:
196 ipa_profile_call_summaries (symbol_table *table)
197 : call_summary<speculative_call_summary *> (table)
198 {}
199
200 /* Duplicate info when an edge is cloned. */
201 virtual void duplicate (cgraph_edge *, cgraph_edge *,
202 speculative_call_summary *old_sum,
203 speculative_call_summary *new_sum);
204};
205
206static ipa_profile_call_summaries *call_sums = NULLnullptr;
207
208/* Dump all information in speculative call summary to F. */
209
210void
211speculative_call_summary::dump (FILE *f)
212{
213 cgraph_node *n2;
214
215 unsigned spec_count = speculative_call_targets.length ();
216 for (unsigned i = 0; i < spec_count; i++)
217 {
218 speculative_call_target item = speculative_call_targets[i];
219 n2 = find_func_by_profile_id (item.target_id);
220 if (n2)
221 fprintf (f, " The %i speculative target is %s with prob %3.2f\n", i,
222 n2->dump_name (),
223 item.target_probability / (float) REG_BR_PROB_BASE10000);
224 else
225 fprintf (f, " The %i speculative target is %u with prob %3.2f\n", i,
226 item.target_id,
227 item.target_probability / (float) REG_BR_PROB_BASE10000);
228 }
229}
230
231/* Duplicate info when an edge is cloned. */
232
233void
234ipa_profile_call_summaries::duplicate (cgraph_edge *, cgraph_edge *,
235 speculative_call_summary *old_sum,
236 speculative_call_summary *new_sum)
237{
238 if (!old_sum)
239 return;
240
241 unsigned old_count = old_sum->speculative_call_targets.length ();
242 if (!old_count)
243 return;
244
245 new_sum->speculative_call_targets.reserve_exact (old_count);
246 new_sum->speculative_call_targets.quick_grow_cleared (old_count);
247
248 for (unsigned i = 0; i < old_count; i++)
249 {
250 new_sum->speculative_call_targets[i]
251 = old_sum->speculative_call_targets[i];
252 }
253}
254
255/* Collect histogram and speculative target summaries from CFG profiles. */
256
257static void
258ipa_profile_generate_summary (void)
259{
260 struct cgraph_node *node;
261 gimple_stmt_iterator gsi;
262 basic_block bb;
263
264 hash_table<histogram_hash> hashtable (10);
265
266 gcc_checking_assert (!call_sums)((void)(!(!call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 266, __FUNCTION__), 0 : 0))
;
1
Assuming 'call_sums' is null
2
'?' condition is false
267 call_sums = new ipa_profile_call_summaries (symtab);
268
269 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
3
Calling 'symbol_table::first_function_with_gimple_body'
48
Returning from 'symbol_table::first_function_with_gimple_body'
49
Loop condition is true. Entering loop body
270 if (ENTRY_BLOCK_PTR_FOR_FN((((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 271, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f))->
cfg->x_entry_block_ptr)
50
Calling 'profile_count::ipa_p'
52
Returning from 'profile_count::ipa_p'
53
Taking true branch
271 (DECL_STRUCT_FUNCTION (node->decl))((((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 271, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f))->
cfg->x_entry_block_ptr)
->count.ipa_p ()
)
272 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))for (bb = (((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 272, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f))->
cfg->x_entry_block_ptr->next_bb; bb != (((tree_check ((
node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 272, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f))->
cfg->x_exit_block_ptr; bb = bb->next_bb)
54
Assuming 'bb' is not equal to field 'x_exit_block_ptr'
55
Loop condition is true. Entering loop body
273 {
274 int time = 0;
275 int size = 0;
276 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
56
Calling 'gsi_end_p'
59
Returning from 'gsi_end_p'
60
Loop condition is true. Entering loop body
277 {
278 gimple *stmt = gsi_stmt (gsi);
279 if (gimple_code (stmt) == GIMPLE_CALL
61
Assuming the condition is true
72
Taking true branch
280 && !gimple_call_fndecl (stmt))
62
Calling 'gimple_call_fndecl'
71
Returning from 'gimple_call_fndecl'
281 {
282 histogram_value h;
283 h = gimple_histogram_value_of_type
284 (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 284, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
,
285 stmt, HIST_TYPE_INDIR_CALL);
286 /* No need to do sanity check: gimple_ic_transform already
287 takes away bad histograms. */
288 if (h)
73
Assuming 'h' is non-null
74
Taking true branch
289 {
290 gcov_type val, count, all;
291 struct cgraph_edge *e = node->get_edge (stmt);
75
'e' initialized here
292 if (e && !e->indirect_unknown_callee)
76
Assuming 'e' is null
293 continue;
294
295 speculative_call_summary *csum
296 = call_sums->get_create (e);
77
Passing null pointer value via 1st parameter 'edge'
78
Calling 'call_summary::get_create'
297
298 for (unsigned j = 0; j < GCOV_TOPN_MAXIMUM_TRACKED_VALUES32;
299 j++)
300 {
301 if (!get_nth_most_common_value (NULLnullptr, "indirect call",
302 h, &val, &count, &all,
303 j))
304 continue;
305
306 if (val == 0 || count == 0)
307 continue;
308
309 if (count > all)
310 {
311 if (dump_file)
312 fprintf (dump_file,
313 "Probability capped to 1\n");
314 count = all;
315 }
316 speculative_call_target item (
317 val, GCOV_COMPUTE_SCALE (count, all)((all) ? ((((count) * 10000) + ((all)) / 2) / ((all))) : 10000
)
);
318 csum->speculative_call_targets.safe_push (item);
319 }
320
321 gimple_remove_histogram_value
322 (DECL_STRUCT_FUNCTION (node->decl)((tree_check ((node->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 322, __FUNCTION__, (FUNCTION_DECL)))->function_decl.f)
, stmt, h);
323 }
324 }
325 time += estimate_num_insns (stmt, &eni_time_weights);
326 size += estimate_num_insns (stmt, &eni_size_weights);
327 }
328 if (bb->count.ipa_p () && bb->count.initialized_p ())
329 account_time_size (&hashtable, histogram,
330 bb->count.ipa ().to_gcov_type (),
331 time, size);
332 }
333 histogram.qsort (cmp_counts)qsort (cmp_counts);
334}
335
336/* Serialize the speculative summary info for LTO. */
337
338static void
339ipa_profile_write_edge_summary (lto_simple_output_block *ob,
340 speculative_call_summary *csum)
341{
342 unsigned len = 0;
343
344 len = csum->speculative_call_targets.length ();
345
346 gcc_assert (len <= GCOV_TOPN_MAXIMUM_TRACKED_VALUES)((void)(!(len <= 32) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 346, __FUNCTION__), 0 : 0))
;
347
348 streamer_write_hwi_stream (ob->main_stream, len);
349
350 if (len)
351 {
352 unsigned spec_count = csum->speculative_call_targets.length ();
353 for (unsigned i = 0; i < spec_count; i++)
354 {
355 speculative_call_target item = csum->speculative_call_targets[i];
356 gcc_assert (item.target_id)((void)(!(item.target_id) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 356, __FUNCTION__), 0 : 0))
;
357 streamer_write_hwi_stream (ob->main_stream, item.target_id);
358 streamer_write_hwi_stream (ob->main_stream, item.target_probability);
359 }
360 }
361}
362
363/* Serialize the ipa info for lto. */
364
365static void
366ipa_profile_write_summary (void)
367{
368 struct lto_simple_output_block *ob
369 = lto_create_simple_output_block (LTO_section_ipa_profile);
370 unsigned int i;
371
372 streamer_write_uhwi_stream (ob->main_stream, histogram.length ());
373 for (i = 0; i < histogram.length (); i++)
374 {
375 streamer_write_gcov_count_stream (ob->main_stream, histogram[i]->count);
376 streamer_write_uhwi_stream (ob->main_stream, histogram[i]->time);
377 streamer_write_uhwi_stream (ob->main_stream, histogram[i]->size);
378 }
379
380 if (!call_sums)
381 return;
382
383 /* Serialize speculative targets information. */
384 unsigned int count = 0;
385 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
386 lto_symtab_encoder_iterator lsei;
387 cgraph_node *node;
388
389 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
390 lsei_next_function_in_partition (&lsei))
391 {
392 node = lsei_cgraph_node (lsei);
393 if (node->definition && node->has_gimple_body_p ()
394 && node->indirect_calls)
395 count++;
396 }
397
398 streamer_write_uhwi_stream (ob->main_stream, count);
399
400 /* Process all of the functions. */
401 for (lsei = lsei_start_function_in_partition (encoder);
402 !lsei_end_p (lsei) && count; lsei_next_function_in_partition (&lsei))
403 {
404 cgraph_node *node = lsei_cgraph_node (lsei);
405 if (node->definition && node->has_gimple_body_p ()
406 && node->indirect_calls)
407 {
408 int node_ref = lto_symtab_encoder_encode (encoder, node);
409 streamer_write_uhwi_stream (ob->main_stream, node_ref);
410
411 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
412 {
413 speculative_call_summary *csum = call_sums->get_create (e);
414 ipa_profile_write_edge_summary (ob, csum);
415 }
416 }
417 }
418
419 lto_destroy_simple_output_block (ob);
420}
421
422/* Dump all profile summary data for all cgraph nodes and edges to file F. */
423
424static void
425ipa_profile_dump_all_summaries (FILE *f)
426{
427 fprintf (dump_file,
428 "\n========== IPA-profile speculative targets: ==========\n");
429 cgraph_node *node;
430 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
431 {
432 fprintf (f, "\nSummary for node %s:\n", node->dump_name ());
433 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
434 {
435 fprintf (f, " Summary for %s of indirect edge %d:\n",
436 e->caller->dump_name (), e->lto_stmt_uid);
437 speculative_call_summary *csum = call_sums->get_create (e);
438 csum->dump (f);
439 }
440 }
441 fprintf (f, "\n\n");
442}
443
444/* Read speculative targets information about edge for LTO WPA. */
445
446static void
447ipa_profile_read_edge_summary (class lto_input_block *ib, cgraph_edge *edge)
448{
449 unsigned i, len;
450
451 len = streamer_read_hwi (ib);
452 gcc_assert (len <= GCOV_TOPN_MAXIMUM_TRACKED_VALUES)((void)(!(len <= 32) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 452, __FUNCTION__), 0 : 0))
;
453 speculative_call_summary *csum = call_sums->get_create (edge);
454
455 for (i = 0; i < len; i++)
456 {
457 unsigned int target_id = streamer_read_hwi (ib);
458 int target_probability = streamer_read_hwi (ib);
459 speculative_call_target item (target_id, target_probability);
460 csum->speculative_call_targets.safe_push (item);
461 }
462}
463
464/* Read profile speculative targets section information for LTO WPA. */
465
466static void
467ipa_profile_read_summary_section (struct lto_file_decl_data *file_data,
468 class lto_input_block *ib)
469{
470 if (!ib)
471 return;
472
473 lto_symtab_encoder_t encoder = file_data->symtab_node_encoder;
474
475 unsigned int count = streamer_read_uhwi (ib);
476
477 unsigned int i;
478 unsigned int index;
479 cgraph_node * node;
480
481 for (i = 0; i < count; i++)
482 {
483 index = streamer_read_uhwi (ib);
484 encoder = file_data->symtab_node_encoder;
485 node
486 = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder, index));
487
488 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
489 ipa_profile_read_edge_summary (ib, e);
490 }
491}
492
493/* Deserialize the IPA histogram and speculative targets summary info for LTO.
494 */
495
496static void
497ipa_profile_read_summary (void)
498{
499 struct lto_file_decl_data ** file_data_vec
500 = lto_get_file_decl_data ();
501 struct lto_file_decl_data * file_data;
502 int j = 0;
503
504 hash_table<histogram_hash> hashtable (10);
505
506 gcc_checking_assert (!call_sums)((void)(!(!call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 506, __FUNCTION__), 0 : 0))
;
507 call_sums = new ipa_profile_call_summaries (symtab);
508
509 while ((file_data = file_data_vec[j++]))
510 {
511 const char *data;
512 size_t len;
513 class lto_input_block *ib
514 = lto_create_simple_input_block (file_data,
515 LTO_section_ipa_profile,
516 &data, &len);
517 if (ib)
518 {
519 unsigned int num = streamer_read_uhwi (ib);
520 unsigned int n;
521 for (n = 0; n < num; n++)
522 {
523 gcov_type count = streamer_read_gcov_count (ib);
524 int time = streamer_read_uhwi (ib);
525 int size = streamer_read_uhwi (ib);
526 account_time_size (&hashtable, histogram,
527 count, time, size);
528 }
529
530 ipa_profile_read_summary_section (file_data, ib);
531
532 lto_destroy_simple_input_block (file_data,
533 LTO_section_ipa_profile,
534 ib, data, len);
535 }
536 }
537 histogram.qsort (cmp_counts)qsort (cmp_counts);
538}
539
540/* Data used by ipa_propagate_frequency. */
541
542struct ipa_propagate_frequency_data
543{
544 cgraph_node *function_symbol;
545 bool maybe_unlikely_executed;
546 bool maybe_executed_once;
547 bool only_called_at_startup;
548 bool only_called_at_exit;
549};
550
551/* Worker for ipa_propagate_frequency_1. */
552
553static bool
554ipa_propagate_frequency_1 (struct cgraph_node *node, void *data)
555{
556 struct ipa_propagate_frequency_data *d;
557 struct cgraph_edge *edge;
558
559 d = (struct ipa_propagate_frequency_data *)data;
560 for (edge = node->callers;
561 edge && (d->maybe_unlikely_executed || d->maybe_executed_once
562 || d->only_called_at_startup || d->only_called_at_exit);
563 edge = edge->next_caller)
564 {
565 if (edge->caller != d->function_symbol)
566 {
567 d->only_called_at_startup &= edge->caller->only_called_at_startup;
568 /* It makes sense to put main() together with the static constructors.
569 It will be executed for sure, but rest of functions called from
570 main are definitely not at startup only. */
571 if (MAIN_NAME_P (DECL_NAME (edge->caller->decl))((tree_check ((((contains_struct_check ((edge->caller->
decl), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 571, __FUNCTION__))->decl_minimal.name)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 571, __FUNCTION__, (IDENTIFIER_NODE))) == global_trees[TI_MAIN_IDENTIFIER
])
)
572 d->only_called_at_startup = 0;
573 d->only_called_at_exit &= edge->caller->only_called_at_exit;
574 }
575
576 /* When profile feedback is available, do not try to propagate too hard;
577 counts are already good guide on function frequencies and roundoff
578 errors can make us to push function into unlikely section even when
579 it is executed by the train run. Transfer the function only if all
580 callers are unlikely executed. */
581 if (profile_info
582 && !(edge->callee->count.ipa () == profile_count::zero ())
583 && (edge->caller->frequency != NODE_FREQUENCY_UNLIKELY_EXECUTED
584 || (edge->caller->inlined_to
585 && edge->caller->inlined_to->frequency
586 != NODE_FREQUENCY_UNLIKELY_EXECUTED)))
587 d->maybe_unlikely_executed = false;
588 if (edge->count.ipa ().initialized_p ()
589 && !edge->count.ipa ().nonzero_p ())
590 continue;
591 switch (edge->caller->frequency)
592 {
593 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
594 break;
595 case NODE_FREQUENCY_EXECUTED_ONCE:
596 {
597 if (dump_file && (dump_flags & TDF_DETAILS))
598 fprintf (dump_file, " Called by %s that is executed once\n",
599 edge->caller->dump_name ());
600 d->maybe_unlikely_executed = false;
601 ipa_call_summary *s = ipa_call_summaries->get (edge);
602 if (s != NULLnullptr && s->loop_depth)
603 {
604 d->maybe_executed_once = false;
605 if (dump_file && (dump_flags & TDF_DETAILS))
606 fprintf (dump_file, " Called in loop\n");
607 }
608 break;
609 }
610 case NODE_FREQUENCY_HOT:
611 case NODE_FREQUENCY_NORMAL:
612 if (dump_file && (dump_flags & TDF_DETAILS))
613 fprintf (dump_file, " Called by %s that is normal or hot\n",
614 edge->caller->dump_name ());
615 d->maybe_unlikely_executed = false;
616 d->maybe_executed_once = false;
617 break;
618 }
619 }
620 return edge != NULLnullptr;
621}
622
623/* Return ture if NODE contains hot calls. */
624
625bool
626contains_hot_call_p (struct cgraph_node *node)
627{
628 struct cgraph_edge *e;
629 for (e = node->callees; e; e = e->next_callee)
630 if (e->maybe_hot_p ())
631 return true;
632 else if (!e->inline_failed
633 && contains_hot_call_p (e->callee))
634 return true;
635 for (e = node->indirect_calls; e; e = e->next_callee)
636 if (e->maybe_hot_p ())
637 return true;
638 return false;
639}
640
641/* See if the frequency of NODE can be updated based on frequencies of its
642 callers. */
643bool
644ipa_propagate_frequency (struct cgraph_node *node)
645{
646 struct ipa_propagate_frequency_data d = {node, true, true, true, true};
647 bool changed = false;
648
649 /* We cannot propagate anything useful about externally visible functions
650 nor about virtuals. */
651 if (!node->local
652 || node->alias
653 || (opt_for_fn (node->decl, flag_devirtualize)(opts_for_fn (node->decl)->x_flag_devirtualize)
654 && DECL_VIRTUAL_P (node->decl)((contains_struct_check ((node->decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 654, __FUNCTION__))->decl_common.virtual_flag)
))
655 return false;
656 gcc_assert (node->analyzed)((void)(!(node->analyzed) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 656, __FUNCTION__), 0 : 0))
;
657 if (dump_file && (dump_flags & TDF_DETAILS))
658 fprintf (dump_file, "Processing frequency %s\n", node->dump_name ());
659
660 node->call_for_symbol_and_aliases (ipa_propagate_frequency_1, &d,
661 true);
662
663 if ((d.only_called_at_startup && !d.only_called_at_exit)
664 && !node->only_called_at_startup)
665 {
666 node->only_called_at_startup = true;
667 if (dump_file)
668 fprintf (dump_file, "Node %s promoted to only called at startup.\n",
669 node->dump_name ());
670 changed = true;
671 }
672 if ((d.only_called_at_exit && !d.only_called_at_startup)
673 && !node->only_called_at_exit)
674 {
675 node->only_called_at_exit = true;
676 if (dump_file)
677 fprintf (dump_file, "Node %s promoted to only called at exit.\n",
678 node->dump_name ());
679 changed = true;
680 }
681
682 /* With profile we can decide on hot/normal based on count. */
683 if (node->count. ipa().initialized_p ())
684 {
685 bool hot = false;
686 if (!(node->count. ipa() == profile_count::zero ())
687 && node->count. ipa() >= get_hot_bb_threshold ())
688 hot = true;
689 if (!hot)
690 hot |= contains_hot_call_p (node);
691 if (hot)
692 {
693 if (node->frequency != NODE_FREQUENCY_HOT)
694 {
695 if (dump_file)
696 fprintf (dump_file, "Node %s promoted to hot.\n",
697 node->dump_name ());
698 node->frequency = NODE_FREQUENCY_HOT;
699 return true;
700 }
701 return false;
702 }
703 else if (node->frequency == NODE_FREQUENCY_HOT)
704 {
705 if (dump_file)
706 fprintf (dump_file, "Node %s reduced to normal.\n",
707 node->dump_name ());
708 node->frequency = NODE_FREQUENCY_NORMAL;
709 changed = true;
710 }
711 }
712 /* These come either from profile or user hints; never update them. */
713 if (node->frequency == NODE_FREQUENCY_HOT
714 || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
715 return changed;
716 if (d.maybe_unlikely_executed)
717 {
718 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
719 if (dump_file)
720 fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
721 node->dump_name ());
722 changed = true;
723 }
724 else if (d.maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
725 {
726 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
727 if (dump_file)
728 fprintf (dump_file, "Node %s promoted to executed once.\n",
729 node->dump_name ());
730 changed = true;
731 }
732 return changed;
733}
734
735/* Check that number of arguments of N agrees with E.
736 Be conservative when summaries are not present. */
737
738static bool
739check_argument_count (struct cgraph_node *n, struct cgraph_edge *e)
740{
741 if (!ipa_node_params_sum || !ipa_edge_args_sum)
742 return true;
743 class ipa_node_params *info = IPA_NODE_REF (n->function_symbol ())(ipa_node_params_sum->get (n->function_symbol ()));
744 if (!info)
745 return true;
746 ipa_edge_args *e_info = IPA_EDGE_REF (e)(ipa_edge_args_sum->get (e));
747 if (!e_info)
748 return true;
749 if (ipa_get_param_count (info) != ipa_get_cs_argument_count (e_info)
750 && (ipa_get_param_count (info) >= ipa_get_cs_argument_count (e_info)
751 || !stdarg_p (TREE_TYPE (n->decl)((contains_struct_check ((n->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 751, __FUNCTION__))->typed.type)
)))
752 return false;
753 return true;
754}
755
756/* Simple ipa profile pass propagating frequencies across the callgraph. */
757
758static unsigned int
759ipa_profile (void)
760{
761 struct cgraph_node **order;
762 struct cgraph_edge *e;
763 int order_pos;
764 bool something_changed = false;
765 int i;
766 gcov_type overall_time = 0, cutoff = 0, cumulated = 0, overall_size = 0;
767 struct cgraph_node *n,*n2;
768 int nindirect = 0, ncommon = 0, nunknown = 0, nuseless = 0, nconverted = 0;
769 int nmismatch = 0, nimpossible = 0;
770 bool node_map_initialized = false;
771 gcov_type threshold;
772
773 if (dump_file)
774 dump_histogram (dump_file, histogram);
775 for (i = 0; i < (int)histogram.length (); i++)
776 {
777 overall_time += histogram[i]->count * histogram[i]->time;
778 overall_size += histogram[i]->size;
779 }
780 threshold = 0;
781 if (overall_time)
782 {
783 gcc_assert (overall_size)((void)(!(overall_size) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 783, __FUNCTION__), 0 : 0))
;
784
785 cutoff = (overall_time * param_hot_bb_count_ws_permilleglobal_options.x_param_hot_bb_count_ws_permille + 500) / 1000;
786 for (i = 0; cumulated < cutoff; i++)
787 {
788 cumulated += histogram[i]->count * histogram[i]->time;
789 threshold = histogram[i]->count;
790 }
791 if (!threshold)
792 threshold = 1;
793 if (dump_file)
794 {
795 gcov_type cumulated_time = 0, cumulated_size = 0;
796
797 for (i = 0;
798 i < (int)histogram.length () && histogram[i]->count >= threshold;
799 i++)
800 {
801 cumulated_time += histogram[i]->count * histogram[i]->time;
802 cumulated_size += histogram[i]->size;
803 }
804 fprintf (dump_file, "Determined min count: %" PRId64"l" "d"
805 " Time:%3.2f%% Size:%3.2f%%\n",
806 (int64_t)threshold,
807 cumulated_time * 100.0 / overall_time,
808 cumulated_size * 100.0 / overall_size);
809 }
810
811 if (in_lto_pglobal_options.x_in_lto_p)
812 {
813 if (dump_file)
814 fprintf (dump_file, "Setting hotness threshold in LTO mode.\n");
815 set_hot_bb_threshold (threshold);
816 }
817 }
818 histogram.release ();
819 histogram_pool.release ();
820
821 /* Produce speculative calls: we saved common target from profiling into
822 e->target_id. Now, at link time, we can look up corresponding
823 function node and produce speculative call. */
824
825 gcc_checking_assert (call_sums)((void)(!(call_sums) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/ipa-profile.c"
, 825, __FUNCTION__), 0 : 0))
;
826
827 if (dump_file)
828 {
829 if (!node_map_initialized)
830 init_node_map (false);
831 node_map_initialized = true;
832
833 ipa_profile_dump_all_summaries (dump_file);
834 }
835
836 FOR_EACH_DEFINED_FUNCTION (n)for ((n) = symtab->first_defined_function (); (n); (n) = symtab
->next_defined_function ((n)))
837 {
838 bool update = false;
839
840 if (!opt_for_fn (n->decl, flag_ipa_profile)(opts_for_fn (n->decl)->x_flag_ipa_profile))
841 continue;
842
843 for (e = n->indirect_calls; e; e = e->next_callee)
844 {
845 if (n->count.initialized_p ())
846 nindirect++;
847
848 speculative_call_summary *csum = call_sums->get_create (e);
849 unsigned spec_count = csum->speculative_call_targets.length ();
850 if (spec_count)
851 {
852 if (!node_map_initialized)
853 init_node_map (false);
854 node_map_initialized = true;
855 ncommon++;
856
857 if (in_lto_pglobal_options.x_in_lto_p)
858 {
859 if (dump_file)
860 {
861 fprintf (dump_file,
862 "Updating hotness threshold in LTO mode.\n");
863 fprintf (dump_file, "Updated min count: %" PRId64"l" "d" "\n",
864 (int64_t) threshold / spec_count);
865 }
866 set_hot_bb_threshold (threshold / spec_count);
867 }
868
869 unsigned speculative_id = 0;
870 profile_count orig = e->count;
871 for (unsigned i = 0; i < spec_count; i++)
872 {
873 speculative_call_target item
874 = csum->speculative_call_targets[i];
875 n2 = find_func_by_profile_id (item.target_id);
876 if (n2)
877 {
878 if (dump_file)
879 {
880 fprintf (dump_file,
881 "Indirect call -> direct call from"
882 " other module %s => %s, prob %3.2f\n",
883 n->dump_name (),
884 n2->dump_name (),
885 item.target_probability
886 / (float) REG_BR_PROB_BASE10000);
887 }
888 if (item.target_probability < REG_BR_PROB_BASE10000 / 2)
889 {
890 nuseless++;
891 if (dump_file)
892 fprintf (dump_file,
893 "Not speculating: "
894 "probability is too low.\n");
895 }
896 else if (!e->maybe_hot_p ())
897 {
898 nuseless++;
899 if (dump_file)
900 fprintf (dump_file,
901 "Not speculating: call is cold.\n");
902 }
903 else if (n2->get_availability () <= AVAIL_INTERPOSABLE
904 && n2->can_be_discarded_p ())
905 {
906 nuseless++;
907 if (dump_file)
908 fprintf (dump_file,
909 "Not speculating: target is overwritable "
910 "and can be discarded.\n");
911 }
912 else if (!check_argument_count (n2, e))
913 {
914 nmismatch++;
915 if (dump_file)
916 fprintf (dump_file,
917 "Not speculating: "
918 "parameter count mismatch\n");
919 }
920 else if (e->indirect_info->polymorphic
921 && !opt_for_fn (n->decl, flag_devirtualize)(opts_for_fn (n->decl)->x_flag_devirtualize)
922 && !possible_polymorphic_call_target_p (e, n2))
923 {
924 nimpossible++;
925 if (dump_file)
926 fprintf (dump_file,
927 "Not speculating: "
928 "function is not in the polymorphic "
929 "call target list\n");
930 }
931 else
932 {
933 /* Target may be overwritable, but profile says that
934 control flow goes to this particular implementation
935 of N2. Speculate on the local alias to allow
936 inlining. */
937 if (!n2->can_be_discarded_p ())
938 {
939 cgraph_node *alias;
940 alias = dyn_cast<cgraph_node *>
941 (n2->noninterposable_alias ());
942 if (alias)
943 n2 = alias;
944 }
945 nconverted++;
946 profile_probability prob
947 = profile_probability::from_reg_br_prob_base
948 (item.target_probability).adjusted ();
949 e->make_speculative (n2,
950 orig.apply_probability (prob),
951 speculative_id);
952 update = true;
953 speculative_id++;
954 }
955 }
956 else
957 {
958 if (dump_file)
959 fprintf (dump_file,
960 "Function with profile-id %i not found.\n",
961 item.target_id);
962 nunknown++;
963 }
964 }
965 }
966 }
967 if (update)
968 ipa_update_overall_fn_summary (n);
969 }
970 if (node_map_initialized)
971 del_node_map ();
972 if (dump_file && nindirect)
973 fprintf (dump_file,
974 "%i indirect calls trained.\n"
975 "%i (%3.2f%%) have common target.\n"
976 "%i (%3.2f%%) targets was not found.\n"
977 "%i (%3.2f%%) targets had parameter count mismatch.\n"
978 "%i (%3.2f%%) targets was not in polymorphic call target list.\n"
979 "%i (%3.2f%%) speculations seems useless.\n"
980 "%i (%3.2f%%) speculations produced.\n",
981 nindirect,
982 ncommon, ncommon * 100.0 / nindirect,
983 nunknown, nunknown * 100.0 / nindirect,
984 nmismatch, nmismatch * 100.0 / nindirect,
985 nimpossible, nimpossible * 100.0 / nindirect,
986 nuseless, nuseless * 100.0 / nindirect,
987 nconverted, nconverted * 100.0 / nindirect);
988
989 order = XCNEWVEC (struct cgraph_node *, symtab->cgraph_count)((struct cgraph_node * *) xcalloc ((symtab->cgraph_count),
sizeof (struct cgraph_node *)))
;
990 order_pos = ipa_reverse_postorder (order);
991 for (i = order_pos - 1; i >= 0; i--)
992 {
993 if (order[i]->local
994 && opt_for_fn (order[i]->decl, flag_ipa_profile)(opts_for_fn (order[i]->decl)->x_flag_ipa_profile)
995 && ipa_propagate_frequency (order[i]))
996 {
997 for (e = order[i]->callees; e; e = e->next_callee)
998 if (e->callee->local && !e->callee->aux)
999 {
1000 something_changed = true;
1001 e->callee->aux = (void *)1;
1002 }
1003 }
1004 order[i]->aux = NULLnullptr;
1005 }
1006
1007 while (something_changed)
1008 {
1009 something_changed = false;
1010 for (i = order_pos - 1; i >= 0; i--)
1011 {
1012 if (order[i]->aux
1013 && opt_for_fn (order[i]->decl, flag_ipa_profile)(opts_for_fn (order[i]->decl)->x_flag_ipa_profile)
1014 && ipa_propagate_frequency (order[i]))
1015 {
1016 for (e = order[i]->callees; e; e = e->next_callee)
1017 if (e->callee->local && !e->callee->aux)
1018 {
1019 something_changed = true;
1020 e->callee->aux = (void *)1;
1021 }
1022 }
1023 order[i]->aux = NULLnullptr;
1024 }
1025 }
1026 free (order);
1027
1028 if (dump_file && (dump_flags & TDF_DETAILS))
1029 symtab->dump (dump_file);
1030
1031 delete call_sums;
1032 call_sums = NULLnullptr;
1033
1034 return 0;
1035}
1036
1037namespace {
1038
1039const pass_data pass_data_ipa_profile =
1040{
1041 IPA_PASS, /* type */
1042 "profile_estimate", /* name */
1043 OPTGROUP_NONE, /* optinfo_flags */
1044 TV_IPA_PROFILE, /* tv_id */
1045 0, /* properties_required */
1046 0, /* properties_provided */
1047 0, /* properties_destroyed */
1048 0, /* todo_flags_start */
1049 0, /* todo_flags_finish */
1050};
1051
1052class pass_ipa_profile : public ipa_opt_pass_d
1053{
1054public:
1055 pass_ipa_profile (gcc::context *ctxt)
1056 : ipa_opt_pass_d (pass_data_ipa_profile, ctxt,
1057 ipa_profile_generate_summary, /* generate_summary */
1058 ipa_profile_write_summary, /* write_summary */
1059 ipa_profile_read_summary, /* read_summary */
1060 NULLnullptr, /* write_optimization_summary */
1061 NULLnullptr, /* read_optimization_summary */
1062 NULLnullptr, /* stmt_fixup */
1063 0, /* function_transform_todo_flags_start */
1064 NULLnullptr, /* function_transform */
1065 NULLnullptr) /* variable_transform */
1066 {}
1067
1068 /* opt_pass methods: */
1069 virtual bool gate (function *) { return flag_ipa_profileglobal_options.x_flag_ipa_profile || in_lto_pglobal_options.x_in_lto_p; }
1070 virtual unsigned int execute (function *) { return ipa_profile (); }
1071
1072}; // class pass_ipa_profile
1073
1074} // anon namespace
1075
1076ipa_opt_pass_d *
1077make_pass_ipa_profile (gcc::context *ctxt)
1078{
1079 return new pass_ipa_profile (ctxt);
1080}

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h

1/* Callgraph handling code.
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#ifndef GCC_CGRAPH_H
22#define GCC_CGRAPH_H
23
24#include "profile-count.h"
25#include "ipa-ref.h"
26#include "plugin-api.h"
27#include "ipa-param-manipulation.h"
28
29extern void debuginfo_early_init (void);
30extern void debuginfo_init (void);
31extern void debuginfo_fini (void);
32extern void debuginfo_start (void);
33extern void debuginfo_stop (void);
34extern void debuginfo_early_start (void);
35extern void debuginfo_early_stop (void);
36
37class ipa_opt_pass_d;
38typedef ipa_opt_pass_d *ipa_opt_pass;
39
40/* Symbol table consists of functions and variables.
41 TODO: add labels and CONST_DECLs. */
42enum symtab_type
43{
44 SYMTAB_SYMBOL,
45 SYMTAB_FUNCTION,
46 SYMTAB_VARIABLE
47};
48
49/* Section names are stored as reference counted strings in GGC safe hashtable
50 (to make them survive through PCH). */
51
52struct GTY((for_user)) section_hash_entry
53{
54 int ref_count;
55 char *name; /* As long as this datastructure stays in GGC, we cannot put
56 string at the tail of structure of GGC dies in horrible
57 way */
58};
59
60struct section_name_hasher : ggc_ptr_hash<section_hash_entry>
61{
62 typedef const char *compare_type;
63
64 static hashval_t hash (section_hash_entry *);
65 static bool equal (section_hash_entry *, const char *);
66};
67
68enum availability
69{
70 /* Not yet set by cgraph_function_body_availability. */
71 AVAIL_UNSET,
72 /* Function body/variable initializer is unknown. */
73 AVAIL_NOT_AVAILABLE,
74 /* Function body/variable initializer is known but might be replaced
75 by a different one from other compilation unit and thus needs to
76 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
77 arbitrary side effects on escaping variables and functions, while
78 like AVAILABLE it might access static variables. */
79 AVAIL_INTERPOSABLE,
80 /* Function body/variable initializer is known and will be used in final
81 program. */
82 AVAIL_AVAILABLE,
83 /* Function body/variable initializer is known and all it's uses are
84 explicitly visible within current unit (i.e. it's address is never taken
85 and it is not exported to other units). Currently used only for
86 functions. */
87 AVAIL_LOCAL
88};
89
90/* Classification of symbols WRT partitioning. */
91enum symbol_partitioning_class
92{
93 /* External declarations are ignored by partitioning algorithms and they are
94 added into the boundary later via compute_ltrans_boundary. */
95 SYMBOL_EXTERNAL,
96 /* Partitioned symbols are put into one of partitions. */
97 SYMBOL_PARTITION,
98 /* Duplicated symbols (such as comdat or constant pool references) are
99 copied into every node needing them via add_symbol_to_partition. */
100 SYMBOL_DUPLICATE
101};
102
103/* Base of all entries in the symbol table.
104 The symtab_node is inherited by cgraph and varpol nodes. */
105struct GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
106 chain_next ("%h.next"), chain_prev ("%h.previous")))
107 symtab_node
108{
109public:
110 friend class symbol_table;
111
112 /* Constructor. */
113 explicit symtab_node (symtab_type t)
114 : type (t), resolution (LDPR_UNKNOWN), definition (false), alias (false),
115 transparent_alias (false), weakref (false), cpp_implicit_alias (false),
116 symver (false), analyzed (false), writeonly (false),
117 refuse_visibility_changes (false), externally_visible (false),
118 no_reorder (false), force_output (false), forced_by_abi (false),
119 unique_name (false), implicit_section (false), body_removed (false),
120 used_from_other_partition (false), in_other_partition (false),
121 address_taken (false), in_init_priority_hash (false),
122 need_lto_streaming (false), offloadable (false), ifunc_resolver (false),
123 order (false), next_sharing_asm_name (NULLnullptr),
124 previous_sharing_asm_name (NULLnullptr), same_comdat_group (NULLnullptr), ref_list (),
125 alias_target (NULLnullptr), lto_file_data (NULLnullptr), aux (NULLnullptr),
126 x_comdat_group (NULL_TREE(tree) nullptr), x_section (NULLnullptr)
127 {}
128
129 /* Return name. */
130 const char *name () const;
131
132 /* Return dump name. */
133 const char *dump_name () const;
134
135 /* Return asm name. */
136 const char *asm_name () const;
137
138 /* Return dump name with assembler name. */
139 const char *dump_asm_name () const;
140
141 /* Return visibility name. */
142 const char *get_visibility_string () const;
143
144 /* Return type_name name. */
145 const char *get_symtab_type_string () const;
146
147 /* Add node into symbol table. This function is not used directly, but via
148 cgraph/varpool node creation routines. */
149 void register_symbol (void);
150
151 /* Remove symbol from symbol table. */
152 void remove (void);
153
154 /* Dump symtab node to F. */
155 void dump (FILE *f);
156
157 /* Dump symtab callgraph in graphviz format. */
158 void dump_graphviz (FILE *f);
159
160 /* Dump symtab node to stderr. */
161 void DEBUG_FUNCTION__attribute__ ((__used__)) debug (void);
162
163 /* Verify consistency of node. */
164 void DEBUG_FUNCTION__attribute__ ((__used__)) verify (void);
165
166 /* Return ipa reference from this symtab_node to
167 REFERRED_NODE or REFERRED_VARPOOL_NODE. USE_TYPE specify type
168 of the use and STMT the statement (if it exists). */
169 ipa_ref *create_reference (symtab_node *referred_node,
170 enum ipa_ref_use use_type);
171
172 /* Return ipa reference from this symtab_node to
173 REFERRED_NODE or REFERRED_VARPOOL_NODE. USE_TYPE specify type
174 of the use and STMT the statement (if it exists). */
175 ipa_ref *create_reference (symtab_node *referred_node,
176 enum ipa_ref_use use_type, gimple *stmt);
177
178 /* If VAL is a reference to a function or a variable, add a reference from
179 this symtab_node to the corresponding symbol table node. Return the new
180 reference or NULL if none was created. */
181 ipa_ref *maybe_create_reference (tree val, gimple *stmt);
182
183 /* Clone all references from symtab NODE to this symtab_node. */
184 void clone_references (symtab_node *node);
185
186 /* Remove all stmt references in non-speculative references.
187 Those are not maintained during inlining & clonning.
188 The exception are speculative references that are updated along
189 with callgraph edges associated with them. */
190 void clone_referring (symtab_node *node);
191
192 /* Clone reference REF to this symtab_node and set its stmt to STMT. */
193 ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
194
195 /* Find the structure describing a reference to REFERRED_NODE
196 and associated with statement STMT. */
197 ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
198 unsigned int lto_stmt_uid);
199
200 /* Remove all references that are associated with statement STMT. */
201 void remove_stmt_references (gimple *stmt);
202
203 /* Remove all stmt references in non-speculative references.
204 Those are not maintained during inlining & clonning.
205 The exception are speculative references that are updated along
206 with callgraph edges associated with them. */
207 void clear_stmts_in_references (void);
208
209 /* Remove all references in ref list. */
210 void remove_all_references (void);
211
212 /* Remove all referring items in ref list. */
213 void remove_all_referring (void);
214
215 /* Dump references in ref list to FILE. */
216 void dump_references (FILE *file);
217
218 /* Dump referring in list to FILE. */
219 void dump_referring (FILE *);
220
221 /* Get number of references for this node. */
222 inline unsigned num_references (void)
223 {
224 return ref_list.references.length ();
225 }
226
227 /* Iterates I-th reference in the list, REF is also set. */
228 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
229
230 /* Iterates I-th referring item in the list, REF is also set. */
231 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
232
233 /* Iterates I-th referring alias item in the list, REF is also set. */
234 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
235
236 /* Return true if symtab node and TARGET represents
237 semantically equivalent symbols. */
238 bool semantically_equivalent_p (symtab_node *target);
239
240 /* Classify symbol symtab node for partitioning. */
241 enum symbol_partitioning_class get_partitioning_class (void);
242
243 /* Return comdat group. */
244 tree get_comdat_group ()
245 {
246 return x_comdat_group;
247 }
248
249 /* Return comdat group as identifier_node. */
250 tree get_comdat_group_id ()
251 {
252 if (x_comdat_group && TREE_CODE (x_comdat_group)((enum tree_code) (x_comdat_group)->base.code) != IDENTIFIER_NODE)
253 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group)decl_assembler_name (x_comdat_group);
254 return x_comdat_group;
255 }
256
257 /* Set comdat group. */
258 void set_comdat_group (tree group)
259 {
260 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE((void)(!(!group || ((enum tree_code) (group)->base.code) ==
IDENTIFIER_NODE || (tree_code_type[(int) (((enum tree_code) (
group)->base.code))] == tcc_declaration)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 261, __FUNCTION__), 0 : 0))
261 || DECL_P (group))((void)(!(!group || ((enum tree_code) (group)->base.code) ==
IDENTIFIER_NODE || (tree_code_type[(int) (((enum tree_code) (
group)->base.code))] == tcc_declaration)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 261, __FUNCTION__), 0 : 0))
;
262 x_comdat_group = group;
263 }
264
265 /* Return section as string. */
266 const char * get_section () const
267 {
268 if (!x_section)
269 return NULLnullptr;
270 return x_section->name;
271 }
272
273 /* Remove node from same comdat group. */
274 void remove_from_same_comdat_group (void);
275
276 /* Add this symtab_node to the same comdat group that OLD is in. */
277 void add_to_same_comdat_group (symtab_node *old_node);
278
279 /* Dissolve the same_comdat_group list in which NODE resides. */
280 void dissolve_same_comdat_group_list (void);
281
282 /* Return true when symtab_node is known to be used from other (non-LTO)
283 object file. Known only when doing LTO via linker plugin. */
284 bool used_from_object_file_p (void);
285
286 /* Walk the alias chain to return the symbol NODE is alias of.
287 If NODE is not an alias, return NODE.
288 When AVAILABILITY is non-NULL, get minimal availability in the chain.
289 When REF is non-NULL, assume that reference happens in symbol REF
290 when determining the availability. */
291 symtab_node *ultimate_alias_target (enum availability *avail = NULLnullptr,
292 struct symtab_node *ref = NULLnullptr);
293
294 /* Return next reachable static symbol with initializer after NODE. */
295 inline symtab_node *next_defined_symbol (void);
296
297 /* Add reference recording that symtab node is alias of TARGET.
298 If TRANSPARENT is true make the alias to be transparent alias.
299 The function can fail in the case of aliasing cycles; in this case
300 it returns false. */
301 bool resolve_alias (symtab_node *target, bool transparent = false);
302
303 /* C++ FE sometimes change linkage flags after producing same
304 body aliases. */
305 void fixup_same_cpp_alias_visibility (symtab_node *target);
306
307 /* Call callback on symtab node and aliases associated to this node.
308 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
309 skipped. */
310 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
311 void *data,
312 bool include_overwrite);
313
314 /* If node cannot be interposable by static or dynamic linker to point to
315 different definition, return this symbol. Otherwise look for alias with
316 such property and if none exists, introduce new one. */
317 symtab_node *noninterposable_alias (void);
318
319 /* Return node that alias is aliasing. */
320 inline symtab_node *get_alias_target (void);
321
322 /* Set section for symbol and its aliases. */
323 void set_section (const char *section);
324
325 /* Like set_section, but copying the section name from another node. */
326 void set_section (const symtab_node &other);
327
328 /* Set section, do not recurse into aliases.
329 When one wants to change section of symbol and its aliases,
330 use set_section. */
331 void set_section_for_node (const char *section);
332
333 /* Like set_section_for_node, but copying the section name from another
334 node. */
335 void set_section_for_node (const symtab_node &other);
336
337 /* Set initialization priority to PRIORITY. */
338 void set_init_priority (priority_type priority);
339
340 /* Return the initialization priority. */
341 priority_type get_init_priority ();
342
343 /* Return availability of NODE when referenced from REF. */
344 enum availability get_availability (symtab_node *ref = NULLnullptr);
345
346 /* During LTO stream-in this predicate can be used to check whether node
347 in question prevails in the linking to save some memory usage. */
348 bool prevailing_p (void);
349
350 /* Return true if NODE binds to current definition in final executable
351 when referenced from REF. If REF is NULL return conservative value
352 for any reference. */
353 bool binds_to_current_def_p (symtab_node *ref = NULLnullptr);
354
355 /* Make DECL local. */
356 void make_decl_local (void);
357
358 /* Copy visibility from N. */
359 void copy_visibility_from (symtab_node *n);
360
361 /* Return desired alignment of the definition. This is NOT alignment useful
362 to access THIS, because THIS may be interposable and DECL_ALIGN should
363 be used instead. It however must be guaranteed when output definition
364 of THIS. */
365 unsigned int definition_alignment ();
366
367 /* Return true if alignment can be increased. */
368 bool can_increase_alignment_p ();
369
370 /* Increase alignment of symbol to ALIGN. */
371 void increase_alignment (unsigned int align);
372
373 /* Return true if list contains an alias. */
374 bool has_aliases_p (void);
375
376 /* Return true when the symbol is real symbol, i.e. it is not inline clone
377 or abstract function kept for debug info purposes only. */
378 bool real_symbol_p (void);
379
380 /* Return true when the symbol needs to be output to the LTO symbol table. */
381 bool output_to_lto_symbol_table_p (void);
382
383 /* Determine if symbol declaration is needed. That is, visible to something
384 either outside this translation unit, something magic in the system
385 configury. This function is used just during symbol creation. */
386 bool needed_p (void);
387
388 /* Return true if this symbol is a function from the C frontend specified
389 directly in RTL form (with "__RTL"). */
390 bool native_rtl_p () const;
391
392 /* Return true when there are references to the node. */
393 bool referred_to_p (bool include_self = true);
394
395 /* Return true if symbol can be discarded by linker from the binary.
396 Assume that symbol is used (so there is no need to take into account
397 garbage collecting linkers)
398
399 This can happen for comdats, commons and weaks when they are prevailed
400 by other definition at static linking time. */
401 inline bool
402 can_be_discarded_p (void)
403 {
404 return (DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 404, __FUNCTION__))->decl_common.decl_flag_1)
405 || ((get_comdat_group ()
406 || DECL_COMMON (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 406, __FUNCTION__))->decl_with_vis.common_flag)
407 || (DECL_SECTION_NAME (decl)decl_section_name (decl) && DECL_WEAK (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 407, __FUNCTION__))->decl_with_vis.weak_flag)
))
408 && ((resolution != LDPR_PREVAILING_DEF
409 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP)
410 || flag_incremental_linkglobal_options.x_flag_incremental_link)
411 && resolution != LDPR_PREVAILING_DEF_IRONLY));
412 }
413
414 /* Return true if NODE is local to a particular COMDAT group, and must not
415 be named from outside the COMDAT. This is used for C++ decloned
416 constructors. */
417 inline bool comdat_local_p (void)
418 {
419 return (same_comdat_group && !TREE_PUBLIC (decl)((decl)->base.public_flag));
420 }
421
422 /* Return true if ONE and TWO are part of the same COMDAT group. */
423 inline bool in_same_comdat_group_p (symtab_node *target);
424
425 /* Return true if symbol is known to be nonzero. */
426 bool nonzero_address ();
427
428 /* Return 0 if symbol is known to have different address than S2,
429 Return 1 if symbol is known to have same address as S2,
430 return 2 otherwise.
431
432 If MEMORY_ACCESSED is true, assume that both memory pointer to THIS
433 and S2 is going to be accessed. This eliminates the situations when
434 either THIS or S2 is NULL and is useful for comparing bases when deciding
435 about memory aliasing. */
436 int equal_address_to (symtab_node *s2, bool memory_accessed = false);
437
438 /* Return true if symbol's address may possibly be compared to other
439 symbol's address. */
440 bool address_matters_p ();
441
442 /* Return true if NODE's address can be compared. This use properties
443 of NODE only and does not look if the address is actually taken in
444 interesting way. For that use ADDRESS_MATTERS_P instead. */
445 bool address_can_be_compared_p (void);
446
447 /* Return symbol table node associated with DECL, if any,
448 and NULL otherwise. */
449 static inline symtab_node *get (const_tree decl)
450 {
451 /* Check that we are called for sane type of object - functions
452 and static or external variables. */
453 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL
|| (((enum tree_code) (decl)->base.code) == VAR_DECL &&
(((decl)->base.static_flag) || ((contains_struct_check ((
decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 455, __FUNCTION__))->decl_common.decl_flag_1) || global_options
.x_in_lto_p))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 456, __FUNCTION__), 0 : 0))
454 || (TREE_CODE (decl) == VAR_DECL((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL
|| (((enum tree_code) (decl)->base.code) == VAR_DECL &&
(((decl)->base.static_flag) || ((contains_struct_check ((
decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 455, __FUNCTION__))->decl_common.decl_flag_1) || global_options
.x_in_lto_p))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 456, __FUNCTION__), 0 : 0))
455 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL
|| (((enum tree_code) (decl)->base.code) == VAR_DECL &&
(((decl)->base.static_flag) || ((contains_struct_check ((
decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 455, __FUNCTION__))->decl_common.decl_flag_1) || global_options
.x_in_lto_p))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 456, __FUNCTION__), 0 : 0))
456 || in_lto_p)))((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL
|| (((enum tree_code) (decl)->base.code) == VAR_DECL &&
(((decl)->base.static_flag) || ((contains_struct_check ((
decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 455, __FUNCTION__))->decl_common.decl_flag_1) || global_options
.x_in_lto_p))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 456, __FUNCTION__), 0 : 0))
;
457 /* Check that the mapping is sane - perhaps this check can go away,
458 but at the moment frontends tends to corrupt the mapping by calling
459 memcpy/memset on the tree nodes. */
460 gcc_checking_assert (!decl->decl_with_vis.symtab_node((void)(!(!decl->decl_with_vis.symtab_node || decl->decl_with_vis
.symtab_node->decl == decl) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 461, __FUNCTION__), 0 : 0))
461 || decl->decl_with_vis.symtab_node->decl == decl)((void)(!(!decl->decl_with_vis.symtab_node || decl->decl_with_vis
.symtab_node->decl == decl) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 461, __FUNCTION__), 0 : 0))
;
462 return decl->decl_with_vis.symtab_node;
463 }
464
465 /* Try to find a symtab node for declaration DECL and if it does not
466 exist or if it corresponds to an inline clone, create a new one. */
467 static inline symtab_node * get_create (tree node);
468
469 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
470 Return NULL if there's no such node. */
471 static symtab_node *get_for_asmname (const_tree asmname);
472
473 /* Verify symbol table for internal consistency. */
474 static DEBUG_FUNCTION__attribute__ ((__used__)) void verify_symtab_nodes (void);
475
476 /* Perform internal consistency checks, if they are enabled. */
477 static inline void checking_verify_symtab_nodes (void);
478
479 /* Type of the symbol. */
480 ENUM_BITFIELD (symtab_type)enum symtab_type type : 8;
481
482 /* The symbols resolution. */
483 ENUM_BITFIELD (ld_plugin_symbol_resolution)enum ld_plugin_symbol_resolution resolution : 8;
484
485 /*** Flags representing the symbol type. ***/
486
487 /* True when symbol corresponds to a definition in current unit.
488 set via finalize_function or finalize_decl */
489 unsigned definition : 1;
490 /* True when symbol is an alias.
491 Set by assemble_alias. */
492 unsigned alias : 1;
493 /* When true the alias is translated into its target symbol either by GCC
494 or assembler (it also may just be a duplicate declaration of the same
495 linker name).
496
497 Currently transparent aliases come in three different flavors
498 - aliases having the same assembler name as their target (aka duplicated
499 declarations). In this case the assembler names compare via
500 assembler_names_equal_p and weakref is false
501 - aliases that are renamed at a time being output to final file
502 by varasm.c. For those DECL_ASSEMBLER_NAME have
503 IDENTIFIER_TRANSPARENT_ALIAS set and thus also their assembler
504 name must be unique.
505 Weakrefs belong to this category when we target assembler without
506 .weakref directive.
507 - weakrefs that are renamed by assembler via .weakref directive.
508 In this case the alias may or may not be definition (depending if
509 target declaration was seen by the compiler), weakref is set.
510 Unless we are before renaming statics, assembler names are different.
511
512 Given that we now support duplicate declarations, the second option is
513 redundant and will be removed. */
514 unsigned transparent_alias : 1;
515 /* True when alias is a weakref. */
516 unsigned weakref : 1;
517 /* C++ frontend produce same body aliases and extra name aliases for
518 virtual functions and vtables that are obviously equivalent.
519 Those aliases are bit special, especially because C++ frontend
520 visibility code is so ugly it cannot get them right at first time
521 and their visibility needs to be copied from their "masters" at
522 the end of parsing. */
523 unsigned cpp_implicit_alias : 1;
524 /* The alias is a symbol version. */
525 unsigned symver : 1;
526 /* Set once the definition was analyzed. The list of references and
527 other properties are built during analysis. */
528 unsigned analyzed : 1;
529 /* Set for write-only variables. */
530 unsigned writeonly : 1;
531 /* Visibility of symbol was used for further optimization; do not
532 permit further changes. */
533 unsigned refuse_visibility_changes : 1;
534
535 /*** Visibility and linkage flags. ***/
536
537 /* Set when function is visible by other units. */
538 unsigned externally_visible : 1;
539 /* Don't reorder to other symbols having this set. */
540 unsigned no_reorder : 1;
541 /* The symbol will be assumed to be used in an invisible way (like
542 by an toplevel asm statement). */
543 unsigned force_output : 1;
544 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
545 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
546 to static and it does not inhibit optimization. */
547 unsigned forced_by_abi : 1;
548 /* True when the name is known to be unique and thus it does not need mangling. */
549 unsigned unique_name : 1;
550 /* Specify whether the section was set by user or by
551 compiler via -ffunction-sections. */
552 unsigned implicit_section : 1;
553 /* True when body and other characteristics have been removed by
554 symtab_remove_unreachable_nodes. */
555 unsigned body_removed : 1;
556
557 /*** WHOPR Partitioning flags.
558 These flags are used at ltrans stage when only part of the callgraph is
559 available. ***/
560
561 /* Set when variable is used from other LTRANS partition. */
562 unsigned used_from_other_partition : 1;
563 /* Set when function is available in the other LTRANS partition.
564 During WPA output it is used to mark nodes that are present in
565 multiple partitions. */
566 unsigned in_other_partition : 1;
567
568
569
570 /*** other flags. ***/
571
572 /* Set when symbol has address taken. */
573 unsigned address_taken : 1;
574 /* Set when init priority is set. */
575 unsigned in_init_priority_hash : 1;
576
577 /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case
578 of offloading, for separate compilation for a different target. */
579 unsigned need_lto_streaming : 1;
580
581 /* Set when symbol can be streamed into bytecode for offloading. */
582 unsigned offloadable : 1;
583
584 /* Set when symbol is an IFUNC resolver. */
585 unsigned ifunc_resolver : 1;
586
587
588 /* Ordering of all symtab entries. */
589 int order;
590
591 /* Declaration representing the symbol. */
592 tree decl;
593
594 /* Linked list of symbol table entries starting with symtab_nodes. */
595 symtab_node *next;
596 symtab_node *previous;
597
598 /* Linked list of symbols with the same asm name. There may be multiple
599 entries for single symbol name during LTO, because symbols are renamed
600 only after partitioning.
601
602 Because inline clones are kept in the assembler name has, they also produce
603 duplicate entries.
604
605 There are also several long standing bugs where frontends and builtin
606 code produce duplicated decls. */
607 symtab_node *next_sharing_asm_name;
608 symtab_node *previous_sharing_asm_name;
609
610 /* Circular list of nodes in the same comdat group if non-NULL. */
611 symtab_node *same_comdat_group;
612
613 /* Vectors of referring and referenced entities. */
614 ipa_ref_list GTY((skip)) ref_list;
615
616 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
617 depending to what was known to frontend on the creation time.
618 Once alias is resolved, this pointer become NULL. */
619 tree alias_target;
620
621 /* File stream where this node is being written to. */
622 struct lto_file_decl_data * lto_file_data;
623
624 PTRvoid * GTY ((skip)) aux;
625
626 /* Comdat group the symbol is in. Can be private if GGC allowed that. */
627 tree x_comdat_group;
628
629 /* Section name. Again can be private, if allowed. */
630 section_hash_entry *x_section;
631
632protected:
633 /* Dump base fields of symtab nodes to F. Not to be used directly. */
634 void dump_base (FILE *);
635
636 /* Verify common part of symtab node. */
637 bool DEBUG_FUNCTION__attribute__ ((__used__)) verify_base (void);
638
639 /* Remove node from symbol table. This function is not used directly, but via
640 cgraph/varpool node removal routines. */
641 void unregister (struct clone_info *);
642
643 /* Return the initialization and finalization priority information for
644 DECL. If there is no previous priority information, a freshly
645 allocated structure is returned. */
646 struct symbol_priority_map *priority_info (void);
647
648 /* Worker for call_for_symbol_and_aliases_1. */
649 bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *),
650 void *data,
651 bool include_overwrite);
652private:
653 /* Workers for set_section. */
654 static bool set_section_from_string (symtab_node *n, void *s);
655 static bool set_section_from_node (symtab_node *n, void *o);
656
657 /* Worker for symtab_resolve_alias. */
658 static bool set_implicit_section (symtab_node *n, void *);
659
660 /* Worker searching noninterposable alias. */
661 static bool noninterposable_alias (symtab_node *node, void *data);
662
663 /* Worker for ultimate_alias_target. */
664 symtab_node *ultimate_alias_target_1 (enum availability *avail = NULLnullptr,
665 symtab_node *ref = NULLnullptr);
666
667 /* Get dump name with normal or assembly name. */
668 const char *get_dump_name (bool asm_name_p) const;
669};
670
671inline void
672symtab_node::checking_verify_symtab_nodes (void)
673{
674 if (flag_checkingglobal_options.x_flag_checking)
675 symtab_node::verify_symtab_nodes ();
676}
677
678/* Walk all aliases for NODE. */
679#define FOR_EACH_ALIAS(NODE, ALIAS)for (unsigned ALIAS_iter_ = 0; (NODE)->iterate_direct_aliases
(ALIAS_iter_, ALIAS); ALIAS_iter_++)
\
680 for (unsigned ALIAS##_iter_ = 0; \
681 (NODE)->iterate_direct_aliases (ALIAS##_iter_, ALIAS); \
682 ALIAS##_iter_++)
683
684/* This is the information that is put into the cgraph local structure
685 to recover a function. */
686struct lto_file_decl_data;
687
688extern const char * const cgraph_availability_names[];
689extern const char * const ld_plugin_symbol_resolution_names[];
690extern const char * const tls_model_names[];
691
692/* Represent which DECL tree (or reference to such tree)
693 will be replaced by another tree while versioning. */
694struct GTY(()) ipa_replace_map
695{
696 /* The new (replacing) tree. */
697 tree new_tree;
698 /* Parameter number to replace, when old_tree is NULL. */
699 int parm_num;
700};
701
702enum cgraph_simd_clone_arg_type
703{
704 SIMD_CLONE_ARG_TYPE_VECTOR,
705 SIMD_CLONE_ARG_TYPE_UNIFORM,
706 /* These are only for integer/pointer arguments passed by value. */
707 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
708 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
709 /* These 6 are only for reference type arguments or arguments passed
710 by reference. */
711 SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP,
712 SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP,
713 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP,
714 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP,
715 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP,
716 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP,
717 SIMD_CLONE_ARG_TYPE_MASK
718};
719
720/* Function arguments in the original function of a SIMD clone.
721 Supplementary data for `struct simd_clone'. */
722
723struct GTY(()) cgraph_simd_clone_arg {
724 /* Original function argument as it originally existed in
725 DECL_ARGUMENTS. */
726 tree orig_arg;
727
728 /* orig_arg's function (or for extern functions type from
729 TYPE_ARG_TYPES). */
730 tree orig_type;
731
732 /* If argument is a vector, this holds the vector version of
733 orig_arg that after adjusting the argument types will live in
734 DECL_ARGUMENTS. Otherwise, this is NULL.
735
736 This basically holds:
737 vector(simdlen) __typeof__(orig_arg) new_arg. */
738 tree vector_arg;
739
740 /* vector_arg's type (or for extern functions new vector type. */
741 tree vector_type;
742
743 /* If argument is a vector, this holds the array where the simd
744 argument is held while executing the simd clone function. This
745 is a local variable in the cloned function. Its content is
746 copied from vector_arg upon entry to the clone.
747
748 This basically holds:
749 __typeof__(orig_arg) simd_array[simdlen]. */
750 tree simd_array;
751
752 /* A SIMD clone's argument can be either linear (constant or
753 variable), uniform, or vector. */
754 enum cgraph_simd_clone_arg_type arg_type;
755
756 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_*CONSTANT_STEP this is
757 the constant linear step, if arg_type is
758 SIMD_CLONE_ARG_TYPE_LINEAR_*VARIABLE_STEP, this is index of
759 the uniform argument holding the step, otherwise 0. */
760 HOST_WIDE_INTlong linear_step;
761
762 /* Variable alignment if available, otherwise 0. */
763 unsigned int alignment;
764};
765
766/* Specific data for a SIMD function clone. */
767
768struct GTY(()) cgraph_simd_clone {
769 /* Number of words in the SIMD lane associated with this clone. */
770 poly_uint64 simdlen;
771
772 /* Number of annotated function arguments in `args'. This is
773 usually the number of named arguments in FNDECL. */
774 unsigned int nargs;
775
776 /* Max hardware vector size in bits for integral vectors. */
777 poly_uint64 vecsize_int;
778
779 /* Max hardware vector size in bits for floating point vectors. */
780 poly_uint64 vecsize_float;
781
782 /* Machine mode of the mask argument(s), if they are to be passed
783 as bitmasks in integer argument(s). VOIDmode if masks are passed
784 as vectors of characteristic type. */
785 machine_mode mask_mode;
786
787 /* The mangling character for a given vector size. This is used
788 to determine the ISA mangling bit as specified in the Intel
789 Vector ABI. */
790 unsigned char vecsize_mangle;
791
792 /* True if this is the masked, in-branch version of the clone,
793 otherwise false. */
794 unsigned int inbranch : 1;
795
796 /* Doubly linked list of SIMD clones. */
797 cgraph_node *prev_clone, *next_clone;
798
799 /* Original cgraph node the SIMD clones were created for. */
800 cgraph_node *origin;
801
802 /* Annotated function arguments for the original function. */
803 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
804};
805
806/* Function Multiversioning info. */
807struct GTY((for_user)) cgraph_function_version_info {
808 /* The cgraph_node for which the function version info is stored. */
809 cgraph_node *this_node;
810 /* Chains all the semantically identical function versions. The
811 first function in this chain is the version_info node of the
812 default function. */
813 cgraph_function_version_info *prev;
814 /* If this version node corresponds to a dispatcher for function
815 versions, this points to the version info node of the default
816 function, the first node in the chain. */
817 cgraph_function_version_info *next;
818 /* If this node corresponds to a function version, this points
819 to the dispatcher function decl, which is the function that must
820 be called to execute the right function version at run-time.
821
822 If this cgraph node is a dispatcher (if dispatcher_function is
823 true, in the cgraph_node struct) for function versions, this
824 points to resolver function, which holds the function body of the
825 dispatcher. The dispatcher decl is an alias to the resolver
826 function decl. */
827 tree dispatcher_resolver;
828};
829
830#define DEFCIFCODE(code, type, string)CIF_code, CIF_ ## code,
831/* Reasons for inlining failures. */
832
833enum cgraph_inline_failed_t {
834#include "cif-code.def"
835 CIF_N_REASONS
836};
837
838enum cgraph_inline_failed_type_t
839{
840 CIF_FINAL_NORMAL = 0,
841 CIF_FINAL_ERROR
842};
843
844struct cgraph_edge;
845
846struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
847{
848 typedef gimple *compare_type;
849
850 static hashval_t hash (cgraph_edge *);
851 static hashval_t hash (gimple *);
852 static bool equal (cgraph_edge *, gimple *);
853};
854
855/* The cgraph data structure.
856 Each function decl has assigned cgraph_node listing callees and callers. */
857
858struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node
859{
860 friend class symbol_table;
861
862 /* Constructor. */
863 explicit cgraph_node (int uid)
864 : symtab_node (SYMTAB_FUNCTION), callees (NULLnullptr), callers (NULLnullptr),
865 indirect_calls (NULLnullptr),
866 next_sibling_clone (NULLnullptr), prev_sibling_clone (NULLnullptr), clones (NULLnullptr),
867 clone_of (NULLnullptr), call_site_hash (NULLnullptr), former_clone_of (NULLnullptr),
868 simdclone (NULLnullptr), simd_clones (NULLnullptr), ipa_transforms_to_apply (vNULL),
869 inlined_to (NULLnullptr), rtl (NULLnullptr),
870 count (profile_count::uninitialized ()),
871 count_materialization_scale (REG_BR_PROB_BASE10000), profile_id (0),
872 unit_id (0), tp_first_run (0), thunk (false),
873 used_as_abstract_origin (false),
874 lowered (false), process (false), frequency (NODE_FREQUENCY_NORMAL),
875 only_called_at_startup (false), only_called_at_exit (false),
876 tm_clone (false), dispatcher_function (false), calls_comdat_local (false),
877 icf_merged (false), nonfreeing_fn (false), merged_comdat (false),
878 merged_extern_inline (false), parallelized_function (false),
879 split_part (false), indirect_call_target (false), local (false),
880 versionable (false), can_change_signature (false),
881 redefined_extern_inline (false), tm_may_enter_irr (false),
882 ipcp_clone (false), declare_variant_alt (false),
883 calls_declare_variant_alt (false), m_uid (uid), m_summary_id (-1)
884 {}
885
886 /* Remove the node from cgraph and all inline clones inlined into it.
887 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
888 removed. This allows to call the function from outer loop walking clone
889 tree. */
890 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULLnullptr);
891
892 /* Record all references from cgraph_node that are taken
893 in statement STMT. */
894 void record_stmt_references (gimple *stmt);
895
896 /* Like cgraph_set_call_stmt but walk the clone tree and update all
897 clones sharing the same function body.
898 When WHOLE_SPECULATIVE_EDGES is true, all three components of
899 speculative edge gets updated. Otherwise we update only direct
900 call. */
901 void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
902 bool update_speculative = true);
903
904 /* Walk the alias chain to return the function cgraph_node is alias of.
905 Walk through thunk, too.
906 When AVAILABILITY is non-NULL, get minimal availability in the chain.
907 When REF is non-NULL, assume that reference happens in symbol REF
908 when determining the availability. */
909 cgraph_node *function_symbol (enum availability *avail = NULLnullptr,
910 struct symtab_node *ref = NULLnullptr);
911
912 /* Walk the alias chain to return the function cgraph_node is alias of.
913 Walk through non virtual thunks, too. Thus we return either a function
914 or a virtual thunk node.
915 When AVAILABILITY is non-NULL, get minimal availability in the chain.
916 When REF is non-NULL, assume that reference happens in symbol REF
917 when determining the availability. */
918 cgraph_node *function_or_virtual_thunk_symbol
919 (enum availability *avail = NULLnullptr,
920 struct symtab_node *ref = NULLnullptr);
921
922 /* Create node representing clone of N executed COUNT times. Decrease
923 the execution counts from original node too.
924 The new clone will have decl set to DECL that may or may not be the same
925 as decl of N.
926
927 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
928 function's profile to reflect the fact that part of execution is handled
929 by node.
930 When CALL_DUPLICATION_HOOK is true, the ipa passes are acknowledged about
931 the new clone. Otherwise the caller is responsible for doing so later.
932
933 If the new node is being inlined into another one, NEW_INLINED_TO should be
934 the outline function the new one is (even indirectly) inlined to.
935 All hooks will see this in node's inlined_to, when invoked.
936 Can be NULL if the node is not inlined. SUFFIX is string that is appended
937 to the original name. */
938 cgraph_node *create_clone (tree decl, profile_count count,
939 bool update_original,
940 vec<cgraph_edge *> redirect_callers,
941 bool call_duplication_hook,
942 cgraph_node *new_inlined_to,
943 ipa_param_adjustments *param_adjustments,
944 const char *suffix = NULLnullptr);
945
946 /* Create callgraph node clone with new declaration. The actual body will be
947 copied later at compilation stage. The name of the new clone will be
948 constructed from the name of the original node, SUFFIX and NUM_SUFFIX. */
949 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
950 vec<ipa_replace_map *, va_gc> *tree_map,
951 ipa_param_adjustments *param_adjustments,
952 const char * suffix, unsigned num_suffix);
953
954 /* Remove the node from the tree of virtual and inline clones and make it a
955 standalone node - not a clone any more. */
956 void remove_from_clone_tree ();
957
958 /* cgraph node being removed from symbol table; see if its entry can be
959 replaced by other inline clone. */
960 cgraph_node *find_replacement (struct clone_info *);
961
962 /* Create a new cgraph node which is the new version of
963 callgraph node. REDIRECT_CALLERS holds the callers
964 edges which should be redirected to point to
965 NEW_VERSION. ALL the callees edges of the node
966 are cloned to the new version node. Return the new
967 version node.
968
969 If non-NULL BLOCK_TO_COPY determine what basic blocks
970 was copied to prevent duplications of calls that are dead
971 in the clone.
972
973 SUFFIX is string that is appended to the original name. */
974
975 cgraph_node *create_version_clone (tree new_decl,
976 vec<cgraph_edge *> redirect_callers,
977 bitmap bbs_to_copy,
978 const char *suffix = NULLnullptr);
979
980 /* Perform function versioning.
981 Function versioning includes copying of the tree and
982 a callgraph update (creating a new cgraph node and updating
983 its callees and callers).
984
985 REDIRECT_CALLERS varray includes the edges to be redirected
986 to the new version.
987
988 TREE_MAP is a mapping of tree nodes we want to replace with
989 new ones (according to results of prior analysis).
990
991 If non-NULL ARGS_TO_SKIP determine function parameters to remove
992 from new version.
993 If SKIP_RETURN is true, the new version will return void.
994 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
995 If non_NULL NEW_ENTRY determine new entry BB of the clone.
996
997 If TARGET_ATTRIBUTES is non-null, when creating a new declaration,
998 add the attributes to DECL_ATTRIBUTES. And call valid_attribute_p
999 that will promote value of the attribute DECL_FUNCTION_SPECIFIC_TARGET
1000 of the declaration.
1001
1002 Return the new version's cgraph node. */
1003 cgraph_node *create_version_clone_with_body
1004 (vec<cgraph_edge *> redirect_callers,
1005 vec<ipa_replace_map *, va_gc> *tree_map,
1006 ipa_param_adjustments *param_adjustments,
1007 bitmap bbs_to_copy, basic_block new_entry_block, const char *clone_name,
1008 tree target_attributes = NULL_TREE(tree) nullptr);
1009
1010 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
1011 corresponding to cgraph_node. */
1012 cgraph_function_version_info *insert_new_function_version (void);
1013
1014 /* Get the cgraph_function_version_info node corresponding to node. */
1015 cgraph_function_version_info *function_version (void);
1016
1017 /* Discover all functions and variables that are trivially needed, analyze
1018 them as well as all functions and variables referred by them */
1019 void analyze (void);
1020
1021 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
1022 aliases DECL with an adjustments made into the first parameter.
1023 See comments in struct symtab-thunks.h for detail on the parameters. */
1024 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
1025 HOST_WIDE_INTlong fixed_offset,
1026 HOST_WIDE_INTlong virtual_value,
1027 HOST_WIDE_INTlong indirect_offset,
1028 tree virtual_offset,
1029 tree real_alias);
1030
1031
1032 /* Return node that alias is aliasing. */
1033 inline cgraph_node *get_alias_target (void);
1034
1035 /* Given function symbol, walk the alias chain to return the function node
1036 is alias of. Do not walk through thunks.
1037 When AVAILABILITY is non-NULL, get minimal availability in the chain.
1038 When REF is non-NULL, assume that reference happens in symbol REF
1039 when determining the availability. */
1040
1041 cgraph_node *ultimate_alias_target (availability *availability = NULLnullptr,
1042 symtab_node *ref = NULLnullptr);
1043
1044 /* Call expand_thunk on all callers that are thunks and analyze those
1045 nodes that were expanded. */
1046 void expand_all_artificial_thunks ();
1047
1048 /* Assemble thunks and aliases associated to node. */
1049 void assemble_thunks_and_aliases (void);
1050
1051 /* Expand function specified by node. */
1052 void expand (void);
1053
1054 /* As an GCC extension we allow redefinition of the function. The
1055 semantics when both copies of bodies differ is not well defined.
1056 We replace the old body with new body so in unit at a time mode
1057 we always use new body, while in normal mode we may end up with
1058 old body inlined into some functions and new body expanded and
1059 inlined in others. */
1060 void reset (void);
1061
1062 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
1063 kind of wrapper method. */
1064 void create_wrapper (cgraph_node *target);
1065
1066 /* Verify cgraph nodes of the cgraph node. */
1067 void DEBUG_FUNCTION__attribute__ ((__used__)) verify_node (void);
1068
1069 /* Remove function from symbol table. */
1070 void remove (void);
1071
1072 /* Dump call graph node to file F. */
1073 void dump (FILE *f);
1074
1075 /* Dump call graph node to file F. */
1076 void dump_graphviz (FILE *f);
1077
1078 /* Dump call graph node to stderr. */
1079 void DEBUG_FUNCTION__attribute__ ((__used__)) debug (void);
1080
1081 /* When doing LTO, read cgraph_node's body from disk if it is not already
1082 present. */
1083 bool get_untransformed_body ();
1084
1085 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
1086 if it is not already present. When some IPA transformations are scheduled,
1087 apply them. */
1088 bool get_body ();
1089
1090 void materialize_clone (void);
1091
1092 /* Release memory used to represent body of function.
1093 Use this only for functions that are released before being translated to
1094 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1095 are free'd in final.c via free_after_compilation(). */
1096 void release_body (bool keep_arguments = false);
1097
1098 /* Return the DECL_STRUCT_FUNCTION of the function. */
1099 struct function *get_fun () const;
1100
1101 /* Bring cgraph node local. */
1102 void make_local (void);
1103
1104 /* Likewise indicate that a node is having address taken. */
1105 void mark_address_taken (void);
1106
1107 /* Set finalization priority to PRIORITY. */
1108 void set_fini_priority (priority_type priority);
1109
1110 /* Return the finalization priority. */
1111 priority_type get_fini_priority (void);
1112
1113 /* Create edge from a given function to CALLEE in the cgraph. */
1114 cgraph_edge *create_edge (cgraph_node *callee,
1115 gcall *call_stmt, profile_count count,
1116 bool cloning_p = false);
1117
1118 /* Create an indirect edge with a yet-undetermined callee where the call
1119 statement destination is a formal parameter of the caller with index
1120 PARAM_INDEX. */
1121 cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
1122 profile_count count,
1123 bool cloning_p = false);
1124
1125 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
1126 same function body. If clones already have edge for OLD_STMT; only
1127 update the edge same way as cgraph_set_call_stmt_including_clones does. */
1128 void create_edge_including_clones (cgraph_node *callee,
1129 gimple *old_stmt, gcall *stmt,
1130 profile_count count,
1131 cgraph_inline_failed_t reason);
1132
1133 /* Return the callgraph edge representing the GIMPLE_CALL statement
1134 CALL_STMT. */
1135 cgraph_edge *get_edge (gimple *call_stmt);
1136
1137 /* Collect all callers of cgraph_node and its aliases that are known to lead
1138 to NODE (i.e. are not overwritable) and that are not thunks. */
1139 vec<cgraph_edge *> collect_callers (void);
1140
1141 /* Remove all callers from the node. */
1142 void remove_callers (void);
1143
1144 /* Remove all callees from the node. */
1145 void remove_callees (void);
1146
1147 /* Return function availability. See cgraph.h for description of individual
1148 return values. */
1149 enum availability get_availability (symtab_node *ref = NULLnullptr);
1150
1151 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1152 if any to NOTHROW. */
1153 bool set_nothrow_flag (bool nothrow);
1154
1155 /* SET DECL_IS_MALLOC on cgraph_node's decl and on aliases of the node
1156 if any. */
1157 bool set_malloc_flag (bool malloc_p);
1158
1159 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
1160 If SET_CONST if false, clear the flag.
1161
1162 When setting the flag be careful about possible interposition and
1163 do not set the flag for functions that can be interposed and set pure
1164 flag for functions that can bind to other definition.
1165
1166 Return true if any change was done. */
1167
1168 bool set_const_flag (bool set_const, bool looping);
1169
1170 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1171 if any to PURE.
1172
1173 When setting the flag, be careful about possible interposition.
1174 Return true if any change was done. */
1175
1176 bool set_pure_flag (bool pure, bool looping);
1177
1178 /* Call callback on function and aliases associated to the function.
1179 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1180 skipped. */
1181
1182 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1183 void *),
1184 void *data, bool include_overwritable);
1185
1186 /* Call callback on cgraph_node, thunks and aliases associated to NODE.
1187 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1188 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
1189 skipped. */
1190 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1191 void *data),
1192 void *data,
1193 bool include_overwritable,
1194 bool exclude_virtual_thunks = false);
1195
1196 /* Likewise indicate that a node is needed, i.e. reachable via some
1197 external means. */
1198 inline void mark_force_output (void);
1199
1200 /* Return true when function can be marked local. */
1201 bool local_p (void);
1202
1203 /* Return true if cgraph_node can be made local for API change.
1204 Extern inline functions and C++ COMDAT functions can be made local
1205 at the expense of possible code size growth if function is used in multiple
1206 compilation units. */
1207 bool can_be_local_p (void);
1208
1209 /* Return true when cgraph_node cannot return or throw and thus
1210 it is safe to ignore its side effects for IPA analysis. */
1211 bool cannot_return_p (void);
1212
1213 /* Return true when function cgraph_node and all its aliases are only called
1214 directly.
1215 i.e. it is not externally visible, address was not taken and
1216 it is not used in any other non-standard way. */
1217 bool only_called_directly_p (void);
1218
1219 /* Return true when function is only called directly or it has alias.
1220 i.e. it is not externally visible, address was not taken and
1221 it is not used in any other non-standard way. */
1222 inline bool only_called_directly_or_aliased_p (void);
1223
1224 /* Return true when function cgraph_node can be expected to be removed
1225 from program when direct calls in this compilation unit are removed.
1226
1227 As a special case COMDAT functions are
1228 cgraph_can_remove_if_no_direct_calls_p while the are not
1229 cgraph_only_called_directly_p (it is possible they are called from other
1230 unit)
1231
1232 This function behaves as cgraph_only_called_directly_p because eliminating
1233 all uses of COMDAT function does not make it necessarily disappear from
1234 the program unless we are compiling whole program or we do LTO. In this
1235 case we know we win since dynamic linking will not really discard the
1236 linkonce section.
1237
1238 If WILL_INLINE is true, assume that function will be inlined into all the
1239 direct calls. */
1240 bool will_be_removed_from_program_if_no_direct_calls_p
1241 (bool will_inline = false);
1242
1243 /* Return true when function can be removed from callgraph
1244 if all direct calls and references are eliminated. The function does
1245 not take into account comdat groups. */
1246 bool can_remove_if_no_direct_calls_and_refs_p (void);
1247
1248 /* Return true when function cgraph_node and its aliases can be removed from
1249 callgraph if all direct calls are eliminated.
1250 If WILL_INLINE is true, assume that function will be inlined into all the
1251 direct calls. */
1252 bool can_remove_if_no_direct_calls_p (bool will_inline = false);
1253
1254 /* Return true when callgraph node is a function with Gimple body defined
1255 in current unit. Functions can also be define externally or they
1256 can be thunks with no Gimple representation.
1257
1258 Note that at WPA stage, the function body may not be present in memory. */
1259 inline bool has_gimple_body_p (void);
1260
1261 /* Return true if this node represents a former, i.e. an expanded, thunk. */
1262 bool former_thunk_p (void);
1263
1264 /* Check if function calls comdat local. This is used to recompute
1265 calls_comdat_local flag after function transformations. */
1266 bool check_calls_comdat_local_p ();
1267
1268 /* Return true if function should be optimized for size. */
1269 enum optimize_size_level optimize_for_size_p (void);
1270
1271 /* Dump the callgraph to file F. */
1272 static void dump_cgraph (FILE *f);
1273
1274 /* Dump the call graph to stderr. */
1275 static inline
1276 void debug_cgraph (void)
1277 {
1278 dump_cgraph (stderrstderr);
1279 }
1280
1281 /* Get unique identifier of the node. */
1282 inline int get_uid ()
1283 {
1284 return m_uid;
1285 }
1286
1287 /* Get summary id of the node. */
1288 inline int get_summary_id ()
1289 {
1290 return m_summary_id;
1291 }
1292
1293 /* Record that DECL1 and DECL2 are semantically identical function
1294 versions. */
1295 static void record_function_versions (tree decl1, tree decl2);
1296
1297 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
1298 DECL is a duplicate declaration. */
1299 static void delete_function_version_by_decl (tree decl);
1300
1301 /* Add the function FNDECL to the call graph.
1302 Unlike finalize_function, this function is intended to be used
1303 by middle end and allows insertion of new function at arbitrary point
1304 of compilation. The function can be either in high, low or SSA form
1305 GIMPLE.
1306
1307 The function is assumed to be reachable and have address taken (so no
1308 API breaking optimizations are performed on it).
1309
1310 Main work done by this function is to enqueue the function for later
1311 processing to avoid need the passes to be re-entrant. */
1312 static void add_new_function (tree fndecl, bool lowered);
1313
1314 /* Return callgraph node for given symbol and check it is a function. */
1315 static inline cgraph_node *get (const_tree decl)
1316 {
1317 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL)((void)(!(((enum tree_code) (decl)->base.code) == FUNCTION_DECL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 1317, __FUNCTION__), 0 : 0))
;
1318 return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1319 }
1320
1321 /* DECL has been parsed. Take it, queue it, compile it at the whim of the
1322 logic in effect. If NO_COLLECT is true, then our caller cannot stand to
1323 have the garbage collector run at the moment. We would need to either
1324 create a new GC context, or just not compile right now. */
1325 static void finalize_function (tree, bool);
1326
1327 /* Return cgraph node assigned to DECL. Create new one when needed. */
1328 static cgraph_node * create (tree decl);
1329
1330 /* Try to find a call graph node for declaration DECL and if it does not
1331 exist or if it corresponds to an inline clone, create a new one. */
1332 static cgraph_node * get_create (tree);
1333
1334 /* Return local info for the compiled function. */
1335 static cgraph_node *local_info_node (tree decl);
1336
1337 /* Return RTL info for the compiled function. */
1338 static struct cgraph_rtl_info *rtl_info (const_tree);
1339
1340 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1341 Return NULL if there's no such node. */
1342 static cgraph_node *get_for_asmname (tree asmname);
1343
1344 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
1345 successful and NULL otherwise.
1346 Same body aliases are output whenever the body of DECL is output,
1347 and cgraph_node::get (ALIAS) transparently
1348 returns cgraph_node::get (DECL). */
1349 static cgraph_node * create_same_body_alias (tree alias, tree decl);
1350
1351 /* Verify whole cgraph structure. */
1352 static void DEBUG_FUNCTION__attribute__ ((__used__)) verify_cgraph_nodes (void);
1353
1354 /* Verify cgraph, if consistency checking is enabled. */
1355 static inline void checking_verify_cgraph_nodes (void);
1356
1357 /* Worker to bring NODE local. */
1358 static bool make_local (cgraph_node *node, void *);
1359
1360 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
1361 the function body is associated
1362 with (not necessarily cgraph_node (DECL). */
1363 static cgraph_node *create_alias (tree alias, tree target);
1364
1365 /* Return true if NODE has thunk. */
1366 static bool has_thunk_p (cgraph_node *node, void *);
1367
1368 cgraph_edge *callees;
1369 cgraph_edge *callers;
1370 /* List of edges representing indirect calls with a yet undetermined
1371 callee. */
1372 cgraph_edge *indirect_calls;
1373 cgraph_node *next_sibling_clone;
1374 cgraph_node *prev_sibling_clone;
1375 cgraph_node *clones;
1376 cgraph_node *clone_of;
1377 /* For functions with many calls sites it holds map from call expression
1378 to the edge to speed up cgraph_edge function. */
1379 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1380 /* Declaration node used to be clone of. */
1381 tree former_clone_of;
1382
1383 /* If this is a SIMD clone, this points to the SIMD specific
1384 information for it. */
1385 cgraph_simd_clone *simdclone;
1386 /* If this function has SIMD clones, this points to the first clone. */
1387 cgraph_node *simd_clones;
1388
1389 /* Interprocedural passes scheduled to have their transform functions
1390 applied next time we execute local pass on them. We maintain it
1391 per-function in order to allow IPA passes to introduce new functions. */
1392 vec<ipa_opt_pass, va_heap, vl_ptr> GTY((skip)) ipa_transforms_to_apply;
1393
1394 /* For inline clones this points to the function they will be
1395 inlined into. */
1396 cgraph_node *inlined_to;
1397
1398 struct cgraph_rtl_info *rtl;
1399
1400 /* Expected number of executions: calculated in profile.c. */
1401 profile_count count;
1402 /* How to scale counts at materialization time; used to merge
1403 LTO units with different number of profile runs. */
1404 int count_materialization_scale;
1405 /* ID assigned by the profiling. */
1406 unsigned int profile_id;
1407 /* ID of the translation unit. */
1408 int unit_id;
1409 /* Time profiler: first run of function. */
1410 int tp_first_run;
1411
1412 /* True when symbol is a thunk. */
1413 unsigned thunk : 1;
1414 /* Set when decl is an abstract function pointed to by the
1415 ABSTRACT_DECL_ORIGIN of a reachable function. */
1416 unsigned used_as_abstract_origin : 1;
1417 /* Set once the function is lowered (i.e. its CFG is built). */
1418 unsigned lowered : 1;
1419 /* Set once the function has been instantiated and its callee
1420 lists created. */
1421 unsigned process : 1;
1422 /* How commonly executed the node is. Initialized during branch
1423 probabilities pass. */
1424 ENUM_BITFIELD (node_frequency)enum node_frequency frequency : 2;
1425 /* True when function can only be called at startup (from static ctor). */
1426 unsigned only_called_at_startup : 1;
1427 /* True when function can only be called at startup (from static dtor). */
1428 unsigned only_called_at_exit : 1;
1429 /* True when function is the transactional clone of a function which
1430 is called only from inside transactions. */
1431 /* ?? We should be able to remove this. We have enough bits in
1432 cgraph to calculate it. */
1433 unsigned tm_clone : 1;
1434 /* True if this decl is a dispatcher for function versions. */
1435 unsigned dispatcher_function : 1;
1436 /* True if this decl calls a COMDAT-local function. This is set up in
1437 compute_fn_summary and inline_call. */
1438 unsigned calls_comdat_local : 1;
1439 /* True if node has been created by merge operation in IPA-ICF. */
1440 unsigned icf_merged: 1;
1441 /* True if call to node can't result in a call to free, munmap or
1442 other operation that could make previously non-trapping memory
1443 accesses trapping. */
1444 unsigned nonfreeing_fn : 1;
1445 /* True if there was multiple COMDAT bodies merged by lto-symtab. */
1446 unsigned merged_comdat : 1;
1447 /* True if this def was merged with extern inlines. */
1448 unsigned merged_extern_inline : 1;
1449 /* True if function was created to be executed in parallel. */
1450 unsigned parallelized_function : 1;
1451 /* True if function is part split out by ipa-split. */
1452 unsigned split_part : 1;
1453 /* True if the function appears as possible target of indirect call. */
1454 unsigned indirect_call_target : 1;
1455 /* Set when function is visible in current compilation unit only and
1456 its address is never taken. */
1457 unsigned local : 1;
1458 /* False when there is something makes versioning impossible. */
1459 unsigned versionable : 1;
1460 /* False when function calling convention and signature cannot be changed.
1461 This is the case when __builtin_apply_args is used. */
1462 unsigned can_change_signature : 1;
1463 /* True when the function has been originally extern inline, but it is
1464 redefined now. */
1465 unsigned redefined_extern_inline : 1;
1466 /* True if the function may enter serial irrevocable mode. */
1467 unsigned tm_may_enter_irr : 1;
1468 /* True if this was a clone created by ipa-cp. */
1469 unsigned ipcp_clone : 1;
1470 /* True if this is the deferred declare variant resolution artificial
1471 function. */
1472 unsigned declare_variant_alt : 1;
1473 /* True if the function calls declare_variant_alt functions. */
1474 unsigned calls_declare_variant_alt : 1;
1475
1476private:
1477 /* Unique id of the node. */
1478 int m_uid;
1479
1480 /* Summary id that is recycled. */
1481 int m_summary_id;
1482
1483 /* Worker for call_for_symbol_and_aliases. */
1484 bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
1485 void *),
1486 void *data, bool include_overwritable);
1487};
1488
1489/* A cgraph node set is a collection of cgraph nodes. A cgraph node
1490 can appear in multiple sets. */
1491struct cgraph_node_set_def
1492{
1493 hash_map<cgraph_node *, size_t> *map;
1494 vec<cgraph_node *> nodes;
1495};
1496
1497typedef cgraph_node_set_def *cgraph_node_set;
1498typedef struct varpool_node_set_def *varpool_node_set;
1499
1500struct varpool_node;
1501
1502/* A varpool node set is a collection of varpool nodes. A varpool node
1503 can appear in multiple sets. */
1504struct varpool_node_set_def
1505{
1506 hash_map<varpool_node *, size_t> * map;
1507 vec<varpool_node *> nodes;
1508};
1509
1510/* Iterator structure for cgraph node sets. */
1511struct cgraph_node_set_iterator
1512{
1513 cgraph_node_set set;
1514 unsigned index;
1515};
1516
1517/* Iterator structure for varpool node sets. */
1518struct varpool_node_set_iterator
1519{
1520 varpool_node_set set;
1521 unsigned index;
1522};
1523
1524/* Context of polymorphic call. It represent information about the type of
1525 instance that may reach the call. This is used by ipa-devirt walkers of the
1526 type inheritance graph. */
1527
1528class GTY(()) ipa_polymorphic_call_context {
1529public:
1530 /* The called object appears in an object of type OUTER_TYPE
1531 at offset OFFSET. When information is not 100% reliable, we
1532 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1533 HOST_WIDE_INTlong offset;
1534 HOST_WIDE_INTlong speculative_offset;
1535 tree outer_type;
1536 tree speculative_outer_type;
1537 /* True if outer object may be in construction or destruction. */
1538 unsigned maybe_in_construction : 1;
1539 /* True if outer object may be of derived type. */
1540 unsigned maybe_derived_type : 1;
1541 /* True if speculative outer object may be of derived type. We always
1542 speculate that construction does not happen. */
1543 unsigned speculative_maybe_derived_type : 1;
1544 /* True if the context is invalid and all calls should be redirected
1545 to BUILTIN_UNREACHABLE. */
1546 unsigned invalid : 1;
1547 /* True if the outer type is dynamic. */
1548 unsigned dynamic : 1;
1549
1550 /* Build empty "I know nothing" context. */
1551 ipa_polymorphic_call_context ();
1552 /* Build polymorphic call context for indirect call E. */
1553 ipa_polymorphic_call_context (cgraph_edge *e);
1554 /* Build polymorphic call context for IP invariant CST.
1555 If specified, OTR_TYPE specify the type of polymorphic call
1556 that takes CST+OFFSET as a parameter. */
1557 ipa_polymorphic_call_context (tree cst, tree otr_type = NULLnullptr,
1558 HOST_WIDE_INTlong offset = 0);
1559 /* Build context for pointer REF contained in FNDECL at statement STMT.
1560 if INSTANCE is non-NULL, return pointer to the object described by
1561 the context. */
1562 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
1563 tree *instance = NULLnullptr);
1564
1565 /* Look for vtable stores or constructor calls to work out dynamic type
1566 of memory location. */
1567 bool get_dynamic_type (tree, tree, tree, gimple *, unsigned *);
1568
1569 /* Make context non-speculative. */
1570 void clear_speculation ();
1571
1572 /* Produce context specifying all derived types of OTR_TYPE. If OTR_TYPE is
1573 NULL, the context is set to dummy "I know nothing" setting. */
1574 void clear_outer_type (tree otr_type = NULLnullptr);
1575
1576 /* Walk container types and modify context to point to actual class
1577 containing OTR_TYPE (if non-NULL) as base class.
1578 Return true if resulting context is valid.
1579
1580 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1581 valid only via allocation of new polymorphic type inside by means
1582 of placement new.
1583
1584 When CONSIDER_BASES is false, only look for actual fields, not base types
1585 of TYPE. */
1586 bool restrict_to_inner_class (tree otr_type,
1587 bool consider_placement_new = true,
1588 bool consider_bases = true);
1589
1590 /* Adjust all offsets in contexts by given number of bits. */
1591 void offset_by (HOST_WIDE_INTlong);
1592 /* Use when we cannot track dynamic type change. This speculatively assume
1593 type change is not happening. */
1594 void possible_dynamic_type_change (bool, tree otr_type = NULLnullptr);
1595 /* Assume that both THIS and a given context is valid and strengthen THIS
1596 if possible. Return true if any strengthening was made.
1597 If actual type the context is being used in is known, OTR_TYPE should be
1598 set accordingly. This improves quality of combined result. */
1599 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULLnullptr);
1600 bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULLnullptr);
1601
1602 /* Return TRUE if context is fully useless. */
1603 bool useless_p () const;
1604 /* Return TRUE if this context conveys the same information as X. */
1605 bool equal_to (const ipa_polymorphic_call_context &x) const;
1606
1607 /* Dump human readable context to F. If NEWLINE is true, it will be
1608 terminated by a newline. */
1609 void dump (FILE *f, bool newline = true) const;
1610 void DEBUG_FUNCTION__attribute__ ((__used__)) debug () const;
1611
1612 /* LTO streaming. */
1613 void stream_out (struct output_block *) const;
1614 void stream_in (class lto_input_block *, class data_in *data_in);
1615
1616private:
1617 bool combine_speculation_with (tree, HOST_WIDE_INTlong, bool, tree);
1618 bool meet_speculation_with (tree, HOST_WIDE_INTlong, bool, tree);
1619 void set_by_decl (tree, HOST_WIDE_INTlong);
1620 bool set_by_invariant (tree, tree, HOST_WIDE_INTlong);
1621 bool speculation_consistent_p (tree, HOST_WIDE_INTlong, bool, tree) const;
1622 void make_speculative (tree otr_type = NULLnullptr);
1623};
1624
1625/* Structure containing additional information about an indirect call. */
1626
1627class GTY(()) cgraph_indirect_call_info
1628{
1629public:
1630 /* When agg_content is set, an offset where the call pointer is located
1631 within the aggregate. */
1632 HOST_WIDE_INTlong offset;
1633 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */
1634 ipa_polymorphic_call_context context;
1635 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */
1636 HOST_WIDE_INTlong otr_token;
1637 /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1638 tree otr_type;
1639 /* Index of the parameter that is called. */
1640 int param_index;
1641 /* ECF flags determined from the caller. */
1642 int ecf_flags;
1643
1644 /* Number of speculative call targets, it's less than GCOV_TOPN_VALUES. */
1645 unsigned num_speculative_call_targets : 16;
1646
1647 /* Set when the call is a virtual call with the parameter being the
1648 associated object pointer rather than a simple direct call. */
1649 unsigned polymorphic : 1;
1650 /* Set when the call is a call of a pointer loaded from contents of an
1651 aggregate at offset. */
1652 unsigned agg_contents : 1;
1653 /* Set when this is a call through a member pointer. */
1654 unsigned member_ptr : 1;
1655 /* When the agg_contents bit is set, this one determines whether the
1656 destination is loaded from a parameter passed by reference. */
1657 unsigned by_ref : 1;
1658 /* When the agg_contents bit is set, this one determines whether we can
1659 deduce from the function body that the loaded value from the reference is
1660 never modified between the invocation of the function and the load
1661 point. */
1662 unsigned guaranteed_unmodified : 1;
1663 /* For polymorphic calls this specify whether the virtual table pointer
1664 may have changed in between function entry and the call. */
1665 unsigned vptr_changed : 1;
1666};
1667
1668class GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1669 for_user)) cgraph_edge
1670{
1671public:
1672 friend struct cgraph_node;
1673 friend class symbol_table;
1674
1675 /* Remove EDGE from the cgraph. */
1676 static void remove (cgraph_edge *edge);
1677
1678 /* Change field call_stmt of edge E to NEW_STMT. If UPDATE_SPECULATIVE and E
1679 is any component of speculative edge, then update all components.
1680 Speculations can be resolved in the process and EDGE can be removed and
1681 deallocated. Return the edge that now represents the call. */
1682 static cgraph_edge *set_call_stmt (cgraph_edge *e, gcall *new_stmt,
1683 bool update_speculative = true);
1684
1685 /* Redirect callee of the edge to N. The function does not update underlying
1686 call expression. */
1687 void redirect_callee (cgraph_node *n);
1688
1689 /* If the edge does not lead to a thunk, simply redirect it to N. Otherwise
1690 create one or more equivalent thunks for N and redirect E to the first in
1691 the chain. Note that it is then necessary to call
1692 n->expand_all_artificial_thunks once all callers are redirected. */
1693 void redirect_callee_duplicating_thunks (cgraph_node *n);
1694
1695 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1696 CALLEE. Speculations can be resolved in the process and EDGE can be
1697 removed and deallocated. Return the edge that now represents the
1698 call. */
1699 static cgraph_edge *make_direct (cgraph_edge *edge, cgraph_node *callee);
1700
1701 /* Turn edge into speculative call calling N2. Update
1702 the profile so the direct call is taken COUNT times
1703 with FREQUENCY. speculative_id is used to link direct calls with their
1704 corresponding IPA_REF_ADDR references when representing speculative calls.
1705 */
1706 cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count,
1707 unsigned int speculative_id = 0);
1708
1709 /* Speculative call consists of an indirect edge and one or more
1710 direct edge+ref pairs. Speculative will expand to the following sequence:
1711
1712 if (call_dest == target1) // reference to target1
1713 target1 (); // direct call to target1
1714 else if (call_dest == target2) // reference to targt2
1715 target2 (); // direct call to target2
1716 else
1717 call_dest (); // indirect call
1718
1719 Before the expansion we will have indirect call and the direct call+ref
1720 pairs all linked to single statement.
1721
1722 Note that ref may point to different symbol than the corresponding call
1723 becuase the speculated edge may have been optimized (redirected to
1724 a clone) or inlined.
1725
1726 Given an edge which is part of speculative call, return the first
1727 direct call edge in the speculative call sequence.
1728
1729 In the example above called on any cgraph edge in the sequence it will
1730 return direct call to target1. */
1731 cgraph_edge *first_speculative_call_target ();
1732
1733 /* Return next speculative call target or NULL if there is none.
1734 All targets are required to form an interval in the callee list.
1735
1736 In example above, if called on call to target1 it will return call to
1737 target2. */
1738 cgraph_edge *next_speculative_call_target ()
1739 {
1740 cgraph_edge *e = this;
1741 gcc_checking_assert (speculative && callee)((void)(!(speculative && callee) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 1741, __FUNCTION__), 0 : 0))
;
1742
1743 if (e->next_callee && e->next_callee->speculative
1744 && e->next_callee->call_stmt == e->call_stmt
1745 && e->next_callee->lto_stmt_uid == e->lto_stmt_uid)
1746 return e->next_callee;
1747 return NULLnullptr;
1748 }
1749
1750 /* When called on any edge in the speculative call return the (unique)
1751 indirect call edge in the speculative call sequence. */
1752 cgraph_edge *speculative_call_indirect_edge ()
1753 {
1754 gcc_checking_assert (speculative)((void)(!(speculative) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 1754, __FUNCTION__), 0 : 0))
;
1755 if (!callee)
1756 return this;
1757 for (cgraph_edge *e2 = caller->indirect_calls;
1758 true; e2 = e2->next_callee)
1759 if (e2->speculative
1760 && call_stmt == e2->call_stmt
1761 && lto_stmt_uid == e2->lto_stmt_uid)
1762 return e2;
1763 }
1764
1765 /* When called on any edge in speculative call and when given any target
1766 of ref which is speculated to it returns the corresponding direct call.
1767
1768 In example above if called on function target2 it will return call to
1769 target2. */
1770 cgraph_edge *speculative_call_for_target (cgraph_node *);
1771
1772 /* Return REF corresponding to direct call in the specualtive call
1773 sequence. */
1774 ipa_ref *speculative_call_target_ref ()
1775 {
1776 ipa_ref *ref;
1777
1778 gcc_checking_assert (speculative)((void)(!(speculative) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 1778, __FUNCTION__), 0 : 0))
;
1779 for (unsigned int i = 0; caller->iterate_reference (i, ref); i++)
1780 if (ref->speculative && ref->speculative_id == speculative_id
1781 && ref->stmt == (gimple *)call_stmt
1782 && ref->lto_stmt_uid == lto_stmt_uid)
1783 return ref;
1784 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 1784, __FUNCTION__))
;
1785 }
1786
1787 /* Speculative call edge turned out to be direct call to CALLEE_DECL. Remove
1788 the speculative call sequence and return edge representing the call, the
1789 original EDGE can be removed and deallocated. It is up to caller to
1790 redirect the call as appropriate. Return the edge that now represents the
1791 call.
1792
1793 For "speculative" indirect call that contains multiple "speculative"
1794 targets (i.e. edge->indirect_info->num_speculative_call_targets > 1),
1795 decrease the count and only remove current direct edge.
1796
1797 If no speculative direct call left to the speculative indirect call, remove
1798 the speculative of both the indirect call and corresponding direct edge.
1799
1800 It is up to caller to iteratively resolve each "speculative" direct call
1801 and redirect the call as appropriate. */
1802 static cgraph_edge *resolve_speculation (cgraph_edge *edge,
1803 tree callee_decl = NULLnullptr);
1804
1805 /* If necessary, change the function declaration in the call statement
1806 associated with edge E so that it corresponds to the edge callee.
1807 Speculations can be resolved in the process and EDGE can be removed and
1808 deallocated.
1809
1810 The edge could be one of speculative direct call generated from speculative
1811 indirect call. In this circumstance, decrease the speculative targets
1812 count (i.e. num_speculative_call_targets) and redirect call stmt to the
1813 corresponding i-th target. If no speculative direct call left to the
1814 speculative indirect call, remove "speculative" of the indirect call and
1815 also redirect stmt to it's final direct target.
1816
1817 It is up to caller to iteratively transform each "speculative"
1818 direct call as appropriate. */
1819 static gimple *redirect_call_stmt_to_callee (cgraph_edge *e);
1820
1821 /* Create clone of edge in the node N represented
1822 by CALL_EXPR the callgraph. */
1823 cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
1824 profile_count num, profile_count den,
1825 bool update_original);
1826
1827 /* Verify edge count and frequency. */
1828 bool verify_count ();
1829
1830 /* Return true when call of edge cannot lead to return from caller
1831 and thus it is safe to ignore its side effects for IPA analysis
1832 when computing side effects of the caller. */
1833 bool cannot_lead_to_return_p (void);
1834
1835 /* Return true when the edge represents a direct recursion. */
1836 bool recursive_p (void);
1837
1838 /* Return true if the edge may be considered hot. */
1839 bool maybe_hot_p (void);
1840
1841 /* Get unique identifier of the edge. */
1842 inline int get_uid ()
1843 {
1844 return m_uid;
1845 }
1846
1847 /* Get summary id of the edge. */
1848 inline int get_summary_id ()
1849 {
1850 return m_summary_id;
1851 }
1852
1853 /* Rebuild cgraph edges for current function node. This needs to be run after
1854 passes that don't update the cgraph. */
1855 static unsigned int rebuild_edges (void);
1856
1857 /* Rebuild cgraph references for current function node. This needs to be run
1858 after passes that don't update the cgraph. */
1859 static void rebuild_references (void);
1860
1861 /* During LTO stream in this can be used to check whether call can possibly
1862 be internal to the current translation unit. */
1863 bool possibly_call_in_translation_unit_p (void);
1864
1865 /* Return num_speculative_targets of this edge. */
1866 int num_speculative_call_targets_p (void);
1867
1868 /* Expected number of executions: calculated in profile.c. */
1869 profile_count count;
1870 cgraph_node *caller;
1871 cgraph_node *callee;
1872 cgraph_edge *prev_caller;
1873 cgraph_edge *next_caller;
1874 cgraph_edge *prev_callee;
1875 cgraph_edge *next_callee;
1876 gcall *call_stmt;
1877 /* Additional information about an indirect call. Not cleared when an edge
1878 becomes direct. */
1879 cgraph_indirect_call_info *indirect_info;
1880 PTRvoid * GTY ((skip (""))) aux;
1881 /* When equal to CIF_OK, inline this call. Otherwise, points to the
1882 explanation why function was not inlined. */
1883 enum cgraph_inline_failed_t inline_failed;
1884 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt
1885 when the function is serialized in. */
1886 unsigned int lto_stmt_uid;
1887 /* speculative id is used to link direct calls with their corresponding
1888 IPA_REF_ADDR references when representing speculative calls. */
1889 unsigned int speculative_id : 16;
1890 /* Whether this edge was made direct by indirect inlining. */
1891 unsigned int indirect_inlining_edge : 1;
1892 /* Whether this edge describes an indirect call with an undetermined
1893 callee. */
1894 unsigned int indirect_unknown_callee : 1;
1895 /* Whether this edge is still a dangling */
1896 /* True if the corresponding CALL stmt cannot be inlined. */
1897 unsigned int call_stmt_cannot_inline_p : 1;
1898 /* Can this call throw externally? */
1899 unsigned int can_throw_external : 1;
1900 /* Edges with SPECULATIVE flag represents indirect calls that was
1901 speculatively turned into direct (i.e. by profile feedback).
1902 The final code sequence will have form:
1903
1904 if (call_target == expected_fn)
1905 expected_fn ();
1906 else
1907 call_target ();
1908
1909 Every speculative call is represented by three components attached
1910 to a same call statement:
1911 1) a direct call (to expected_fn)
1912 2) an indirect call (to call_target)
1913 3) a IPA_REF_ADDR reference to expected_fn.
1914
1915 Optimizers may later redirect direct call to clone, so 1) and 3)
1916 do not need to necessarily agree with destination. */
1917 unsigned int speculative : 1;
1918 /* Set to true when caller is a constructor or destructor of polymorphic
1919 type. */
1920 unsigned in_polymorphic_cdtor : 1;
1921
1922 /* Return true if call must bind to current definition. */
1923 bool binds_to_current_def_p ();
1924
1925 /* Expected frequency of executions within the function.
1926 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1927 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
1928 int frequency ();
1929
1930 /* Expected frequency of executions within the function. */
1931 sreal sreal_frequency ();
1932private:
1933 /* Unique id of the edge. */
1934 int m_uid;
1935
1936 /* Summary id that is recycled. */
1937 int m_summary_id;
1938
1939 /* Remove the edge from the list of the callers of the callee. */
1940 void remove_caller (void);
1941
1942 /* Remove the edge from the list of the callees of the caller. */
1943 void remove_callee (void);
1944
1945 /* Set callee N of call graph edge and add it to the corresponding set of
1946 callers. */
1947 void set_callee (cgraph_node *n);
1948
1949 /* Output flags of edge to a file F. */
1950 void dump_edge_flags (FILE *f);
1951
1952 /* Dump edge to stderr. */
1953 void DEBUG_FUNCTION__attribute__ ((__used__)) debug (void);
1954
1955 /* Verify that call graph edge corresponds to DECL from the associated
1956 statement. Return true if the verification should fail. */
1957 bool verify_corresponds_to_fndecl (tree decl);
1958};
1959
1960#define CGRAPH_FREQ_BASE1000 1000
1961#define CGRAPH_FREQ_MAX100000 100000
1962
1963/* The varpool data structure.
1964 Each static variable decl has assigned varpool_node. */
1965
1966struct GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node
1967{
1968 /* Constructor. */
1969 explicit varpool_node ()
1970 : symtab_node (SYMTAB_VARIABLE), output (0), dynamically_initialized (0),
1971 tls_model (TLS_MODEL_NONE), used_by_single_function (0)
1972 {}
1973
1974 /* Dump given varpool node to F. */
1975 void dump (FILE *f);
1976
1977 /* Dump given varpool node to stderr. */
1978 void DEBUG_FUNCTION__attribute__ ((__used__)) debug (void);
1979
1980 /* Remove variable from symbol table. */
1981 void remove (void);
1982
1983 /* Remove node initializer when it is no longer needed. */
1984 void remove_initializer (void);
1985
1986 void analyze (void);
1987
1988 /* Return variable availability. */
1989 availability get_availability (symtab_node *ref = NULLnullptr);
1990
1991 /* When doing LTO, read variable's constructor from disk if
1992 it is not already present. */
1993 tree get_constructor (void);
1994
1995 /* Return true if variable has constructor that can be used for folding. */
1996 bool ctor_useable_for_folding_p (void);
1997
1998 /* For given variable pool node, walk the alias chain to return the function
1999 the variable is alias of. Do not walk through thunks.
2000 When AVAILABILITY is non-NULL, get minimal availability in the chain.
2001 When REF is non-NULL, assume that reference happens in symbol REF
2002 when determining the availability. */
2003 inline varpool_node *ultimate_alias_target
2004 (availability *availability = NULLnullptr, symtab_node *ref = NULLnullptr);
2005
2006 /* Return node that alias is aliasing. */
2007 inline varpool_node *get_alias_target (void);
2008
2009 /* Output one variable, if necessary. Return whether we output it. */
2010 bool assemble_decl (void);
2011
2012 /* For variables in named sections make sure get_variable_section
2013 is called before we switch to those sections. Then section
2014 conflicts between read-only and read-only requiring relocations
2015 sections can be resolved. */
2016 void finalize_named_section_flags (void);
2017
2018 /* Call callback on varpool symbol and aliases associated to varpool symbol.
2019 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2020 skipped. */
2021 bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *),
2022 void *data,
2023 bool include_overwritable);
2024
2025 /* Return true when variable should be considered externally visible. */
2026 bool externally_visible_p (void);
2027
2028 /* Return true when all references to variable must be visible
2029 in ipa_ref_list.
2030 i.e. if the variable is not externally visible or not used in some magic
2031 way (asm statement or such).
2032 The magic uses are all summarized in force_output flag. */
2033 inline bool all_refs_explicit_p ();
2034
2035 /* Return true when variable can be removed from variable pool
2036 if all direct calls are eliminated. */
2037 inline bool can_remove_if_no_refs_p (void);
2038
2039 /* Add the variable DECL to the varpool.
2040 Unlike finalize_decl function is intended to be used
2041 by middle end and allows insertion of new variable at arbitrary point
2042 of compilation. */
2043 static void add (tree decl);
2044
2045 /* Return varpool node for given symbol and check it is a function. */
2046 static inline varpool_node *get (const_tree decl);
2047
2048 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct
2049 the middle end to output the variable to asm file, if needed or externally
2050 visible. */
2051 static void finalize_decl (tree decl);
2052
2053 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
2054 Extra name aliases are output whenever DECL is output. */
2055 static varpool_node * create_extra_name_alias (tree alias, tree decl);
2056
2057 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful.
2058 Extra name aliases are output whenever DECL is output. */
2059 static varpool_node * create_alias (tree, tree);
2060
2061 /* Dump the variable pool to F. */
2062 static void dump_varpool (FILE *f);
2063
2064 /* Dump the variable pool to stderr. */
2065 static void DEBUG_FUNCTION__attribute__ ((__used__)) debug_varpool (void);
2066
2067 /* Allocate new callgraph node and insert it into basic data structures. */
2068 static varpool_node *create_empty (void);
2069
2070 /* Return varpool node assigned to DECL. Create new one when needed. */
2071 static varpool_node *get_create (tree decl);
2072
2073 /* Given an assembler name, lookup node. */
2074 static varpool_node *get_for_asmname (tree asmname);
2075
2076 /* Set when variable is scheduled to be assembled. */
2077 unsigned output : 1;
2078
2079 /* Set if the variable is dynamically initialized, except for
2080 function local statics. */
2081 unsigned dynamically_initialized : 1;
2082
2083 ENUM_BITFIELD(tls_model)enum tls_model tls_model : 3;
2084
2085 /* Set if the variable is known to be used by single function only.
2086 This is computed by ipa_single_use pass and used by late optimizations
2087 in places where optimization would be valid for local static variable
2088 if we did not do any inter-procedural code movement. */
2089 unsigned used_by_single_function : 1;
2090
2091private:
2092 /* Assemble thunks and aliases associated to varpool node. */
2093 void assemble_aliases (void);
2094
2095 /* Worker for call_for_node_and_aliases. */
2096 bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *),
2097 void *data,
2098 bool include_overwritable);
2099};
2100
2101/* Every top level asm statement is put into a asm_node. */
2102
2103struct GTY(()) asm_node {
2104 /* Next asm node. */
2105 asm_node *next;
2106 /* String for this asm node. */
2107 tree asm_str;
2108 /* Ordering of all cgraph nodes. */
2109 int order;
2110};
2111
2112/* Report whether or not THIS symtab node is a function, aka cgraph_node. */
2113
2114template <>
2115template <>
2116inline bool
2117is_a_helper <cgraph_node *>::test (symtab_node *p)
2118{
2119 return p
7.1
'p' is non-null
31.1
'p' is non-null
7.1
'p' is non-null
31.1
'p' is non-null
7.1
'p' is non-null
31.1
'p' is non-null
7.1
'p' is non-null
31.1
'p' is non-null
7.1
'p' is non-null
31.1
'p' is non-null
7.1
'p' is non-null
31.1
'p' is non-null
7.1
'p' is non-null
31.1
'p' is non-null
7.1
'p' is non-null
31.1
'p' is non-null
&& p->type == SYMTAB_FUNCTION
;
8
Assuming field 'type' is equal to SYMTAB_FUNCTION
9
Returning without writing to 'p->next', which participates in a condition later
10
Returning the value 1, which participates in a condition later
32
Assuming field 'type' is equal to SYMTAB_FUNCTION
33
Returning the value 1, which participates in a condition later
2120}
2121
2122/* Report whether or not THIS symtab node is a variable, aka varpool_node. */
2123
2124template <>
2125template <>
2126inline bool
2127is_a_helper <varpool_node *>::test (symtab_node *p)
2128{
2129 return p && p->type == SYMTAB_VARIABLE;
2130}
2131
2132typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
2133typedef void (*cgraph_node_hook)(cgraph_node *, void *);
2134typedef void (*varpool_node_hook)(varpool_node *, void *);
2135typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
2136typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
2137
2138struct cgraph_edge_hook_list;
2139struct cgraph_node_hook_list;
2140struct varpool_node_hook_list;
2141struct cgraph_2edge_hook_list;
2142struct cgraph_2node_hook_list;
2143
2144/* Map from a symbol to initialization/finalization priorities. */
2145struct GTY(()) symbol_priority_map {
2146 priority_type init;
2147 priority_type fini;
2148};
2149
2150enum symtab_state
2151{
2152 /* Frontend is parsing and finalizing functions. */
2153 PARSING,
2154 /* Callgraph is being constructed. It is safe to add new functions. */
2155 CONSTRUCTION,
2156 /* Callgraph is being streamed-in at LTO time. */
2157 LTO_STREAMING,
2158 /* Callgraph is built and early IPA passes are being run. */
2159 IPA,
2160 /* Callgraph is built and all functions are transformed to SSA form. */
2161 IPA_SSA,
2162 /* All inline decisions are done; it is now possible to remove extern inline
2163 functions and virtual call targets. */
2164 IPA_SSA_AFTER_INLINING,
2165 /* Functions are now ordered and being passed to RTL expanders. */
2166 EXPANSION,
2167 /* All cgraph expansion is done. */
2168 FINISHED
2169};
2170
2171struct asmname_hasher : ggc_ptr_hash <symtab_node>
2172{
2173 typedef const_tree compare_type;
2174
2175 static hashval_t hash (symtab_node *n);
2176 static bool equal (symtab_node *n, const_tree t);
2177};
2178
2179/* Core summaries maintained about symbols. */
2180
2181struct thunk_info;
2182template <class T> class function_summary;
2183typedef function_summary <thunk_info *> thunk_summary;
2184
2185struct clone_info;
2186template <class T> class function_summary;
2187typedef function_summary <clone_info *> clone_summary;
2188
2189class GTY((tag ("SYMTAB"))) symbol_table
2190{
2191public:
2192 friend struct symtab_node;
2193 friend struct cgraph_node;
2194 friend struct cgraph_edge;
2195
2196 symbol_table ():
2197 cgraph_count (0), cgraph_max_uid (1), cgraph_max_summary_id (0),
2198 edges_count (0), edges_max_uid (1), edges_max_summary_id (0),
2199 cgraph_released_summary_ids (), edge_released_summary_ids (),
2200 nodes (NULLnullptr), asmnodes (NULLnullptr), asm_last_node (NULLnullptr),
2201 order (0), max_unit (0), global_info_ready (false), state (PARSING),
2202 function_flags_ready (false), cpp_implicit_aliases_done (false),
2203 section_hash (NULLnullptr), assembler_name_hash (NULLnullptr), init_priority_hash (NULLnullptr),
2204 dump_file (NULLnullptr), ipa_clones_dump_file (NULLnullptr), cloned_nodes (),
2205 m_thunks (NULLnullptr), m_clones (NULLnullptr),
2206 m_first_edge_removal_hook (NULLnullptr), m_first_cgraph_removal_hook (NULLnullptr),
2207 m_first_edge_duplicated_hook (NULLnullptr), m_first_cgraph_duplicated_hook (NULLnullptr),
2208 m_first_cgraph_insertion_hook (NULLnullptr), m_first_varpool_insertion_hook (NULLnullptr),
2209 m_first_varpool_removal_hook (NULLnullptr)
2210 {
2211 }
2212
2213 /* Initialize callgraph dump file. */
2214 void initialize (void);
2215
2216 /* Register a top-level asm statement ASM_STR. */
2217 inline asm_node *finalize_toplevel_asm (tree asm_str);
2218
2219 /* Analyze the whole compilation unit once it is parsed completely. */
2220 void finalize_compilation_unit (void);
2221
2222 /* C++ frontend produce same body aliases all over the place, even before PCH
2223 gets streamed out. It relies on us linking the aliases with their function
2224 in order to do the fixups, but ipa-ref is not PCH safe. Consequently we
2225 first produce aliases without links, but once C++ FE is sure it won't
2226 stream PCH we build the links via this function. */
2227 void process_same_body_aliases (void);
2228
2229 /* Perform simple optimizations based on callgraph. */
2230 void compile (void);
2231
2232 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
2233 functions into callgraph in a way so they look like ordinary reachable
2234 functions inserted into callgraph already at construction time. */
2235 void process_new_functions (void);
2236
2237 /* Register a symbol NODE. */
2238 inline void register_symbol (symtab_node *node);
2239
2240 inline void
2241 clear_asm_symbols (void)
2242 {
2243 asmnodes = NULLnullptr;
2244 asm_last_node = NULLnullptr;
2245 }
2246
2247 /* Perform reachability analysis and reclaim all unreachable nodes. */
2248 bool remove_unreachable_nodes (FILE *file);
2249
2250 /* Optimization of function bodies might've rendered some variables as
2251 unnecessary so we want to avoid these from being compiled. Re-do
2252 reachability starting from variables that are either externally visible
2253 or was referred from the asm output routines. */
2254 void remove_unreferenced_decls (void);
2255
2256 /* Unregister a symbol NODE. */
2257 inline void unregister (symtab_node *node);
2258
2259 /* Allocate new callgraph node and insert it into basic data structures. */
2260 cgraph_node *create_empty (void);
2261
2262 /* Release a callgraph NODE. */
2263 void release_symbol (cgraph_node *node);
2264
2265 /* Output all variables enqueued to be assembled. */
2266 bool output_variables (void);
2267
2268 /* Weakrefs may be associated to external decls and thus not output
2269 at expansion time. Emit all necessary aliases. */
2270 void output_weakrefs (void);
2271
2272 /* Return first static symbol with definition. */
2273 inline symtab_node *first_symbol (void);
2274
2275 /* Return first assembler symbol. */
2276 inline asm_node *
2277 first_asm_symbol (void)
2278 {
2279 return asmnodes;
2280 }
2281
2282 /* Return first static symbol with definition. */
2283 inline symtab_node *first_defined_symbol (void);
2284
2285 /* Return first variable. */
2286 inline varpool_node *first_variable (void);
2287
2288 /* Return next variable after NODE. */
2289 inline varpool_node *next_variable (varpool_node *node);
2290
2291 /* Return first static variable with initializer. */
2292 inline varpool_node *first_static_initializer (void);
2293
2294 /* Return next static variable with initializer after NODE. */
2295 inline varpool_node *next_static_initializer (varpool_node *node);
2296
2297 /* Return first static variable with definition. */
2298 inline varpool_node *first_defined_variable (void);
2299
2300 /* Return next static variable with definition after NODE. */
2301 inline varpool_node *next_defined_variable (varpool_node *node);
2302
2303 /* Return first function with body defined. */
2304 inline cgraph_node *first_defined_function (void);
2305
2306 /* Return next function with body defined after NODE. */
2307 inline cgraph_node *next_defined_function (cgraph_node *node);
2308
2309 /* Return first function. */
2310 inline cgraph_node *first_function (void);
2311
2312 /* Return next function. */
2313 inline cgraph_node *next_function (cgraph_node *node);
2314
2315 /* Return first function with body defined. */
2316 cgraph_node *first_function_with_gimple_body (void);
2317
2318 /* Return next reachable static variable with initializer after NODE. */
2319 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
2320
2321 /* Register HOOK to be called with DATA on each removed edge. */
2322 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
2323 void *data);
2324
2325 /* Remove ENTRY from the list of hooks called on removing edges. */
2326 void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
2327
2328 /* Register HOOK to be called with DATA on each removed node. */
2329 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
2330 void *data);
2331
2332 /* Remove ENTRY from the list of hooks called on removing nodes. */
2333 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
2334
2335 /* Register HOOK to be called with DATA on each removed node. */
2336 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
2337 void *data);
2338
2339 /* Remove ENTRY from the list of hooks called on removing nodes. */
2340 void remove_varpool_removal_hook (varpool_node_hook_list *entry);
2341
2342 /* Register HOOK to be called with DATA on each inserted node. */
2343 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
2344 void *data);
2345
2346 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2347 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
2348
2349 /* Register HOOK to be called with DATA on each inserted node. */
2350 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
2351 void *data);
2352
2353 /* Remove ENTRY from the list of hooks called on inserted nodes. */
2354 void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
2355
2356 /* Register HOOK to be called with DATA on each duplicated edge. */
2357 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
2358 void *data);
2359 /* Remove ENTRY from the list of hooks called on duplicating edges. */
2360 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
2361
2362 /* Register HOOK to be called with DATA on each duplicated node. */
2363 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
2364 void *data);
2365
2366 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
2367 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
2368
2369 /* Call all edge removal hooks. */
2370 void call_edge_removal_hooks (cgraph_edge *e);
2371
2372 /* Call all node insertion hooks. */
2373 void call_cgraph_insertion_hooks (cgraph_node *node);
2374
2375 /* Call all node removal hooks. */
2376 void call_cgraph_removal_hooks (cgraph_node *node);
2377
2378 /* Call all node duplication hooks. */
2379 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
2380
2381 /* Call all edge duplication hooks. */
2382 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
2383
2384 /* Call all node removal hooks. */
2385 void call_varpool_removal_hooks (varpool_node *node);
2386
2387 /* Call all node insertion hooks. */
2388 void call_varpool_insertion_hooks (varpool_node *node);
2389
2390 /* Arrange node to be first in its entry of assembler_name_hash. */
2391 void symtab_prevail_in_asm_name_hash (symtab_node *node);
2392
2393 /* Initialize asm name hash unless. */
2394 void symtab_initialize_asm_name_hash (void);
2395
2396 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */
2397 void change_decl_assembler_name (tree decl, tree name);
2398
2399 /* Dump symbol table to F. */
2400 void dump (FILE *f);
2401
2402 /* Dump symbol table to F in graphviz format. */
2403 void dump_graphviz (FILE *f);
2404
2405 /* Dump symbol table to stderr. */
2406 void DEBUG_FUNCTION__attribute__ ((__used__)) debug (void);
2407
2408 /* Assign a new summary ID for the callgraph NODE. */
2409 inline int assign_summary_id (cgraph_node *node)
2410 {
2411 if (!cgraph_released_summary_ids.is_empty ())
2412 node->m_summary_id = cgraph_released_summary_ids.pop ();
2413 else
2414 node->m_summary_id = cgraph_max_summary_id++;
2415
2416 return node->m_summary_id;
2417 }
2418
2419 /* Assign a new summary ID for the callgraph EDGE. */
2420 inline int assign_summary_id (cgraph_edge *edge)
2421 {
2422 if (!edge_released_summary_ids.is_empty ())
2423 edge->m_summary_id = edge_released_summary_ids.pop ();
2424 else
2425 edge->m_summary_id = edges_max_summary_id++;
2426
2427 return edge->m_summary_id;
2428 }
2429
2430 /* Return true if assembler names NAME1 and NAME2 leads to the same symbol
2431 name. */
2432 static bool assembler_names_equal_p (const char *name1, const char *name2);
2433
2434 int cgraph_count;
2435 int cgraph_max_uid;
2436 int cgraph_max_summary_id;
2437
2438 int edges_count;
2439 int edges_max_uid;
2440 int edges_max_summary_id;
2441
2442 /* Vector of released summary IDS for cgraph nodes. */
2443 vec<int> GTY ((skip)) cgraph_released_summary_ids;
2444
2445 /* Vector of released summary IDS for cgraph nodes. */
2446 vec<int> GTY ((skip)) edge_released_summary_ids;
2447
2448 /* Return symbol used to separate symbol name from suffix. */
2449 static char symbol_suffix_separator ();
2450
2451 symtab_node* GTY(()) nodes;
2452 asm_node* GTY(()) asmnodes;
2453 asm_node* GTY(()) asm_last_node;
2454
2455 /* The order index of the next symtab node to be created. This is
2456 used so that we can sort the cgraph nodes in order by when we saw
2457 them, to support -fno-toplevel-reorder. */
2458 int order;
2459
2460 /* Maximal unit ID used. */
2461 int max_unit;
2462
2463 /* Set when whole unit has been analyzed so we can access global info. */
2464 bool global_info_ready;
2465 /* What state callgraph is in right now. */
2466 enum symtab_state state;
2467 /* Set when the cgraph is fully build and the basic flags are computed. */
2468 bool function_flags_ready;
2469
2470 bool cpp_implicit_aliases_done;
2471
2472 /* Hash table used to hold sections. */
2473 hash_table<section_name_hasher> *GTY(()) section_hash;
2474
2475 /* Hash table used to convert assembler names into nodes. */
2476 hash_table<asmname_hasher> *assembler_name_hash;
2477
2478 /* Hash table used to hold init priorities. */
2479 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2480
2481 FILE* GTY ((skip)) dump_file;
2482
2483 FILE* GTY ((skip)) ipa_clones_dump_file;
2484
2485 hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
2486
2487 /* Thunk annotations. */
2488 thunk_summary *m_thunks;
2489
2490 /* Virtual clone annotations. */
2491 clone_summary *m_clones;
2492
2493private:
2494 /* Allocate a cgraph_edge structure and fill it with data according to the
2495 parameters of which only CALLEE can be NULL (when creating an indirect
2496 call edge). CLONING_P should be set if properties that are copied from an
2497 original edge should not be calculated. */
2498 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2499 gcall *call_stmt, profile_count count,
2500 bool indir_unknown_callee, bool cloning_p);
2501
2502 /* Put the edge onto the free list. */
2503 void free_edge (cgraph_edge *e);
2504
2505 /* Insert NODE to assembler name hash. */
2506 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2507
2508 /* Remove NODE from assembler name hash. */
2509 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2510
2511 /* Hash asmnames ignoring the user specified marks. */
2512 static hashval_t decl_assembler_name_hash (const_tree asmname);
2513
2514 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */
2515 static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2516
2517 friend struct asmname_hasher;
2518
2519 /* List of hooks triggered when an edge is removed. */
2520 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2521 /* List of hooks trigger_red when a cgraph node is removed. */
2522 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2523 /* List of hooks triggered when an edge is duplicated. */
2524 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2525 /* List of hooks triggered when a node is duplicated. */
2526 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2527 /* List of hooks triggered when an function is inserted. */
2528 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2529 /* List of hooks triggered when an variable is inserted. */
2530 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2531 /* List of hooks triggered when a node is removed. */
2532 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2533};
2534
2535extern GTY(()) symbol_table *symtab;
2536
2537extern vec<cgraph_node *> cgraph_new_nodes;
2538
2539inline hashval_t
2540asmname_hasher::hash (symtab_node *n)
2541{
2542 return symbol_table::decl_assembler_name_hash
2543 (DECL_ASSEMBLER_NAME (n->decl)decl_assembler_name (n->decl));
2544}
2545
2546inline bool
2547asmname_hasher::equal (symtab_node *n, const_tree t)
2548{
2549 return symbol_table::decl_assembler_name_equal (n->decl, t);
2550}
2551
2552/* In cgraph.c */
2553void cgraph_c_finalize (void);
2554void release_function_body (tree);
2555cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2556
2557void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
2558bool cgraph_function_possibly_inlined_p (tree);
2559
2560const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2561cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2562
2563/* In cgraphunit.c */
2564void cgraphunit_c_finalize (void);
2565int tp_first_run_node_cmp (const void *pa, const void *pb);
2566
2567/* In symtab-thunks.cc */
2568void symtab_thunks_cc_finalize (void);
2569
2570/* Initialize datastructures so DECL is a function in lowered gimple form.
2571 IN_SSA is true if the gimple is in SSA. */
2572basic_block init_lowered_empty_function (tree, bool, profile_count);
2573
2574tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INTlong, tree,
2575 HOST_WIDE_INTlong);
2576/* In cgraphclones.c */
2577
2578tree clone_function_name_numbered (const char *name, const char *suffix);
2579tree clone_function_name_numbered (tree decl, const char *suffix);
2580tree clone_function_name (const char *name, const char *suffix,
2581 unsigned long number);
2582tree clone_function_name (tree decl, const char *suffix,
2583 unsigned long number);
2584tree clone_function_name (tree decl, const char *suffix);
2585
2586void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2587 ipa_param_adjustments *,
2588 bool, bitmap, basic_block);
2589
2590void dump_callgraph_transformation (const cgraph_node *original,
2591 const cgraph_node *clone,
2592 const char *suffix);
2593/* In cgraphbuild.c */
2594int compute_call_stmt_bb_frequency (tree, basic_block bb);
2595void record_references_in_initializer (tree, bool);
2596
2597/* In ipa.c */
2598void cgraph_build_static_cdtor (char which, tree body, int priority);
2599bool ipa_discover_variable_flags (void);
2600
2601/* In varpool.c */
2602tree ctor_for_folding (tree);
2603
2604/* In ipa-inline-analysis.c */
2605void initialize_inline_failed (struct cgraph_edge *);
2606bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
2607
2608/* Return true when the symbol is real symbol, i.e. it is not inline clone
2609 or abstract function kept for debug info purposes only. */
2610inline bool
2611symtab_node::real_symbol_p (void)
2612{
2613 cgraph_node *cnode;
2614
2615 if (DECL_ABSTRACT_P (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 2615, __FUNCTION__))->decl_common.abstract_flag)
)
2616 return false;
2617 if (transparent_alias && definition)
2618 return false;
2619 if (!is_a <cgraph_node *> (this))
2620 return true;
2621 cnode = dyn_cast <cgraph_node *> (this);
2622 if (cnode->inlined_to)
2623 return false;
2624 return true;
2625}
2626
2627/* Return true if DECL should have entry in symbol table if used.
2628 Those are functions and static & external variables. */
2629
2630static inline bool
2631decl_in_symtab_p (const_tree decl)
2632{
2633 return (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == FUNCTION_DECL
2634 || (TREE_CODE (decl)((enum tree_code) (decl)->base.code) == VAR_DECL
2635 && (TREE_STATIC (decl)((decl)->base.static_flag) || DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 2635, __FUNCTION__))->decl_common.decl_flag_1)
)));
2636}
2637
2638inline bool
2639symtab_node::in_same_comdat_group_p (symtab_node *target)
2640{
2641 symtab_node *source = this;
2642
2643 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2644 {
2645 if (cn->inlined_to)
2646 source = cn->inlined_to;
2647 }
2648 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2649 {
2650 if (cn->inlined_to)
2651 target = cn->inlined_to;
2652 }
2653
2654 return source->get_comdat_group () == target->get_comdat_group ();
2655}
2656
2657/* Return node that alias is aliasing. */
2658
2659inline symtab_node *
2660symtab_node::get_alias_target (void)
2661{
2662 ipa_ref *ref = NULLnullptr;
2663 iterate_reference (0, ref);
2664 gcc_checking_assert (ref->use == IPA_REF_ALIAS)((void)(!(ref->use == IPA_REF_ALIAS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 2664, __FUNCTION__), 0 : 0))
;
2665 return ref->referred;
2666}
2667
2668/* Return next reachable static symbol with initializer after the node. */
2669
2670inline symtab_node *
2671symtab_node::next_defined_symbol (void)
2672{
2673 symtab_node *node1 = next;
2674
2675 for (; node1; node1 = node1->next)
2676 if (node1->definition)
2677 return node1;
2678
2679 return NULLnullptr;
2680}
2681
2682/* Iterates I-th reference in the list, REF is also set. */
2683
2684inline ipa_ref *
2685symtab_node::iterate_reference (unsigned i, ipa_ref *&ref)
2686{
2687 ref_list.references.iterate (i, &ref);
2688
2689 return ref;
2690}
2691
2692/* Iterates I-th referring item in the list, REF is also set. */
2693
2694inline ipa_ref *
2695symtab_node::iterate_referring (unsigned i, ipa_ref *&ref)
2696{
2697 ref_list.referring.iterate (i, &ref);
2698
2699 return ref;
2700}
2701
2702/* Iterates I-th referring alias item in the list, REF is also set. */
2703
2704inline ipa_ref *
2705symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref)
2706{
2707 ref_list.referring.iterate (i, &ref);
2708
2709 if (ref && ref->use != IPA_REF_ALIAS)
2710 return NULLnullptr;
2711
2712 return ref;
2713}
2714
2715/* Return true if list contains an alias. */
2716
2717inline bool
2718symtab_node::has_aliases_p (void)
2719{
2720 ipa_ref *ref = NULLnullptr;
2721
2722 return (iterate_direct_aliases (0, ref) != NULLnullptr);
2723}
2724
2725/* Return true when RESOLUTION indicate that linker will use
2726 the symbol from non-LTO object files. */
2727
2728inline bool
2729resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2730{
2731 return (resolution == LDPR_PREVAILING_DEF
2732 || resolution == LDPR_PREEMPTED_REG
2733 || resolution == LDPR_RESOLVED_EXEC
2734 || resolution == LDPR_RESOLVED_DYN);
2735}
2736
2737/* Return true when symtab_node is known to be used from other (non-LTO)
2738 object file. Known only when doing LTO via linker plugin. */
2739
2740inline bool
2741symtab_node::used_from_object_file_p (void)
2742{
2743 if (!TREE_PUBLIC (decl)((decl)->base.public_flag) || DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 2743, __FUNCTION__))->decl_common.decl_flag_1)
)
2744 return false;
2745 if (resolution_used_from_other_file_p (resolution))
2746 return true;
2747 return false;
2748}
2749
2750/* Return varpool node for given symbol and check it is a function. */
2751
2752inline varpool_node *
2753varpool_node::get (const_tree decl)
2754{
2755 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL)((void)(!(((enum tree_code) (decl)->base.code) == VAR_DECL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 2755, __FUNCTION__), 0 : 0))
;
2756 return dyn_cast<varpool_node *> (symtab_node::get (decl));
2757}
2758
2759/* Register a symbol NODE. */
2760
2761inline void
2762symbol_table::register_symbol (symtab_node *node)
2763{
2764 node->next = nodes;
2765 node->previous = NULLnullptr;
2766
2767 if (nodes)
2768 nodes->previous = node;
2769 nodes = node;
2770
2771 node->order = order++;
2772}
2773
2774/* Register a top-level asm statement ASM_STR. */
2775
2776asm_node *
2777symbol_table::finalize_toplevel_asm (tree asm_str)
2778{
2779 asm_node *node;
2780
2781 node = ggc_cleared_alloc<asm_node> ();
2782 node->asm_str = asm_str;
2783 node->order = order++;
2784 node->next = NULLnullptr;
2785
2786 if (asmnodes == NULLnullptr)
2787 asmnodes = node;
2788 else
2789 asm_last_node->next = node;
2790
2791 asm_last_node = node;
2792 return node;
2793}
2794
2795/* Unregister a symbol NODE. */
2796inline void
2797symbol_table::unregister (symtab_node *node)
2798{
2799 if (node->previous)
2800 node->previous->next = node->next;
2801 else
2802 nodes = node->next;
2803
2804 if (node->next)
2805 node->next->previous = node->previous;
2806
2807 node->next = NULLnullptr;
2808 node->previous = NULLnullptr;
2809}
2810
2811/* Release a callgraph NODE with UID and put in to the list of free nodes. */
2812
2813inline void
2814symbol_table::release_symbol (cgraph_node *node)
2815{
2816 cgraph_count--;
2817 if (node->m_summary_id != -1)
2818 cgraph_released_summary_ids.safe_push (node->m_summary_id);
2819 ggc_free (node);
2820}
2821
2822/* Return first static symbol with definition. */
2823inline symtab_node *
2824symbol_table::first_symbol (void)
2825{
2826 return nodes;
2827}
2828
2829/* Walk all symbols. */
2830#define FOR_EACH_SYMBOL(node)for ((node) = symtab->first_symbol (); (node); (node) = (node
)->next)
\
2831 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2832
2833/* Return first static symbol with definition. */
2834inline symtab_node *
2835symbol_table::first_defined_symbol (void)
2836{
2837 symtab_node *node;
2838
2839 for (node = nodes; node; node = node->next)
2840 if (node->definition)
2841 return node;
2842
2843 return NULLnullptr;
2844}
2845
2846/* Walk all symbols with definitions in current unit. */
2847#define FOR_EACH_DEFINED_SYMBOL(node)for ((node) = symtab->first_defined_symbol (); (node); (node
) = node->next_defined_symbol ())
\
2848 for ((node) = symtab->first_defined_symbol (); (node); \
2849 (node) = node->next_defined_symbol ())
2850
2851/* Return first variable. */
2852inline varpool_node *
2853symbol_table::first_variable (void)
2854{
2855 symtab_node *node;
2856 for (node = nodes; node; node = node->next)
2857 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2858 return vnode;
2859 return NULLnullptr;
2860}
2861
2862/* Return next variable after NODE. */
2863inline varpool_node *
2864symbol_table::next_variable (varpool_node *node)
2865{
2866 symtab_node *node1 = node->next;
2867 for (; node1; node1 = node1->next)
2868 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2869 return vnode1;
2870 return NULLnullptr;
2871}
2872/* Walk all variables. */
2873#define FOR_EACH_VARIABLE(node)for ((node) = symtab->first_variable (); (node); (node) = symtab
->next_variable ((node)))
\
2874 for ((node) = symtab->first_variable (); \
2875 (node); \
2876 (node) = symtab->next_variable ((node)))
2877
2878/* Return first static variable with initializer. */
2879inline varpool_node *
2880symbol_table::first_static_initializer (void)
2881{
2882 symtab_node *node;
2883 for (node = nodes; node; node = node->next)
2884 {
2885 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2886 if (vnode && DECL_INITIAL (node->decl)((contains_struct_check ((node->decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 2886, __FUNCTION__))->decl_common.initial)
)
2887 return vnode;
2888 }
2889 return NULLnullptr;
2890}
2891
2892/* Return next static variable with initializer after NODE. */
2893inline varpool_node *
2894symbol_table::next_static_initializer (varpool_node *node)
2895{
2896 symtab_node *node1 = node->next;
2897 for (; node1; node1 = node1->next)
2898 {
2899 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2900 if (vnode1 && DECL_INITIAL (node1->decl)((contains_struct_check ((node1->decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 2900, __FUNCTION__))->decl_common.initial)
)
2901 return vnode1;
2902 }
2903 return NULLnullptr;
2904}
2905
2906/* Walk all static variables with initializer set. */
2907#define FOR_EACH_STATIC_INITIALIZER(node)for ((node) = symtab->first_static_initializer (); (node);
(node) = symtab->next_static_initializer (node))
\
2908 for ((node) = symtab->first_static_initializer (); (node); \
2909 (node) = symtab->next_static_initializer (node))
2910
2911/* Return first static variable with definition. */
2912inline varpool_node *
2913symbol_table::first_defined_variable (void)
2914{
2915 symtab_node *node;
2916 for (node = nodes; node; node = node->next)
2917 {
2918 varpool_node *vnode = dyn_cast <varpool_node *> (node);
2919 if (vnode && vnode->definition)
2920 return vnode;
2921 }
2922 return NULLnullptr;
2923}
2924
2925/* Return next static variable with definition after NODE. */
2926inline varpool_node *
2927symbol_table::next_defined_variable (varpool_node *node)
2928{
2929 symtab_node *node1 = node->next;
2930 for (; node1; node1 = node1->next)
2931 {
2932 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2933 if (vnode1 && vnode1->definition)
2934 return vnode1;
2935 }
2936 return NULLnullptr;
2937}
2938/* Walk all variables with definitions in current unit. */
2939#define FOR_EACH_DEFINED_VARIABLE(node)for ((node) = symtab->first_defined_variable (); (node); (
node) = symtab->next_defined_variable (node))
\
2940 for ((node) = symtab->first_defined_variable (); (node); \
2941 (node) = symtab->next_defined_variable (node))
2942
2943/* Return first function with body defined. */
2944inline cgraph_node *
2945symbol_table::first_defined_function (void)
2946{
2947 symtab_node *node;
2948 for (node = nodes; node; node = node->next)
2949 {
2950 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2951 if (cn && cn->definition)
2952 return cn;
2953 }
2954 return NULLnullptr;
2955}
2956
2957/* Return next function with body defined after NODE. */
2958inline cgraph_node *
2959symbol_table::next_defined_function (cgraph_node *node)
2960{
2961 symtab_node *node1 = node->next;
2962 for (; node1; node1 = node1->next)
2963 {
2964 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2965 if (cn1 && cn1->definition)
2966 return cn1;
2967 }
2968 return NULLnullptr;
2969}
2970
2971/* Walk all functions with body defined. */
2972#define FOR_EACH_DEFINED_FUNCTION(node)for ((node) = symtab->first_defined_function (); (node); (
node) = symtab->next_defined_function ((node)))
\
2973 for ((node) = symtab->first_defined_function (); (node); \
2974 (node) = symtab->next_defined_function ((node)))
2975
2976/* Return first function. */
2977inline cgraph_node *
2978symbol_table::first_function (void)
2979{
2980 symtab_node *node;
2981 for (node = nodes; node; node = node->next)
2982 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2983 return cn;
2984 return NULLnullptr;
2985}
2986
2987/* Return next function. */
2988inline cgraph_node *
2989symbol_table::next_function (cgraph_node *node)
2990{
2991 symtab_node *node1 = node->next;
2992 for (; node1; node1 = node1->next)
2993 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2994 return cn1;
2995 return NULLnullptr;
2996}
2997
2998/* Return first function with body defined. */
2999inline cgraph_node *
3000symbol_table::first_function_with_gimple_body (void)
3001{
3002 symtab_node *node;
3003 for (node = nodes; node; node = node->next)
4
Loop condition is true. Entering loop body
28
Loop condition is true. Entering loop body
3004 {
3005 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
5
Calling 'dyn_cast<cgraph_node *, symtab_node>'
22
Returning from 'dyn_cast<cgraph_node *, symtab_node>'
29
Calling 'dyn_cast<cgraph_node *, symtab_node>'
39
Returning from 'dyn_cast<cgraph_node *, symtab_node>'
3006 if (cn
22.1
'cn' is non-null
39.1
'cn' is non-null
22.1
'cn' is non-null
39.1
'cn' is non-null
22.1
'cn' is non-null
39.1
'cn' is non-null
22.1
'cn' is non-null
39.1
'cn' is non-null
22.1
'cn' is non-null
39.1
'cn' is non-null
22.1
'cn' is non-null
39.1
'cn' is non-null
22.1
'cn' is non-null
39.1
'cn' is non-null
22.1
'cn' is non-null
39.1
'cn' is non-null
&& cn->has_gimple_body_p ())
23
Calling 'cgraph_node::has_gimple_body_p'
26
Returning from 'cgraph_node::has_gimple_body_p'
27
Taking false branch
40
Calling 'cgraph_node::has_gimple_body_p'
45
Returning from 'cgraph_node::has_gimple_body_p'
46
Taking true branch
3007 return cn;
47
Returning pointer (loaded from 'cn'), which participates in a condition later
3008 }
3009 return NULLnullptr;
3010}
3011
3012/* Return next reachable static variable with initializer after NODE. */
3013inline cgraph_node *
3014symbol_table::next_function_with_gimple_body (cgraph_node *node)
3015{
3016 symtab_node *node1 = node->next;
3017 for (; node1; node1 = node1->next)
3018 {
3019 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
3020 if (cn1 && cn1->has_gimple_body_p ())
3021 return cn1;
3022 }
3023 return NULLnullptr;
3024}
3025
3026/* Walk all functions. */
3027#define FOR_EACH_FUNCTION(node)for ((node) = symtab->first_function (); (node); (node) = symtab
->next_function ((node)))
\
3028 for ((node) = symtab->first_function (); (node); \
3029 (node) = symtab->next_function ((node)))
3030
3031/* Return true when callgraph node is a function with Gimple body defined
3032 in current unit. Functions can also be define externally or they
3033 can be thunks with no Gimple representation.
3034
3035 Note that at WPA stage, the function body may not be present in memory. */
3036
3037inline bool
3038cgraph_node::has_gimple_body_p (void)
3039{
3040 return definition && !thunk && !alias;
24
Assuming field 'definition' is 0
25
Returning zero, which participates in a condition later
41
Assuming field 'definition' is not equal to 0
42
Assuming field 'thunk' is 0
43
Assuming field 'alias' is 0
44
Returning the value 1, which participates in a condition later
3041}
3042
3043/* Walk all functions with body defined. */
3044#define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node)for ((node) = symtab->first_function_with_gimple_body (); (
node); (node) = symtab->next_function_with_gimple_body (node
))
\
3045 for ((node) = symtab->first_function_with_gimple_body (); (node); \
3046 (node) = symtab->next_function_with_gimple_body (node))
3047
3048/* Uniquize all constants that appear in memory.
3049 Each constant in memory thus far output is recorded
3050 in `const_desc_table'. */
3051
3052struct GTY((for_user)) constant_descriptor_tree {
3053 /* A MEM for the constant. */
3054 rtx rtl;
3055
3056 /* The value of the constant. */
3057 tree value;
3058
3059 /* Hash of value. Computing the hash from value each time
3060 hashfn is called can't work properly, as that means recursive
3061 use of the hash table during hash table expansion. */
3062 hashval_t hash;
3063};
3064
3065/* Return true when function is only called directly or it has alias.
3066 i.e. it is not externally visible, address was not taken and
3067 it is not used in any other non-standard way. */
3068
3069inline bool
3070cgraph_node::only_called_directly_or_aliased_p (void)
3071{
3072 gcc_assert (!inlined_to)((void)(!(!inlined_to) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3072, __FUNCTION__), 0 : 0))
;
3073 return (!force_output && !address_taken
3074 && !ifunc_resolver
3075 && !used_from_other_partition
3076 && !DECL_VIRTUAL_P (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3076, __FUNCTION__))->decl_common.virtual_flag)
3077 && !DECL_STATIC_CONSTRUCTOR (decl)((tree_check ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3077, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
3078 && !DECL_STATIC_DESTRUCTOR (decl)((tree_check ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3078, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
3079 && !used_from_object_file_p ()
3080 && !externally_visible);
3081}
3082
3083/* Return true when function can be removed from callgraph
3084 if all direct calls are eliminated. */
3085
3086inline bool
3087cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
3088{
3089 gcc_checking_assert (!inlined_to)((void)(!(!inlined_to) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3089, __FUNCTION__), 0 : 0))
;
3090 /* Extern inlines can always go, we will use the external definition. */
3091 if (DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3091, __FUNCTION__))->decl_common.decl_flag_1)
)
3092 return true;
3093 /* When function is needed, we cannot remove it. */
3094 if (force_output || used_from_other_partition)
3095 return false;
3096 if (DECL_STATIC_CONSTRUCTOR (decl)((tree_check ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3096, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_ctor_flag
)
3097 || DECL_STATIC_DESTRUCTOR (decl)((tree_check ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3097, __FUNCTION__, (FUNCTION_DECL)))->function_decl.static_dtor_flag
)
)
3098 return false;
3099 /* Only COMDAT functions can be removed if externally visible. */
3100 if (externally_visible
3101 && ((!DECL_COMDAT (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3101, __FUNCTION__))->decl_with_vis.comdat_flag)
|| ifunc_resolver)
3102 || forced_by_abi
3103 || used_from_object_file_p ()))
3104 return false;
3105 return true;
3106}
3107
3108/* Verify cgraph, if consistency checking is enabled. */
3109
3110inline void
3111cgraph_node::checking_verify_cgraph_nodes (void)
3112{
3113 if (flag_checkingglobal_options.x_flag_checking)
3114 cgraph_node::verify_cgraph_nodes ();
3115}
3116
3117/* Return true when variable can be removed from variable pool
3118 if all direct calls are eliminated. */
3119
3120inline bool
3121varpool_node::can_remove_if_no_refs_p (void)
3122{
3123 if (DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3123, __FUNCTION__))->decl_common.decl_flag_1)
)
3124 return true;
3125 return (!force_output && !used_from_other_partition
3126 && ((DECL_COMDAT (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3126, __FUNCTION__))->decl_with_vis.comdat_flag)
3127 && !forced_by_abi
3128 && !used_from_object_file_p ())
3129 || !externally_visible
3130 || DECL_HAS_VALUE_EXPR_P (decl)((tree_check3 ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3130, __FUNCTION__, (VAR_DECL), (PARM_DECL), (RESULT_DECL))
) ->decl_common.decl_flag_2)
));
3131}
3132
3133/* Return true when all references to variable must be visible in ipa_ref_list.
3134 i.e. if the variable is not externally visible or not used in some magic
3135 way (asm statement or such).
3136 The magic uses are all summarized in force_output flag. */
3137
3138inline bool
3139varpool_node::all_refs_explicit_p ()
3140{
3141 return (definition
3142 && !externally_visible
3143 && !used_from_other_partition
3144 && !force_output);
3145}
3146
3147struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree>
3148{
3149 static hashval_t hash (constant_descriptor_tree *);
3150 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
3151};
3152
3153/* Constant pool accessor function. */
3154hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
3155
3156/* Return node that alias is aliasing. */
3157
3158inline cgraph_node *
3159cgraph_node::get_alias_target (void)
3160{
3161 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
3162}
3163
3164/* Return node that alias is aliasing. */
3165
3166inline varpool_node *
3167varpool_node::get_alias_target (void)
3168{
3169 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
3170}
3171
3172/* Walk the alias chain to return the symbol NODE is alias of.
3173 If NODE is not an alias, return NODE.
3174 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3175 When REF is non-NULL, assume that reference happens in symbol REF
3176 when determining the availability. */
3177
3178inline symtab_node *
3179symtab_node::ultimate_alias_target (enum availability *availability,
3180 symtab_node *ref)
3181{
3182 if (!alias)
3183 {
3184 if (availability)
3185 *availability = get_availability (ref);
3186 return this;
3187 }
3188
3189 return ultimate_alias_target_1 (availability, ref);
3190}
3191
3192/* Given function symbol, walk the alias chain to return the function node
3193 is alias of. Do not walk through thunks.
3194 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3195 When REF is non-NULL, assume that reference happens in symbol REF
3196 when determining the availability. */
3197
3198inline cgraph_node *
3199cgraph_node::ultimate_alias_target (enum availability *availability,
3200 symtab_node *ref)
3201{
3202 cgraph_node *n = dyn_cast <cgraph_node *>
3203 (symtab_node::ultimate_alias_target (availability, ref));
3204 if (!n && availability)
3205 *availability = AVAIL_NOT_AVAILABLE;
3206 return n;
3207}
3208
3209/* For given variable pool node, walk the alias chain to return the function
3210 the variable is alias of. Do not walk through thunks.
3211 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3212 When REF is non-NULL, assume that reference happens in symbol REF
3213 when determining the availability. */
3214
3215inline varpool_node *
3216varpool_node::ultimate_alias_target (availability *availability,
3217 symtab_node *ref)
3218{
3219 varpool_node *n = dyn_cast <varpool_node *>
3220 (symtab_node::ultimate_alias_target (availability, ref));
3221
3222 if (!n && availability)
3223 *availability = AVAIL_NOT_AVAILABLE;
3224 return n;
3225}
3226
3227/* Set callee N of call graph edge and add it to the corresponding set of
3228 callers. */
3229
3230inline void
3231cgraph_edge::set_callee (cgraph_node *n)
3232{
3233 prev_caller = NULLnullptr;
3234 if (n->callers)
3235 n->callers->prev_caller = this;
3236 next_caller = n->callers;
3237 n->callers = this;
3238 callee = n;
3239}
3240
3241/* Return true when the edge represents a direct recursion. */
3242
3243inline bool
3244cgraph_edge::recursive_p (void)
3245{
3246 cgraph_node *c = callee->ultimate_alias_target ();
3247 if (caller->inlined_to)
3248 return caller->inlined_to->decl == c->decl;
3249 else
3250 return caller->decl == c->decl;
3251}
3252
3253/* Remove the edge from the list of the callers of the callee. */
3254
3255inline void
3256cgraph_edge::remove_callee (void)
3257{
3258 gcc_assert (!indirect_unknown_callee)((void)(!(!indirect_unknown_callee) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3258, __FUNCTION__), 0 : 0))
;
3259 if (prev_caller)
3260 prev_caller->next_caller = next_caller;
3261 if (next_caller)
3262 next_caller->prev_caller = prev_caller;
3263 if (!prev_caller)
3264 callee->callers = next_caller;
3265}
3266
3267/* Return true if call must bind to current definition. */
3268
3269inline bool
3270cgraph_edge::binds_to_current_def_p ()
3271{
3272 if (callee)
3273 return callee->binds_to_current_def_p (caller);
3274 else
3275 return false;
3276}
3277
3278/* Expected frequency of executions within the function.
3279 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
3280 per function call. The range is 0 to CGRAPH_FREQ_MAX. */
3281
3282inline int
3283cgraph_edge::frequency ()
3284{
3285 return count.to_cgraph_frequency (caller->inlined_to
3286 ? caller->inlined_to->count
3287 : caller->count);
3288}
3289
3290
3291/* Return true if the TM_CLONE bit is set for a given FNDECL. */
3292static inline bool
3293decl_is_tm_clone (const_tree fndecl)
3294{
3295 cgraph_node *n = cgraph_node::get (fndecl);
3296 if (n)
3297 return n->tm_clone;
3298 return false;
3299}
3300
3301/* Likewise indicate that a node is needed, i.e. reachable via some
3302 external means. */
3303
3304inline void
3305cgraph_node::mark_force_output (void)
3306{
3307 force_output = 1;
3308 gcc_checking_assert (!inlined_to)((void)(!(!inlined_to) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3308, __FUNCTION__), 0 : 0))
;
3309}
3310
3311/* Return true if function should be optimized for size. */
3312
3313inline enum optimize_size_level
3314cgraph_node::optimize_for_size_p (void)
3315{
3316 if (opt_for_fn (decl, optimize_size)(opts_for_fn (decl)->x_optimize_size))
3317 return OPTIMIZE_SIZE_MAX;
3318 if (count == profile_count::zero ())
3319 return OPTIMIZE_SIZE_MAX;
3320 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
3321 return OPTIMIZE_SIZE_BALANCED;
3322 else
3323 return OPTIMIZE_SIZE_NO;
3324}
3325
3326/* Return symtab_node for NODE or create one if it is not present
3327 in symtab. */
3328
3329inline symtab_node *
3330symtab_node::get_create (tree node)
3331{
3332 if (TREE_CODE (node)((enum tree_code) (node)->base.code) == VAR_DECL)
3333 return varpool_node::get_create (node);
3334 else
3335 return cgraph_node::get_create (node);
3336}
3337
3338/* Return availability of NODE when referenced from REF. */
3339
3340inline enum availability
3341symtab_node::get_availability (symtab_node *ref)
3342{
3343 if (is_a <cgraph_node *> (this))
3344 return dyn_cast <cgraph_node *> (this)->get_availability (ref);
3345 else
3346 return dyn_cast <varpool_node *> (this)->get_availability (ref);
3347}
3348
3349/* Call callback on symtab node and aliases associated to this node.
3350 When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */
3351
3352inline bool
3353symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
3354 void *),
3355 void *data,
3356 bool include_overwritable)
3357{
3358 if (include_overwritable
3359 || get_availability () > AVAIL_INTERPOSABLE)
3360 {
3361 if (callback (this, data))
3362 return true;
3363 }
3364 if (has_aliases_p ())
3365 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3366 return false;
3367}
3368
3369/* Call callback on function and aliases associated to the function.
3370 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3371 skipped. */
3372
3373inline bool
3374cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
3375 void *),
3376 void *data,
3377 bool include_overwritable)
3378{
3379 if (include_overwritable
3380 || get_availability () > AVAIL_INTERPOSABLE)
3381 {
3382 if (callback (this, data))
3383 return true;
3384 }
3385 if (has_aliases_p ())
3386 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3387 return false;
3388}
3389
3390/* Call callback on varpool symbol and aliases associated to varpool symbol.
3391 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3392 skipped. */
3393
3394inline bool
3395varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *,
3396 void *),
3397 void *data,
3398 bool include_overwritable)
3399{
3400 if (include_overwritable
3401 || get_availability () > AVAIL_INTERPOSABLE)
3402 {
3403 if (callback (this, data))
3404 return true;
3405 }
3406 if (has_aliases_p ())
3407 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3408 return false;
3409}
3410
3411/* Return true if reference may be used in address compare. */
3412
3413inline bool
3414ipa_ref::address_matters_p ()
3415{
3416 if (use != IPA_REF_ADDR)
3417 return false;
3418 /* Addresses taken from virtual tables are never compared. */
3419 if (is_a <varpool_node *> (referring)
3420 && DECL_VIRTUAL_P (referring->decl)((contains_struct_check ((referring->decl), (TS_DECL_COMMON
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3420, __FUNCTION__))->decl_common.virtual_flag)
)
3421 return false;
3422 return referred->address_can_be_compared_p ();
3423}
3424
3425/* Build polymorphic call context for indirect call E. */
3426
3427inline
3428ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
3429{
3430 gcc_checking_assert (e->indirect_info->polymorphic)((void)(!(e->indirect_info->polymorphic) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3430, __FUNCTION__), 0 : 0))
;
3431 *this = e->indirect_info->context;
3432}
3433
3434/* Build empty "I know nothing" context. */
3435
3436inline
3437ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
3438{
3439 clear_speculation ();
3440 clear_outer_type ();
3441 invalid = false;
3442}
3443
3444/* Make context non-speculative. */
3445
3446inline void
3447ipa_polymorphic_call_context::clear_speculation ()
3448{
3449 speculative_outer_type = NULLnullptr;
3450 speculative_offset = 0;
3451 speculative_maybe_derived_type = false;
3452}
3453
3454/* Produce context specifying all derived types of OTR_TYPE. If OTR_TYPE is
3455 NULL, the context is set to dummy "I know nothing" setting. */
3456
3457inline void
3458ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
3459{
3460 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type)((tree_class_check ((otr_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3460, __FUNCTION__))->type_common.main_variant)
: NULLnullptr;
3461 offset = 0;
3462 maybe_derived_type = true;
3463 maybe_in_construction = true;
3464 dynamic = true;
3465}
3466
3467/* Adjust all offsets in contexts by OFF bits. */
3468
3469inline void
3470ipa_polymorphic_call_context::offset_by (HOST_WIDE_INTlong off)
3471{
3472 if (outer_type)
3473 offset += off;
3474 if (speculative_outer_type)
3475 speculative_offset += off;
3476}
3477
3478/* Return TRUE if context is fully useless. */
3479
3480inline bool
3481ipa_polymorphic_call_context::useless_p () const
3482{
3483 return (!outer_type && !speculative_outer_type);
3484}
3485
3486/* When using fprintf (or similar), problems can arise with
3487 transient generated strings. Many string-generation APIs
3488 only support one result being alive at once (e.g. by
3489 returning a pointer to a statically-allocated buffer).
3490
3491 If there is more than one generated string within one
3492 fprintf call: the first string gets evicted or overwritten
3493 by the second, before fprintf is fully evaluated.
3494 See e.g. PR/53136.
3495
3496 This function provides a workaround for this, by providing
3497 a simple way to create copies of these transient strings,
3498 without the need to have explicit cleanup:
3499
3500 fprintf (dumpfile, "string 1: %s string 2:%s\n",
3501 xstrdup_for_dump (EXPR_1),
3502 xstrdup_for_dump (EXPR_2));
3503
3504 This is actually a simple wrapper around ggc_strdup, but
3505 the name documents the intent. We require that no GC can occur
3506 within the fprintf call. */
3507
3508static inline const char *
3509xstrdup_for_dump (const char *transient_str)
3510{
3511 return ggc_strdup (transient_str)ggc_alloc_string ((transient_str), -1 );
3512}
3513
3514/* During LTO stream-in this predicate can be used to check whether node
3515 in question prevails in the linking to save some memory usage. */
3516inline bool
3517symtab_node::prevailing_p (void)
3518{
3519 return definition && ((!TREE_PUBLIC (decl)((decl)->base.public_flag) && !DECL_EXTERNAL (decl)((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/cgraph.h"
, 3519, __FUNCTION__))->decl_common.decl_flag_1)
)
3520 || previous_sharing_asm_name == NULLnullptr);
3521}
3522
3523extern GTY(()) symbol_table *saved_symtab;
3524
3525#if CHECKING_P1
3526
3527namespace selftest {
3528
3529/* An RAII-style class for use in selftests for temporarily using a different
3530 symbol_table, so that such tests can be isolated from each other. */
3531
3532class symbol_table_test
3533{
3534 public:
3535 /* Constructor. Override "symtab". */
3536 symbol_table_test ();
3537
3538 /* Destructor. Restore the saved_symtab. */
3539 ~symbol_table_test ();
3540};
3541
3542} // namespace selftest
3543
3544#endif /* CHECKING_P */
3545
3546#endif /* GCC_CGRAPH_H */

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/is-a.h

1/* Dynamic testing for abstract is-a relationships.
2 Copyright (C) 2012-2021 Free Software Foundation, Inc.
3 Contributed by Lawrence Crowl.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21
22/* This header generic type query and conversion functions.
23
24
25USING THE GENERIC TYPE FACILITY
26
27
28The user functions are:
29
30bool is_a <TYPE> (pointer)
31
32 Tests whether the pointer actually points to a more derived TYPE.
33
34 Suppose you have a symtab_node *ptr, AKA symtab_node *ptr. You can test
35 whether it points to a 'derived' cgraph_node as follows.
36
37 if (is_a <cgraph_node *> (ptr))
38 ....
39
40
41TYPE as_a <TYPE> (pointer)
42
43 Converts pointer to a TYPE.
44
45 You can just assume that it is such a node.
46
47 do_something_with (as_a <cgraph_node *> *ptr);
48
49TYPE safe_as_a <TYPE> (pointer)
50
51 Like as_a <TYPE> (pointer), but where pointer could be NULL. This
52 adds a check against NULL where the regular is_a_helper hook for TYPE
53 assumes non-NULL.
54
55 do_something_with (safe_as_a <cgraph_node *> *ptr);
56
57TYPE dyn_cast <TYPE> (pointer)
58
59 Converts pointer to TYPE if and only if "is_a <TYPE> pointer". Otherwise,
60 returns NULL. This function is essentially a checked down cast.
61
62 This functions reduce compile time and increase type safety when treating a
63 generic item as a more specific item.
64
65 You can test and obtain a pointer to the 'derived' type in one indivisible
66 operation.
67
68 if (cgraph_node *cptr = dyn_cast <cgraph_node *> (ptr))
69 ....
70
71 As an example, the code change is from
72
73 if (symtab_function_p (node))
74 {
75 struct cgraph_node *cnode = cgraph (node);
76 ....
77 }
78
79 to
80
81 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
82 {
83 ....
84 }
85
86 The necessary conditional test defines a variable that holds a known good
87 pointer to the specific item and avoids subsequent conversion calls and
88 the assertion checks that may come with them.
89
90 When, the property test is embedded within a larger condition, the
91 variable declaration gets pulled out of the condition. (This approach
92 leaves some room for using the variable inappropriately.)
93
94 if (symtab_variable_p (node) && varpool (node)->finalized)
95 varpool_analyze_node (varpool (node));
96
97 becomes
98
99 varpool_node *vnode = dyn_cast <varpool_node *> (node);
100 if (vnode && vnode->finalized)
101 varpool_analyze_node (vnode);
102
103 Note that we have converted two sets of assertions in the calls to varpool
104 into safe and efficient use of a variable.
105
106TYPE safe_dyn_cast <TYPE> (pointer)
107
108 Like dyn_cast <TYPE> (pointer), except that it accepts null pointers
109 and returns null results for them.
110
111
112If you use these functions and get a 'inline function not defined' or a
113'missing symbol' error message for 'is_a_helper<....>::test', it means that
114the connection between the types has not been made. See below.
115
116
117EXTENDING THE GENERIC TYPE FACILITY
118
119Method 1
120--------
121
122If DERIVED is derived from BASE, and if BASE contains enough information
123to determine whether an object is actually an instance of DERIVED,
124then you can make the above routines work for DERIVED by defining
125a specialization of is_a_helper such as:
126
127 template<>
128 struct is_a_helper<DERIVED *> : static_is_a_helper<DERIVED *>
129 {
130 static inline bool test (const BASE *p) { return ...; }
131 };
132
133This test function should return true if P is an instanced of DERIVED.
134This on its own is enough; the comments below for method 2 do not apply.
135
136Method 2
137--------
138
139Alternatively, if two types are connected in ways other than C++
140inheritance, each connection between them must be made by defining a
141specialization of the template member function 'test' of the template
142class 'is_a_helper'. For example,
143
144 template <>
145 template <>
146 inline bool
147 is_a_helper <cgraph_node *>::test (symtab_node *p)
148 {
149 return p->type == SYMTAB_FUNCTION;
150 }
151
152If a simple reinterpret_cast between the pointer types is incorrect, then you
153must also specialize the template member function 'cast'. Failure to do so
154when needed may result in a crash. For example,
155
156 template <>
157 template <>
158 inline bool
159 is_a_helper <cgraph_node *>::cast (symtab_node *p)
160 {
161 return &p->x_function;
162 }
163
164*/
165
166#ifndef GCC_IS_A_H
167#define GCC_IS_A_H
168
169/* A base class that specializations of is_a_helper can use if casting
170 U * to T is simply a reinterpret_cast. */
171
172template <typename T>
173struct reinterpret_is_a_helper
174{
175 template <typename U>
176 static inline T cast (U *p) { return reinterpret_cast <T> (p); }
17
Returning without writing to 'p->next', which participates in a condition later
18
Returning pointer (loaded from 'p'), which participates in a condition later
177};
178
179/* A base class that specializations of is_a_helper can use if casting
180 U * to T is simply a static_cast. This is more type-safe than
181 reinterpret_is_a_helper. */
182
183template <typename T>
184struct static_is_a_helper
185{
186 template <typename U>
187 static inline T cast (U *p) { return static_cast <T> (p); }
188};
189
190/* A generic type conversion internal helper class. */
191
192template <typename T>
193struct is_a_helper : reinterpret_is_a_helper<T>
194{
195 template <typename U>
196 static inline bool test (U *p);
197};
198
199/* Reuse the definition of is_a_helper<T *> to implement
200 is_a_helper<const T *>. */
201
202template <typename T>
203struct is_a_helper<const T *>
204{
205 template <typename U>
206 static inline const T *cast (const U *p)
207 {
208 return is_a_helper<T *>::cast (const_cast <U *> (p));
209 }
210 template <typename U>
211 static inline bool test (const U *p)
212 {
213 return is_a_helper<T *>::test (p);
214 }
215};
216
217/* Note that we deliberately do not define the 'test' member template. Not
218 doing so will result in a build-time error for type relationships that have
219 not been defined, rather than a run-time error. See the discussion above
220 for when to define this member. */
221
222/* The public interface. */
223
224/* A generic test for a type relationship. See the discussion above for when
225 to use this function. The question answered is "Is type T a derived type of
226 type U?". */
227
228template <typename T, typename U>
229inline bool
230is_a (U *p)
231{
232 return is_a_helper<T>::test (p);
7
Calling 'is_a_helper::test'
11
Returning from 'is_a_helper::test'
12
Returning without writing to 'p->next', which participates in a condition later
13
Returning the value 1, which participates in a condition later
31
Calling 'is_a_helper::test'
34
Returning from 'is_a_helper::test'
35
Returning the value 1, which participates in a condition later
233}
234
235/* A generic conversion from a base type U to a derived type T. See the
236 discussion above for when to use this function. */
237
238template <typename T, typename U>
239inline T
240as_a (U *p)
241{
242 gcc_checking_assert (is_a <T> (p))((void)(!(is_a <T> (p)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/is-a.h"
, 242, __FUNCTION__), 0 : 0))
;
243 return is_a_helper <T>::cast (p);
244}
245
246/* Similar to as_a<>, but where the pointer can be NULL, even if
247 is_a_helper<T> doesn't check for NULL. */
248
249template <typename T, typename U>
250inline T
251safe_as_a (U *p)
252{
253 if (p)
254 {
255 gcc_checking_assert (is_a <T> (p))((void)(!(is_a <T> (p)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/is-a.h"
, 255, __FUNCTION__), 0 : 0))
;
256 return is_a_helper <T>::cast (p);
257 }
258 else
259 return NULLnullptr;
260}
261
262/* A generic checked conversion from a base type U to a derived type T. See
263 the discussion above for when to use this function. */
264
265template <typename T, typename U>
266inline T
267dyn_cast (U *p)
268{
269 if (is_a <T> (p))
6
Calling 'is_a<cgraph_node *, symtab_node>'
14
Returning from 'is_a<cgraph_node *, symtab_node>'
15
Taking true branch
30
Calling 'is_a<cgraph_node *, symtab_node>'
36
Returning from 'is_a<cgraph_node *, symtab_node>'
37
Taking true branch
270 return is_a_helper <T>::cast (p);
16
Calling 'reinterpret_is_a_helper::cast'
19
Returning from 'reinterpret_is_a_helper::cast'
20
Returning without writing to 'p->next', which participates in a condition later
21
Returning pointer, which participates in a condition later
38
Returning pointer, which participates in a condition later
271 else
272 return static_cast <T> (0);
273}
274
275/* Similar to dyn_cast, except that the pointer may be null. */
276
277template <typename T, typename U>
278inline T
279safe_dyn_cast (U *p)
280{
281 return p ? dyn_cast <T> (p) : 0;
282}
283
284#endif /* GCC_IS_A_H */

/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h

1/* Profile counter container type.
2 Copyright (C) 2017-2021 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#ifndef GCC_PROFILE_COUNT_H
22#define GCC_PROFILE_COUNT_H
23
24struct function;
25struct profile_count;
26class sreal;
27
28/* Quality of the profile count. Because gengtype does not support enums
29 inside of classes, this is in global namespace. */
30enum profile_quality {
31 /* Uninitialized value. */
32 UNINITIALIZED_PROFILE,
33
34 /* Profile is based on static branch prediction heuristics and may
35 or may not match reality. It is local to function and cannot be compared
36 inter-procedurally. Never used by probabilities (they are always local).
37 */
38 GUESSED_LOCAL,
39
40 /* Profile was read by feedback and was 0, we used local heuristics to guess
41 better. This is the case of functions not run in profile feedback.
42 Never used by probabilities. */
43 GUESSED_GLOBAL0,
44
45 /* Same as GUESSED_GLOBAL0 but global count is adjusted 0. */
46 GUESSED_GLOBAL0_ADJUSTED,
47
48 /* Profile is based on static branch prediction heuristics. It may or may
49 not reflect the reality but it can be compared interprocedurally
50 (for example, we inlined function w/o profile feedback into function
51 with feedback and propagated from that).
52 Never used by probabilities. */
53 GUESSED,
54
55 /* Profile was determined by autofdo. */
56 AFDO,
57
58 /* Profile was originally based on feedback but it was adjusted
59 by code duplicating optimization. It may not precisely reflect the
60 particular code path. */
61 ADJUSTED,
62
63 /* Profile was read from profile feedback or determined by accurate static
64 method. */
65 PRECISE
66};
67
68extern const char *profile_quality_as_string (enum profile_quality);
69extern bool parse_profile_quality (const char *value,
70 profile_quality *quality);
71
72/* The base value for branch probability notes and edge probabilities. */
73#define REG_BR_PROB_BASE10000 10000
74
75#define RDIV(X,Y)(((X) + (Y) / 2) / (Y)) (((X) + (Y) / 2) / (Y))
76
77bool slow_safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res);
78
79/* Compute RES=(a*b + c/2)/c capping and return false if overflow happened. */
80
81inline bool
82safe_scale_64bit (uint64_t a, uint64_t b, uint64_t c, uint64_t *res)
83{
84#if (GCC_VERSION(4 * 1000 + 2) >= 5000)
85 uint64_t tmp;
86 if (!__builtin_mul_overflow (a, b, &tmp)
87 && !__builtin_add_overflow (tmp, c/2, &tmp))
88 {
89 *res = tmp / c;
90 return true;
91 }
92 if (c == 1)
93 {
94 *res = (uint64_t) -1;
95 return false;
96 }
97#else
98 if (a < ((uint64_t)1 << 31)
99 && b < ((uint64_t)1 << 31)
100 && c < ((uint64_t)1 << 31))
101 {
102 *res = (a * b + (c / 2)) / c;
103 return true;
104 }
105#endif
106 return slow_safe_scale_64bit (a, b, c, res);
107}
108
109/* Data type to hold probabilities. It implements fixed point arithmetics
110 with capping so probability is always in range [0,1] and scaling requiring
111 values greater than 1 needs to be represented otherwise.
112
113 In addition to actual value the quality of profile is tracked and propagated
114 through all operations. Special value UNINITIALIZED_PROFILE is used for probabilities
115 that has not been determined yet (for example because of
116 -fno-guess-branch-probability)
117
118 Typically probabilities are derived from profile feedback (via
119 probability_in_gcov_type), autoFDO or guessed statically and then propagated
120 thorough the compilation.
121
122 Named probabilities are available:
123 - never (0 probability)
124 - guessed_never
125 - very_unlikely (1/2000 probability)
126 - unlikely (1/5 probability)
127 - even (1/2 probability)
128 - likely (4/5 probability)
129 - very_likely (1999/2000 probability)
130 - guessed_always
131 - always
132
133 Named probabilities except for never/always are assumed to be statically
134 guessed and thus not necessarily accurate. The difference between never
135 and guessed_never is that the first one should be used only in case that
136 well behaving program will very likely not execute the "never" path.
137 For example if the path is going to abort () call or it exception handling.
138
139 Always and guessed_always probabilities are symmetric.
140
141 For legacy code we support conversion to/from REG_BR_PROB_BASE based fixpoint
142 integer arithmetics. Once the code is converted to branch probabilities,
143 these conversions will probably go away because they are lossy.
144*/
145
146class GTY((user)) profile_probability
147{
148 static const int n_bits = 29;
149 /* We can technically use ((uint32_t) 1 << (n_bits - 1)) - 2 but that
150 will lead to harder multiplication sequences. */
151 static const uint32_t max_probability = (uint32_t) 1 << (n_bits - 2);
152 static const uint32_t uninitialized_probability
153 = ((uint32_t) 1 << (n_bits - 1)) - 1;
154
155 uint32_t m_val : 29;
156 enum profile_quality m_quality : 3;
157
158 friend struct profile_count;
159public:
160 profile_probability (): m_val (uninitialized_probability),
161 m_quality (GUESSED)
162 {}
163
164 profile_probability (uint32_t val, profile_quality quality):
165 m_val (val), m_quality (quality)
166 {}
167
168 /* Named probabilities. */
169 static profile_probability never ()
170 {
171 profile_probability ret;
172 ret.m_val = 0;
173 ret.m_quality = PRECISE;
174 return ret;
175 }
176
177 static profile_probability guessed_never ()
178 {
179 profile_probability ret;
180 ret.m_val = 0;
181 ret.m_quality = GUESSED;
182 return ret;
183 }
184
185 static profile_probability very_unlikely ()
186 {
187 /* Be consistent with PROB_VERY_UNLIKELY in predict.h. */
188 profile_probability r = guessed_always ().apply_scale (1, 2000);
189 r.m_val--;
190 return r;
191 }
192
193 static profile_probability unlikely ()
194 {
195 /* Be consistent with PROB_VERY_LIKELY in predict.h. */
196 profile_probability r = guessed_always ().apply_scale (1, 5);
197 r.m_val--;
198 return r;
199 }
200
201 static profile_probability even ()
202 {
203 return guessed_always ().apply_scale (1, 2);
204 }
205
206 static profile_probability very_likely ()
207 {
208 return always () - very_unlikely ();
209 }
210
211 static profile_probability likely ()
212 {
213 return always () - unlikely ();
214 }
215
216 static profile_probability guessed_always ()
217 {
218 profile_probability ret;
219 ret.m_val = max_probability;
220 ret.m_quality = GUESSED;
221 return ret;
222 }
223
224 static profile_probability always ()
225 {
226 profile_probability ret;
227 ret.m_val = max_probability;
228 ret.m_quality = PRECISE;
229 return ret;
230 }
231
232 /* Probabilities which has not been initialized. Either because
233 initialization did not happen yet or because profile is unknown. */
234 static profile_probability uninitialized ()
235 {
236 profile_probability c;
237 c.m_val = uninitialized_probability;
238 c.m_quality = GUESSED;
239 return c;
240 }
241
242 /* Return true if value has been initialized. */
243 bool initialized_p () const
244 {
245 return m_val != uninitialized_probability;
246 }
247
248 /* Return true if value can be trusted. */
249 bool reliable_p () const
250 {
251 return m_quality >= ADJUSTED;
252 }
253
254 /* Conversion from and to REG_BR_PROB_BASE integer fixpoint arithmetics.
255 this is mostly to support legacy code and should go away. */
256 static profile_probability from_reg_br_prob_base (int v)
257 {
258 profile_probability ret;
259 gcc_checking_assert (v >= 0 && v <= REG_BR_PROB_BASE)((void)(!(v >= 0 && v <= 10000) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 259, __FUNCTION__), 0 : 0))
;
260 ret.m_val = RDIV (v * (uint64_t) max_probability, REG_BR_PROB_BASE)(((v * (uint64_t) max_probability) + (10000) / 2) / (10000));
261 ret.m_quality = GUESSED;
262 return ret;
263 }
264
265 /* Return THIS with quality set to ADJUSTED. */
266 profile_probability adjusted () const
267 {
268 profile_probability ret = *this;
269 if (!initialized_p ())
270 return *this;
271 ret.m_quality = ADJUSTED;
272 return ret;
273 }
274
275 int to_reg_br_prob_base () const
276 {
277 gcc_checking_assert (initialized_p ())((void)(!(initialized_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 277, __FUNCTION__), 0 : 0))
;
278 return RDIV (m_val * (uint64_t) REG_BR_PROB_BASE, max_probability)(((m_val * (uint64_t) 10000) + (max_probability) / 2) / (max_probability
))
;
279 }
280
281 /* Conversion to and from RTL representation of profile probabilities. */
282 static profile_probability from_reg_br_prob_note (int v)
283 {
284 profile_probability ret;
285 ret.m_val = ((unsigned int)v) / 8;
286 ret.m_quality = (enum profile_quality)(v & 7);
287 return ret;
288 }
289
290 int to_reg_br_prob_note () const
291 {
292 gcc_checking_assert (initialized_p ())((void)(!(initialized_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 292, __FUNCTION__), 0 : 0))
;
293 int ret = m_val * 8 + m_quality;
294 gcc_checking_assert (from_reg_br_prob_note (ret) == *this)((void)(!(from_reg_br_prob_note (ret) == *this) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 294, __FUNCTION__), 0 : 0))
;
295 return ret;
296 }
297
298 /* Return VAL1/VAL2. */
299 static profile_probability probability_in_gcov_type
300 (gcov_type val1, gcov_type val2)
301 {
302 profile_probability ret;
303 gcc_checking_assert (val1 >= 0 && val2 > 0)((void)(!(val1 >= 0 && val2 > 0) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 303, __FUNCTION__), 0 : 0))
;
304 if (val1 > val2)
305 ret.m_val = max_probability;
306 else
307 {
308 uint64_t tmp;
309 safe_scale_64bit (val1, max_probability, val2, &tmp);
310 gcc_checking_assert (tmp <= max_probability)((void)(!(tmp <= max_probability) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 310, __FUNCTION__), 0 : 0))
;
311 ret.m_val = tmp;
312 }
313 ret.m_quality = PRECISE;
314 return ret;
315 }
316
317 /* Basic operations. */
318 bool operator== (const profile_probability &other) const
319 {
320 return m_val == other.m_val && m_quality == other.m_quality;
321 }
322
323 profile_probability operator+ (const profile_probability &other) const
324 {
325 if (other == never ())
326 return *this;
327 if (*this == never ())
328 return other;
329 if (!initialized_p () || !other.initialized_p ())
330 return uninitialized ();
331
332 profile_probability ret;
333 ret.m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability)(((uint32_t)(m_val + other.m_val)) < (max_probability) ? (
(uint32_t)(m_val + other.m_val)) : (max_probability))
;
334 ret.m_quality = MIN (m_quality, other.m_quality)((m_quality) < (other.m_quality) ? (m_quality) : (other.m_quality
))
;
335 return ret;
336 }
337
338 profile_probability &operator+= (const profile_probability &other)
339 {
340 if (other == never ())
341 return *this;
342 if (*this == never ())
343 {
344 *this = other;
345 return *this;
346 }
347 if (!initialized_p () || !other.initialized_p ())
348 return *this = uninitialized ();
349 else
350 {
351 m_val = MIN ((uint32_t)(m_val + other.m_val), max_probability)(((uint32_t)(m_val + other.m_val)) < (max_probability) ? (
(uint32_t)(m_val + other.m_val)) : (max_probability))
;
352 m_quality = MIN (m_quality, other.m_quality)((m_quality) < (other.m_quality) ? (m_quality) : (other.m_quality
))
;
353 }
354 return *this;
355 }
356
357 profile_probability operator- (const profile_probability &other) const
358 {
359 if (*this == never ()
360 || other == never ())
361 return *this;
362 if (!initialized_p () || !other.initialized_p ())
363 return uninitialized ();
364 profile_probability ret;
365 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
366 ret.m_quality = MIN (m_quality, other.m_quality)((m_quality) < (other.m_quality) ? (m_quality) : (other.m_quality
))
;
367 return ret;
368 }
369
370 profile_probability &operator-= (const profile_probability &other)
371 {
372 if (*this == never ()
373 || other == never ())
374 return *this;
375 if (!initialized_p () || !other.initialized_p ())
376 return *this = uninitialized ();
377 else
378 {
379 m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
380 m_quality = MIN (m_quality, other.m_quality)((m_quality) < (other.m_quality) ? (m_quality) : (other.m_quality
))
;
381 }
382 return *this;
383 }
384
385 profile_probability operator* (const profile_probability &other) const
386 {
387 if (*this == never ()
388 || other == never ())
389 return never ();
390 if (!initialized_p () || !other.initialized_p ())
391 return uninitialized ();
392 profile_probability ret;
393 ret.m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability)((((uint64_t)m_val * other.m_val) + (max_probability) / 2) / (
max_probability))
;
394 ret.m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED)((((m_quality) < (other.m_quality) ? (m_quality) : (other.
m_quality))) < (ADJUSTED) ? (((m_quality) < (other.m_quality
) ? (m_quality) : (other.m_quality))) : (ADJUSTED))
;
395 return ret;
396 }
397
398 profile_probability &operator*= (const profile_probability &other)
399 {
400 if (*this == never ()
401 || other == never ())
402 return *this = never ();
403 if (!initialized_p () || !other.initialized_p ())
404 return *this = uninitialized ();
405 else
406 {
407 m_val = RDIV ((uint64_t)m_val * other.m_val, max_probability)((((uint64_t)m_val * other.m_val) + (max_probability) / 2) / (
max_probability))
;
408 m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED)((((m_quality) < (other.m_quality) ? (m_quality) : (other.
m_quality))) < (ADJUSTED) ? (((m_quality) < (other.m_quality
) ? (m_quality) : (other.m_quality))) : (ADJUSTED))
;
409 }
410 return *this;
411 }
412
413 profile_probability operator/ (const profile_probability &other) const
414 {
415 if (*this == never ())
416 return never ();
417 if (!initialized_p () || !other.initialized_p ())
418 return uninitialized ();
419 profile_probability ret;
420 /* If we get probability above 1, mark it as unreliable and return 1. */
421 if (m_val >= other.m_val)
422 {
423 ret.m_val = max_probability;
424 ret.m_quality = MIN (MIN (m_quality, other.m_quality),((((m_quality) < (other.m_quality) ? (m_quality) : (other.
m_quality))) < (GUESSED) ? (((m_quality) < (other.m_quality
) ? (m_quality) : (other.m_quality))) : (GUESSED))
425 GUESSED)((((m_quality) < (other.m_quality) ? (m_quality) : (other.
m_quality))) < (GUESSED) ? (((m_quality) < (other.m_quality
) ? (m_quality) : (other.m_quality))) : (GUESSED))
;
426 return ret;
427 }
428 else if (!m_val)
429 ret.m_val = 0;
430 else
431 {
432 gcc_checking_assert (other.m_val)((void)(!(other.m_val) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 432, __FUNCTION__), 0 : 0))
;
433 ret.m_val = MIN (RDIV ((uint64_t)m_val * max_probability,((((((uint64_t)m_val * max_probability) + (other.m_val) / 2) /
(other.m_val))) < (max_probability) ? (((((uint64_t)m_val
* max_probability) + (other.m_val) / 2) / (other.m_val))) : (
max_probability))
434 other.m_val),((((((uint64_t)m_val * max_probability) + (other.m_val) / 2) /
(other.m_val))) < (max_probability) ? (((((uint64_t)m_val
* max_probability) + (other.m_val) / 2) / (other.m_val))) : (
max_probability))
435 max_probability)((((((uint64_t)m_val * max_probability) + (other.m_val) / 2) /
(other.m_val))) < (max_probability) ? (((((uint64_t)m_val
* max_probability) + (other.m_val) / 2) / (other.m_val))) : (
max_probability))
;
436 }
437 ret.m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED)((((m_quality) < (other.m_quality) ? (m_quality) : (other.
m_quality))) < (ADJUSTED) ? (((m_quality) < (other.m_quality
) ? (m_quality) : (other.m_quality))) : (ADJUSTED))
;
438 return ret;
439 }
440
441 profile_probability &operator/= (const profile_probability &other)
442 {
443 if (*this == never ())
444 return *this = never ();
445 if (!initialized_p () || !other.initialized_p ())
446 return *this = uninitialized ();
447 else
448 {
449 /* If we get probability above 1, mark it as unreliable
450 and return 1. */
451 if (m_val > other.m_val)
452 {
453 m_val = max_probability;
454 m_quality = MIN (MIN (m_quality, other.m_quality),((((m_quality) < (other.m_quality) ? (m_quality) : (other.
m_quality))) < (GUESSED) ? (((m_quality) < (other.m_quality
) ? (m_quality) : (other.m_quality))) : (GUESSED))
455 GUESSED)((((m_quality) < (other.m_quality) ? (m_quality) : (other.
m_quality))) < (GUESSED) ? (((m_quality) < (other.m_quality
) ? (m_quality) : (other.m_quality))) : (GUESSED))
;
456 return *this;
457 }
458 else if (!m_val)
459 ;
460 else
461 {
462 gcc_checking_assert (other.m_val)((void)(!(other.m_val) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 462, __FUNCTION__), 0 : 0))
;
463 m_val = MIN (RDIV ((uint64_t)m_val * max_probability,((((((uint64_t)m_val * max_probability) + (other.m_val) / 2) /
(other.m_val))) < (max_probability) ? (((((uint64_t)m_val
* max_probability) + (other.m_val) / 2) / (other.m_val))) : (
max_probability))
464 other.m_val),((((((uint64_t)m_val * max_probability) + (other.m_val) / 2) /
(other.m_val))) < (max_probability) ? (((((uint64_t)m_val
* max_probability) + (other.m_val) / 2) / (other.m_val))) : (
max_probability))
465 max_probability)((((((uint64_t)m_val * max_probability) + (other.m_val) / 2) /
(other.m_val))) < (max_probability) ? (((((uint64_t)m_val
* max_probability) + (other.m_val) / 2) / (other.m_val))) : (
max_probability))
;
466 }
467 m_quality = MIN (MIN (m_quality, other.m_quality), ADJUSTED)((((m_quality) < (other.m_quality) ? (m_quality) : (other.
m_quality))) < (ADJUSTED) ? (((m_quality) < (other.m_quality
) ? (m_quality) : (other.m_quality))) : (ADJUSTED))
;
468 }
469 return *this;
470 }
471
472 /* Split *THIS (ORIG) probability into 2 probabilities, such that
473 the returned one (FIRST) is *THIS * CPROB and *THIS is
474 adjusted (SECOND) so that FIRST + FIRST.invert () * SECOND
475 == ORIG. This is useful e.g. when splitting a conditional
476 branch like:
477 if (cond)
478 goto lab; // ORIG probability
479 into
480 if (cond1)
481 goto lab; // FIRST = ORIG * CPROB probability
482 if (cond2)
483 goto lab; // SECOND probability
484 such that the overall probability of jumping to lab remains
485 the same. CPROB gives the relative probability between the
486 branches. */
487 profile_probability split (const profile_probability &cprob)
488 {
489 profile_probability ret = *this * cprob;
490 /* The following is equivalent to:
491 *this = cprob.invert () * *this / ret.invert ();
492 Avoid scaling when overall outcome is supposed to be always.
493 Without knowing that one is inverse of other, the result would be
494 conservative. */
495 if (!(*this == always ()))
496 *this = (*this - ret) / ret.invert ();
497 return ret;
498 }
499
500 gcov_type apply (gcov_type val) const
501 {
502 if (*this == uninitialized ())
503 return val / 2;
504 return RDIV (val * m_val, max_probability)(((val * m_val) + (max_probability) / 2) / (max_probability));
505 }
506
507 /* Return 1-*THIS. */
508 profile_probability invert () const
509 {
510 return always() - *this;
511 }
512
513 /* Return THIS with quality dropped to GUESSED. */
514 profile_probability guessed () const
515 {
516 profile_probability ret = *this;
517 ret.m_quality = GUESSED;
518 return ret;
519 }
520
521 /* Return THIS with quality dropped to AFDO. */
522 profile_probability afdo () const
523 {
524 profile_probability ret = *this;
525 ret.m_quality = AFDO;
526 return ret;
527 }
528
529 /* Return *THIS * NUM / DEN. */
530 profile_probability apply_scale (int64_t num, int64_t den) const
531 {
532 if (*this == never ())
533 return *this;
534 if (!initialized_p ())
535 return uninitialized ();
536 profile_probability ret;
537 uint64_t tmp;
538 safe_scale_64bit (m_val, num, den, &tmp);
539 ret.m_val = MIN (tmp, max_probability)((tmp) < (max_probability) ? (tmp) : (max_probability));
540 ret.m_quality = MIN (m_quality, ADJUSTED)((m_quality) < (ADJUSTED) ? (m_quality) : (ADJUSTED));
541 return ret;
542 }
543
544 /* Return true when the probability of edge is reliable.
545
546 The profile guessing code is good at predicting branch outcome (i.e.
547 taken/not taken), that is predicted right slightly over 75% of time.
548 It is however notoriously poor on predicting the probability itself.
549 In general the profile appear a lot flatter (with probabilities closer
550 to 50%) than the reality so it is bad idea to use it to drive optimization
551 such as those disabling dynamic branch prediction for well predictable
552 branches.
553
554 There are two exceptions - edges leading to noreturn edges and edges
555 predicted by number of iterations heuristics are predicted well. This macro
556 should be able to distinguish those, but at the moment it simply check for
557 noreturn heuristic that is only one giving probability over 99% or bellow
558 1%. In future we might want to propagate reliability information across the
559 CFG if we find this information useful on multiple places. */
560 bool probably_reliable_p () const
561 {
562 if (m_quality >= ADJUSTED)
563 return true;
564 if (!initialized_p ())
565 return false;
566 return m_val < max_probability / 100
567 || m_val > max_probability - max_probability / 100;
568 }
569
570 /* Return false if profile_probability is bogus. */
571 bool verify () const
572 {
573 gcc_checking_assert (m_quality != UNINITIALIZED_PROFILE)((void)(!(m_quality != UNINITIALIZED_PROFILE) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 573, __FUNCTION__), 0 : 0))
;
574 if (m_val == uninitialized_probability)
575 return m_quality == GUESSED;
576 else if (m_quality < GUESSED)
577 return false;
578 return m_val <= max_probability;
579 }
580
581 /* Comparisons are three-state and conservative. False is returned if
582 the inequality cannot be decided. */
583 bool operator< (const profile_probability &other) const
584 {
585 return initialized_p () && other.initialized_p () && m_val < other.m_val;
586 }
587
588 bool operator> (const profile_probability &other) const
589 {
590 return initialized_p () && other.initialized_p () && m_val > other.m_val;
591 }
592
593 bool operator<= (const profile_probability &other) const
594 {
595 return initialized_p () && other.initialized_p () && m_val <= other.m_val;
596 }
597
598 bool operator>= (const profile_probability &other) const
599 {
600 return initialized_p () && other.initialized_p () && m_val >= other.m_val;
601 }
602
603 /* Get the value of the count. */
604 uint32_t value () const { return m_val; }
605
606 /* Get the quality of the count. */
607 enum profile_quality quality () const { return m_quality; }
608
609 /* Output THIS to F. */
610 void dump (FILE *f) const;
611
612 /* Print THIS to stderr. */
613 void debug () const;
614
615 /* Return true if THIS is known to differ significantly from OTHER. */
616 bool differs_from_p (profile_probability other) const;
617
618 /* Return if difference is greater than 50%. */
619 bool differs_lot_from_p (profile_probability other) const;
620
621 /* COUNT1 times event happens with *THIS probability, COUNT2 times OTHER
622 happens with COUNT2 probability. Return probability that either *THIS or
623 OTHER happens. */
624 profile_probability combine_with_count (profile_count count1,
625 profile_probability other,
626 profile_count count2) const;
627
628 /* Return probability as sreal. */
629 sreal to_sreal () const;
630 /* LTO streaming support. */
631 static profile_probability stream_in (class lto_input_block *);
632 void stream_out (struct output_block *);
633 void stream_out (struct lto_output_stream *);
634};
635
636/* Main data type to hold profile counters in GCC. Profile counts originate
637 either from profile feedback, static profile estimation or both. We do not
638 perform whole program profile propagation and thus profile estimation
639 counters are often local to function, while counters from profile feedback
640 (or special cases of profile estimation) can be used inter-procedurally.
641
642 There are 3 basic types
643 1) local counters which are result of intra-procedural static profile
644 estimation.
645 2) ipa counters which are result of profile feedback or special case
646 of static profile estimation (such as in function main).
647 3) counters which counts as 0 inter-procedurally (because given function
648 was never run in train feedback) but they hold local static profile
649 estimate.
650
651 Counters of type 1 and 3 cannot be mixed with counters of different type
652 within operation (because whole function should use one type of counter)
653 with exception that global zero mix in most operations where outcome is
654 well defined.
655
656 To take local counter and use it inter-procedurally use ipa member function
657 which strips information irrelevant at the inter-procedural level.
658
659 Counters are 61bit integers representing number of executions during the
660 train run or normalized frequency within the function.
661
662 As the profile is maintained during the compilation, many adjustments are
663 made. Not all transformations can be made precisely, most importantly
664 when code is being duplicated. It also may happen that part of CFG has
665 profile counts known while other do not - for example when LTO optimizing
666 partly profiled program or when profile was lost due to COMDAT merging.
667
668 For this reason profile_count tracks more information than
669 just unsigned integer and it is also ready for profile mismatches.
670 The API of this data type represent operations that are natural
671 on profile counts - sum, difference and operation with scales and
672 probabilities. All operations are safe by never getting negative counts
673 and they do end up in uninitialized scale if any of the parameters is
674 uninitialized.
675
676 All comparisons that are three state and handling of probabilities. Thus
677 a < b is not equal to !(a >= b).
678
679 The following pre-defined counts are available:
680
681 profile_count::zero () for code that is known to execute zero times at
682 runtime (this can be detected statically i.e. for paths leading to
683 abort ();
684 profile_count::one () for code that is known to execute once (such as
685 main () function
686 profile_count::uninitialized () for unknown execution count.
687
688 */
689
690struct GTY(()) profile_count
691{
692public:
693 /* Use 62bit to hold basic block counters. Should be at least
694 64bit. Although a counter cannot be negative, we use a signed
695 type to hold various extra stages. */
696
697 static const int n_bits = 61;
698 static const uint64_t max_count = ((uint64_t) 1 << n_bits) - 2;
699private:
700 static const uint64_t uninitialized_count = ((uint64_t) 1 << n_bits) - 1;
701
702#if defined (__arm__) && (__GNUC__4 >= 6 && __GNUC__4 <= 8)
703 /* Work-around for PR88469. A bug in the gcc-6/7/8 PCS layout code
704 incorrectly detects the alignment of a structure where the only
705 64-bit aligned object is a bit-field. We force the alignment of
706 the entire field to mitigate this. */
707#define UINT64_BIT_FIELD_ALIGN __attribute__ ((aligned(8)))
708#else
709#define UINT64_BIT_FIELD_ALIGN
710#endif
711 uint64_t UINT64_BIT_FIELD_ALIGN m_val : n_bits;
712#undef UINT64_BIT_FIELD_ALIGN
713 enum profile_quality m_quality : 3;
714public:
715
716 /* Return true if both values can meaningfully appear in single function
717 body. We have either all counters in function local or global, otherwise
718 operations between them are not really defined well. */
719 bool compatible_p (const profile_count other) const
720 {
721 if (!initialized_p () || !other.initialized_p ())
722 return true;
723 if (*this == zero ()
724 || other == zero ())
725 return true;
726 /* Do not allow nonzero global profile together with local guesses
727 that are globally0. */
728 if (ipa ().nonzero_p ()
729 && !(other.ipa () == other))
730 return false;
731 if (other.ipa ().nonzero_p ()
732 && !(ipa () == *this))
733 return false;
734
735 return ipa_p () == other.ipa_p ();
736 }
737
738 /* Used for counters which are expected to be never executed. */
739 static profile_count zero ()
740 {
741 return from_gcov_type (0);
742 }
743
744 static profile_count adjusted_zero ()
745 {
746 profile_count c;
747 c.m_val = 0;
748 c.m_quality = ADJUSTED;
749 return c;
750 }
751
752 static profile_count guessed_zero ()
753 {
754 profile_count c;
755 c.m_val = 0;
756 c.m_quality = GUESSED;
757 return c;
758 }
759
760 static profile_count one ()
761 {
762 return from_gcov_type (1);
763 }
764
765 /* Value of counters which has not been initialized. Either because
766 initialization did not happen yet or because profile is unknown. */
767 static profile_count uninitialized ()
768 {
769 profile_count c;
770 c.m_val = uninitialized_count;
771 c.m_quality = GUESSED_LOCAL;
772 return c;
773 }
774
775 /* Conversion to gcov_type is lossy. */
776 gcov_type to_gcov_type () const
777 {
778 gcc_checking_assert (initialized_p ())((void)(!(initialized_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 778, __FUNCTION__), 0 : 0))
;
779 return m_val;
780 }
781
782 /* Return true if value has been initialized. */
783 bool initialized_p () const
784 {
785 return m_val != uninitialized_count;
786 }
787
788 /* Return true if value can be trusted. */
789 bool reliable_p () const
790 {
791 return m_quality >= ADJUSTED;
792 }
793
794 /* Return true if value can be operated inter-procedurally. */
795 bool ipa_p () const
796 {
797 return !initialized_p () || m_quality >= GUESSED_GLOBAL0;
51
Returning the value 1, which participates in a condition later
798 }
799
800 /* Return true if quality of profile is precise. */
801 bool precise_p () const
802 {
803 return m_quality == PRECISE;
804 }
805
806 /* Get the value of the count. */
807 uint32_t value () const { return m_val; }
808
809 /* Get the quality of the count. */
810 enum profile_quality quality () const { return m_quality; }
811
812 /* When merging basic blocks, the two different profile counts are unified.
813 Return true if this can be done without losing info about profile.
814 The only case we care about here is when first BB contains something
815 that makes it terminate in a way not visible in CFG. */
816 bool ok_for_merging (profile_count other) const
817 {
818 if (m_quality < ADJUSTED
819 || other.m_quality < ADJUSTED)
820 return true;
821 return !(other < *this);
822 }
823
824 /* When merging two BBs with different counts, pick common count that looks
825 most representative. */
826 profile_count merge (profile_count other) const
827 {
828 if (*this == other || !other.initialized_p ()
829 || m_quality > other.m_quality)
830 return *this;
831 if (other.m_quality > m_quality
832 || other > *this)
833 return other;
834 return *this;
835 }
836
837 /* Basic operations. */
838 bool operator== (const profile_count &other) const
839 {
840 return m_val == other.m_val && m_quality == other.m_quality;
841 }
842
843 profile_count operator+ (const profile_count &other) const
844 {
845 if (other == zero ())
846 return *this;
847 if (*this == zero ())
848 return other;
849 if (!initialized_p () || !other.initialized_p ())
850 return uninitialized ();
851
852 profile_count ret;
853 gcc_checking_assert (compatible_p (other))((void)(!(compatible_p (other)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 853, __FUNCTION__), 0 : 0))
;
854 ret.m_val = m_val + other.m_val;
855 ret.m_quality = MIN (m_quality, other.m_quality)((m_quality) < (other.m_quality) ? (m_quality) : (other.m_quality
))
;
856 return ret;
857 }
858
859 profile_count &operator+= (const profile_count &other)
860 {
861 if (other == zero ())
862 return *this;
863 if (*this == zero ())
864 {
865 *this = other;
866 return *this;
867 }
868 if (!initialized_p () || !other.initialized_p ())
869 return *this = uninitialized ();
870 else
871 {
872 gcc_checking_assert (compatible_p (other))((void)(!(compatible_p (other)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 872, __FUNCTION__), 0 : 0))
;
873 m_val += other.m_val;
874 m_quality = MIN (m_quality, other.m_quality)((m_quality) < (other.m_quality) ? (m_quality) : (other.m_quality
))
;
875 }
876 return *this;
877 }
878
879 profile_count operator- (const profile_count &other) const
880 {
881 if (*this == zero () || other == zero ())
882 return *this;
883 if (!initialized_p () || !other.initialized_p ())
884 return uninitialized ();
885 gcc_checking_assert (compatible_p (other))((void)(!(compatible_p (other)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 885, __FUNCTION__), 0 : 0))
;
886 profile_count ret;
887 ret.m_val = m_val >= other.m_val ? m_val - other.m_val : 0;
888 ret.m_quality = MIN (m_quality, other.m_quality)((m_quality) < (other.m_quality) ? (m_quality) : (other.m_quality
))
;
889 return ret;
890 }
891
892 profile_count &operator-= (const profile_count &other)
893 {
894 if (*this == zero () || other == zero ())
895 return *this;
896 if (!initialized_p () || !other.initialized_p ())
897 return *this = uninitialized ();
898 else
899 {
900 gcc_checking_assert (compatible_p (other))((void)(!(compatible_p (other)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 900, __FUNCTION__), 0 : 0))
;
901 m_val = m_val >= other.m_val ? m_val - other.m_val: 0;
902 m_quality = MIN (m_quality, other.m_quality)((m_quality) < (other.m_quality) ? (m_quality) : (other.m_quality
))
;
903 }
904 return *this;
905 }
906
907 /* Return false if profile_count is bogus. */
908 bool verify () const
909 {
910 gcc_checking_assert (m_quality != UNINITIALIZED_PROFILE)((void)(!(m_quality != UNINITIALIZED_PROFILE) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 910, __FUNCTION__), 0 : 0))
;
911 return m_val != uninitialized_count || m_quality == GUESSED_LOCAL;
912 }
913
914 /* Comparisons are three-state and conservative. False is returned if
915 the inequality cannot be decided. */
916 bool operator< (const profile_count &other) const
917 {
918 if (!initialized_p () || !other.initialized_p ())
919 return false;
920 if (*this == zero ())
921 return !(other == zero ());
922 if (other == zero ())
923 return false;
924 gcc_checking_assert (compatible_p (other))((void)(!(compatible_p (other)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 924, __FUNCTION__), 0 : 0))
;
925 return m_val < other.m_val;
926 }
927
928 bool operator> (const profile_count &other) const
929 {
930 if (!initialized_p () || !other.initialized_p ())
931 return false;
932 if (*this == zero ())
933 return false;
934 if (other == zero ())
935 return !(*this == zero ());
936 gcc_checking_assert (compatible_p (other))((void)(!(compatible_p (other)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 936, __FUNCTION__), 0 : 0))
;
937 return initialized_p () && other.initialized_p () && m_val > other.m_val;
938 }
939
940 bool operator< (const gcov_type other) const
941 {
942 gcc_checking_assert (ipa_p ())((void)(!(ipa_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 942, __FUNCTION__), 0 : 0))
;
943 gcc_checking_assert (other >= 0)((void)(!(other >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 943, __FUNCTION__), 0 : 0))
;
944 return ipa ().initialized_p () && ipa ().m_val < (uint64_t) other;
945 }
946
947 bool operator> (const gcov_type other) const
948 {
949 gcc_checking_assert (ipa_p ())((void)(!(ipa_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 949, __FUNCTION__), 0 : 0))
;
950 gcc_checking_assert (other >= 0)((void)(!(other >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 950, __FUNCTION__), 0 : 0))
;
951 return ipa ().initialized_p () && ipa ().m_val > (uint64_t) other;
952 }
953
954 bool operator<= (const profile_count &other) const
955 {
956 if (!initialized_p () || !other.initialized_p ())
957 return false;
958 if (*this == zero ())
959 return true;
960 if (other == zero ())
961 return (*this == zero ());
962 gcc_checking_assert (compatible_p (other))((void)(!(compatible_p (other)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 962, __FUNCTION__), 0 : 0))
;
963 return m_val <= other.m_val;
964 }
965
966 bool operator>= (const profile_count &other) const
967 {
968 if (!initialized_p () || !other.initialized_p ())
969 return false;
970 if (other == zero ())
971 return true;
972 if (*this == zero ())
973 return (other == zero ());
974 gcc_checking_assert (compatible_p (other))((void)(!(compatible_p (other)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 974, __FUNCTION__), 0 : 0))
;
975 return m_val >= other.m_val;
976 }
977
978 bool operator<= (const gcov_type other) const
979 {
980 gcc_checking_assert (ipa_p ())((void)(!(ipa_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 980, __FUNCTION__), 0 : 0))
;
981 gcc_checking_assert (other >= 0)((void)(!(other >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 981, __FUNCTION__), 0 : 0))
;
982 return ipa ().initialized_p () && ipa ().m_val <= (uint64_t) other;
983 }
984
985 bool operator>= (const gcov_type other) const
986 {
987 gcc_checking_assert (ipa_p ())((void)(!(ipa_p ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 987, __FUNCTION__), 0 : 0))
;
988 gcc_checking_assert (other >= 0)((void)(!(other >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/profile-count.h"
, 988, __FUNCTION__), 0 : 0))
;
989 return ipa ().initialized_p () && ipa ().m_val >= (uint64_t) other;
990 }
991
992 /* Return true when value is not zero and can be used for scaling.
993 This is different from *this > 0 because that requires counter to
994 be IPA. */
995 bool nonzero_p () const
996 {
997 return initialized_p () && m_val != 0;
998 }
999
1000 /* Make counter forcibly nonzero. */
1001 profile_count force_nonzero () const
1002 {
1003 if (!initialized_p ())
1004 return *this;
1005 profile_count ret = *this;
1006 if (ret.m_val == 0)
1007 {
1008 ret.m_val = 1;
1009 ret.m_quality = MIN (m_quality, ADJUSTED)((m_quality) < (ADJUSTED) ? (m_quality) : (ADJUSTED));
1010 }
1011 return ret;
1012 }
1013
1014 profile_count max (profile_count other) const
1015 {
1016 profile_count val = *this;
1017
1018 /* Always prefer nonzero IPA counts over local counts. */
1019 if (ipa ().nonzero_p () || other.ipa ().nonzero_p ())
1020 {
1021 val = ipa ();