Bug Summary

File:build/gcc/reload1.c
Warning:line 3472, column 31
Assigned value is garbage or undefined

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name reload1.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-EOVSTg.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c
1/* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "predict.h"
28#include "df.h"
29#include "memmodel.h"
30#include "tm_p.h"
31#include "optabs.h"
32#include "regs.h"
33#include "ira.h"
34#include "recog.h"
35
36#include "rtl-error.h"
37#include "expr.h"
38#include "addresses.h"
39#include "cfgrtl.h"
40#include "cfgbuild.h"
41#include "reload.h"
42#include "except.h"
43#include "dumpfile.h"
44#include "rtl-iter.h"
45#include "function-abi.h"
46
47/* This file contains the reload pass of the compiler, which is
48 run after register allocation has been done. It checks that
49 each insn is valid (operands required to be in registers really
50 are in registers of the proper class) and fixes up invalid ones
51 by copying values temporarily into registers for the insns
52 that need them.
53
54 The results of register allocation are described by the vector
55 reg_renumber; the insns still contain pseudo regs, but reg_renumber
56 can be used to find which hard reg, if any, a pseudo reg is in.
57
58 The technique we always use is to free up a few hard regs that are
59 called ``reload regs'', and for each place where a pseudo reg
60 must be in a hard reg, copy it temporarily into one of the reload regs.
61
62 Reload regs are allocated locally for every instruction that needs
63 reloads. When there are pseudos which are allocated to a register that
64 has been chosen as a reload reg, such pseudos must be ``spilled''.
65 This means that they go to other hard regs, or to stack slots if no other
66 available hard regs can be found. Spilling can invalidate more
67 insns, requiring additional need for reloads, so we must keep checking
68 until the process stabilizes.
69
70 For machines with different classes of registers, we must keep track
71 of the register class needed for each reload, and make sure that
72 we allocate enough reload registers of each class.
73
74 The file reload.c contains the code that checks one insn for
75 validity and reports the reloads that it needs. This file
76 is in charge of scanning the entire rtl code, accumulating the
77 reload needs, spilling, assigning reload registers to use for
78 fixing up each insn, and generating the new insns to copy values
79 into the reload registers. */
80
81struct target_reload default_target_reload;
82#if SWITCHABLE_TARGET1
83struct target_reload *this_target_reload = &default_target_reload;
84#endif
85
86#define spill_indirect_levels(this_target_reload->x_spill_indirect_levels) \
87 (this_target_reload->x_spill_indirect_levels)
88
89/* During reload_as_needed, element N contains a REG rtx for the hard reg
90 into which reg N has been reloaded (perhaps for a previous insn). */
91static rtx *reg_last_reload_reg;
92
93/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
94 for an output reload that stores into reg N. */
95static regset_head reg_has_output_reload;
96
97/* Indicates which hard regs are reload-registers for an output reload
98 in the current insn. */
99static HARD_REG_SET reg_is_output_reload;
100
101/* Widest mode in which each pseudo reg is referred to (via subreg). */
102static machine_mode *reg_max_ref_mode;
103
104/* Vector to remember old contents of reg_renumber before spilling. */
105static short *reg_old_renumber;
106
107/* During reload_as_needed, element N contains the last pseudo regno reloaded
108 into hard register N. If that pseudo reg occupied more than one register,
109 reg_reloaded_contents points to that pseudo for each spill register in
110 use; all of these must remain set for an inheritance to occur. */
111static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER76];
112
113/* During reload_as_needed, element N contains the insn for which
114 hard register N was last used. Its contents are significant only
115 when reg_reloaded_valid is set for this register. */
116static rtx_insn *reg_reloaded_insn[FIRST_PSEUDO_REGISTER76];
117
118/* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid. */
119static HARD_REG_SET reg_reloaded_valid;
120/* Indicate if the register was dead at the end of the reload.
121 This is only valid if reg_reloaded_contents is set and valid. */
122static HARD_REG_SET reg_reloaded_dead;
123
124/* Number of spill-regs so far; number of valid elements of spill_regs. */
125static int n_spills;
126
127/* In parallel with spill_regs, contains REG rtx's for those regs.
128 Holds the last rtx used for any given reg, or 0 if it has never
129 been used for spilling yet. This rtx is reused, provided it has
130 the proper mode. */
131static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER76];
132
133/* In parallel with spill_regs, contains nonzero for a spill reg
134 that was stored after the last time it was used.
135 The precise value is the insn generated to do the store. */
136static rtx_insn *spill_reg_store[FIRST_PSEUDO_REGISTER76];
137
138/* This is the register that was stored with spill_reg_store. This is a
139 copy of reload_out / reload_out_reg when the value was stored; if
140 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
141static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER76];
142
143/* This table is the inverse mapping of spill_regs:
144 indexed by hard reg number,
145 it contains the position of that reg in spill_regs,
146 or -1 for something that is not in spill_regs.
147
148 ?!? This is no longer accurate. */
149static short spill_reg_order[FIRST_PSEUDO_REGISTER76];
150
151/* This reg set indicates registers that can't be used as spill registers for
152 the currently processed insn. These are the hard registers which are live
153 during the insn, but not allocated to pseudos, as well as fixed
154 registers. */
155static HARD_REG_SET bad_spill_regs;
156
157/* These are the hard registers that can't be used as spill register for any
158 insn. This includes registers used for user variables and registers that
159 we can't eliminate. A register that appears in this set also can't be used
160 to retry register allocation. */
161static HARD_REG_SET bad_spill_regs_global;
162
163/* Describes order of use of registers for reloading
164 of spilled pseudo-registers. `n_spills' is the number of
165 elements that are actually valid; new ones are added at the end.
166
167 Both spill_regs and spill_reg_order are used on two occasions:
168 once during find_reload_regs, where they keep track of the spill registers
169 for a single insn, but also during reload_as_needed where they show all
170 the registers ever used by reload. For the latter case, the information
171 is calculated during finish_spills. */
172static short spill_regs[FIRST_PSEUDO_REGISTER76];
173
174/* This vector of reg sets indicates, for each pseudo, which hard registers
175 may not be used for retrying global allocation because the register was
176 formerly spilled from one of them. If we allowed reallocating a pseudo to
177 a register that it was already allocated to, reload might not
178 terminate. */
179static HARD_REG_SET *pseudo_previous_regs;
180
181/* This vector of reg sets indicates, for each pseudo, which hard
182 registers may not be used for retrying global allocation because they
183 are used as spill registers during one of the insns in which the
184 pseudo is live. */
185static HARD_REG_SET *pseudo_forbidden_regs;
186
187/* All hard regs that have been used as spill registers for any insn are
188 marked in this set. */
189static HARD_REG_SET used_spill_regs;
190
191/* Index of last register assigned as a spill register. We allocate in
192 a round-robin fashion. */
193static int last_spill_reg;
194
195/* Record the stack slot for each spilled hard register. */
196static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER76];
197
198/* Width allocated so far for that stack slot. */
199static poly_uint64_pod spill_stack_slot_width[FIRST_PSEUDO_REGISTER76];
200
201/* Record which pseudos needed to be spilled. */
202static regset_head spilled_pseudos;
203
204/* Record which pseudos changed their allocation in finish_spills. */
205static regset_head changed_allocation_pseudos;
206
207/* Used for communication between order_regs_for_reload and count_pseudo.
208 Used to avoid counting one pseudo twice. */
209static regset_head pseudos_counted;
210
211/* First uid used by insns created by reload in this function.
212 Used in find_equiv_reg. */
213int reload_first_uid;
214
215/* Flag set by local-alloc or global-alloc if anything is live in
216 a call-clobbered reg across calls. */
217int caller_save_needed;
218
219/* Set to 1 while reload_as_needed is operating.
220 Required by some machines to handle any generated moves differently. */
221int reload_in_progress = 0;
222
223/* This obstack is used for allocation of rtl during register elimination.
224 The allocated storage can be freed once find_reloads has processed the
225 insn. */
226static struct obstack reload_obstack;
227
228/* Points to the beginning of the reload_obstack. All insn_chain structures
229 are allocated first. */
230static char *reload_startobj;
231
232/* The point after all insn_chain structures. Used to quickly deallocate
233 memory allocated in copy_reloads during calculate_needs_all_insns. */
234static char *reload_firstobj;
235
236/* This points before all local rtl generated by register elimination.
237 Used to quickly free all memory after processing one insn. */
238static char *reload_insn_firstobj;
239
240/* List of insn_chain instructions, one for every insn that reload needs to
241 examine. */
242class insn_chain *reload_insn_chain;
243
244/* TRUE if we potentially left dead insns in the insn stream and want to
245 run DCE immediately after reload, FALSE otherwise. */
246static bool need_dce;
247
248/* List of all insns needing reloads. */
249static class insn_chain *insns_need_reload;
250
251/* This structure is used to record information about register eliminations.
252 Each array entry describes one possible way of eliminating a register
253 in favor of another. If there is more than one way of eliminating a
254 particular register, the most preferred should be specified first. */
255
256struct elim_table
257{
258 int from; /* Register number to be eliminated. */
259 int to; /* Register number used as replacement. */
260 poly_int64_pod initial_offset; /* Initial difference between values. */
261 int can_eliminate; /* Nonzero if this elimination can be done. */
262 int can_eliminate_previous; /* Value returned by TARGET_CAN_ELIMINATE
263 target hook in previous scan over insns
264 made by reload. */
265 poly_int64_pod offset; /* Current offset between the two regs. */
266 poly_int64_pod previous_offset; /* Offset at end of previous insn. */
267 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
268 rtx from_rtx; /* REG rtx for the register to be eliminated.
269 We cannot simply compare the number since
270 we might then spuriously replace a hard
271 register corresponding to a pseudo
272 assigned to the reg to be eliminated. */
273 rtx to_rtx; /* REG rtx for the replacement. */
274};
275
276static struct elim_table *reg_eliminate = 0;
277
278/* This is an intermediate structure to initialize the table. It has
279 exactly the members provided by ELIMINABLE_REGS. */
280static const struct elim_table_1
281{
282 const int from;
283 const int to;
284} reg_eliminate_1[] =
285
286 ELIMINABLE_REGS{{ 16, 7}, { 16, 6}, { 19, 7}, { 19, 6}};
287
288#define NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])) ARRAY_SIZE (reg_eliminate_1)(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))
289
290/* Record the number of pending eliminations that have an offset not equal
291 to their initial offset. If nonzero, we use a new copy of each
292 replacement result in any insns encountered. */
293int num_not_at_initial_offset;
294
295/* Count the number of registers that we may be able to eliminate. */
296static int num_eliminable;
297/* And the number of registers that are equivalent to a constant that
298 can be eliminated to frame_pointer / arg_pointer + constant. */
299static int num_eliminable_invariants;
300
301/* For each label, we record the offset of each elimination. If we reach
302 a label by more than one path and an offset differs, we cannot do the
303 elimination. This information is indexed by the difference of the
304 number of the label and the first label number. We can't offset the
305 pointer itself as this can cause problems on machines with segmented
306 memory. The first table is an array of flags that records whether we
307 have yet encountered a label and the second table is an array of arrays,
308 one entry in the latter array for each elimination. */
309
310static int first_label_num;
311static char *offsets_known_at;
312static poly_int64_pod (*offsets_at)[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))];
313
314vec<reg_equivs_t, va_gc> *reg_equivs;
315
316/* Stack of addresses where an rtx has been changed. We can undo the
317 changes by popping items off the stack and restoring the original
318 value at each location.
319
320 We use this simplistic undo capability rather than copy_rtx as copy_rtx
321 will not make a deep copy of a normally sharable rtx, such as
322 (const (plus (symbol_ref) (const_int))). If such an expression appears
323 as R1 in gen_reload_chain_without_interm_reg_p, then a shared
324 rtx expression would be changed. See PR 42431. */
325
326typedef rtx *rtx_p;
327static vec<rtx_p> substitute_stack;
328
329/* Number of labels in the current function. */
330
331static int num_labels;
332
333static void replace_pseudos_in (rtx *, machine_mode, rtx);
334static void maybe_fix_stack_asms (void);
335static void copy_reloads (class insn_chain *);
336static void calculate_needs_all_insns (int);
337static int find_reg (class insn_chain *, int);
338static void find_reload_regs (class insn_chain *);
339static void select_reload_regs (void);
340static void delete_caller_save_insns (void);
341
342static void spill_failure (rtx_insn *, enum reg_class);
343static void count_spilled_pseudo (int, int, int);
344static void delete_dead_insn (rtx_insn *);
345static void alter_reg (int, int, bool);
346static void set_label_offsets (rtx, rtx_insn *, int);
347static void check_eliminable_occurrences (rtx);
348static void elimination_effects (rtx, machine_mode);
349static rtx eliminate_regs_1 (rtx, machine_mode, rtx, bool, bool);
350static int eliminate_regs_in_insn (rtx_insn *, int);
351static void update_eliminable_offsets (void);
352static void mark_not_eliminable (rtx, const_rtx, void *);
353static void set_initial_elim_offsets (void);
354static bool verify_initial_elim_offsets (void);
355static void set_initial_label_offsets (void);
356static void set_offsets_for_label (rtx_insn *);
357static void init_eliminable_invariants (rtx_insn *, bool);
358static void init_elim_table (void);
359static void free_reg_equiv (void);
360static void update_eliminables (HARD_REG_SET *);
361static bool update_eliminables_and_spill (void);
362static void elimination_costs_in_insn (rtx_insn *);
363static void spill_hard_reg (unsigned int, int);
364static int finish_spills (int);
365static void scan_paradoxical_subregs (rtx);
366static void count_pseudo (int);
367static void order_regs_for_reload (class insn_chain *);
368static void reload_as_needed (int);
369static void forget_old_reloads_1 (rtx, const_rtx, void *);
370static void forget_marked_reloads (regset);
371static int reload_reg_class_lower (const void *, const void *);
372static void mark_reload_reg_in_use (unsigned int, int, enum reload_type,
373 machine_mode);
374static void clear_reload_reg_in_use (unsigned int, int, enum reload_type,
375 machine_mode);
376static int reload_reg_free_p (unsigned int, int, enum reload_type);
377static int reload_reg_free_for_value_p (int, int, int, enum reload_type,
378 rtx, rtx, int, int);
379static int free_for_value_p (int, machine_mode, int, enum reload_type,
380 rtx, rtx, int, int);
381static int allocate_reload_reg (class insn_chain *, int, int);
382static int conflicts_with_override (rtx);
383static void failed_reload (rtx_insn *, int);
384static int set_reload_reg (int, int);
385static void choose_reload_regs_init (class insn_chain *, rtx *);
386static void choose_reload_regs (class insn_chain *);
387static void emit_input_reload_insns (class insn_chain *, struct reload *,
388 rtx, int);
389static void emit_output_reload_insns (class insn_chain *, struct reload *,
390 int);
391static void do_input_reload (class insn_chain *, struct reload *, int);
392static void do_output_reload (class insn_chain *, struct reload *, int);
393static void emit_reload_insns (class insn_chain *);
394static void delete_output_reload (rtx_insn *, int, int, rtx);
395static void delete_address_reloads (rtx_insn *, rtx_insn *);
396static void delete_address_reloads_1 (rtx_insn *, rtx, rtx_insn *);
397static void inc_for_reload (rtx, rtx, rtx, poly_int64);
398static void substitute (rtx *, const_rtx, rtx);
399static bool gen_reload_chain_without_interm_reg_p (int, int);
400static int reloads_conflict (int, int);
401static rtx_insn *gen_reload (rtx, rtx, int, enum reload_type);
402static rtx_insn *emit_insn_if_valid_for_reload (rtx);
403
404/* Initialize the reload pass. This is called at the beginning of compilation
405 and may be called again if the target is reinitialized. */
406
407void
408init_reload (void)
409{
410 int i;
411
412 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
413 Set spill_indirect_levels to the number of levels such addressing is
414 permitted, zero if it is not permitted at all. */
415
416 rtx tem
417 = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
,
418 gen_rtx_PLUS (Pmode,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ?
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (
scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), (((
76)) + 5) + 1))), ((gen_int_mode (4, (global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)))) )
419 gen_rtx_REG (Pmode,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ?
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (
scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), (((
76)) + 5) + 1))), ((gen_int_mode (4, (global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)))) )
420 LAST_VIRTUAL_REGISTER + 1),gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ?
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (
scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), (((
76)) + 5) + 1))), ((gen_int_mode (4, (global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)))) )
421 gen_int_mode (4, Pmode))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ?
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (
scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), (((
76)) + 5) + 1))), ((gen_int_mode (4, (global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)))) )
);
422 spill_indirect_levels(this_target_reload->x_spill_indirect_levels) = 0;
423
424 while (memory_address_p (QImode, tem)memory_address_addr_space_p (((scalar_int_mode ((scalar_int_mode
::from_int) E_QImode))), (tem), 0)
)
425 {
426 spill_indirect_levels(this_target_reload->x_spill_indirect_levels)++;
427 tem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tem);
428 }
429
430 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
431
432 tem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, gen_rtx_SYMBOL_REF (Pmode, "foo")gen_rtx_fmt_s0_stat ((SYMBOL_REF), (((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), (("foo")) )
);
433 indirect_symref_ok(this_target_reload->x_indirect_symref_ok) = memory_address_p (QImode, tem)memory_address_addr_space_p (((scalar_int_mode ((scalar_int_mode
::from_int) E_QImode))), (tem), 0)
;
434
435 /* See if reg+reg is a valid (and offsettable) address. */
436
437 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
438 {
439 tem = gen_rtx_PLUS (Pmode,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ?
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (
scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 6))
), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (
scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode
((scalar_int_mode::from_int) E_SImode))), i))) )
440 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ?
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (
scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 6))
), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (
scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode
((scalar_int_mode::from_int) E_SImode))), i))) )
441 gen_rtx_REG (Pmode, i))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ?
(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (
scalar_int_mode ((scalar_int_mode::from_int) E_SImode))), 6))
), ((gen_rtx_REG ((global_options.x_ix86_pmode == PMODE_DI ? (
scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode
((scalar_int_mode::from_int) E_SImode))), i))) )
;
442
443 /* This way, we make sure that reg+reg is an offsettable address. */
444 tem = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tem, 4);
445
446 for (int mode = 0; mode < MAX_MACHINE_MODE; mode++)
447 if (!double_reg_address_ok(this_target_reload->x_double_reg_address_ok)[mode]
448 && memory_address_p ((enum machine_mode)mode, tem)memory_address_addr_space_p (((enum machine_mode)mode), (tem)
, 0)
)
449 double_reg_address_ok(this_target_reload->x_double_reg_address_ok)[mode] = 1;
450 }
451
452 /* Initialize obstack for our rtl allocation. */
453 if (reload_startobj == NULLnullptr)
454 {
455 gcc_obstack_init (&reload_obstack)_obstack_begin (((&reload_obstack)), (memory_block_pool::
block_size), (0), (mempool_obstack_chunk_alloc), (mempool_obstack_chunk_free
))
;
456 reload_startobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack
)); __extension__ ({ struct obstack *__o = (__h); size_t __len
= (((0))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
457 }
458
459 INIT_REG_SET (&spilled_pseudos)bitmap_initialize (&spilled_pseudos, &reg_obstack);
460 INIT_REG_SET (&changed_allocation_pseudos)bitmap_initialize (&changed_allocation_pseudos, &reg_obstack
)
;
461 INIT_REG_SET (&pseudos_counted)bitmap_initialize (&pseudos_counted, &reg_obstack);
462}
463
464/* List of insn chains that are currently unused. */
465static class insn_chain *unused_insn_chains = 0;
466
467/* Allocate an empty insn_chain structure. */
468class insn_chain *
469new_insn_chain (void)
470{
471 class insn_chain *c;
472
473 if (unused_insn_chains == 0)
474 {
475 c = XOBNEW (&reload_obstack, class insn_chain)((class insn_chain *) __extension__ ({ struct obstack *__h = (
(&reload_obstack)); __extension__ ({ struct obstack *__o =
(__h); size_t __len = ((sizeof (class insn_chain))); if (__extension__
({ struct obstack const *__o1 = (__o); (size_t) (__o1->chunk_limit
- __o1->next_free); }) < __len) _obstack_newchunk (__o
, __len); ((void) ((__o)->next_free += (__len))); }); __extension__
({ struct obstack *__o1 = (__h); void *__value = (void *) __o1
->object_base; if (__o1->next_free == __value) __o1->
maybe_empty_object = 1; __o1->next_free = ((sizeof (ptrdiff_t
) < sizeof (void *) ? (__o1->object_base) : (char *) 0)
+ (((__o1->next_free) - (sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (__o1->alignment_mask
)) & ~(__o1->alignment_mask))); if ((size_t) (__o1->
next_free - (char *) __o1->chunk) > (size_t) (__o1->
chunk_limit - (char *) __o1->chunk)) __o1->next_free = __o1
->chunk_limit; __o1->object_base = __o1->next_free; __value
; }); }))
;
476 INIT_REG_SET (&c->live_throughout)bitmap_initialize (&c->live_throughout, &reg_obstack
)
;
477 INIT_REG_SET (&c->dead_or_set)bitmap_initialize (&c->dead_or_set, &reg_obstack);
478 }
479 else
480 {
481 c = unused_insn_chains;
482 unused_insn_chains = c->next;
483 }
484 c->is_caller_save_insn = 0;
485 c->need_operand_change = 0;
486 c->need_reload = 0;
487 c->need_elim = 0;
488 return c;
489}
490
491/* Small utility function to set all regs in hard reg set TO which are
492 allocated to pseudos in regset FROM. */
493
494void
495compute_use_by_pseudos (HARD_REG_SET *to, regset from)
496{
497 unsigned int regno;
498 reg_set_iterator rsi;
499
500 EXECUTE_IF_SET_IN_REG_SET (from, FIRST_PSEUDO_REGISTER, regno, rsi)for (bmp_iter_set_init (&(rsi), (from), (76), &(regno
)); bmp_iter_set (&(rsi), &(regno)); bmp_iter_next (&
(rsi), &(regno)))
501 {
502 int r = reg_renumber[regno];
503
504 if (r < 0)
505 {
506 /* reload_combine uses the information from DF_LIVE_IN,
507 which might still contain registers that have not
508 actually been allocated since they have an
509 equivalence. */
510 gcc_assert (ira_conflicts_p || reload_completed)((void)(!(ira_conflicts_p || reload_completed) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 510, __FUNCTION__), 0 : 0))
;
511 }
512 else
513 add_to_hard_reg_set (to, PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode), r);
514 }
515}
516
517/* Replace all pseudos found in LOC with their corresponding
518 equivalences. */
519
520static void
521replace_pseudos_in (rtx *loc, machine_mode mem_mode, rtx usage)
522{
523 rtx x = *loc;
524 enum rtx_code code;
525 const char *fmt;
526 int i, j;
527
528 if (! x)
529 return;
530
531 code = GET_CODE (x)((enum rtx_code) (x)->code);
532 if (code == REG)
533 {
534 unsigned int regno = REGNO (x)(rhs_regno(x));
535
536 if (regno < FIRST_PSEUDO_REGISTER76)
537 return;
538
539 x = eliminate_regs_1 (x, mem_mode, usage, true, false);
540 if (x != *loc)
541 {
542 *loc = x;
543 replace_pseudos_in (loc, mem_mode, usage);
544 return;
545 }
546
547 if (reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant)
548 *loc = reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant;
549 else if (reg_equiv_invariant (regno)(*reg_equivs)[(regno)].invariant)
550 *loc = reg_equiv_invariant (regno)(*reg_equivs)[(regno)].invariant;
551 else if (reg_equiv_mem (regno)(*reg_equivs)[(regno)].mem)
552 *loc = reg_equiv_mem (regno)(*reg_equivs)[(regno)].mem;
553 else if (reg_equiv_address (regno)(*reg_equivs)[(regno)].address)
554 *loc = gen_rtx_MEM (GET_MODE (x)((machine_mode) (x)->mode), reg_equiv_address (regno)(*reg_equivs)[(regno)].address);
555 else
556 {
557 gcc_assert (!REG_P (regno_reg_rtx[regno])((void)(!(!(((enum rtx_code) (regno_reg_rtx[regno])->code)
== REG) || (rhs_regno(regno_reg_rtx[regno])) != regno) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 558, __FUNCTION__), 0 : 0))
558 || REGNO (regno_reg_rtx[regno]) != regno)((void)(!(!(((enum rtx_code) (regno_reg_rtx[regno])->code)
== REG) || (rhs_regno(regno_reg_rtx[regno])) != regno) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 558, __FUNCTION__), 0 : 0))
;
559 *loc = regno_reg_rtx[regno];
560 }
561
562 return;
563 }
564 else if (code == MEM)
565 {
566 replace_pseudos_in (& XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), GET_MODE (x)((machine_mode) (x)->mode), usage);
567 return;
568 }
569
570 /* Process each of our operands recursively. */
571 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]);
572 for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++, fmt++)
573 if (*fmt == 'e')
574 replace_pseudos_in (&XEXP (x, i)(((x)->u.fld[i]).rt_rtx), mem_mode, usage);
575 else if (*fmt == 'E')
576 for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++)
577 replace_pseudos_in (& XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), mem_mode, usage);
578}
579
580/* Determine if the current function has an exception receiver block
581 that reaches the exit block via non-exceptional edges */
582
583static bool
584has_nonexceptional_receiver (void)
585{
586 edge e;
587 edge_iterator ei;
588 basic_block *tos, *worklist, bb;
589
590 /* If we're not optimizing, then just err on the safe side. */
591 if (!optimizeglobal_options.x_optimize)
592 return true;
593
594 /* First determine which blocks can reach exit via normal paths. */
595 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1)((basic_block *) xmalloc (sizeof (basic_block) * ((((cfun + 0
))->cfg->x_n_basic_blocks) + 1)))
;
596
597 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
598 bb->flags &= ~BB_REACHABLE;
599
600 /* Place the exit block on our worklist. */
601 EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr)->flags |= BB_REACHABLE;
602 *tos++ = EXIT_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_exit_block_ptr);
603
604 /* Iterate: find everything reachable from what we've already seen. */
605 while (tos != worklist)
606 {
607 bb = *--tos;
608
609 FOR_EACH_EDGE (e, ei, bb->preds)for ((ei) = ei_start_1 (&((bb->preds))); ei_cond ((ei)
, &(e)); ei_next (&(ei)))
610 if (!(e->flags & EDGE_ABNORMAL))
611 {
612 basic_block src = e->src;
613
614 if (!(src->flags & BB_REACHABLE))
615 {
616 src->flags |= BB_REACHABLE;
617 *tos++ = src;
618 }
619 }
620 }
621 free (worklist);
622
623 /* Now see if there's a reachable block with an exceptional incoming
624 edge. */
625 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
626 if (bb->flags & BB_REACHABLE && bb_has_abnormal_pred (bb))
627 return true;
628
629 /* No exceptional block reached exit unexceptionally. */
630 return false;
631}
632
633/* Grow (or allocate) the REG_EQUIVS array from its current size (which may be
634 zero elements) to MAX_REG_NUM elements.
635
636 Initialize all new fields to NULL and update REG_EQUIVS_SIZE. */
637void
638grow_reg_equivs (void)
639{
640 int old_size = vec_safe_length (reg_equivs);
641 int max_regno = max_reg_num ();
642 int i;
643 reg_equivs_t ze;
644
645 memset (&ze, 0, sizeof (reg_equivs_t));
646 vec_safe_reserve (reg_equivs, max_regno);
647 for (i = old_size; i < max_regno; i++)
648 reg_equivs->quick_insert (i, ze);
649}
650
651
652/* Global variables used by reload and its subroutines. */
653
654/* The current basic block while in calculate_elim_costs_all_insns. */
655static basic_block elim_bb;
656
657/* Set during calculate_needs if an insn needs register elimination. */
658static int something_needs_elimination;
659/* Set during calculate_needs if an insn needs an operand changed. */
660static int something_needs_operands_changed;
661/* Set by alter_regs if we spilled a register to the stack. */
662static bool something_was_spilled;
663
664/* Nonzero means we couldn't get enough spill regs. */
665static int failure;
666
667/* Temporary array of pseudo-register number. */
668static int *temp_pseudo_reg_arr;
669
670/* If a pseudo has no hard reg, delete the insns that made the equivalence.
671 If that insn didn't set the register (i.e., it copied the register to
672 memory), just delete that insn instead of the equivalencing insn plus
673 anything now dead. If we call delete_dead_insn on that insn, we may
674 delete the insn that actually sets the register if the register dies
675 there and that is incorrect. */
676static void
677remove_init_insns ()
678{
679 for (int i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
680 {
681 if (reg_renumber[i] < 0 && reg_equiv_init (i)(*reg_equivs)[(i)].init != 0)
682 {
683 rtx list;
684 for (list = reg_equiv_init (i)(*reg_equivs)[(i)].init; list; list = XEXP (list, 1)(((list)->u.fld[1]).rt_rtx))
685 {
686 rtx_insn *equiv_insn = as_a <rtx_insn *> (XEXP (list, 0)(((list)->u.fld[0]).rt_rtx));
687
688 /* If we already deleted the insn or if it may trap, we can't
689 delete it. The latter case shouldn't happen, but can
690 if an insn has a variable address, gets a REG_EH_REGION
691 note added to it, and then gets converted into a load
692 from a constant address. */
693 if (NOTE_P (equiv_insn)(((enum rtx_code) (equiv_insn)->code) == NOTE)
694 || can_throw_internal (equiv_insn))
695 ;
696 else if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
697 delete_dead_insn (equiv_insn);
698 else
699 SET_INSN_DELETED (equiv_insn)set_insn_deleted (equiv_insn);;
700 }
701 }
702 }
703}
704
705/* Return true if remove_init_insns will delete INSN. */
706static bool
707will_delete_init_insn_p (rtx_insn *insn)
708{
709 rtx set = single_set (insn);
710 if (!set || !REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) ==
REG)
)
711 return false;
712 unsigned regno = REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx)));
713
714 if (can_throw_internal (insn))
715 return false;
716
717 if (regno < FIRST_PSEUDO_REGISTER76 || reg_renumber[regno] >= 0)
718 return false;
719
720 for (rtx list = reg_equiv_init (regno)(*reg_equivs)[(regno)].init; list; list = XEXP (list, 1)(((list)->u.fld[1]).rt_rtx))
721 {
722 rtx equiv_insn = XEXP (list, 0)(((list)->u.fld[0]).rt_rtx);
723 if (equiv_insn == insn)
724 return true;
725 }
726 return false;
727}
728
729/* Main entry point for the reload pass.
730
731 FIRST is the first insn of the function being compiled.
732
733 GLOBAL nonzero means we were called from global_alloc
734 and should attempt to reallocate any pseudoregs that we
735 displace from hard regs we will use for reloads.
736 If GLOBAL is zero, we do not have enough information to do that,
737 so any pseudo reg that is spilled must go to the stack.
738
739 Return value is TRUE if reload likely left dead insns in the
740 stream and a DCE pass should be run to elimiante them. Else the
741 return value is FALSE. */
742
743bool
744reload (rtx_insn *first, int global)
745{
746 int i, n;
747 rtx_insn *insn;
748 struct elim_table *ep;
749 basic_block bb;
750 bool inserted;
751
752 /* Make sure even insns with volatile mem refs are recognizable. */
753 init_recog ();
754
755 failure = 0;
756
757 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack
)); __extension__ ({ struct obstack *__o = (__h); size_t __len
= (((0))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
758
759 /* Make sure that the last insn in the chain
760 is not something that needs reloading. */
761 emit_note (NOTE_INSN_DELETED);
762
763 /* Enable find_equiv_reg to distinguish insns made by reload. */
764 reload_first_uid = get_max_uid ();
765
766 /* Initialize the secondary memory table. */
767 clear_secondary_mem ();
768
769 /* We don't have a stack slot for any spill reg yet. */
770 memset (spill_stack_slot, 0, sizeof spill_stack_slot);
771 memset (spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
772
773 /* Initialize the save area information for caller-save, in case some
774 are needed. */
775 init_save_areas ();
776
777 /* Compute which hard registers are now in use
778 as homes for pseudo registers.
779 This is done here rather than (eg) in global_alloc
780 because this point is reached even if not optimizing. */
781 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
782 mark_home_live (i);
783
784 /* A function that has a nonlocal label that can reach the exit
785 block via non-exceptional paths must save all call-saved
786 registers. */
787 if (cfun(cfun + 0)->has_nonlocal_label
788 && has_nonexceptional_receiver ())
789 crtl(&x_rtl)->saves_all_registers = 1;
790
791 if (crtl(&x_rtl)->saves_all_registers)
792 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
793 if (! crtl(&x_rtl)->abi->clobbers_full_reg_p (i)
794 && ! fixed_regs(this_target_hard_regs->x_fixed_regs)[i]
795 && ! LOCAL_REGNO (i)0)
796 df_set_regs_ever_live (i, true);
797
798 /* Find all the pseudo registers that didn't get hard regs
799 but do have known equivalent constants or memory slots.
800 These include parameters (known equivalent to parameter slots)
801 and cse'd or loop-moved constant memory addresses.
802
803 Record constant equivalents in reg_equiv_constant
804 so they will be substituted by find_reloads.
805 Record memory equivalents in reg_mem_equiv so they can
806 be substituted eventually by altering the REG-rtx's. */
807
808 grow_reg_equivs ();
809 reg_old_renumber = XCNEWVEC (short, max_regno)((short *) xcalloc ((max_regno), sizeof (short)));
810 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
811 pseudo_forbidden_regs = XNEWVEC (HARD_REG_SET, max_regno)((HARD_REG_SET *) xmalloc (sizeof (HARD_REG_SET) * (max_regno
)))
;
812 pseudo_previous_regs = XCNEWVEC (HARD_REG_SET, max_regno)((HARD_REG_SET *) xcalloc ((max_regno), sizeof (HARD_REG_SET)
))
;
813
814 CLEAR_HARD_REG_SET (bad_spill_regs_global);
815
816 init_eliminable_invariants (first, true);
817 init_elim_table ();
818
819 /* Alter each pseudo-reg rtx to contain its hard reg number. Assign
820 stack slots to the pseudos that lack hard regs or equivalents.
821 Do not touch virtual registers. */
822
823 temp_pseudo_reg_arr = XNEWVEC (int, max_regno - LAST_VIRTUAL_REGISTER - 1)((int *) xmalloc (sizeof (int) * (max_regno - (((76)) + 5) - 1
)))
;
824 for (n = 0, i = LAST_VIRTUAL_REGISTER(((76)) + 5) + 1; i < max_regno; i++)
825 temp_pseudo_reg_arr[n++] = i;
826
827 if (ira_conflicts_p)
828 /* Ask IRA to order pseudo-registers for better stack slot
829 sharing. */
830 ira_sort_regnos_for_alter_reg (temp_pseudo_reg_arr, n, reg_max_ref_mode);
831
832 for (i = 0; i < n; i++)
833 alter_reg (temp_pseudo_reg_arr[i], -1, false);
834
835 /* If we have some registers we think can be eliminated, scan all insns to
836 see if there is an insn that sets one of these registers to something
837 other than itself plus a constant. If so, the register cannot be
838 eliminated. Doing this scan here eliminates an extra pass through the
839 main reload loop in the most common case where register elimination
840 cannot be done. */
841 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
842 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
843 note_pattern_stores (PATTERN (insn), mark_not_eliminable, NULLnullptr);
844
845 maybe_fix_stack_asms ();
846
847 insns_need_reload = 0;
848 something_needs_elimination = 0;
849
850 /* Initialize to -1, which means take the first spill register. */
851 last_spill_reg = -1;
852
853 /* Spill any hard regs that we know we can't eliminate. */
854 CLEAR_HARD_REG_SET (used_spill_regs);
855 /* There can be multiple ways to eliminate a register;
856 they should be listed adjacently.
857 Elimination for any register fails only if all possible ways fail. */
858 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; )
859 {
860 int from = ep->from;
861 int can_eliminate = 0;
862 do
863 {
864 can_eliminate |= ep->can_eliminate;
865 ep++;
866 }
867 while (ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))] && ep->from == from);
868 if (! can_eliminate)
869 spill_hard_reg (from, 1);
870 }
871
872 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER(6 == 19) && frame_pointer_needed((&x_rtl)->frame_pointer_needed))
873 spill_hard_reg (HARD_FRAME_POINTER_REGNUM6, 1);
874
875 finish_spills (global);
876
877 /* From now on, we may need to generate moves differently. We may also
878 allow modifications of insns which cause them to not be recognized.
879 Any such modifications will be cleaned up during reload itself. */
880 reload_in_progress = 1;
881
882 /* This loop scans the entire function each go-round
883 and repeats until one repetition spills no additional hard regs. */
884 for (;;)
885 {
886 int something_changed;
887 poly_int64 starting_frame_size;
888
889 starting_frame_size = get_frame_size ();
890 something_was_spilled = false;
891
892 set_initial_elim_offsets ();
893 set_initial_label_offsets ();
894
895 /* For each pseudo register that has an equivalent location defined,
896 try to eliminate any eliminable registers (such as the frame pointer)
897 assuming initial offsets for the replacement register, which
898 is the normal case.
899
900 If the resulting location is directly addressable, substitute
901 the MEM we just got directly for the old REG.
902
903 If it is not addressable but is a constant or the sum of a hard reg
904 and constant, it is probably not addressable because the constant is
905 out of range, in that case record the address; we will generate
906 hairy code to compute the address in a register each time it is
907 needed. Similarly if it is a hard register, but one that is not
908 valid as an address register.
909
910 If the location is not addressable, but does not have one of the
911 above forms, assign a stack slot. We have to do this to avoid the
912 potential of producing lots of reloads if, e.g., a location involves
913 a pseudo that didn't get a hard register and has an equivalent memory
914 location that also involves a pseudo that didn't get a hard register.
915
916 Perhaps at some point we will improve reload_when_needed handling
917 so this problem goes away. But that's very hairy. */
918
919 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
920 if (reg_renumber[i] < 0 && reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc)
921 {
922 rtx x = eliminate_regs (reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc, VOIDmode((void) 0, E_VOIDmode),
923 NULL_RTX(rtx) 0);
924
925 if (strict_memory_address_addr_space_p
926 (GET_MODE (regno_reg_rtx[i])((machine_mode) (regno_reg_rtx[i])->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
927 MEM_ADDR_SPACE (x)(get_mem_attrs (x)->addrspace)))
928 reg_equiv_mem (i)(*reg_equivs)[(i)].mem = x, reg_equiv_address (i)(*reg_equivs)[(i)].address = 0;
929 else if (CONSTANT_P (XEXP (x, 0))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[0]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
930 || (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
931 && REGNO (XEXP (x, 0))(rhs_regno((((x)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76)
932 || (GET_CODE (XEXP (x, 0))((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == PLUS
933 && REG_P (XEXP (XEXP (x, 0), 0))(((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx))->u.fld[
0]).rt_rtx))->code) == REG)
934 && (REGNO (XEXP (XEXP (x, 0), 0))(rhs_regno(((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx
)))
935 < FIRST_PSEUDO_REGISTER76)
936 && CONSTANT_P (XEXP (XEXP (x, 0), 1))((rtx_class[(int) (((enum rtx_code) (((((((x)->u.fld[0]).rt_rtx
))->u.fld[1]).rt_rtx))->code))]) == RTX_CONST_OBJ)
))
937 reg_equiv_address (i)(*reg_equivs)[(i)].address = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), reg_equiv_mem (i)(*reg_equivs)[(i)].mem = 0;
938 else
939 {
940 /* Make a new stack slot. Then indicate that something
941 changed so we go back and recompute offsets for
942 eliminable registers because the allocation of memory
943 below might change some offset. reg_equiv_{mem,address}
944 will be set up for this pseudo on the next pass around
945 the loop. */
946 reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc = 0;
947 reg_equiv_init (i)(*reg_equivs)[(i)].init = 0;
948 alter_reg (i, -1, true);
949 }
950 }
951
952 if (caller_save_needed)
953 setup_save_areas ();
954
955 if (maybe_ne (starting_frame_size, 0) && crtl(&x_rtl)->stack_alignment_needed)
956 {
957 /* If we have a stack frame, we must align it now. The
958 stack size may be a part of the offset computation for
959 register elimination. So if this changes the stack size,
960 then repeat the elimination bookkeeping. We don't
961 realign when there is no stack, as that will cause a
962 stack frame when none is needed should
963 TARGET_STARTING_FRAME_OFFSET not be already aligned to
964 STACK_BOUNDARY. */
965 assign_stack_local (BLKmode((void) 0, E_BLKmode), 0, crtl(&x_rtl)->stack_alignment_needed);
966 }
967 /* If we allocated another stack slot, redo elimination bookkeeping. */
968 if (something_was_spilled
969 || maybe_ne (starting_frame_size, get_frame_size ()))
970 {
971 if (update_eliminables_and_spill ())
972 finish_spills (0);
973 continue;
974 }
975
976 if (caller_save_needed)
977 {
978 save_call_clobbered_regs ();
979 /* That might have allocated new insn_chain structures. */
980 reload_firstobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack
)); __extension__ ({ struct obstack *__o = (__h); size_t __len
= (((0))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
981 }
982
983 calculate_needs_all_insns (global);
984
985 if (! ira_conflicts_p)
986 /* Don't do it for IRA. We need this info because we don't
987 change live_throughout and dead_or_set for chains when IRA
988 is used. */
989 CLEAR_REG_SET (&spilled_pseudos)bitmap_clear (&spilled_pseudos);
990
991 something_changed = 0;
992
993 /* If we allocated any new memory locations, make another pass
994 since it might have changed elimination offsets. */
995 if (something_was_spilled
996 || maybe_ne (starting_frame_size, get_frame_size ()))
997 something_changed = 1;
998
999 /* Even if the frame size remained the same, we might still have
1000 changed elimination offsets, e.g. if find_reloads called
1001 force_const_mem requiring the back end to allocate a constant
1002 pool base register that needs to be saved on the stack. */
1003 else if (!verify_initial_elim_offsets ())
1004 something_changed = 1;
1005
1006 if (update_eliminables_and_spill ())
1007 {
1008 finish_spills (0);
1009 something_changed = 1;
1010 }
1011 else
1012 {
1013 select_reload_regs ();
1014 if (failure)
1015 goto failed;
1016 if (insns_need_reload)
1017 something_changed |= finish_spills (global);
1018 }
1019
1020 if (! something_changed)
1021 break;
1022
1023 if (caller_save_needed)
1024 delete_caller_save_insns ();
1025
1026 obstack_free (&reload_obstack, reload_firstobj)__extension__ ({ struct obstack *__o = (&reload_obstack);
void *__obj = (void *) (reload_firstobj); if (__obj > (void
*) __o->chunk && __obj < (void *) __o->chunk_limit
) __o->next_free = __o->object_base = (char *) __obj; else
_obstack_free (__o, __obj); })
;
1027 }
1028
1029 /* If global-alloc was run, notify it of any register eliminations we have
1030 done. */
1031 if (global)
1032 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++)
1033 if (ep->can_eliminate)
1034 mark_elimination (ep->from, ep->to);
1035
1036 remove_init_insns ();
1037
1038 /* Use the reload registers where necessary
1039 by generating move instructions to move the must-be-register
1040 values into or out of the reload registers. */
1041
1042 if (insns_need_reload != 0 || something_needs_elimination
1043 || something_needs_operands_changed)
1044 {
1045 poly_int64 old_frame_size = get_frame_size ();
1046
1047 reload_as_needed (global);
1048
1049 gcc_assert (known_eq (old_frame_size, get_frame_size ()))((void)(!((!maybe_ne (old_frame_size, get_frame_size ()))) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1049, __FUNCTION__), 0 : 0))
;
1050
1051 gcc_assert (verify_initial_elim_offsets ())((void)(!(verify_initial_elim_offsets ()) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1051, __FUNCTION__), 0 : 0))
;
1052 }
1053
1054 /* If we were able to eliminate the frame pointer, show that it is no
1055 longer live at the start of any basic block. If it ls live by
1056 virtue of being in a pseudo, that pseudo will be marked live
1057 and hence the frame pointer will be known to be live via that
1058 pseudo. */
1059
1060 if (! frame_pointer_needed((&x_rtl)->frame_pointer_needed))
1061 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
1062 bitmap_clear_bit (df_get_live_in (bb), HARD_FRAME_POINTER_REGNUM6);
1063
1064 /* Come here (with failure set nonzero) if we can't get enough spill
1065 regs. */
1066 failed:
1067
1068 CLEAR_REG_SET (&changed_allocation_pseudos)bitmap_clear (&changed_allocation_pseudos);
1069 CLEAR_REG_SET (&spilled_pseudos)bitmap_clear (&spilled_pseudos);
1070 reload_in_progress = 0;
1071
1072 /* Now eliminate all pseudo regs by modifying them into
1073 their equivalent memory references.
1074 The REG-rtx's for the pseudos are modified in place,
1075 so all insns that used to refer to them now refer to memory.
1076
1077 For a reg that has a reg_equiv_address, all those insns
1078 were changed by reloading so that no insns refer to it any longer;
1079 but the DECL_RTL of a variable decl may refer to it,
1080 and if so this causes the debugging info to mention the variable. */
1081
1082 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
1083 {
1084 rtx addr = 0;
1085
1086 if (reg_equiv_mem (i)(*reg_equivs)[(i)].mem)
1087 addr = XEXP (reg_equiv_mem (i), 0)((((*reg_equivs)[(i)].mem)->u.fld[0]).rt_rtx);
1088
1089 if (reg_equiv_address (i)(*reg_equivs)[(i)].address)
1090 addr = reg_equiv_address (i)(*reg_equivs)[(i)].address;
1091
1092 if (addr)
1093 {
1094 if (reg_renumber[i] < 0)
1095 {
1096 rtx reg = regno_reg_rtx[i];
1097
1098 REG_USERVAR_P (reg)(__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if (
((enum rtx_code) (_rtx)->code) != REG) rtl_check_failed_flag
("REG_USERVAR_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1098, __FUNCTION__); _rtx; })->volatil)
= 0;
1099 PUT_CODE (reg, MEM)((reg)->code = (MEM));
1100 XEXP (reg, 0)(((reg)->u.fld[0]).rt_rtx) = addr;
1101 if (reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc)
1102 MEM_COPY_ATTRIBUTES (reg, reg_equiv_memory_loc (i))((__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if (
((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->volatil) = (__extension__
({ __typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = (
((*reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx)
->code) != MEM && ((enum rtx_code) (_rtx)->code
) != ASM_OPERANDS && ((enum rtx_code) (_rtx)->code
) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P", _rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->volatil), (__extension__ (
{ __typeof ((reg)) const _rtx = ((reg)); if (((enum rtx_code)
(_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P"
, _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->call) = (__extension__ ({
__typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = (((*
reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx)->
code) != MEM) rtl_check_failed_flag ("MEM_NOTRAP_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->call), (__extension__ ({ __typeof
((reg)) const _rtx = ((reg)); if (((enum rtx_code) (_rtx)->
code) != MEM) rtl_check_failed_flag ("MEM_READONLY_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->unchanging) = (__extension__
({ __typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = (
((*reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx)
->code) != MEM) rtl_check_failed_flag ("MEM_READONLY_P", _rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->unchanging), (__extension__
({ __typeof ((reg)) const _rtx = ((reg)); if (((enum rtx_code
) (_rtx)->code) != MEM) rtl_check_failed_flag ("MEM_KEEP_ALIAS_SET_P"
, _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->jump) = (__extension__ ({
__typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = (((*
reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx)->
code) != MEM) rtl_check_failed_flag ("MEM_KEEP_ALIAS_SET_P", _rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->jump), (__extension__ ({ __typeof
((reg)) const _rtx = ((reg)); if (((enum rtx_code) (_rtx)->
code) != MEM) rtl_check_failed_flag ("MEM_POINTER", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->frame_related) = (__extension__
({ __typeof (((*reg_equivs)[(i)].memory_loc)) const _rtx = (
((*reg_equivs)[(i)].memory_loc)); if (((enum rtx_code) (_rtx)
->code) != MEM) rtl_check_failed_flag ("MEM_POINTER", _rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1102, __FUNCTION__); _rtx; })->frame_related), (((reg)->
u.fld[1]).rt_mem) = ((((*reg_equivs)[(i)].memory_loc)->u.fld
[1]).rt_mem))
;
1103 else
1104 MEM_ATTRS (reg)(((reg)->u.fld[1]).rt_mem) = 0;
1105 MEM_NOTRAP_P (reg)(__extension__ ({ __typeof ((reg)) const _rtx = ((reg)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1105, __FUNCTION__); _rtx; })->call)
= 1;
1106 }
1107 else if (reg_equiv_mem (i)(*reg_equivs)[(i)].mem)
1108 XEXP (reg_equiv_mem (i), 0)((((*reg_equivs)[(i)].mem)->u.fld[0]).rt_rtx) = addr;
1109 }
1110
1111 /* We don't want complex addressing modes in debug insns
1112 if simpler ones will do, so delegitimize equivalences
1113 in debug insns. */
1114 if (MAY_HAVE_DEBUG_BIND_INSNSglobal_options.x_flag_var_tracking_assignments && reg_renumber[i] < 0)
1115 {
1116 rtx reg = regno_reg_rtx[i];
1117 rtx equiv = 0;
1118 df_ref use, next;
1119
1120 if (reg_equiv_constant (i)(*reg_equivs)[(i)].constant)
1121 equiv = reg_equiv_constant (i)(*reg_equivs)[(i)].constant;
1122 else if (reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant)
1123 equiv = reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant;
1124 else if (reg && MEM_P (reg)(((enum rtx_code) (reg)->code) == MEM))
1125 equiv = targetm.delegitimize_address (reg);
1126 else if (reg && REG_P (reg)(((enum rtx_code) (reg)->code) == REG) && (int)REGNO (reg)(rhs_regno(reg)) != i)
1127 equiv = reg;
1128
1129 if (equiv == reg)
1130 continue;
1131
1132 for (use = DF_REG_USE_CHAIN (i)(df->use_regs[(i)]->reg_chain); use; use = next)
1133 {
1134 insn = DF_REF_INSN (use)((use)->base.insn_info->insn);
1135
1136 /* Make sure the next ref is for a different instruction,
1137 so that we're not affected by the rescan. */
1138 next = DF_REF_NEXT_REG (use)((use)->base.next_reg);
1139 while (next && DF_REF_INSN (next)((next)->base.insn_info->insn) == insn)
1140 next = DF_REF_NEXT_REG (next)((next)->base.next_reg);
1141
1142 if (DEBUG_BIND_INSN_P (insn)((((enum rtx_code) (insn)->code) == DEBUG_INSN) &&
(((enum rtx_code) (PATTERN (insn))->code) == VAR_LOCATION
))
)
1143 {
1144 if (!equiv)
1145 {
1146 INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx =
(PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1146, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx))
= gen_rtx_UNKNOWN_VAR_LOC ()(gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), (
((const_int_rtx[64]))) ))
;
1147 df_insn_rescan_debug_internal (insn);
1148 }
1149 else
1150 INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx =
(PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1150, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx))
1151 = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx =
(PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1151, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx))
,
1152 reg, equiv);
1153 }
1154 }
1155 }
1156 }
1157
1158 /* We must set reload_completed now since the cleanup_subreg_operands call
1159 below will re-recognize each insn and reload may have generated insns
1160 which are only valid during and after reload. */
1161 reload_completed = 1;
1162
1163 /* Make a pass over all the insns and delete all USEs which we inserted
1164 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1165 notes. Delete all CLOBBER insns, except those that refer to the return
1166 value and the special mem:BLK CLOBBERs added to prevent the scheduler
1167 from misarranging variable-array code, and simplify (subreg (reg))
1168 operands. Strip and regenerate REG_INC notes that may have been moved
1169 around. */
1170
1171 for (insn = first; insn; insn = NEXT_INSN (insn))
1172 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
1173 {
1174 rtx *pnote;
1175
1176 if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN))
1177 replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn)(((insn)->u.fld[7]).rt_rtx),
1178 VOIDmode((void) 0, E_VOIDmode), CALL_INSN_FUNCTION_USAGE (insn)(((insn)->u.fld[7]).rt_rtx));
1179
1180 if ((GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == USE
1181 /* We mark with QImode USEs introduced by reload itself. */
1182 && (GET_MODE (insn)((machine_mode) (insn)->mode) == QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode))
1183 || find_reg_note (insn, REG_EQUAL, NULL_RTX(rtx) 0)))
1184 || (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == CLOBBER
1185 && (!MEM_P (XEXP (PATTERN (insn), 0))(((enum rtx_code) ((((PATTERN (insn))->u.fld[0]).rt_rtx))->
code) == MEM)
1186 || GET_MODE (XEXP (PATTERN (insn), 0))((machine_mode) ((((PATTERN (insn))->u.fld[0]).rt_rtx))->
mode)
!= BLKmode((void) 0, E_BLKmode)
1187 || (GET_CODE (XEXP (XEXP (PATTERN (insn), 0), 0))((enum rtx_code) (((((((PATTERN (insn))->u.fld[0]).rt_rtx)
)->u.fld[0]).rt_rtx))->code)
!= SCRATCH
1188 && XEXP (XEXP (PATTERN (insn), 0), 0)((((((PATTERN (insn))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx
)
1189 != stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER])))
1190 && (!REG_P (XEXP (PATTERN (insn), 0))(((enum rtx_code) ((((PATTERN (insn))->u.fld[0]).rt_rtx))->
code) == REG)
1191 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0))(__extension__ ({ __typeof (((((PATTERN (insn))->u.fld[0])
.rt_rtx))) const _rtx = (((((PATTERN (insn))->u.fld[0]).rt_rtx
))); if (((enum rtx_code) (_rtx)->code) != REG && (
(enum rtx_code) (_rtx)->code) != PARALLEL) rtl_check_failed_flag
("REG_FUNCTION_VALUE_P",_rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1191, __FUNCTION__); _rtx; })->return_val)
)))
1192 {
1193 delete_insn (insn);
1194 continue;
1195 }
1196
1197 /* Some CLOBBERs may survive until here and still reference unassigned
1198 pseudos with const equivalent, which may in turn cause ICE in later
1199 passes if the reference remains in place. */
1200 if (GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == CLOBBER)
1201 replace_pseudos_in (& XEXP (PATTERN (insn), 0)(((PATTERN (insn))->u.fld[0]).rt_rtx),
1202 VOIDmode((void) 0, E_VOIDmode), PATTERN (insn));
1203
1204 /* Discard obvious no-ops, even without -O. This optimization
1205 is fast and doesn't interfere with debugging. */
1206 if (NONJUMP_INSN_P (insn)(((enum rtx_code) (insn)->code) == INSN)
1207 && GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == SET
1208 && REG_P (SET_SRC (PATTERN (insn)))(((enum rtx_code) ((((PATTERN (insn))->u.fld[1]).rt_rtx))->
code) == REG)
1209 && REG_P (SET_DEST (PATTERN (insn)))(((enum rtx_code) ((((PATTERN (insn))->u.fld[0]).rt_rtx))->
code) == REG)
1210 && (REGNO (SET_SRC (PATTERN (insn)))(rhs_regno((((PATTERN (insn))->u.fld[1]).rt_rtx)))
1211 == REGNO (SET_DEST (PATTERN (insn)))(rhs_regno((((PATTERN (insn))->u.fld[0]).rt_rtx)))))
1212 {
1213 delete_insn (insn);
1214 continue;
1215 }
1216
1217 pnote = &REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx);
1218 while (*pnote != 0)
1219 {
1220 if (REG_NOTE_KIND (*pnote)((enum reg_note) ((machine_mode) (*pnote)->mode)) == REG_DEAD
1221 || REG_NOTE_KIND (*pnote)((enum reg_note) ((machine_mode) (*pnote)->mode)) == REG_UNUSED
1222 || REG_NOTE_KIND (*pnote)((enum reg_note) ((machine_mode) (*pnote)->mode)) == REG_INC)
1223 *pnote = XEXP (*pnote, 1)(((*pnote)->u.fld[1]).rt_rtx);
1224 else
1225 pnote = &XEXP (*pnote, 1)(((*pnote)->u.fld[1]).rt_rtx);
1226 }
1227
1228 if (AUTO_INC_DEC0)
1229 add_auto_inc_notes (insn, PATTERN (insn));
1230
1231 /* Simplify (subreg (reg)) if it appears as an operand. */
1232 cleanup_subreg_operands (insn);
1233
1234 /* Clean up invalid ASMs so that they don't confuse later passes.
1235 See PR 21299. */
1236 if (asm_noperands (PATTERN (insn)) >= 0)
1237 {
1238 extract_insn (insn);
1239 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1240 {
1241 error_for_asm (insn,
1242 "%<asm%> operand has impossible constraints");
1243 delete_insn (insn);
1244 continue;
1245 }
1246 }
1247 }
1248
1249 free (temp_pseudo_reg_arr);
1250
1251 /* Indicate that we no longer have known memory locations or constants. */
1252 free_reg_equiv ();
1253
1254 free (reg_max_ref_mode);
1255 free (reg_old_renumber);
1256 free (pseudo_previous_regs);
1257 free (pseudo_forbidden_regs);
1258
1259 CLEAR_HARD_REG_SET (used_spill_regs);
1260 for (i = 0; i < n_spills; i++)
1261 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1262
1263 /* Free all the insn_chain structures at once. */
1264 obstack_free (&reload_obstack, reload_startobj)__extension__ ({ struct obstack *__o = (&reload_obstack);
void *__obj = (void *) (reload_startobj); if (__obj > (void
*) __o->chunk && __obj < (void *) __o->chunk_limit
) __o->next_free = __o->object_base = (char *) __obj; else
_obstack_free (__o, __obj); })
;
1265 unused_insn_chains = 0;
1266
1267 inserted = fixup_abnormal_edges ();
1268
1269 /* We've possibly turned single trapping insn into multiple ones. */
1270 if (cfun(cfun + 0)->can_throw_non_call_exceptions)
1271 {
1272 auto_sbitmap blocks (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block));
1273 bitmap_ones (blocks);
1274 find_many_sub_basic_blocks (blocks);
1275 }
1276
1277 if (inserted)
1278 commit_edge_insertions ();
1279
1280 /* Replacing pseudos with their memory equivalents might have
1281 created shared rtx. Subsequent passes would get confused
1282 by this, so unshare everything here. */
1283 unshare_all_rtl_again (first);
1284
1285#ifdef STACK_BOUNDARY((((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) && ix86_cfun_abi () == MS_ABI) ? 128 : ((8) * (((
global_options.x_ix86_isa_flags & (1UL << 1)) != 0)
? 8 : 4)))
1286 /* init_emit has set the alignment of the hard frame pointer
1287 to STACK_BOUNDARY. It is very likely no longer valid if
1288 the hard frame pointer was used for register allocation. */
1289 if (!frame_pointer_needed((&x_rtl)->frame_pointer_needed))
1290 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM)((&x_rtl)->emit.regno_pointer_align[6]) = BITS_PER_UNIT(8);
1291#endif
1292
1293 substitute_stack.release ();
1294
1295 gcc_assert (bitmap_empty_p (&spilled_pseudos))((void)(!(bitmap_empty_p (&spilled_pseudos)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1295, __FUNCTION__), 0 : 0))
;
1296
1297 reload_completed = !failure;
1298
1299 return need_dce;
1300}
1301
1302/* Yet another special case. Unfortunately, reg-stack forces people to
1303 write incorrect clobbers in asm statements. These clobbers must not
1304 cause the register to appear in bad_spill_regs, otherwise we'll call
1305 fatal_insn later. We clear the corresponding regnos in the live
1306 register sets to avoid this.
1307 The whole thing is rather sick, I'm afraid. */
1308
1309static void
1310maybe_fix_stack_asms (void)
1311{
1312#ifdef STACK_REGS
1313 const char *constraints[MAX_RECOG_OPERANDS30];
1314 machine_mode operand_mode[MAX_RECOG_OPERANDS30];
1315 class insn_chain *chain;
1316
1317 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1318 {
1319 int i, noperands;
1320 HARD_REG_SET clobbered, allowed;
1321 rtx pat;
1322
1323 if (! INSN_P (chain->insn)(((((enum rtx_code) (chain->insn)->code) == INSN) || ((
(enum rtx_code) (chain->insn)->code) == JUMP_INSN) || (
((enum rtx_code) (chain->insn)->code) == CALL_INSN)) ||
(((enum rtx_code) (chain->insn)->code) == DEBUG_INSN))
1324 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1325 continue;
1326 pat = PATTERN (chain->insn);
1327 if (GET_CODE (pat)((enum rtx_code) (pat)->code) != PARALLEL)
1328 continue;
1329
1330 CLEAR_HARD_REG_SET (clobbered);
1331 CLEAR_HARD_REG_SET (allowed);
1332
1333 /* First, make a mask of all stack regs that are clobbered. */
1334 for (i = 0; i < XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem); i++)
1335 {
1336 rtx t = XVECEXP (pat, 0, i)(((((pat)->u.fld[0]).rt_rtvec))->elem[i]);
1337 if (GET_CODE (t)((enum rtx_code) (t)->code) == CLOBBER && STACK_REG_P (XEXP (t, 0))((((enum rtx_code) ((((t)->u.fld[0]).rt_rtx))->code) ==
REG) && ((unsigned long) (((rhs_regno((((t)->u.fld
[0]).rt_rtx))))) - (unsigned long) (8) <= (unsigned long) (
15) - (unsigned long) (8)))
)
1338 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0))(rhs_regno((((t)->u.fld[0]).rt_rtx))));
1339 }
1340
1341 /* Get the operand values and constraints out of the insn. */
1342 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1343 constraints, operand_mode, NULLnullptr);
1344
1345 /* For every operand, see what registers are allowed. */
1346 for (i = 0; i < noperands; i++)
1347 {
1348 const char *p = constraints[i];
1349 /* For every alternative, we compute the class of registers allowed
1350 for reloading in CLS, and merge its contents into the reg set
1351 ALLOWED. */
1352 int cls = (int) NO_REGS;
1353
1354 for (;;)
1355 {
1356 char c = *p;
1357
1358 if (c == '\0' || c == ',' || c == '#')
1359 {
1360 /* End of one alternative - mark the regs in the current
1361 class, and reset the class. */
1362 allowed |= reg_class_contents(this_target_hard_regs->x_reg_class_contents)[cls];
1363 cls = NO_REGS;
1364 p++;
1365 if (c == '#')
1366 do {
1367 c = *p++;
1368 } while (c != '\0' && c != ',');
1369 if (c == '\0')
1370 break;
1371 continue;
1372 }
1373
1374 switch (c)
1375 {
1376 case 'g':
1377 cls = (int) reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[cls][(int) GENERAL_REGS];
1378 break;
1379
1380 default:
1381 enum constraint_num cn = lookup_constraint (p);
1382 if (insn_extra_address_constraint (cn))
1383 cls = (int) reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[cls]
1384 [(int) base_reg_class (VOIDmode((void) 0, E_VOIDmode), ADDR_SPACE_GENERIC0,
1385 ADDRESS, SCRATCH)];
1386 else
1387 cls = (int) reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[cls]
1388 [reg_class_for_constraint (cn)];
1389 break;
1390 }
1391 p += CONSTRAINT_LEN (c, p)insn_constraint_len (c,p);
1392 }
1393 }
1394 /* Those of the registers which are clobbered, but allowed by the
1395 constraints, must be usable as reload registers. So clear them
1396 out of the life information. */
1397 allowed &= clobbered;
1398 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
1399 if (TEST_HARD_REG_BIT (allowed, i))
1400 {
1401 CLEAR_REGNO_REG_SET (&chain->live_throughout, i)bitmap_clear_bit (&chain->live_throughout, i);
1402 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i)bitmap_clear_bit (&chain->dead_or_set, i);
1403 }
1404 }
1405
1406#endif
1407}
1408
1409/* Copy the global variables n_reloads and rld into the corresponding elts
1410 of CHAIN. */
1411static void
1412copy_reloads (class insn_chain *chain)
1413{
1414 chain->n_reloads = n_reloads;
1415 chain->rld = XOBNEWVEC (&reload_obstack, struct reload, n_reloads)((struct reload *) __extension__ ({ struct obstack *__h = ((&
reload_obstack)); __extension__ ({ struct obstack *__o = (__h
); size_t __len = ((sizeof (struct reload) * (n_reloads))); if
(__extension__ ({ struct obstack const *__o1 = (__o); (size_t
) (__o1->chunk_limit - __o1->next_free); }) < __len)
_obstack_newchunk (__o, __len); ((void) ((__o)->next_free
+= (__len))); }); __extension__ ({ struct obstack *__o1 = (__h
); void *__value = (void *) __o1->object_base; if (__o1->
next_free == __value) __o1->maybe_empty_object = 1; __o1->
next_free = ((sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (((__o1->next_free) - (sizeof
(ptrdiff_t) < sizeof (void *) ? (__o1->object_base) : (
char *) 0) + (__o1->alignment_mask)) & ~(__o1->alignment_mask
))); if ((size_t) (__o1->next_free - (char *) __o1->chunk
) > (size_t) (__o1->chunk_limit - (char *) __o1->chunk
)) __o1->next_free = __o1->chunk_limit; __o1->object_base
= __o1->next_free; __value; }); }))
;
1416 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1417 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack
)); __extension__ ({ struct obstack *__o = (__h); size_t __len
= (((0))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
1418}
1419
1420/* Walk the chain of insns, and determine for each whether it needs reloads
1421 and/or eliminations. Build the corresponding insns_need_reload list, and
1422 set something_needs_elimination as appropriate. */
1423static void
1424calculate_needs_all_insns (int global)
1425{
1426 class insn_chain **pprev_reload = &insns_need_reload;
1427 class insn_chain *chain, *next = 0;
1428
1429 something_needs_elimination = 0;
1430
1431 reload_insn_firstobj = XOBNEWVAR (&reload_obstack, char, 0)((char *) __extension__ ({ struct obstack *__h = ((&reload_obstack
)); __extension__ ({ struct obstack *__o = (__h); size_t __len
= (((0))); if (__extension__ ({ struct obstack const *__o1 =
(__o); (size_t) (__o1->chunk_limit - __o1->next_free);
}) < __len) _obstack_newchunk (__o, __len); ((void) ((__o
)->next_free += (__len))); }); __extension__ ({ struct obstack
*__o1 = (__h); void *__value = (void *) __o1->object_base
; if (__o1->next_free == __value) __o1->maybe_empty_object
= 1; __o1->next_free = ((sizeof (ptrdiff_t) < sizeof (
void *) ? (__o1->object_base) : (char *) 0) + (((__o1->
next_free) - (sizeof (ptrdiff_t) < sizeof (void *) ? (__o1
->object_base) : (char *) 0) + (__o1->alignment_mask)) &
~(__o1->alignment_mask))); if ((size_t) (__o1->next_free
- (char *) __o1->chunk) > (size_t) (__o1->chunk_limit
- (char *) __o1->chunk)) __o1->next_free = __o1->chunk_limit
; __o1->object_base = __o1->next_free; __value; }); }))
;
1432 for (chain = reload_insn_chain; chain != 0; chain = next)
1433 {
1434 rtx_insn *insn = chain->insn;
1435
1436 next = chain->next;
1437
1438 /* Clear out the shortcuts. */
1439 chain->n_reloads = 0;
1440 chain->need_elim = 0;
1441 chain->need_reload = 0;
1442 chain->need_operand_change = 0;
1443
1444 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1445 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1446 what effects this has on the known offsets at labels. */
1447
1448 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL) || JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN) || JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA)
1449 || (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
&& REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) != 0))
1450 set_label_offsets (insn, insn, 0);
1451
1452 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
1453 {
1454 rtx old_body = PATTERN (insn);
1455 int old_code = INSN_CODE (insn)(((insn)->u.fld[5]).rt_int);
1456 rtx old_notes = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx);
1457 int did_elimination = 0;
1458 int operands_changed = 0;
1459
1460 /* Skip insns that only set an equivalence. */
1461 if (will_delete_init_insn_p (insn))
1462 continue;
1463
1464 /* If needed, eliminate any eliminable registers. */
1465 if (num_eliminable || num_eliminable_invariants)
1466 did_elimination = eliminate_regs_in_insn (insn, 0);
1467
1468 /* Analyze the instruction. */
1469 operands_changed = find_reloads (insn, 0, spill_indirect_levels(this_target_reload->x_spill_indirect_levels),
1470 global, spill_reg_order);
1471
1472 /* If a no-op set needs more than one reload, this is likely
1473 to be something that needs input address reloads. We
1474 can't get rid of this cleanly later, and it is of no use
1475 anyway, so discard it now.
1476 We only do this when expensive_optimizations is enabled,
1477 since this complements reload inheritance / output
1478 reload deletion, and it can make debugging harder. */
1479 if (flag_expensive_optimizationsglobal_options.x_flag_expensive_optimizations && n_reloads > 1)
1480 {
1481 rtx set = single_set (insn);
1482 if (set
1483 &&
1484 ((SET_SRC (set)(((set)->u.fld[1]).rt_rtx) == SET_DEST (set)(((set)->u.fld[0]).rt_rtx)
1485 && REG_P (SET_SRC (set))(((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) ==
REG)
1486 && REGNO (SET_SRC (set))(rhs_regno((((set)->u.fld[1]).rt_rtx))) >= FIRST_PSEUDO_REGISTER76)
1487 || (REG_P (SET_SRC (set))(((enum rtx_code) ((((set)->u.fld[1]).rt_rtx))->code) ==
REG)
&& REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) ==
REG)
1488 && reg_renumber[REGNO (SET_SRC (set))(rhs_regno((((set)->u.fld[1]).rt_rtx)))] < 0
1489 && reg_renumber[REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx)))] < 0
1490 && reg_equiv_memory_loc (REGNO (SET_SRC (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[1]).rt_rtx))))].memory_loc != NULLnullptr
1491 && reg_equiv_memory_loc (REGNO (SET_DEST (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[0]).rt_rtx))))].memory_loc != NULLnullptr
1492 && rtx_equal_p (reg_equiv_memory_loc (REGNO (SET_SRC (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[1]).rt_rtx))))].memory_loc,
1493 reg_equiv_memory_loc (REGNO (SET_DEST (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[0]).rt_rtx))))].memory_loc))))
1494 {
1495 if (ira_conflicts_p)
1496 /* Inform IRA about the insn deletion. */
1497 ira_mark_memory_move_deletion (REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx))),
1498 REGNO (SET_SRC (set))(rhs_regno((((set)->u.fld[1]).rt_rtx))));
1499 delete_insn (insn);
1500 /* Delete it from the reload chain. */
1501 if (chain->prev)
1502 chain->prev->next = next;
1503 else
1504 reload_insn_chain = next;
1505 if (next)
1506 next->prev = chain->prev;
1507 chain->next = unused_insn_chains;
1508 unused_insn_chains = chain;
1509 continue;
1510 }
1511 }
1512 if (num_eliminable)
1513 update_eliminable_offsets ();
1514
1515 /* Remember for later shortcuts which insns had any reloads or
1516 register eliminations. */
1517 chain->need_elim = did_elimination;
1518 chain->need_reload = n_reloads > 0;
1519 chain->need_operand_change = operands_changed;
1520
1521 /* Discard any register replacements done. */
1522 if (did_elimination)
1523 {
1524 obstack_free (&reload_obstack, reload_insn_firstobj)__extension__ ({ struct obstack *__o = (&reload_obstack);
void *__obj = (void *) (reload_insn_firstobj); if (__obj >
(void *) __o->chunk && __obj < (void *) __o->
chunk_limit) __o->next_free = __o->object_base = (char *
) __obj; else _obstack_free (__o, __obj); })
;
1525 PATTERN (insn) = old_body;
1526 INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) = old_code;
1527 REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) = old_notes;
1528 something_needs_elimination = 1;
1529 }
1530
1531 something_needs_operands_changed |= operands_changed;
1532
1533 if (n_reloads != 0)
1534 {
1535 copy_reloads (chain);
1536 *pprev_reload = chain;
1537 pprev_reload = &chain->next_need_reload;
1538 }
1539 }
1540 }
1541 *pprev_reload = 0;
1542}
1543
1544/* This function is called from the register allocator to set up estimates
1545 for the cost of eliminating pseudos which have REG_EQUIV equivalences to
1546 an invariant. The structure is similar to calculate_needs_all_insns. */
1547
1548void
1549calculate_elim_costs_all_insns (void)
1550{
1551 int *reg_equiv_init_cost;
1552 basic_block bb;
1553 int i;
1554
1555 reg_equiv_init_cost = XCNEWVEC (int, max_regno)((int *) xcalloc ((max_regno), sizeof (int)));
1556 init_elim_table ();
1557 init_eliminable_invariants (get_insns (), false);
1558
1559 set_initial_elim_offsets ();
1560 set_initial_label_offsets ();
1561
1562 FOR_EACH_BB_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_entry_block_ptr->next_bb
; bb != ((cfun + 0))->cfg->x_exit_block_ptr; bb = bb->
next_bb)
1563 {
1564 rtx_insn *insn;
1565 elim_bb = bb;
1566
1567 FOR_BB_INSNS (bb, insn)for ((insn) = (bb)->il.x.head_; (insn) && (insn) !=
NEXT_INSN ((bb)->il.x.rtl->end_); (insn) = NEXT_INSN (
insn))
1568 {
1569 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1570 include REG_LABEL_OPERAND and REG_LABEL_TARGET), we need to see
1571 what effects this has on the known offsets at labels. */
1572
1573 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL) || JUMP_P (insn)(((enum rtx_code) (insn)->code) == JUMP_INSN) || JUMP_TABLE_DATA_P (insn)(((enum rtx_code) (insn)->code) == JUMP_TABLE_DATA)
1574 || (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
&& REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) != 0))
1575 set_label_offsets (insn, insn, 0);
1576
1577 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
1578 {
1579 rtx set = single_set (insn);
1580
1581 /* Skip insns that only set an equivalence. */
1582 if (set && REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) ==
REG)
1583 && reg_renumber[REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx)))] < 0
1584 && (reg_equiv_constant (REGNO (SET_DEST (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[0]).rt_rtx))))].constant
1585 || reg_equiv_invariant (REGNO (SET_DEST (set)))(*reg_equivs)[((rhs_regno((((set)->u.fld[0]).rt_rtx))))].invariant))
1586 {
1587 unsigned regno = REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx)));
1588 rtx_insn_list *init = reg_equiv_init (regno)(*reg_equivs)[(regno)].init;
1589 if (init)
1590 {
1591 rtx t = eliminate_regs_1 (SET_SRC (set)(((set)->u.fld[1]).rt_rtx), VOIDmode((void) 0, E_VOIDmode), insn,
1592 false, true);
1593 machine_mode mode = GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode);
1594 int cost = set_src_cost (t, mode,
1595 optimize_bb_for_speed_p (bb));
1596 int freq = REG_FREQ_FROM_BB (bb)((optimize_function_for_size_p ((cfun + 0)) || !(cfun + 0)->
cfg->count_max.initialized_p ()) ? 1000 : ((bb)->count.
to_frequency ((cfun + 0)) * 1000 / 10000) ? ((bb)->count.to_frequency
((cfun + 0)) * 1000 / 10000) : 1)
;
1597
1598 reg_equiv_init_cost[regno] = cost * freq;
1599 continue;
1600 }
1601 }
1602 /* If needed, eliminate any eliminable registers. */
1603 if (num_eliminable || num_eliminable_invariants)
1604 elimination_costs_in_insn (insn);
1605
1606 if (num_eliminable)
1607 update_eliminable_offsets ();
1608 }
1609 }
1610 }
1611 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++)
1612 {
1613 if (reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant)
1614 {
1615 if (reg_equiv_init (i)(*reg_equivs)[(i)].init)
1616 {
1617 int cost = reg_equiv_init_cost[i];
1618 if (dump_file)
1619 fprintf (dump_file,
1620 "Reg %d has equivalence, initial gains %d\n", i, cost);
1621 if (cost != 0)
1622 ira_adjust_equiv_reg_cost (i, cost);
1623 }
1624 else
1625 {
1626 if (dump_file)
1627 fprintf (dump_file,
1628 "Reg %d had equivalence, but can't be eliminated\n",
1629 i);
1630 ira_adjust_equiv_reg_cost (i, 0);
1631 }
1632 }
1633 }
1634
1635 free (reg_equiv_init_cost);
1636 free (offsets_known_at);
1637 free (offsets_at);
1638 offsets_at = NULLnullptr;
1639 offsets_known_at = NULLnullptr;
1640}
1641
1642/* Comparison function for qsort to decide which of two reloads
1643 should be handled first. *P1 and *P2 are the reload numbers. */
1644
1645static int
1646reload_reg_class_lower (const void *r1p, const void *r2p)
1647{
1648 int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1649 int t;
1650
1651 /* Consider required reloads before optional ones. */
1652 t = rld[r1].optional - rld[r2].optional;
1653 if (t != 0)
1654 return t;
1655
1656 /* Count all solitary classes before non-solitary ones. */
1657 t = ((reg_class_size(this_target_hard_regs->x_reg_class_size)[(int) rld[r2].rclass] == 1)
1658 - (reg_class_size(this_target_hard_regs->x_reg_class_size)[(int) rld[r1].rclass] == 1));
1659 if (t != 0)
1660 return t;
1661
1662 /* Aside from solitaires, consider all multi-reg groups first. */
1663 t = rld[r2].nregs - rld[r1].nregs;
1664 if (t != 0)
1665 return t;
1666
1667 /* Consider reloads in order of increasing reg-class number. */
1668 t = (int) rld[r1].rclass - (int) rld[r2].rclass;
1669 if (t != 0)
1670 return t;
1671
1672 /* If reloads are equally urgent, sort by reload number,
1673 so that the results of qsort leave nothing to chance. */
1674 return r1 - r2;
1675}
1676
1677/* The cost of spilling each hard reg. */
1678static int spill_cost[FIRST_PSEUDO_REGISTER76];
1679
1680/* When spilling multiple hard registers, we use SPILL_COST for the first
1681 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1682 only the first hard reg for a multi-reg pseudo. */
1683static int spill_add_cost[FIRST_PSEUDO_REGISTER76];
1684
1685/* Map of hard regno to pseudo regno currently occupying the hard
1686 reg. */
1687static int hard_regno_to_pseudo_regno[FIRST_PSEUDO_REGISTER76];
1688
1689/* Update the spill cost arrays, considering that pseudo REG is live. */
1690
1691static void
1692count_pseudo (int reg)
1693{
1694 int freq = REG_FREQ (reg)(reg_info_p[reg].freq);
1695 int r = reg_renumber[reg];
1696 int nregs;
1697
1698 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1699 if (ira_conflicts_p && r < 0)
1700 return;
1701
1702 if (REGNO_REG_SET_P (&pseudos_counted, reg)bitmap_bit_p (&pseudos_counted, reg)
1703 || REGNO_REG_SET_P (&spilled_pseudos, reg)bitmap_bit_p (&spilled_pseudos, reg))
1704 return;
1705
1706 SET_REGNO_REG_SET (&pseudos_counted, reg)bitmap_set_bit (&pseudos_counted, reg);
1707
1708 gcc_assert (r >= 0)((void)(!(r >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1708, __FUNCTION__), 0 : 0))
;
1709
1710 spill_add_cost[r] += freq;
1711 nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg)((machine_mode) (regno_reg_rtx[reg])->mode));
1712 while (nregs-- > 0)
1713 {
1714 hard_regno_to_pseudo_regno[r + nregs] = reg;
1715 spill_cost[r + nregs] += freq;
1716 }
1717}
1718
1719/* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1720 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1721
1722static void
1723order_regs_for_reload (class insn_chain *chain)
1724{
1725 unsigned i;
1726 HARD_REG_SET used_by_pseudos;
1727 HARD_REG_SET used_by_pseudos2;
1728 reg_set_iterator rsi;
1729
1730 bad_spill_regs = fixed_reg_set(this_target_hard_regs->x_fixed_reg_set);
1731
1732 memset (spill_cost, 0, sizeof spill_cost);
1733 memset (spill_add_cost, 0, sizeof spill_add_cost);
1734 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
1735 hard_regno_to_pseudo_regno[i] = -1;
1736
1737 /* Count number of uses of each hard reg by pseudo regs allocated to it
1738 and then order them by decreasing use. First exclude hard registers
1739 that are live in or across this insn. */
1740
1741 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout)do { CLEAR_HARD_REG_SET (used_by_pseudos); reg_set_to_hard_reg_set
(&used_by_pseudos, &chain->live_throughout); } while
(0)
;
1742 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set)do { CLEAR_HARD_REG_SET (used_by_pseudos2); reg_set_to_hard_reg_set
(&used_by_pseudos2, &chain->dead_or_set); } while
(0)
;
1743 bad_spill_regs |= used_by_pseudos;
1744 bad_spill_regs |= used_by_pseudos2;
1745
1746 /* Now find out which pseudos are allocated to it, and update
1747 hard_reg_n_uses. */
1748 CLEAR_REG_SET (&pseudos_counted)bitmap_clear (&pseudos_counted);
1749
1750 EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->live_throughout
), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
1751 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&chain->live_throughout
), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
1752 {
1753 count_pseudo (i);
1754 }
1755 EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->dead_or_set
), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
1756 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&chain->dead_or_set
), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
1757 {
1758 count_pseudo (i);
1759 }
1760 CLEAR_REG_SET (&pseudos_counted)bitmap_clear (&pseudos_counted);
1761}
1762
1763/* Vector of reload-numbers showing the order in which the reloads should
1764 be processed. */
1765static short reload_order[MAX_RELOADS(2 * 30 * (2 + 1))];
1766
1767/* This is used to keep track of the spill regs used in one insn. */
1768static HARD_REG_SET used_spill_regs_local;
1769
1770/* We decided to spill hard register SPILLED, which has a size of
1771 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1772 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1773 update SPILL_COST/SPILL_ADD_COST. */
1774
1775static void
1776count_spilled_pseudo (int spilled, int spilled_nregs, int reg)
1777{
1778 int freq = REG_FREQ (reg)(reg_info_p[reg].freq);
1779 int r = reg_renumber[reg];
1780 int nregs;
1781
1782 /* Ignore spilled pseudo-registers which can be here only if IRA is used. */
1783 if (ira_conflicts_p && r < 0)
1784 return;
1785
1786 gcc_assert (r >= 0)((void)(!(r >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1786, __FUNCTION__), 0 : 0))
;
1787
1788 nregs = hard_regno_nregs (r, PSEUDO_REGNO_MODE (reg)((machine_mode) (regno_reg_rtx[reg])->mode));
1789
1790 if (REGNO_REG_SET_P (&spilled_pseudos, reg)bitmap_bit_p (&spilled_pseudos, reg)
1791 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1792 return;
1793
1794 SET_REGNO_REG_SET (&spilled_pseudos, reg)bitmap_set_bit (&spilled_pseudos, reg);
1795
1796 spill_add_cost[r] -= freq;
1797 while (nregs-- > 0)
1798 {
1799 hard_regno_to_pseudo_regno[r + nregs] = -1;
1800 spill_cost[r + nregs] -= freq;
1801 }
1802}
1803
1804/* Find reload register to use for reload number ORDER. */
1805
1806static int
1807find_reg (class insn_chain *chain, int order)
1808{
1809 int rnum = reload_order[order];
1810 struct reload *rl = rld + rnum;
1811 int best_cost = INT_MAX2147483647;
1812 int best_reg = -1;
1813 unsigned int i, j, n;
1814 int k;
1815 HARD_REG_SET not_usable;
1816 HARD_REG_SET used_by_other_reload;
1817 reg_set_iterator rsi;
1818 static int regno_pseudo_regs[FIRST_PSEUDO_REGISTER76];
1819 static int best_regno_pseudo_regs[FIRST_PSEUDO_REGISTER76];
1820
1821 not_usable = (bad_spill_regs
1822 | bad_spill_regs_global
1823 | ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[rl->rclass]);
1824
1825 CLEAR_HARD_REG_SET (used_by_other_reload);
1826 for (k = 0; k < order; k++)
1827 {
1828 int other = reload_order[k];
1829
1830 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1831 for (j = 0; j < rld[other].nregs; j++)
1832 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1833 }
1834
1835 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
1836 {
1837#ifdef REG_ALLOC_ORDER
1838 unsigned int regno = reg_alloc_order(this_target_hard_regs->x_reg_alloc_order)[i];
1839#else
1840 unsigned int regno = i;
1841#endif
1842
1843 if (! TEST_HARD_REG_BIT (not_usable, regno)
1844 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1845 && targetm.hard_regno_mode_ok (regno, rl->mode))
1846 {
1847 int this_cost = spill_cost[regno];
1848 int ok = 1;
1849 unsigned int this_nregs = hard_regno_nregs (regno, rl->mode);
1850
1851 for (j = 1; j < this_nregs; j++)
1852 {
1853 this_cost += spill_add_cost[regno + j];
1854 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1855 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1856 ok = 0;
1857 }
1858 if (! ok)
1859 continue;
1860
1861 if (ira_conflicts_p)
1862 {
1863 /* Ask IRA to find a better pseudo-register for
1864 spilling. */
1865 for (n = j = 0; j < this_nregs; j++)
1866 {
1867 int r = hard_regno_to_pseudo_regno[regno + j];
1868
1869 if (r < 0)
1870 continue;
1871 if (n == 0 || regno_pseudo_regs[n - 1] != r)
1872 regno_pseudo_regs[n++] = r;
1873 }
1874 regno_pseudo_regs[n++] = -1;
1875 if (best_reg < 0
1876 || ira_better_spill_reload_regno_p (regno_pseudo_regs,
1877 best_regno_pseudo_regs,
1878 rl->in, rl->out,
1879 chain->insn))
1880 {
1881 best_reg = regno;
1882 for (j = 0;; j++)
1883 {
1884 best_regno_pseudo_regs[j] = regno_pseudo_regs[j];
1885 if (regno_pseudo_regs[j] < 0)
1886 break;
1887 }
1888 }
1889 continue;
1890 }
1891
1892 if (rl->in && REG_P (rl->in)(((enum rtx_code) (rl->in)->code) == REG) && REGNO (rl->in)(rhs_regno(rl->in)) == regno)
1893 this_cost--;
1894 if (rl->out && REG_P (rl->out)(((enum rtx_code) (rl->out)->code) == REG) && REGNO (rl->out)(rhs_regno(rl->out)) == regno)
1895 this_cost--;
1896 if (this_cost < best_cost
1897 /* Among registers with equal cost, prefer caller-saved ones, or
1898 use REG_ALLOC_ORDER if it is defined. */
1899 || (this_cost == best_cost
1900#ifdef REG_ALLOC_ORDER
1901 && (inv_reg_alloc_order(this_target_hard_regs->x_inv_reg_alloc_order)[regno]
1902 < inv_reg_alloc_order(this_target_hard_regs->x_inv_reg_alloc_order)[best_reg])
1903#else
1904 && crtl(&x_rtl)->abi->clobbers_full_reg_p (regno)
1905 && !crtl(&x_rtl)->abi->clobbers_full_reg_p (best_reg)
1906#endif
1907 ))
1908 {
1909 best_reg = regno;
1910 best_cost = this_cost;
1911 }
1912 }
1913 }
1914 if (best_reg == -1)
1915 return 0;
1916
1917 if (dump_file)
1918 fprintf (dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1919
1920 rl->nregs = hard_regno_nregs (best_reg, rl->mode);
1921 rl->regno = best_reg;
1922
1923 EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->live_throughout
), (76), &(j)); bmp_iter_set (&(rsi), &(j)); bmp_iter_next
(&(rsi), &(j)))
1924 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j, rsi)for (bmp_iter_set_init (&(rsi), (&chain->live_throughout
), (76), &(j)); bmp_iter_set (&(rsi), &(j)); bmp_iter_next
(&(rsi), &(j)))
1925 {
1926 count_spilled_pseudo (best_reg, rl->nregs, j);
1927 }
1928
1929 EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->dead_or_set
), (76), &(j)); bmp_iter_set (&(rsi), &(j)); bmp_iter_next
(&(rsi), &(j)))
1930 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j, rsi)for (bmp_iter_set_init (&(rsi), (&chain->dead_or_set
), (76), &(j)); bmp_iter_set (&(rsi), &(j)); bmp_iter_next
(&(rsi), &(j)))
1931 {
1932 count_spilled_pseudo (best_reg, rl->nregs, j);
1933 }
1934
1935 for (i = 0; i < rl->nregs; i++)
1936 {
1937 gcc_assert (spill_cost[best_reg + i] == 0)((void)(!(spill_cost[best_reg + i] == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1937, __FUNCTION__), 0 : 0))
;
1938 gcc_assert (spill_add_cost[best_reg + i] == 0)((void)(!(spill_add_cost[best_reg + i] == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1938, __FUNCTION__), 0 : 0))
;
1939 gcc_assert (hard_regno_to_pseudo_regno[best_reg + i] == -1)((void)(!(hard_regno_to_pseudo_regno[best_reg + i] == -1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 1939, __FUNCTION__), 0 : 0))
;
1940 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1941 }
1942 return 1;
1943}
1944
1945/* Find more reload regs to satisfy the remaining need of an insn, which
1946 is given by CHAIN.
1947 Do it by ascending class number, since otherwise a reg
1948 might be spilled for a big class and might fail to count
1949 for a smaller class even though it belongs to that class. */
1950
1951static void
1952find_reload_regs (class insn_chain *chain)
1953{
1954 int i;
1955
1956 /* In order to be certain of getting the registers we need,
1957 we must sort the reloads into order of increasing register class.
1958 Then our grabbing of reload registers will parallel the process
1959 that provided the reload registers. */
1960 for (i = 0; i < chain->n_reloads; i++)
1961 {
1962 /* Show whether this reload already has a hard reg. */
1963 if (chain->rld[i].reg_rtx)
1964 {
1965 chain->rld[i].regno = REGNO (chain->rld[i].reg_rtx)(rhs_regno(chain->rld[i].reg_rtx));
1966 chain->rld[i].nregs = REG_NREGS (chain->rld[i].reg_rtx)((&(chain->rld[i].reg_rtx)->u.reg)->nregs);
1967 }
1968 else
1969 chain->rld[i].regno = -1;
1970 reload_order[i] = i;
1971 }
1972
1973 n_reloads = chain->n_reloads;
1974 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1975
1976 CLEAR_HARD_REG_SET (used_spill_regs_local);
1977
1978 if (dump_file)
1979 fprintf (dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1980
1981 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower)gcc_qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower
)
;
1982
1983 /* Compute the order of preference for hard registers to spill. */
1984
1985 order_regs_for_reload (chain);
1986
1987 for (i = 0; i < n_reloads; i++)
1988 {
1989 int r = reload_order[i];
1990
1991 /* Ignore reloads that got marked inoperative. */
1992 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1993 && ! rld[r].optional
1994 && rld[r].regno == -1)
1995 if (! find_reg (chain, i))
1996 {
1997 if (dump_file)
1998 fprintf (dump_file, "reload failure for reload %d\n", r);
1999 spill_failure (chain->insn, rld[r].rclass);
2000 failure = 1;
2001 return;
2002 }
2003 }
2004
2005 chain->used_spill_regs = used_spill_regs_local;
2006 used_spill_regs |= used_spill_regs_local;
2007
2008 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
2009}
2010
2011static void
2012select_reload_regs (void)
2013{
2014 class insn_chain *chain;
2015
2016 /* Try to satisfy the needs for each insn. */
2017 for (chain = insns_need_reload; chain != 0;
2018 chain = chain->next_need_reload)
2019 find_reload_regs (chain);
2020}
2021
2022/* Delete all insns that were inserted by emit_caller_save_insns during
2023 this iteration. */
2024static void
2025delete_caller_save_insns (void)
2026{
2027 class insn_chain *c = reload_insn_chain;
2028
2029 while (c != 0)
2030 {
2031 while (c != 0 && c->is_caller_save_insn)
2032 {
2033 class insn_chain *next = c->next;
2034 rtx_insn *insn = c->insn;
2035
2036 if (c == reload_insn_chain)
2037 reload_insn_chain = next;
2038 delete_insn (insn);
2039
2040 if (next)
2041 next->prev = c->prev;
2042 if (c->prev)
2043 c->prev->next = next;
2044 c->next = unused_insn_chains;
2045 unused_insn_chains = c;
2046 c = next;
2047 }
2048 if (c != 0)
2049 c = c->next;
2050 }
2051}
2052
2053/* Handle the failure to find a register to spill.
2054 INSN should be one of the insns which needed this particular spill reg. */
2055
2056static void
2057spill_failure (rtx_insn *insn, enum reg_class rclass)
2058{
2059 if (asm_noperands (PATTERN (insn)) >= 0)
2060 error_for_asm (insn, "cannot find a register in class %qs while "
2061 "reloading %<asm%>",
2062 reg_class_names[rclass]);
2063 else
2064 {
2065 error ("unable to find a register to spill in class %qs",
2066 reg_class_names[rclass]);
2067
2068 if (dump_file)
2069 {
2070 fprintf (dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
2071 debug_reload_to_stream (dump_file);
2072 }
2073 fatal_insn ("this is the insn:", insn)_fatal_insn ("this is the insn:", insn, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 2073, __FUNCTION__)
;
2074 }
2075}
2076
2077/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2078 data that is dead in INSN. */
2079
2080static void
2081delete_dead_insn (rtx_insn *insn)
2082{
2083 rtx_insn *prev = prev_active_insn (insn);
2084 rtx prev_dest;
2085
2086 /* If the previous insn sets a register that dies in our insn make
2087 a note that we want to run DCE immediately after reload.
2088
2089 We used to delete the previous insn & recurse, but that's wrong for
2090 block local equivalences. Instead of trying to figure out the exact
2091 circumstances where we can delete the potentially dead insns, just
2092 let DCE do the job. */
2093 if (prev && BLOCK_FOR_INSN (prev) == BLOCK_FOR_INSN (insn)
2094 && GET_CODE (PATTERN (prev))((enum rtx_code) (PATTERN (prev))->code) == SET
2095 && (prev_dest = SET_DEST (PATTERN (prev))(((PATTERN (prev))->u.fld[0]).rt_rtx), REG_P (prev_dest)(((enum rtx_code) (prev_dest)->code) == REG))
2096 && reg_mentioned_p (prev_dest, PATTERN (insn))
2097 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)(rhs_regno(prev_dest)))
2098 && ! side_effects_p (SET_SRC (PATTERN (prev))(((PATTERN (prev))->u.fld[1]).rt_rtx)))
2099 need_dce = 1;
2100
2101 SET_INSN_DELETED (insn)set_insn_deleted (insn);;
2102}
2103
2104/* Modify the home of pseudo-reg I.
2105 The new home is present in reg_renumber[I].
2106
2107 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2108 or it may be -1, meaning there is none or it is not relevant.
2109 This is used so that all pseudos spilled from a given hard reg
2110 can share one stack slot. */
2111
2112static void
2113alter_reg (int i, int from_reg, bool dont_share_p)
2114{
2115 /* When outputting an inline function, this can happen
2116 for a reg that isn't actually used. */
2117 if (regno_reg_rtx[i] == 0)
2118 return;
2119
2120 /* If the reg got changed to a MEM at rtl-generation time,
2121 ignore it. */
2122 if (!REG_P (regno_reg_rtx[i])(((enum rtx_code) (regno_reg_rtx[i])->code) == REG))
2123 return;
2124
2125 /* Modify the reg-rtx to contain the new hard reg
2126 number or else to contain its pseudo reg number. */
2127 SET_REGNO (regno_reg_rtx[i],(df_ref_change_reg_with_loc (regno_reg_rtx[i], reg_renumber[i
] >= 0 ? reg_renumber[i] : i))
2128 reg_renumber[i] >= 0 ? reg_renumber[i] : i)(df_ref_change_reg_with_loc (regno_reg_rtx[i], reg_renumber[i
] >= 0 ? reg_renumber[i] : i))
;
2129
2130 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2131 allocate a stack slot for it. */
2132
2133 if (reg_renumber[i] < 0
2134 && REG_N_REFS (i) > 0
2135 && reg_equiv_constant (i)(*reg_equivs)[(i)].constant == 0
2136 && (reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant == 0
2137 || reg_equiv_init (i)(*reg_equivs)[(i)].init == 0)
2138 && reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc == 0)
2139 {
2140 rtx x = NULL_RTX(rtx) 0;
2141 machine_mode mode = GET_MODE (regno_reg_rtx[i])((machine_mode) (regno_reg_rtx[i])->mode);
2142 poly_uint64 inherent_size = GET_MODE_SIZE (mode);
2143 unsigned int inherent_align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode);
2144 machine_mode wider_mode = wider_subreg_mode (mode, reg_max_ref_mode[i]);
2145 poly_uint64 total_size = GET_MODE_SIZE (wider_mode);
2146 /* ??? Seems strange to derive the minimum alignment from the size,
2147 but that's the traditional behavior. For polynomial-size modes,
2148 the natural extension is to use the minimum possible size. */
2149 unsigned int min_align
2150 = constant_lower_bound (GET_MODE_BITSIZE (reg_max_ref_mode[i]));
2151 poly_int64 adjust = 0;
2152
2153 something_was_spilled = true;
2154
2155 if (ira_conflicts_p)
2156 {
2157 /* Mark the spill for IRA. */
2158 SET_REGNO_REG_SET (&spilled_pseudos, i)bitmap_set_bit (&spilled_pseudos, i);
2159 if (!dont_share_p)
2160 x = ira_reuse_stack_slot (i, inherent_size, total_size);
2161 }
2162
2163 if (x)
2164 ;
2165
2166 /* Each pseudo reg has an inherent size which comes from its own mode,
2167 and a total size which provides room for paradoxical subregs
2168 which refer to the pseudo reg in wider modes.
2169
2170 We can use a slot already allocated if it provides both
2171 enough inherent space and enough total space.
2172 Otherwise, we allocate a new slot, making sure that it has no less
2173 inherent space, and no less total space, then the previous slot. */
2174 else if (from_reg == -1 || (!dont_share_p && ira_conflicts_p))
2175 {
2176 rtx stack_slot;
2177
2178 /* The sizes are taken from a subreg operation, which guarantees
2179 that they're ordered. */
2180 gcc_checking_assert (ordered_p (total_size, inherent_size))((void)(!(ordered_p (total_size, inherent_size)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 2180, __FUNCTION__), 0 : 0))
;
2181
2182 /* No known place to spill from => no slot to reuse. */
2183 x = assign_stack_local (mode, total_size,
2184 min_align > inherent_align
2185 || maybe_gt (total_size, inherent_size)maybe_lt (inherent_size, total_size)
2186 ? -1 : 0);
2187
2188 stack_slot = x;
2189
2190 /* Cancel the big-endian correction done in assign_stack_local.
2191 Get the address of the beginning of the slot. This is so we
2192 can do a big-endian correction unconditionally below. */
2193 if (BYTES_BIG_ENDIAN0)
2194 {
2195 adjust = inherent_size - total_size;
2196 if (maybe_ne (adjust, 0))
2197 {
2198 poly_uint64 total_bits = total_size * BITS_PER_UNIT(8);
2199 machine_mode mem_mode
2200 = int_mode_for_size (total_bits, 1).else_blk ();
2201 stack_slot = adjust_address_nv (x, mem_mode, adjust)adjust_address_1 (x, mem_mode, adjust, 0, 1, 0, 0);
2202 }
2203 }
2204
2205 if (! dont_share_p && ira_conflicts_p)
2206 /* Inform IRA about allocation a new stack slot. */
2207 ira_mark_new_stack_slot (stack_slot, i, total_size);
2208 }
2209
2210 /* Reuse a stack slot if possible. */
2211 else if (spill_stack_slot[from_reg] != 0
2212 && known_ge (spill_stack_slot_width[from_reg], total_size)(!maybe_lt (spill_stack_slot_width[from_reg], total_size))
2213 && known_ge (GET_MODE_SIZE(!maybe_lt (GET_MODE_SIZE (((machine_mode) (spill_stack_slot[
from_reg])->mode)), inherent_size))
2214 (GET_MODE (spill_stack_slot[from_reg])),(!maybe_lt (GET_MODE_SIZE (((machine_mode) (spill_stack_slot[
from_reg])->mode)), inherent_size))
2215 inherent_size)(!maybe_lt (GET_MODE_SIZE (((machine_mode) (spill_stack_slot[
from_reg])->mode)), inherent_size))
2216 && MEM_ALIGN (spill_stack_slot[from_reg])(get_mem_attrs (spill_stack_slot[from_reg])->align) >= min_align)
2217 x = spill_stack_slot[from_reg];
2218
2219 /* Allocate a bigger slot. */
2220 else
2221 {
2222 /* Compute maximum size needed, both for inherent size
2223 and for total size. */
2224 rtx stack_slot;
2225
2226 if (spill_stack_slot[from_reg])
2227 {
2228 if (partial_subreg_p (mode,
2229 GET_MODE (spill_stack_slot[from_reg])((machine_mode) (spill_stack_slot[from_reg])->mode)))
2230 mode = GET_MODE (spill_stack_slot[from_reg])((machine_mode) (spill_stack_slot[from_reg])->mode);
2231 total_size = ordered_max (total_size,
2232 spill_stack_slot_width[from_reg]);
2233 if (MEM_ALIGN (spill_stack_slot[from_reg])(get_mem_attrs (spill_stack_slot[from_reg])->align) > min_align)
2234 min_align = MEM_ALIGN (spill_stack_slot[from_reg])(get_mem_attrs (spill_stack_slot[from_reg])->align);
2235 }
2236
2237 /* The sizes are taken from a subreg operation, which guarantees
2238 that they're ordered. */
2239 gcc_checking_assert (ordered_p (total_size, inherent_size))((void)(!(ordered_p (total_size, inherent_size)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 2239, __FUNCTION__), 0 : 0))
;
2240
2241 /* Make a slot with that size. */
2242 x = assign_stack_local (mode, total_size,
2243 min_align > inherent_align
2244 || maybe_gt (total_size, inherent_size)maybe_lt (inherent_size, total_size)
2245 ? -1 : 0);
2246 stack_slot = x;
2247
2248 /* Cancel the big-endian correction done in assign_stack_local.
2249 Get the address of the beginning of the slot. This is so we
2250 can do a big-endian correction unconditionally below. */
2251 if (BYTES_BIG_ENDIAN0)
2252 {
2253 adjust = GET_MODE_SIZE (mode) - total_size;
2254 if (maybe_ne (adjust, 0))
2255 {
2256 poly_uint64 total_bits = total_size * BITS_PER_UNIT(8);
2257 machine_mode mem_mode
2258 = int_mode_for_size (total_bits, 1).else_blk ();
2259 stack_slot = adjust_address_nv (x, mem_mode, adjust)adjust_address_1 (x, mem_mode, adjust, 0, 1, 0, 0);
2260 }
2261 }
2262
2263 spill_stack_slot[from_reg] = stack_slot;
2264 spill_stack_slot_width[from_reg] = total_size;
2265 }
2266
2267 /* On a big endian machine, the "address" of the slot
2268 is the address of the low part that fits its inherent mode. */
2269 adjust += subreg_size_lowpart_offset (inherent_size, total_size);
2270
2271 /* If we have any adjustment to make, or if the stack slot is the
2272 wrong mode, make a new stack slot. */
2273 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust)adjust_address_1 (x, ((machine_mode) (regno_reg_rtx[i])->mode
), adjust, 0, 1, 0, 0)
;
2274
2275 /* Set all of the memory attributes as appropriate for a spill. */
2276 set_mem_attrs_for_spill (x);
2277
2278 /* Save the stack slot for later. */
2279 reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc = x;
2280 }
2281}
2282
2283/* Mark the slots in regs_ever_live for the hard regs used by
2284 pseudo-reg number REGNO, accessed in MODE. */
2285
2286static void
2287mark_home_live_1 (int regno, machine_mode mode)
2288{
2289 int i, lim;
2290
2291 i = reg_renumber[regno];
2292 if (i < 0)
2293 return;
2294 lim = end_hard_regno (mode, i);
2295 while (i < lim)
2296 df_set_regs_ever_live (i++, true);
2297}
2298
2299/* Mark the slots in regs_ever_live for the hard regs
2300 used by pseudo-reg number REGNO. */
2301
2302void
2303mark_home_live (int regno)
2304{
2305 if (reg_renumber[regno] >= 0)
2306 mark_home_live_1 (regno, PSEUDO_REGNO_MODE (regno)((machine_mode) (regno_reg_rtx[regno])->mode));
2307}
2308
2309/* This function handles the tracking of elimination offsets around branches.
2310
2311 X is a piece of RTL being scanned.
2312
2313 INSN is the insn that it came from, if any.
2314
2315 INITIAL_P is nonzero if we are to set the offset to be the initial
2316 offset and zero if we are setting the offset of the label to be the
2317 current offset. */
2318
2319static void
2320set_label_offsets (rtx x, rtx_insn *insn, int initial_p)
2321{
2322 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code);
2323 rtx tem;
2324 unsigned int i;
2325 struct elim_table *p;
2326
2327 switch (code)
2328 {
2329 case LABEL_REF:
2330 if (LABEL_REF_NONLOCAL_P (x)(__extension__ ({ __typeof ((x)) const _rtx = ((x)); if (((enum
rtx_code) (_rtx)->code) != LABEL_REF) rtl_check_failed_flag
("LABEL_REF_NONLOCAL_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 2330, __FUNCTION__); _rtx; })->volatil)
)
2331 return;
2332
2333 x = label_ref_label (x);
2334
2335 /* fall through */
2336
2337 case CODE_LABEL:
2338 /* If we know nothing about this label, set the desired offsets. Note
2339 that this sets the offset at a label to be the offset before a label
2340 if we don't know anything about the label. This is not correct for
2341 the label after a BARRIER, but is the best guess we can make. If
2342 we guessed wrong, we will suppress an elimination that might have
2343 been possible had we been able to guess correctly. */
2344
2345 if (! offsets_known_at[CODE_LABEL_NUMBER (x)(((x)->u.fld[5]).rt_int) - first_label_num])
2346 {
2347 for (i = 0; i < NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])); i++)
2348 offsets_at[CODE_LABEL_NUMBER (x)(((x)->u.fld[5]).rt_int) - first_label_num][i]
2349 = (initial_p ? reg_eliminate[i].initial_offset
2350 : reg_eliminate[i].offset);
2351 offsets_known_at[CODE_LABEL_NUMBER (x)(((x)->u.fld[5]).rt_int) - first_label_num] = 1;
2352 }
2353
2354 /* Otherwise, if this is the definition of a label and it is
2355 preceded by a BARRIER, set our offsets to the known offset of
2356 that label. */
2357
2358 else if (x == insn
2359 && (tem = prev_nonnote_insn (insn)) != 0
2360 && BARRIER_P (tem)(((enum rtx_code) (tem)->code) == BARRIER))
2361 set_offsets_for_label (insn);
2362 else
2363 /* If neither of the above cases is true, compare each offset
2364 with those previously recorded and suppress any eliminations
2365 where the offsets disagree. */
2366
2367 for (i = 0; i < NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])); i++)
2368 if (maybe_ne (offsets_at[CODE_LABEL_NUMBER (x)(((x)->u.fld[5]).rt_int) - first_label_num][i],
2369 (initial_p ? reg_eliminate[i].initial_offset
2370 : reg_eliminate[i].offset)))
2371 reg_eliminate[i].can_eliminate = 0;
2372
2373 return;
2374
2375 case JUMP_TABLE_DATA:
2376 set_label_offsets (PATTERN (insn), insn, initial_p);
2377 return;
2378
2379 case JUMP_INSN:
2380 set_label_offsets (PATTERN (insn), insn, initial_p);
2381
2382 /* fall through */
2383
2384 case INSN:
2385 case CALL_INSN:
2386 /* Any labels mentioned in REG_LABEL_OPERAND notes can be branched
2387 to indirectly and hence must have all eliminations at their
2388 initial offsets. */
2389 for (tem = REG_NOTES (x)(((x)->u.fld[6]).rt_rtx); tem; tem = XEXP (tem, 1)(((tem)->u.fld[1]).rt_rtx))
2390 if (REG_NOTE_KIND (tem)((enum reg_note) ((machine_mode) (tem)->mode)) == REG_LABEL_OPERAND)
2391 set_label_offsets (XEXP (tem, 0)(((tem)->u.fld[0]).rt_rtx), insn, 1);
2392 return;
2393
2394 case PARALLEL:
2395 case ADDR_VEC:
2396 case ADDR_DIFF_VEC:
2397 /* Each of the labels in the parallel or address vector must be
2398 at their initial offsets. We want the first field for PARALLEL
2399 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2400
2401 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC)(((((x)->u.fld[code == ADDR_DIFF_VEC]).rt_rtvec))->num_elem
)
; i++)
2402 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i)(((((x)->u.fld[code == ADDR_DIFF_VEC]).rt_rtvec))->elem
[i])
,
2403 insn, initial_p);
2404 return;
2405
2406 case SET:
2407 /* We only care about setting PC. If the source is not RETURN,
2408 IF_THEN_ELSE, or a label, disable any eliminations not at
2409 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2410 isn't one of those possibilities. For branches to a label,
2411 call ourselves recursively.
2412
2413 Note that this can disable elimination unnecessarily when we have
2414 a non-local goto since it will look like a non-constant jump to
2415 someplace in the current function. This isn't a significant
2416 problem since such jumps will normally be when all elimination
2417 pairs are back to their initial offsets. */
2418
2419 if (SET_DEST (x)(((x)->u.fld[0]).rt_rtx) != pc_rtx)
2420 return;
2421
2422 switch (GET_CODE (SET_SRC (x))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code))
2423 {
2424 case PC:
2425 case RETURN:
2426 return;
2427
2428 case LABEL_REF:
2429 set_label_offsets (SET_SRC (x)(((x)->u.fld[1]).rt_rtx), insn, initial_p);
2430 return;
2431
2432 case IF_THEN_ELSE:
2433 tem = XEXP (SET_SRC (x), 1)((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx);
2434 if (GET_CODE (tem)((enum rtx_code) (tem)->code) == LABEL_REF)
2435 set_label_offsets (label_ref_label (tem), insn, initial_p);
2436 else if (GET_CODE (tem)((enum rtx_code) (tem)->code) != PC && GET_CODE (tem)((enum rtx_code) (tem)->code) != RETURN)
2437 break;
2438
2439 tem = XEXP (SET_SRC (x), 2)((((((x)->u.fld[1]).rt_rtx))->u.fld[2]).rt_rtx);
2440 if (GET_CODE (tem)((enum rtx_code) (tem)->code) == LABEL_REF)
2441 set_label_offsets (label_ref_label (tem), insn, initial_p);
2442 else if (GET_CODE (tem)((enum rtx_code) (tem)->code) != PC && GET_CODE (tem)((enum rtx_code) (tem)->code) != RETURN)
2443 break;
2444 return;
2445
2446 default:
2447 break;
2448 }
2449
2450 /* If we reach here, all eliminations must be at their initial
2451 offset because we are doing a jump to a variable address. */
2452 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; p++)
2453 if (maybe_ne (p->offset, p->initial_offset))
2454 p->can_eliminate = 0;
2455 break;
2456
2457 default:
2458 break;
2459 }
2460}
2461
2462/* This function examines every reg that occurs in X and adjusts the
2463 costs for its elimination which are gathered by IRA. INSN is the
2464 insn in which X occurs. We do not recurse into MEM expressions. */
2465
2466static void
2467note_reg_elim_costly (const_rtx x, rtx insn)
2468{
2469 subrtx_iterator::array_type array;
2470 FOR_EACH_SUBRTX (iter, array, x, NONCONST)for (subrtx_iterator iter (array, x, rtx_nonconst_subrtx_bounds
); !iter.at_end (); iter.next ())
2471 {
2472 const_rtx x = *iter;
2473 if (MEM_P (x)(((enum rtx_code) (x)->code) == MEM))
2474 iter.skip_subrtxes ();
2475 else if (REG_P (x)(((enum rtx_code) (x)->code) == REG)
2476 && REGNO (x)(rhs_regno(x)) >= FIRST_PSEUDO_REGISTER76
2477 && reg_equiv_init (REGNO (x))(*reg_equivs)[((rhs_regno(x)))].init
2478 && reg_equiv_invariant (REGNO (x))(*reg_equivs)[((rhs_regno(x)))].invariant)
2479 {
2480 rtx t = reg_equiv_invariant (REGNO (x))(*reg_equivs)[((rhs_regno(x)))].invariant;
2481 rtx new_rtx = eliminate_regs_1 (t, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, insn, true, true);
2482 int cost = set_src_cost (new_rtx, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
,
2483 optimize_bb_for_speed_p (elim_bb));
2484 int freq = REG_FREQ_FROM_BB (elim_bb)((optimize_function_for_size_p ((cfun + 0)) || !(cfun + 0)->
cfg->count_max.initialized_p ()) ? 1000 : ((elim_bb)->count
.to_frequency ((cfun + 0)) * 1000 / 10000) ? ((elim_bb)->count
.to_frequency ((cfun + 0)) * 1000 / 10000) : 1)
;
2485
2486 if (cost != 0)
2487 ira_adjust_equiv_reg_cost (REGNO (x)(rhs_regno(x)), -cost * freq);
2488 }
2489 }
2490}
2491
2492/* Scan X and replace any eliminable registers (such as fp) with a
2493 replacement (such as sp), plus an offset.
2494
2495 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2496 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2497 MEM, we are allowed to replace a sum of a register and the constant zero
2498 with the register, which we cannot do outside a MEM. In addition, we need
2499 to record the fact that a register is referenced outside a MEM.
2500
2501 If INSN is an insn, it is the insn containing X. If we replace a REG
2502 in a SET_DEST with an equivalent MEM and INSN is nonzero, write a
2503 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2504 the REG is being modified.
2505
2506 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2507 That's used when we eliminate in expressions stored in notes.
2508 This means, do not set ref_outside_mem even if the reference
2509 is outside of MEMs.
2510
2511 If FOR_COSTS is true, we are being called before reload in order to
2512 estimate the costs of keeping registers with an equivalence unallocated.
2513
2514 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2515 replacements done assuming all offsets are at their initial values. If
2516 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2517 encounter, return the actual location so that find_reloads will do
2518 the proper thing. */
2519
2520static rtx
2521eliminate_regs_1 (rtx x, machine_mode mem_mode, rtx insn,
2522 bool may_use_invariant, bool for_costs)
2523{
2524 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code);
2525 struct elim_table *ep;
2526 int regno;
2527 rtx new_rtx;
2528 int i, j;
2529 const char *fmt;
2530 int copied = 0;
2531
2532 if (! current_function_decl)
2533 return x;
2534
2535 switch (code)
2536 {
2537 CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR
:
2538 case CONST:
2539 case SYMBOL_REF:
2540 case CODE_LABEL:
2541 case PC:
2542 case CC0:
2543 case ASM_INPUT:
2544 case ADDR_VEC:
2545 case ADDR_DIFF_VEC:
2546 case RETURN:
2547 return x;
2548
2549 case REG:
2550 regno = REGNO (x)(rhs_regno(x));
2551
2552 /* First handle the case where we encounter a bare register that
2553 is eliminable. Replace it with a PLUS. */
2554 if (regno < FIRST_PSEUDO_REGISTER76)
2555 {
2556 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))];
2557 ep++)
2558 if (ep->from_rtx == x && ep->can_eliminate)
2559 return plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, ep->to_rtx, ep->previous_offset);
2560
2561 }
2562 else if (reg_renumber && reg_renumber[regno] < 0
2563 && reg_equivs
2564 && reg_equiv_invariant (regno)(*reg_equivs)[(regno)].invariant)
2565 {
2566 if (may_use_invariant || (insn && DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN)))
2567 return eliminate_regs_1 (copy_rtx (reg_equiv_invariant (regno)(*reg_equivs)[(regno)].invariant),
2568 mem_mode, insn, true, for_costs);
2569 /* There exists at least one use of REGNO that cannot be
2570 eliminated. Prevent the defining insn from being deleted. */
2571 reg_equiv_init (regno)(*reg_equivs)[(regno)].init = NULLnullptr;
2572 if (!for_costs)
2573 alter_reg (regno, -1, true);
2574 }
2575 return x;
2576
2577 /* You might think handling MINUS in a manner similar to PLUS is a
2578 good idea. It is not. It has been tried multiple times and every
2579 time the change has had to have been reverted.
2580
2581 Other parts of reload know a PLUS is special (gen_reload for example)
2582 and require special code to handle code a reloaded PLUS operand.
2583
2584 Also consider backends where the flags register is clobbered by a
2585 MINUS, but we can emit a PLUS that does not clobber flags (IA-32,
2586 lea instruction comes to mind). If we try to reload a MINUS, we
2587 may kill the flags register that was holding a useful value.
2588
2589 So, please before trying to handle MINUS, consider reload as a
2590 whole instead of this little section as well as the backend issues. */
2591 case PLUS:
2592 /* If this is the sum of an eliminable register and a constant, rework
2593 the sum. */
2594 if (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
2595 && REGNO (XEXP (x, 0))(rhs_regno((((x)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76
2596 && CONSTANT_P (XEXP (x, 1))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
)
2597 {
2598 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))];
2599 ep++)
2600 if (ep->from_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) && ep->can_eliminate)
2601 {
2602 /* The only time we want to replace a PLUS with a REG (this
2603 occurs when the constant operand of the PLUS is the negative
2604 of the offset) is when we are inside a MEM. We won't want
2605 to do so at other times because that would change the
2606 structure of the insn in a way that reload can't handle.
2607 We special-case the commonest situation in
2608 eliminate_regs_in_insn, so just replace a PLUS with a
2609 PLUS here, unless inside a MEM. In DEBUG_INSNs, it is
2610 always ok to replace a PLUS with just a REG. */
2611 if ((mem_mode != 0 || (insn && DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN)))
2612 && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT
)
2613 && known_eq (INTVAL (XEXP (x, 1)), -ep->previous_offset)(!maybe_ne ((((((x)->u.fld[1]).rt_rtx))->u.hwint[0]), -
ep->previous_offset))
)
2614 return ep->to_rtx;
2615 else
2616 return gen_rtx_PLUS (Pmode, ep->to_rtx,gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((ep->to_rtx)), ((plus_constant ((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
), (((x)->u.fld[1]).rt_rtx), ep->previous_offset))) )
2617 plus_constant (Pmode, XEXP (x, 1),gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((ep->to_rtx)), ((plus_constant ((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
), (((x)->u.fld[1]).rt_rtx), ep->previous_offset))) )
2618 ep->previous_offset))gen_rtx_fmt_ee_stat ((PLUS), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((ep->to_rtx)), ((plus_constant ((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
), (((x)->u.fld[1]).rt_rtx), ep->previous_offset))) )
;
2619 }
2620
2621 /* If the register is not eliminable, we are done since the other
2622 operand is a constant. */
2623 return x;
2624 }
2625
2626 /* If this is part of an address, we want to bring any constant to the
2627 outermost PLUS. We will do this by doing register replacement in
2628 our operands and seeing if a constant shows up in one of them.
2629
2630 Note that there is no risk of modifying the structure of the insn,
2631 since we only get called for its operands, thus we are either
2632 modifying the address inside a MEM, or something like an address
2633 operand of a load-address insn. */
2634
2635 {
2636 rtx new0 = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, true,
2637 for_costs);
2638 rtx new1 = eliminate_regs_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), mem_mode, insn, true,
2639 for_costs);
2640
2641 if (reg_renumber && (new0 != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) || new1 != XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)))
2642 {
2643 /* If one side is a PLUS and the other side is a pseudo that
2644 didn't get a hard register but has a reg_equiv_constant,
2645 we must replace the constant here since it may no longer
2646 be in the position of any operand. */
2647 if (GET_CODE (new0)((enum rtx_code) (new0)->code) == PLUS && REG_P (new1)(((enum rtx_code) (new1)->code) == REG)
2648 && REGNO (new1)(rhs_regno(new1)) >= FIRST_PSEUDO_REGISTER76
2649 && reg_renumber[REGNO (new1)(rhs_regno(new1))] < 0
2650 && reg_equivs
2651 && reg_equiv_constant (REGNO (new1))(*reg_equivs)[((rhs_regno(new1)))].constant != 0)
2652 new1 = reg_equiv_constant (REGNO (new1))(*reg_equivs)[((rhs_regno(new1)))].constant;
2653 else if (GET_CODE (new1)((enum rtx_code) (new1)->code) == PLUS && REG_P (new0)(((enum rtx_code) (new0)->code) == REG)
2654 && REGNO (new0)(rhs_regno(new0)) >= FIRST_PSEUDO_REGISTER76
2655 && reg_renumber[REGNO (new0)(rhs_regno(new0))] < 0
2656 && reg_equiv_constant (REGNO (new0))(*reg_equivs)[((rhs_regno(new0)))].constant != 0)
2657 new0 = reg_equiv_constant (REGNO (new0))(*reg_equivs)[((rhs_regno(new0)))].constant;
2658
2659 new_rtx = form_sum (GET_MODE (x)((machine_mode) (x)->mode), new0, new1);
2660
2661 /* As above, if we are not inside a MEM we do not want to
2662 turn a PLUS into something else. We might try to do so here
2663 for an addition of 0 if we aren't optimizing. */
2664 if (! mem_mode && GET_CODE (new_rtx)((enum rtx_code) (new_rtx)->code) != PLUS)
2665 return gen_rtx_PLUS (GET_MODE (x), new_rtx, const0_rtx)gen_rtx_fmt_ee_stat ((PLUS), ((((machine_mode) (x)->mode))
), ((new_rtx)), (((const_int_rtx[64]))) )
;
2666 else
2667 return new_rtx;
2668 }
2669 }
2670 return x;
2671
2672 case MULT:
2673 /* If this is the product of an eliminable register and a
2674 constant, apply the distribute law and move the constant out
2675 so that we have (plus (mult ..) ..). This is needed in order
2676 to keep load-address insns valid. This case is pathological.
2677 We ignore the possibility of overflow here. */
2678 if (REG_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
2679 && REGNO (XEXP (x, 0))(rhs_regno((((x)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76
2680 && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT
)
)
2681 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))];
2682 ep++)
2683 if (ep->from_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) && ep->can_eliminate)
2684 {
2685 if (! mem_mode
2686 /* Refs inside notes or in DEBUG_INSNs don't count for
2687 this purpose. */
2688 && ! (insn != 0 && (GET_CODE (insn)((enum rtx_code) (insn)->code) == EXPR_LIST
2689 || GET_CODE (insn)((enum rtx_code) (insn)->code) == INSN_LIST
2690 || DEBUG_INSN_P (insn)(((enum rtx_code) (insn)->code) == DEBUG_INSN))))
2691 ep->ref_outside_mem = 1;
2692
2693 return
2694 plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
,
2695 gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1))gen_rtx_fmt_ee_stat ((MULT), (((global_options.x_ix86_pmode ==
PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
))), ((ep->to_rtx)), (((((x)->u.fld[1]).rt_rtx))) )
,
2696 ep->previous_offset * INTVAL (XEXP (x, 1))(((((x)->u.fld[1]).rt_rtx))->u.hwint[0]));
2697 }
2698
2699 /* fall through */
2700
2701 case CALL:
2702 case COMPARE:
2703 /* See comments before PLUS about handling MINUS. */
2704 case MINUS:
2705 case DIV: case UDIV:
2706 case MOD: case UMOD:
2707 case AND: case IOR: case XOR:
2708 case ROTATERT: case ROTATE:
2709 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2710 case NE: case EQ:
2711 case GE: case GT: case GEU: case GTU:
2712 case LE: case LT: case LEU: case LTU:
2713 {
2714 rtx new0 = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, false,
2715 for_costs);
2716 rtx new1 = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)
2717 ? eliminate_regs_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), mem_mode, insn, false,
2718 for_costs) : 0;
2719
2720 if (new0 != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) || new1 != XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))
2721 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1)gen_rtx_fmt_ee_stat ((code), (((machine_mode) (x)->mode)),
(new0), (new1) )
;
2722 }
2723 return x;
2724
2725 case EXPR_LIST:
2726 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2727 if (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
2728 {
2729 new_rtx = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, true,
2730 for_costs);
2731 if (new_rtx != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
2732 {
2733 /* If this is a REG_DEAD note, it is not valid anymore.
2734 Using the eliminated version could result in creating a
2735 REG_DEAD note for the stack or frame pointer. */
2736 if (REG_NOTE_KIND (x)((enum reg_note) ((machine_mode) (x)->mode)) == REG_DEAD)
2737 return (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)
2738 ? eliminate_regs_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), mem_mode, insn, true,
2739 for_costs)
2740 : NULL_RTX(rtx) 0);
2741
2742 x = alloc_reg_note (REG_NOTE_KIND (x)((enum reg_note) ((machine_mode) (x)->mode)), new_rtx, XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
2743 }
2744 }
2745
2746 /* fall through */
2747
2748 case INSN_LIST:
2749 case INT_LIST:
2750 /* Now do eliminations in the rest of the chain. If this was
2751 an EXPR_LIST, this might result in allocating more memory than is
2752 strictly needed, but it simplifies the code. */
2753 if (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))
2754 {
2755 new_rtx = eliminate_regs_1 (XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), mem_mode, insn, true,
2756 for_costs);
2757 if (new_rtx != XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))
2758 return
2759 gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new_rtx)gen_rtx_fmt_ee_stat ((((enum rtx_code) (x)->code)), (((machine_mode
) (x)->mode)), ((((x)->u.fld[0]).rt_rtx)), (new_rtx) )
;
2760 }
2761 return x;
2762
2763 case PRE_INC:
2764 case POST_INC:
2765 case PRE_DEC:
2766 case POST_DEC:
2767 /* We do not support elimination of a register that is modified.
2768 elimination_effects has already make sure that this does not
2769 happen. */
2770 return x;
2771
2772 case PRE_MODIFY:
2773 case POST_MODIFY:
2774 /* We do not support elimination of a register that is modified.
2775 elimination_effects has already make sure that this does not
2776 happen. The only remaining case we need to consider here is
2777 that the increment value may be an eliminable register. */
2778 if (GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == PLUS
2779 && XEXP (XEXP (x, 1), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
2780 {
2781 rtx new_rtx = eliminate_regs_1 (XEXP (XEXP (x, 1), 1)((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx), mem_mode,
2782 insn, true, for_costs);
2783
2784 if (new_rtx != XEXP (XEXP (x, 1), 1)((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx))
2785 return gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (x, 0),gen_rtx_fmt_ee_stat ((code), (((machine_mode) (x)->mode)),
((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat ((PLUS),
((((machine_mode) (x)->mode))), (((((x)->u.fld[0]).rt_rtx
))), ((new_rtx)) )) )
2786 gen_rtx_PLUS (GET_MODE (x),gen_rtx_fmt_ee_stat ((code), (((machine_mode) (x)->mode)),
((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat ((PLUS),
((((machine_mode) (x)->mode))), (((((x)->u.fld[0]).rt_rtx
))), ((new_rtx)) )) )
2787 XEXP (x, 0), new_rtx))gen_rtx_fmt_ee_stat ((code), (((machine_mode) (x)->mode)),
((((x)->u.fld[0]).rt_rtx)), (gen_rtx_fmt_ee_stat ((PLUS),
((((machine_mode) (x)->mode))), (((((x)->u.fld[0]).rt_rtx
))), ((new_rtx)) )) )
;
2788 }
2789 return x;
2790
2791 case STRICT_LOW_PART:
2792 case NEG: case NOT:
2793 case SIGN_EXTEND: case ZERO_EXTEND:
2794 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2795 case FLOAT: case FIX:
2796 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2797 case ABS:
2798 case SQRT:
2799 case FFS:
2800 case CLZ:
2801 case CTZ:
2802 case POPCOUNT:
2803 case PARITY:
2804 case BSWAP:
2805 new_rtx = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, false,
2806 for_costs);
2807 if (new_rtx != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
2808 return gen_rtx_fmt_e (code, GET_MODE (x), new_rtx)gen_rtx_fmt_e_stat ((code), (((machine_mode) (x)->mode)), (
new_rtx) )
;
2809 return x;
2810
2811 case SUBREG:
2812 /* Similar to above processing, but preserve SUBREG_BYTE.
2813 Convert (subreg (mem)) to (mem) if not paradoxical.
2814 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2815 pseudo didn't get a hard reg, we must replace this with the
2816 eliminated version of the memory location because push_reload
2817 may do the replacement in certain circumstances. */
2818 if (REG_P (SUBREG_REG (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
2819 && !paradoxical_subreg_p (x)
2820 && reg_equivs
2821 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x)))(*reg_equivs)[((rhs_regno((((x)->u.fld[0]).rt_rtx))))].memory_loc != 0)
2822 {
2823 new_rtx = SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx);
2824 }
2825 else
2826 new_rtx = eliminate_regs_1 (SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx), mem_mode, insn, false, for_costs);
2827
2828 if (new_rtx != SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx))
2829 {
2830 poly_int64 x_size = GET_MODE_SIZE (GET_MODE (x)((machine_mode) (x)->mode));
2831 poly_int64 new_size = GET_MODE_SIZE (GET_MODE (new_rtx)((machine_mode) (new_rtx)->mode));
2832
2833 if (MEM_P (new_rtx)(((enum rtx_code) (new_rtx)->code) == MEM)
2834 && ((partial_subreg_p (GET_MODE (x)((machine_mode) (x)->mode), GET_MODE (new_rtx)((machine_mode) (new_rtx)->mode))
2835 /* On RISC machines, combine can create rtl of the form
2836 (set (subreg:m1 (reg:m2 R) 0) ...)
2837 where m1 < m2, and expects something interesting to
2838 happen to the entire word. Moreover, it will use the
2839 (reg:m2 R) later, expecting all bits to be preserved.
2840 So if the number of words is the same, preserve the
2841 subreg so that push_reload can see it. */
2842 && !(WORD_REGISTER_OPERATIONS0
2843 && known_equal_after_align_down (x_size - 1,
2844 new_size - 1,
2845 UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
)))
2846 || known_eq (x_size, new_size)(!maybe_ne (x_size, new_size)))
2847 )
2848 return adjust_address_nv (new_rtx, GET_MODE (x), SUBREG_BYTE (x))adjust_address_1 (new_rtx, ((machine_mode) (x)->mode), (((
x)->u.fld[1]).rt_subreg), 0, 1, 0, 0)
;
2849 else if (insn && GET_CODE (insn)((enum rtx_code) (insn)->code) == DEBUG_INSN)
2850 return gen_rtx_raw_SUBREG (GET_MODE (x), new_rtx, SUBREG_BYTE (x))gen_rtx_fmt_ep_stat ((SUBREG), ((((machine_mode) (x)->mode
))), ((new_rtx)), (((((x)->u.fld[1]).rt_subreg))) )
;
2851 else
2852 return gen_rtx_SUBREG (GET_MODE (x)((machine_mode) (x)->mode), new_rtx, SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg));
2853 }
2854
2855 return x;
2856
2857 case MEM:
2858 /* Our only special processing is to pass the mode of the MEM to our
2859 recursive call and copy the flags. While we are here, handle this
2860 case more efficiently. */
2861
2862 new_rtx = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), GET_MODE (x)((machine_mode) (x)->mode), insn, true,
2863 for_costs);
2864 if (for_costs
2865 && memory_address_p (GET_MODE (x), XEXP (x, 0))memory_address_addr_space_p ((((machine_mode) (x)->mode)),
((((x)->u.fld[0]).rt_rtx)), 0)
2866 && !memory_address_p (GET_MODE (x), new_rtx)memory_address_addr_space_p ((((machine_mode) (x)->mode)),
(new_rtx), 0)
)
2867 note_reg_elim_costly (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), insn);
2868
2869 return replace_equiv_address_nv (x, new_rtx);
2870
2871 case USE:
2872 /* Handle insn_list USE that a call to a pure function may generate. */
2873 new_rtx = eliminate_regs_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), VOIDmode((void) 0, E_VOIDmode), insn, false,
2874 for_costs);
2875 if (new_rtx != XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
2876 return gen_rtx_USE (GET_MODE (x), new_rtx)gen_rtx_fmt_e_stat ((USE), ((((machine_mode) (x)->mode))),
((new_rtx)) )
;
2877 return x;
2878
2879 case CLOBBER:
2880 case ASM_OPERANDS:
2881 gcc_assert (insn && DEBUG_INSN_P (insn))((void)(!(insn && (((enum rtx_code) (insn)->code) ==
DEBUG_INSN)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 2881, __FUNCTION__), 0 : 0))
;
2882 break;
2883
2884 case SET:
2885 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 2885, __FUNCTION__))
;
2886
2887 default:
2888 break;
2889 }
2890
2891 /* Process each of our operands recursively. If any have changed, make a
2892 copy of the rtx. */
2893 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]);
2894 for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++, fmt++)
2895 {
2896 if (*fmt == 'e')
2897 {
2898 new_rtx = eliminate_regs_1 (XEXP (x, i)(((x)->u.fld[i]).rt_rtx), mem_mode, insn, false,
2899 for_costs);
2900 if (new_rtx != XEXP (x, i)(((x)->u.fld[i]).rt_rtx) && ! copied)
2901 {
2902 x = shallow_copy_rtx (x);
2903 copied = 1;
2904 }
2905 XEXP (x, i)(((x)->u.fld[i]).rt_rtx) = new_rtx;
2906 }
2907 else if (*fmt == 'E')
2908 {
2909 int copied_vec = 0;
2910 for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++)
2911 {
2912 new_rtx = eliminate_regs_1 (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), mem_mode, insn, false,
2913 for_costs);
2914 if (new_rtx != XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]) && ! copied_vec)
2915 {
2916 rtvec new_v = gen_rtvec_v (XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem),
2917 XVEC (x, i)(((x)->u.fld[i]).rt_rtvec)->elem);
2918 if (! copied)
2919 {
2920 x = shallow_copy_rtx (x);
2921 copied = 1;
2922 }
2923 XVEC (x, i)(((x)->u.fld[i]).rt_rtvec) = new_v;
2924 copied_vec = 1;
2925 }
2926 XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]) = new_rtx;
2927 }
2928 }
2929 }
2930
2931 return x;
2932}
2933
2934rtx
2935eliminate_regs (rtx x, machine_mode mem_mode, rtx insn)
2936{
2937 if (reg_eliminate == NULLnullptr)
2938 {
2939 gcc_assert (targetm.no_register_allocation)((void)(!(targetm.no_register_allocation) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 2939, __FUNCTION__), 0 : 0))
;
2940 return x;
2941 }
2942 return eliminate_regs_1 (x, mem_mode, insn, false, false);
2943}
2944
2945/* Scan rtx X for modifications of elimination target registers. Update
2946 the table of eliminables to reflect the changed state. MEM_MODE is
2947 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2948
2949static void
2950elimination_effects (rtx x, machine_mode mem_mode)
2951{
2952 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code);
2953 struct elim_table *ep;
2954 int regno;
2955 int i, j;
2956 const char *fmt;
2957
2958 switch (code)
2959 {
2960 CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR
:
2961 case CONST:
2962 case SYMBOL_REF:
2963 case CODE_LABEL:
2964 case PC:
2965 case CC0:
2966 case ASM_INPUT:
2967 case ADDR_VEC:
2968 case ADDR_DIFF_VEC:
2969 case RETURN:
2970 return;
2971
2972 case REG:
2973 regno = REGNO (x)(rhs_regno(x));
2974
2975 /* First handle the case where we encounter a bare register that
2976 is eliminable. Replace it with a PLUS. */
2977 if (regno < FIRST_PSEUDO_REGISTER76)
2978 {
2979 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))];
2980 ep++)
2981 if (ep->from_rtx == x && ep->can_eliminate)
2982 {
2983 if (! mem_mode)
2984 ep->ref_outside_mem = 1;
2985 return;
2986 }
2987
2988 }
2989 else if (reg_renumber[regno] < 0
2990 && reg_equivs
2991 && reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant
2992 && ! function_invariant_p (reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant))
2993 elimination_effects (reg_equiv_constant (regno)(*reg_equivs)[(regno)].constant, mem_mode);
2994 return;
2995
2996 case PRE_INC:
2997 case POST_INC:
2998 case PRE_DEC:
2999 case POST_DEC:
3000 case POST_MODIFY:
3001 case PRE_MODIFY:
3002 /* If we modify the source of an elimination rule, disable it. */
3003 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++)
3004 if (ep->from_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
3005 ep->can_eliminate = 0;
3006
3007 /* If we modify the target of an elimination rule by adding a constant,
3008 update its offset. If we modify the target in any other way, we'll
3009 have to disable the rule as well. */
3010 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++)
3011 if (ep->to_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
3012 {
3013 poly_int64 size = GET_MODE_SIZE (mem_mode);
3014
3015 /* If more bytes than MEM_MODE are pushed, account for them. */
3016#ifdef PUSH_ROUNDING
3017 if (ep->to_rtx == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]))
3018 size = PUSH_ROUNDING (size)ix86_push_rounding (size);
3019#endif
3020 if (code == PRE_DEC || code == POST_DEC)
3021 ep->offset += size;
3022 else if (code == PRE_INC || code == POST_INC)
3023 ep->offset -= size;
3024 else if (code == PRE_MODIFY || code == POST_MODIFY)
3025 {
3026 if (GET_CODE (XEXP (x, 1))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == PLUS
3027 && XEXP (x, 0)(((x)->u.fld[0]).rt_rtx) == XEXP (XEXP (x, 1), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx)
3028 && CONST_INT_P (XEXP (XEXP (x, 1), 1))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[
1]).rt_rtx))->code) == CONST_INT)
)
3029 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1))((((((((x)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx))->
u.hwint[0])
;
3030 else
3031 ep->can_eliminate = 0;
3032 }
3033 }
3034
3035 /* These two aren't unary operators. */
3036 if (code == POST_MODIFY || code == PRE_MODIFY)
3037 break;
3038
3039 /* Fall through to generic unary operation case. */
3040 gcc_fallthrough ();
3041 case STRICT_LOW_PART:
3042 case NEG: case NOT:
3043 case SIGN_EXTEND: case ZERO_EXTEND:
3044 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
3045 case FLOAT: case FIX:
3046 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
3047 case ABS:
3048 case SQRT:
3049 case FFS:
3050 case CLZ:
3051 case CTZ:
3052 case POPCOUNT:
3053 case PARITY:
3054 case BSWAP:
3055 elimination_effects (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode);
3056 return;
3057
3058 case SUBREG:
3059 if (REG_P (SUBREG_REG (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
3060 && !paradoxical_subreg_p (x)
3061 && reg_equivs
3062 && reg_equiv_memory_loc (REGNO (SUBREG_REG (x)))(*reg_equivs)[((rhs_regno((((x)->u.fld[0]).rt_rtx))))].memory_loc != 0)
3063 return;
3064
3065 elimination_effects (SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx), mem_mode);
3066 return;
3067
3068 case USE:
3069 /* If using a register that is the source of an eliminate we still
3070 think can be performed, note it cannot be performed since we don't
3071 know how this register is used. */
3072 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++)
3073 if (ep->from_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
3074 ep->can_eliminate = 0;
3075
3076 elimination_effects (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode);
3077 return;
3078
3079 case CLOBBER:
3080 /* If clobbering a register that is the replacement register for an
3081 elimination we still think can be performed, note that it cannot
3082 be performed. Otherwise, we need not be concerned about it. */
3083 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++)
3084 if (ep->to_rtx == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
3085 ep->can_eliminate = 0;
3086
3087 elimination_effects (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), mem_mode);
3088 return;
3089
3090 case SET:
3091 /* Check for setting a register that we know about. */
3092 if (REG_P (SET_DEST (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
)
3093 {
3094 /* See if this is setting the replacement register for an
3095 elimination.
3096
3097 If DEST is the hard frame pointer, we do nothing because we
3098 assume that all assignments to the frame pointer are for
3099 non-local gotos and are being done at a time when they are valid
3100 and do not disturb anything else. Some machines want to
3101 eliminate a fake argument pointer (or even a fake frame pointer)
3102 with either the real frame or the stack pointer. Assignments to
3103 the hard frame pointer must not prevent this elimination. */
3104
3105 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))];
3106 ep++)
3107 if (ep->to_rtx == SET_DEST (x)(((x)->u.fld[0]).rt_rtx)
3108 && SET_DEST (x)(((x)->u.fld[0]).rt_rtx) != hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER]))
3109 {
3110 /* If it is being incremented, adjust the offset. Otherwise,
3111 this elimination can't be done. */
3112 rtx src = SET_SRC (x)(((x)->u.fld[1]).rt_rtx);
3113
3114 if (GET_CODE (src)((enum rtx_code) (src)->code) == PLUS
3115 && XEXP (src, 0)(((src)->u.fld[0]).rt_rtx) == SET_DEST (x)(((x)->u.fld[0]).rt_rtx)
3116 && CONST_INT_P (XEXP (src, 1))(((enum rtx_code) ((((src)->u.fld[1]).rt_rtx))->code) ==
CONST_INT)
)
3117 ep->offset -= INTVAL (XEXP (src, 1))(((((src)->u.fld[1]).rt_rtx))->u.hwint[0]);
3118 else
3119 ep->can_eliminate = 0;
3120 }
3121 }
3122
3123 elimination_effects (SET_DEST (x)(((x)->u.fld[0]).rt_rtx), VOIDmode((void) 0, E_VOIDmode));
3124 elimination_effects (SET_SRC (x)(((x)->u.fld[1]).rt_rtx), VOIDmode((void) 0, E_VOIDmode));
3125 return;
3126
3127 case MEM:
3128 /* Our only special processing is to pass the mode of the MEM to our
3129 recursive call. */
3130 elimination_effects (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), GET_MODE (x)((machine_mode) (x)->mode));
3131 return;
3132
3133 default:
3134 break;
3135 }
3136
3137 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]);
3138 for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++, fmt++)
3139 {
3140 if (*fmt == 'e')
3141 elimination_effects (XEXP (x, i)(((x)->u.fld[i]).rt_rtx), mem_mode);
3142 else if (*fmt == 'E')
3143 for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++)
3144 elimination_effects (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), mem_mode);
3145 }
3146}
3147
3148/* Descend through rtx X and verify that no references to eliminable registers
3149 remain. If any do remain, mark the involved register as not
3150 eliminable. */
3151
3152static void
3153check_eliminable_occurrences (rtx x)
3154{
3155 const char *fmt;
3156 int i;
3157 enum rtx_code code;
3158
3159 if (x == 0)
3160 return;
3161
3162 code = GET_CODE (x)((enum rtx_code) (x)->code);
3163
3164 if (code == REG && REGNO (x)(rhs_regno(x)) < FIRST_PSEUDO_REGISTER76)
3165 {
3166 struct elim_table *ep;
3167
3168 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++)
3169 if (ep->from_rtx == x)
3170 ep->can_eliminate = 0;
3171 return;
3172 }
3173
3174 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]);
3175 for (i = 0; i < GET_RTX_LENGTH (code)(rtx_length[(int) (code)]); i++, fmt++)
3176 {
3177 if (*fmt == 'e')
3178 check_eliminable_occurrences (XEXP (x, i)(((x)->u.fld[i]).rt_rtx));
3179 else if (*fmt == 'E')
3180 {
3181 int j;
3182 for (j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++)
3183 check_eliminable_occurrences (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]));
3184 }
3185 }
3186}
3187
3188/* Scan INSN and eliminate all eliminable registers in it.
3189
3190 If REPLACE is nonzero, do the replacement destructively. Also
3191 delete the insn as dead it if it is setting an eliminable register.
3192
3193 If REPLACE is zero, do all our allocations in reload_obstack.
3194
3195 If no eliminations were done and this insn doesn't require any elimination
3196 processing (these are not identical conditions: it might be updating sp,
3197 but not referencing fp; this needs to be seen during reload_as_needed so
3198 that the offset between fp and sp can be taken into consideration), zero
3199 is returned. Otherwise, 1 is returned. */
3200
3201static int
3202eliminate_regs_in_insn (rtx_insn *insn, int replace)
3203{
3204 int icode = recog_memoized (insn);
3205 rtx old_body = PATTERN (insn);
3206 int insn_is_asm = asm_noperands (old_body) >= 0;
1
Assuming the condition is false
3207 rtx old_set = single_set (insn);
3208 rtx new_body;
3209 int val = 0;
3210 int i;
3211 rtx substed_operand[MAX_RECOG_OPERANDS30];
3212 rtx orig_operand[MAX_RECOG_OPERANDS30];
3213 struct elim_table *ep;
3214 rtx plus_src, plus_cst_src;
3215
3216 if (! insn_is_asm
1.1
'insn_is_asm' is 0
&& icode
1.2
'icode' is >= 0
< 0)
2
Taking false branch
3217 {
3218 gcc_assert (DEBUG_INSN_P (insn)((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) ||
((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum
rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code
) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3221, __FUNCTION__), 0 : 0))
3219 || GET_CODE (PATTERN (insn)) == USE((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) ||
((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum
rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code
) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3221, __FUNCTION__), 0 : 0))
3220 || GET_CODE (PATTERN (insn)) == CLOBBER((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) ||
((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum
rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code
) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3221, __FUNCTION__), 0 : 0))
3221 || GET_CODE (PATTERN (insn)) == ASM_INPUT)((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) ||
((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum
rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code
) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3221, __FUNCTION__), 0 : 0))
;
3222 if (DEBUG_BIND_INSN_P (insn)((((enum rtx_code) (insn)->code) == DEBUG_INSN) &&
(((enum rtx_code) (PATTERN (insn))->code) == VAR_LOCATION
))
)
3223 INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx =
(PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3223, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx))
3224 = eliminate_regs (INSN_VAR_LOCATION_LOC (insn)((((((__extension__ ({ __typeof (PATTERN (insn)) const _rtx =
(PATTERN (insn)); if (((enum rtx_code) (_rtx)->code) != VAR_LOCATION
) rtl_check_failed_flag ("INSN_VAR_LOCATION", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3224, __FUNCTION__); _rtx; }))))->u.fld[1]).rt_rtx))
, VOIDmode((void) 0, E_VOIDmode), insn);
3225 return 0;
3226 }
3227
3228 /* We allow one special case which happens to work on all machines we
3229 currently support: a single set with the source or a REG_EQUAL
3230 note being a PLUS of an eliminable register and a constant. */
3231 plus_src = plus_cst_src = 0;
3232 if (old_set
2.1
'old_set' is null
&& REG_P (SET_DEST (old_set))(((enum rtx_code) ((((old_set)->u.fld[0]).rt_rtx))->code
) == REG)
)
3233 {
3234 if (GET_CODE (SET_SRC (old_set))((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code
)
== PLUS)
3235 plus_src = SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx);
3236 /* First see if the source is of the form (plus (...) CST). */
3237 if (plus_src
3238 && CONST_INT_P (XEXP (plus_src, 1))(((enum rtx_code) ((((plus_src)->u.fld[1]).rt_rtx))->code
) == CONST_INT)
)
3239 plus_cst_src = plus_src;
3240 else if (REG_P (SET_SRC (old_set))(((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code
) == REG)
3241 || plus_src)
3242 {
3243 /* Otherwise, see if we have a REG_EQUAL note of the form
3244 (plus (...) CST). */
3245 rtx links;
3246 for (links = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); links; links = XEXP (links, 1)(((links)->u.fld[1]).rt_rtx))
3247 {
3248 if ((REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_EQUAL
3249 || REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_EQUIV)
3250 && GET_CODE (XEXP (links, 0))((enum rtx_code) ((((links)->u.fld[0]).rt_rtx))->code) == PLUS
3251 && CONST_INT_P (XEXP (XEXP (links, 0), 1))(((enum rtx_code) (((((((links)->u.fld[0]).rt_rtx))->u.
fld[1]).rt_rtx))->code) == CONST_INT)
)
3252 {
3253 plus_cst_src = XEXP (links, 0)(((links)->u.fld[0]).rt_rtx);
3254 break;
3255 }
3256 }
3257 }
3258
3259 /* Check that the first operand of the PLUS is a hard reg or
3260 the lowpart subreg of one. */
3261 if (plus_cst_src)
3262 {
3263 rtx reg = XEXP (plus_cst_src, 0)(((plus_cst_src)->u.fld[0]).rt_rtx);
3264 if (GET_CODE (reg)((enum rtx_code) (reg)->code) == SUBREG && subreg_lowpart_p (reg))
3265 reg = SUBREG_REG (reg)(((reg)->u.fld[0]).rt_rtx);
3266
3267 if (!REG_P (reg)(((enum rtx_code) (reg)->code) == REG) || REGNO (reg)(rhs_regno(reg)) >= FIRST_PSEUDO_REGISTER76)
3268 plus_cst_src = 0;
3269 }
3270 }
3271 if (plus_cst_src
2.2
'plus_cst_src' is null
)
3
Taking false branch
3272 {
3273 rtx reg = XEXP (plus_cst_src, 0)(((plus_cst_src)->u.fld[0]).rt_rtx);
3274 poly_int64 offset = INTVAL (XEXP (plus_cst_src, 1))(((((plus_cst_src)->u.fld[1]).rt_rtx))->u.hwint[0]);
3275
3276 if (GET_CODE (reg)((enum rtx_code) (reg)->code) == SUBREG)
3277 reg = SUBREG_REG (reg)(((reg)->u.fld[0]).rt_rtx);
3278
3279 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++)
3280 if (ep->from_rtx == reg && ep->can_eliminate)
3281 {
3282 rtx to_rtx = ep->to_rtx;
3283 offset += ep->offset;
3284 offset = trunc_int_for_mode (offset, GET_MODE (plus_cst_src)((machine_mode) (plus_cst_src)->mode));
3285
3286 if (GET_CODE (XEXP (plus_cst_src, 0))((enum rtx_code) ((((plus_cst_src)->u.fld[0]).rt_rtx))->
code)
== SUBREG)
3287 to_rtx = gen_lowpartrtl_hooks.gen_lowpart (GET_MODE (XEXP (plus_cst_src, 0))((machine_mode) ((((plus_cst_src)->u.fld[0]).rt_rtx))->
mode)
,
3288 to_rtx);
3289 /* If we have a nonzero offset, and the source is already
3290 a simple REG, the following transformation would
3291 increase the cost of the insn by replacing a simple REG
3292 with (plus (reg sp) CST). So try only when we already
3293 had a PLUS before. */
3294 if (known_eq (offset, 0)(!maybe_ne (offset, 0)) || plus_src)
3295 {
3296 rtx new_src = plus_constant (GET_MODE (to_rtx)((machine_mode) (to_rtx)->mode),
3297 to_rtx, offset);
3298
3299 new_body = old_body;
3300 if (! replace)
3301 {
3302 new_body = copy_insn (old_body);
3303 if (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx))
3304 REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) = copy_insn_1 (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx));
3305 }
3306 PATTERN (insn) = new_body;
3307 old_set = single_set (insn);
3308
3309 /* First see if this insn remains valid when we make the
3310 change. If not, try to replace the whole pattern with
3311 a simple set (this may help if the original insn was a
3312 PARALLEL that was only recognized as single_set due to
3313 REG_UNUSED notes). If this isn't valid either, keep
3314 the INSN_CODE the same and let reload fix it up. */
3315 if (!validate_change (insn, &SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx), new_src, 0))
3316 {
3317 rtx new_pat = gen_rtx_SET (SET_DEST (old_set), new_src)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), (((((old_set
)->u.fld[0]).rt_rtx))), ((new_src)) )
;
3318
3319 if (!validate_change (insn, &PATTERN (insn), new_pat, 0))
3320 SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx) = new_src;
3321 }
3322 }
3323 else
3324 break;
3325
3326 val = 1;
3327 /* This can't have an effect on elimination offsets, so skip right
3328 to the end. */
3329 goto done;
3330 }
3331 }
3332
3333 /* Determine the effects of this insn on elimination offsets. */
3334 elimination_effects (old_body, VOIDmode((void) 0, E_VOIDmode));
3335
3336 /* Eliminate all eliminable registers occurring in operands that
3337 can be handled by reload. */
3338 extract_insn (insn);
3339 for (i = 0; i < recog_data.n_operands; i++)
4
Assuming 'i' is < field 'n_operands'
5
Loop condition is true. Entering loop body
14
Assuming 'i' is >= field 'n_operands'
15
Loop condition is false. Execution continues on line 3395
3340 {
3341 orig_operand[i] = recog_data.operand[i];
3342 substed_operand[i] = recog_data.operand[i];
3343
3344 /* For an asm statement, every operand is eliminable. */
3345 if (insn_is_asm
5.1
'insn_is_asm' is 0
|| insn_data[icode].operand[i].eliminable)
6
Assuming field 'eliminable' is not equal to 0
7
Taking true branch
3346 {
3347 bool is_set_src, in_plus;
3348
3349 /* Check for setting a register that we know about. */
3350 if (recog_data.operand_type[i] != OP_IN
8
Assuming the condition is false
3351 && REG_P (orig_operand[i])(((enum rtx_code) (orig_operand[i])->code) == REG))
3352 {
3353 /* If we are assigning to a register that can be eliminated, it
3354 must be as part of a PARALLEL, since the code above handles
3355 single SETs. We must indicate that we can no longer
3356 eliminate this reg. */
3357 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))];
3358 ep++)
3359 if (ep->from_rtx == orig_operand[i])
3360 ep->can_eliminate = 0;
3361 }
3362
3363 /* Companion to the above plus substitution, we can allow
3364 invariants as the source of a plain move. */
3365 is_set_src = false;
3366 if (old_set
8.1
'old_set' is null
3367 && recog_data.operand_loc[i] == &SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx)) 3368 is_set_src = true; 3369 in_plus = false; 3370 if (plus_src
8.2
'plus_src' is null
3371 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)(((plus_src)->u.fld[0]).rt_rtx) 3372 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)(((plus_src)->u.fld[1]).rt_rtx))) 3373 in_plus = true; 3374 3375 substed_operand[i] 3376 = eliminate_regs_1 (recog_data.operand[i], VOIDmode((void) 0, E_VOIDmode), 3377 replace ? insn : NULL_RTX(rtx) 0,
9
Assuming 'replace' is 0
10
'?' condition is false
3378 is_set_src
10.1
'is_set_src' is false
|| in_plus, false); 3379 if (substed_operand[i] != orig_operand[i])
11
Assuming the condition is true
12
Taking true branch
3380 val = 1; 3381 /* Terminate the search in check_eliminable_occurrences at 3382 this point. */ 3383 *recog_data.operand_loc[i] = 0; 3384 3385 /* If an output operand changed from a REG to a MEM and INSN is an 3386 insn, write a CLOBBER insn. */ 3387 if (recog_data.operand_type[i] != OP_IN
13
Assuming the condition is false
3388 && REG_P (orig_operand[i])(((enum rtx_code) (orig_operand[i])->code) == REG) 3389 && MEM_P (substed_operand[i])(((enum rtx_code) (substed_operand[i])->code) == MEM) 3390 && replace) 3391 emit_insn_after (gen_clobber (orig_operand[i]), insn); 3392 } 3393 } 3394 3395 for (i = 0; i < recog_data.n_dups; i++)
16
Assuming 'i' is >= field 'n_dups'
17
Loop condition is false. Execution continues on line 3400
3396 *recog_data.dup_loc[i] 3397 = *recog_data.operand_loc[(int) recog_data.dup_num[i]]; 3398 3399 /* If any eliminable remain, they aren't eliminable anymore. */ 3400 check_eliminable_occurrences (old_body); 3401 3402 /* Substitute the operands; the new values are in the substed_operand 3403 array. */ 3404 for (i = 0; i < recog_data.n_operands; i++)
18
Assuming 'i' is >= field 'n_operands'
19
Loop condition is false. Execution continues on line 3406
3405 *recog_data.operand_loc[i] = substed_operand[i]; 3406 for (i = 0; i < recog_data.n_dups; i++)
20
Assuming 'i' is >= field 'n_dups'
21
Loop condition is false. Execution continues on line 3418
3407 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]]; 3408 3409 /* If we are replacing a body that was a (set X (plus Y Z)), try to 3410 re-recognize the insn. We do this in case we had a simple addition 3411 but now can do this as a load-address. This saves an insn in this 3412 common case. 3413 If re-recognition fails, the old insn code number will still be used, 3414 and some register operands may have changed into PLUS expressions. 3415 These will be handled by find_reloads by loading them into a register 3416 again. */ 3417 3418 if (val
21.1
'val' is 1
)
22
Taking true branch
3419 { 3420 /* If we aren't replacing things permanently and we changed something, 3421 make another copy to ensure that all the RTL is new. Otherwise 3422 things can go wrong if find_reload swaps commutative operands 3423 and one is inside RTL that has been copied while the other is not. */ 3424 new_body = old_body; 3425 if (! replace
22.1
'replace' is 0
)
23
Taking true branch
3426 { 3427 new_body = copy_insn (old_body); 3428 if (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx))
24
Assuming field 'rt_rtx' is null
25
Taking false branch
3429 REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) = copy_insn_1 (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx)); 3430 } 3431 PATTERN (insn) = new_body; 3432 3433 /* If we had a move insn but now we don't, rerecognize it. This will 3434 cause spurious re-recognition if the old move had a PARALLEL since 3435 the new one still will, but we can't call single_set without 3436 having put NEW_BODY into the insn and the re-recognition won't 3437 hurt in this rare case. */ 3438 /* ??? Why this huge if statement - why don't we just rerecognize the 3439 thing always? */ 3440 if (! insn_is_asm
25.1
'insn_is_asm' is 0
3441 && old_set
25.2
'old_set' is equal to null
!= 0 3442 && ((REG_P (SET_SRC (old_set))(((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code
) == REG)
3443 && (GET_CODE (new_body)((enum rtx_code) (new_body)->code) != SET 3444 || !REG_P (SET_SRC (new_body))(((enum rtx_code) ((((new_body)->u.fld[1]).rt_rtx))->code
) == REG)
)) 3445 /* If this was a load from or store to memory, compare 3446 the MEM in recog_data.operand to the one in the insn. 3447 If they are not equal, then rerecognize the insn. */ 3448 || (old_set != 0 3449 && ((MEM_P (SET_SRC (old_set))(((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code
) == MEM)
3450 && SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx) != recog_data.operand[1]) 3451 || (MEM_P (SET_DEST (old_set))(((enum rtx_code) ((((old_set)->u.fld[0]).rt_rtx))->code
) == MEM)
3452 && SET_DEST (old_set)(((old_set)->u.fld[0]).rt_rtx) != recog_data.operand[0]))) 3453 /* If this was an add insn before, rerecognize. */ 3454 || GET_CODE (SET_SRC (old_set))((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code
)
== PLUS)) 3455 { 3456 int new_icode = recog (PATTERN (insn), insn, 0); 3457 if (new_icode >= 0) 3458 INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) = new_icode; 3459 } 3460 } 3461 3462 /* Restore the old body. If there were any changes to it, we made a copy 3463 of it while the changes were still in place, so we'll correctly return 3464 a modified insn below. */ 3465 if (! replace
25.3
'replace' is 0
)
26
Taking true branch
3466 { 3467 /* Restore the old body. */ 3468 for (i = 0; i < recog_data.n_operands; i++)
27
Assuming 'i' is < field 'n_operands'
28
Loop condition is true. Entering loop body
31
The value 1 is assigned to 'i'
32
Assuming 'i' is < field 'n_operands'
33
Loop condition is true. Entering loop body
3469 /* Restoring a top-level match_parallel would clobber the new_body 3470 we installed in the insn. */ 3471 if (recog_data.operand_loc[i] != &PATTERN (insn))
29
Assuming the condition is false
30
Taking false branch
34
Assuming the condition is true
35
Taking true branch
3472 *recog_data.operand_loc[i] = orig_operand[i];
36
Assigned value is garbage or undefined
3473 for (i = 0; i < recog_data.n_dups; i++) 3474 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]]; 3475 } 3476 3477 /* Update all elimination pairs to reflect the status after the current 3478 insn. The changes we make were determined by the earlier call to 3479 elimination_effects. 3480 3481 We also detect cases where register elimination cannot be done, 3482 namely, if a register would be both changed and referenced outside a MEM 3483 in the resulting insn since such an insn is often undefined and, even if 3484 not, we cannot know what meaning will be given to it. Note that it is 3485 valid to have a register used in an address in an insn that changes it 3486 (presumably with a pre- or post-increment or decrement). 3487 3488 If anything changes, return nonzero. */ 3489 3490 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3491 { 3492 if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem) 3493 ep->can_eliminate = 0; 3494 3495 ep->ref_outside_mem = 0; 3496 3497 if (maybe_ne (ep->previous_offset, ep->offset)) 3498 val = 1; 3499 } 3500 3501 done: 3502 /* If we changed something, perform elimination in REG_NOTES. This is 3503 needed even when REPLACE is zero because a REG_DEAD note might refer 3504 to a register that we eliminate and could cause a different number 3505 of spill registers to be needed in the final reload pass than in 3506 the pre-passes. */ 3507 if (val && REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) != 0) 3508 REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx) 3509 = eliminate_regs_1 (REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx), VOIDmode((void) 0, E_VOIDmode), REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx), true, 3510 false); 3511 3512 return val; 3513} 3514 3515/* Like eliminate_regs_in_insn, but only estimate costs for the use of the 3516 register allocator. INSN is the instruction we need to examine, we perform 3517 eliminations in its operands and record cases where eliminating a reg with 3518 an invariant equivalence would add extra cost. */ 3519 3520#pragma GCC diagnostic push 3521#pragma GCC diagnostic warning "-Wmaybe-uninitialized" 3522static void 3523elimination_costs_in_insn (rtx_insn *insn) 3524{ 3525 int icode = recog_memoized (insn); 3526 rtx old_body = PATTERN (insn); 3527 int insn_is_asm = asm_noperands (old_body) >= 0; 3528 rtx old_set = single_set (insn); 3529 int i; 3530 rtx orig_operand[MAX_RECOG_OPERANDS30]; 3531 rtx orig_dup[MAX_RECOG_OPERANDS30]; 3532 struct elim_table *ep; 3533 rtx plus_src, plus_cst_src; 3534 bool sets_reg_p; 3535 3536 if (! insn_is_asm && icode < 0) 3537 { 3538 gcc_assert (DEBUG_INSN_P (insn)((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) ||
((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum
rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code
) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3541, __FUNCTION__), 0 : 0))
3539 || GET_CODE (PATTERN (insn)) == USE((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) ||
((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum
rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code
) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3541, __FUNCTION__), 0 : 0))
3540 || GET_CODE (PATTERN (insn)) == CLOBBER((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) ||
((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum
rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code
) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3541, __FUNCTION__), 0 : 0))
3541 || GET_CODE (PATTERN (insn)) == ASM_INPUT)((void)(!((((enum rtx_code) (insn)->code) == DEBUG_INSN) ||
((enum rtx_code) (PATTERN (insn))->code) == USE || ((enum
rtx_code) (PATTERN (insn))->code) == CLOBBER || ((enum rtx_code
) (PATTERN (insn))->code) == ASM_INPUT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 3541, __FUNCTION__), 0 : 0))
; 3542 return; 3543 } 3544 3545 if (old_set != 0 && REG_P (SET_DEST (old_set))(((enum rtx_code) ((((old_set)->u.fld[0]).rt_rtx))->code
) == REG)
3546 && REGNO (SET_DEST (old_set))(rhs_regno((((old_set)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76) 3547 { 3548 /* Check for setting an eliminable register. */ 3549 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3550 if (ep->from_rtx == SET_DEST (old_set)(((old_set)->u.fld[0]).rt_rtx) && ep->can_eliminate) 3551 return; 3552 } 3553 3554 /* We allow one special case which happens to work on all machines we 3555 currently support: a single set with the source or a REG_EQUAL 3556 note being a PLUS of an eliminable register and a constant. */ 3557 plus_src = plus_cst_src = 0; 3558 sets_reg_p = false; 3559 if (old_set && REG_P (SET_DEST (old_set))(((enum rtx_code) ((((old_set)->u.fld[0]).rt_rtx))->code
) == REG)
) 3560 { 3561 sets_reg_p = true; 3562 if (GET_CODE (SET_SRC (old_set))((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code
)
== PLUS) 3563 plus_src = SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx); 3564 /* First see if the source is of the form (plus (...) CST). */ 3565 if (plus_src 3566 && CONST_INT_P (XEXP (plus_src, 1))(((enum rtx_code) ((((plus_src)->u.fld[1]).rt_rtx))->code
) == CONST_INT)
) 3567 plus_cst_src = plus_src; 3568 else if (REG_P (SET_SRC (old_set))(((enum rtx_code) ((((old_set)->u.fld[1]).rt_rtx))->code
) == REG)
3569 || plus_src) 3570 { 3571 /* Otherwise, see if we have a REG_EQUAL note of the form 3572 (plus (...) CST). */ 3573 rtx links; 3574 for (links = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); links; links = XEXP (links, 1)(((links)->u.fld[1]).rt_rtx)) 3575 { 3576 if ((REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_EQUAL 3577 || REG_NOTE_KIND (links)((enum reg_note) ((machine_mode) (links)->mode)) == REG_EQUIV) 3578 && GET_CODE (XEXP (links, 0))((enum rtx_code) ((((links)->u.fld[0]).rt_rtx))->code) == PLUS 3579 && CONST_INT_P (XEXP (XEXP (links, 0), 1))(((enum rtx_code) (((((((links)->u.fld[0]).rt_rtx))->u.
fld[1]).rt_rtx))->code) == CONST_INT)
) 3580 { 3581 plus_cst_src = XEXP (links, 0)(((links)->u.fld[0]).rt_rtx); 3582 break; 3583 } 3584 } 3585 } 3586 } 3587 3588 /* Determine the effects of this insn on elimination offsets. */ 3589 elimination_effects (old_body, VOIDmode((void) 0, E_VOIDmode)); 3590 3591 /* Eliminate all eliminable registers occurring in operands that 3592 can be handled by reload. */ 3593 extract_insn (insn); 3594 int n_dups = recog_data.n_dups; 3595 for (i = 0; i < n_dups; i++) 3596 orig_dup[i] = *recog_data.dup_loc[i]; 3597 3598 int n_operands = recog_data.n_operands; 3599 for (i = 0; i < n_operands; i++) 3600 { 3601 orig_operand[i] = recog_data.operand[i]; 3602 3603 /* For an asm statement, every operand is eliminable. */ 3604 if (insn_is_asm || insn_data[icode].operand[i].eliminable) 3605 { 3606 bool is_set_src, in_plus; 3607 3608 /* Check for setting a register that we know about. */ 3609 if (recog_data.operand_type[i] != OP_IN 3610 && REG_P (orig_operand[i])(((enum rtx_code) (orig_operand[i])->code) == REG)) 3611 { 3612 /* If we are assigning to a register that can be eliminated, it 3613 must be as part of a PARALLEL, since the code above handles 3614 single SETs. We must indicate that we can no longer 3615 eliminate this reg. */ 3616 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; 3617 ep++) 3618 if (ep->from_rtx == orig_operand[i]) 3619 ep->can_eliminate = 0; 3620 } 3621 3622 /* Companion to the above plus substitution, we can allow 3623 invariants as the source of a plain move. */ 3624 is_set_src = false; 3625 if (old_set && recog_data.operand_loc[i] == &SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx)) 3626 is_set_src = true; 3627 if (is_set_src && !sets_reg_p) 3628 note_reg_elim_costly (SET_SRC (old_set)(((old_set)->u.fld[1]).rt_rtx), insn); 3629 in_plus = false; 3630 if (plus_src && sets_reg_p 3631 && (recog_data.operand_loc[i] == &XEXP (plus_src, 0)(((plus_src)->u.fld[0]).rt_rtx) 3632 || recog_data.operand_loc[i] == &XEXP (plus_src, 1)(((plus_src)->u.fld[1]).rt_rtx))) 3633 in_plus = true; 3634 3635 eliminate_regs_1 (recog_data.operand[i], VOIDmode((void) 0, E_VOIDmode), 3636 NULL_RTX(rtx) 0, 3637 is_set_src || in_plus, true); 3638 /* Terminate the search in check_eliminable_occurrences at 3639 this point. */ 3640 *recog_data.operand_loc[i] = 0; 3641 } 3642 } 3643 3644 for (i = 0; i < n_dups; i++) 3645 *recog_data.dup_loc[i] 3646 = *recog_data.operand_loc[(int) recog_data.dup_num[i]]; 3647 3648 /* If any eliminable remain, they aren't eliminable anymore. */ 3649 check_eliminable_occurrences (old_body); 3650 3651 /* Restore the old body. */ 3652 for (i = 0; i < n_operands; i++) 3653 *recog_data.operand_loc[i] = orig_operand[i]; 3654 for (i = 0; i < n_dups; i++) 3655 *recog_data.dup_loc[i] = orig_dup[i]; 3656 3657 /* Update all elimination pairs to reflect the status after the current 3658 insn. The changes we make were determined by the earlier call to 3659 elimination_effects. */ 3660 3661 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3662 { 3663 if (maybe_ne (ep->previous_offset, ep->offset) && ep->ref_outside_mem) 3664 ep->can_eliminate = 0; 3665 3666 ep->ref_outside_mem = 0; 3667 } 3668 3669 return; 3670} 3671#pragma GCC diagnostic pop 3672 3673/* Loop through all elimination pairs. 3674 Recalculate the number not at initial offset. 3675 3676 Compute the maximum offset (minimum offset if the stack does not 3677 grow downward) for each elimination pair. */ 3678 3679static void 3680update_eliminable_offsets (void) 3681{ 3682 struct elim_table *ep; 3683 3684 num_not_at_initial_offset = 0; 3685 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3686 { 3687 ep->previous_offset = ep->offset; 3688 if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset)) 3689 num_not_at_initial_offset++; 3690 } 3691} 3692 3693/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register 3694 replacement we currently believe is valid, mark it as not eliminable if X 3695 modifies DEST in any way other than by adding a constant integer to it. 3696 3697 If DEST is the frame pointer, we do nothing because we assume that 3698 all assignments to the hard frame pointer are nonlocal gotos and are being 3699 done at a time when they are valid and do not disturb anything else. 3700 Some machines want to eliminate a fake argument pointer with either the 3701 frame or stack pointer. Assignments to the hard frame pointer must not 3702 prevent this elimination. 3703 3704 Called via note_stores from reload before starting its passes to scan 3705 the insns of the function. */ 3706 3707static void 3708mark_not_eliminable (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED__attribute__ ((__unused__))) 3709{ 3710 unsigned int i; 3711 3712 /* A SUBREG of a hard register here is just changing its mode. We should 3713 not see a SUBREG of an eliminable hard register, but check just in 3714 case. */ 3715 if (GET_CODE (dest)((enum rtx_code) (dest)->code) == SUBREG) 3716 dest = SUBREG_REG (dest)(((dest)->u.fld[0]).rt_rtx); 3717 3718 if (dest == hard_frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_HARD_FRAME_POINTER])) 3719 return; 3720 3721 for (i = 0; i < NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])); i++) 3722 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx 3723 && (GET_CODE (x)((enum rtx_code) (x)->code) != SET 3724 || GET_CODE (SET_SRC (x))((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) != PLUS 3725 || XEXP (SET_SRC (x), 0)((((((x)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) != dest 3726 || !CONST_INT_P (XEXP (SET_SRC (x), 1))(((enum rtx_code) (((((((x)->u.fld[1]).rt_rtx))->u.fld[
1]).rt_rtx))->code) == CONST_INT)
)) 3727 { 3728 reg_eliminate[i].can_eliminate_previous 3729 = reg_eliminate[i].can_eliminate = 0; 3730 num_eliminable--; 3731 } 3732} 3733 3734/* Verify that the initial elimination offsets did not change since the 3735 last call to set_initial_elim_offsets. This is used to catch cases 3736 where something illegal happened during reload_as_needed that could 3737 cause incorrect code to be generated if we did not check for it. */ 3738 3739static bool 3740verify_initial_elim_offsets (void) 3741{ 3742 poly_int64 t; 3743 struct elim_table *ep; 3744 3745 if (!num_eliminable) 3746 return true; 3747 3748 targetm.compute_frame_layout (); 3749 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3750 { 3751 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t)((t) = ix86_initial_elimination_offset ((ep->from), (ep->
to)))
; 3752 if (maybe_ne (t, ep->initial_offset)) 3753 return false; 3754 } 3755 3756 return true; 3757} 3758 3759/* Reset all offsets on eliminable registers to their initial values. */ 3760 3761static void 3762set_initial_elim_offsets (void) 3763{ 3764 struct elim_table *ep = reg_eliminate; 3765 3766 targetm.compute_frame_layout (); 3767 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3768 { 3769 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset)((ep->initial_offset) = ix86_initial_elimination_offset ((
ep->from), (ep->to)))
; 3770 ep->previous_offset = ep->offset = ep->initial_offset; 3771 } 3772 3773 num_not_at_initial_offset = 0; 3774} 3775 3776/* Subroutine of set_initial_label_offsets called via for_each_eh_label. */ 3777 3778static void 3779set_initial_eh_label_offset (rtx label) 3780{ 3781 set_label_offsets (label, NULLnullptr, 1); 3782} 3783 3784/* Initialize the known label offsets. 3785 Set a known offset for each forced label to be at the initial offset 3786 of each elimination. We do this because we assume that all 3787 computed jumps occur from a location where each elimination is 3788 at its initial offset. 3789 For all other labels, show that we don't know the offsets. */ 3790 3791static void 3792set_initial_label_offsets (void) 3793{ 3794 memset (offsets_known_at, 0, num_labels); 3795 3796 unsigned int i; 3797 rtx_insn *insn; 3798 FOR_EACH_VEC_SAFE_ELT (forced_labels, i, insn)for (i = 0; vec_safe_iterate ((((&x_rtl)->expr.x_forced_labels
)), (i), &(insn)); ++(i))
3799 set_label_offsets (insn, NULLnullptr, 1); 3800 3801 for (rtx_insn_list *x = nonlocal_goto_handler_labels((&x_rtl)->x_nonlocal_goto_handler_labels); x; x = x->next ()) 3802 if (x->insn ()) 3803 set_label_offsets (x->insn (), NULLnullptr, 1); 3804 3805 for_each_eh_label (set_initial_eh_label_offset); 3806} 3807 3808/* Set all elimination offsets to the known values for the code label given 3809 by INSN. */ 3810 3811static void 3812set_offsets_for_label (rtx_insn *insn) 3813{ 3814 unsigned int i; 3815 int label_nr = CODE_LABEL_NUMBER (insn)(((insn)->u.fld[5]).rt_int); 3816 struct elim_table *ep; 3817 3818 num_not_at_initial_offset = 0; 3819 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])); ep++, i++) 3820 { 3821 ep->offset = ep->previous_offset 3822 = offsets_at[label_nr - first_label_num][i]; 3823 if (ep->can_eliminate && maybe_ne (ep->offset, ep->initial_offset)) 3824 num_not_at_initial_offset++; 3825 } 3826} 3827 3828/* See if anything that happened changes which eliminations are valid. 3829 For example, on the SPARC, whether or not the frame pointer can 3830 be eliminated can depend on what registers have been used. We need 3831 not check some conditions again (such as flag_omit_frame_pointer) 3832 since they can't have changed. */ 3833 3834static void 3835update_eliminables (HARD_REG_SET *pset) 3836{ 3837 int previous_frame_pointer_needed = frame_pointer_needed((&x_rtl)->frame_pointer_needed); 3838 struct elim_table *ep; 3839 3840 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3841 if ((ep->from == HARD_FRAME_POINTER_REGNUM6 3842 && targetm.frame_pointer_required ()) 3843 || ! targetm.can_eliminate (ep->from, ep->to) 3844 ) 3845 ep->can_eliminate = 0; 3846 3847 /* Look for the case where we have discovered that we can't replace 3848 register A with register B and that means that we will now be 3849 trying to replace register A with register C. This means we can 3850 no longer replace register C with register B and we need to disable 3851 such an elimination, if it exists. This occurs often with A == ap, 3852 B == sp, and C == fp. */ 3853 3854 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3855 { 3856 struct elim_table *op; 3857 int new_to = -1; 3858 3859 if (! ep->can_eliminate && ep->can_eliminate_previous) 3860 { 3861 /* Find the current elimination for ep->from, if there is a 3862 new one. */ 3863 for (op = reg_eliminate; 3864 op < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; op++) 3865 if (op->from == ep->from && op->can_eliminate) 3866 { 3867 new_to = op->to; 3868 break; 3869 } 3870 3871 /* See if there is an elimination of NEW_TO -> EP->TO. If so, 3872 disable it. */ 3873 for (op = reg_eliminate; 3874 op < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; op++) 3875 if (op->from == new_to && op->to == ep->to) 3876 op->can_eliminate = 0; 3877 } 3878 } 3879 3880 /* See if any registers that we thought we could eliminate the previous 3881 time are no longer eliminable. If so, something has changed and we 3882 must spill the register. Also, recompute the number of eliminable 3883 registers and see if the frame pointer is needed; it is if there is 3884 no elimination of the frame pointer that we can perform. */ 3885 3886 frame_pointer_needed((&x_rtl)->frame_pointer_needed) = 1; 3887 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3888 { 3889 if (ep->can_eliminate 3890 && ep->from == FRAME_POINTER_REGNUM19 3891 && ep->to != HARD_FRAME_POINTER_REGNUM6 3892 && (! SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options
.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi
() == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4))))
3893 || ! crtl(&x_rtl)->stack_realign_needed)) 3894 frame_pointer_needed((&x_rtl)->frame_pointer_needed) = 0; 3895 3896 if (! ep->can_eliminate && ep->can_eliminate_previous) 3897 { 3898 ep->can_eliminate_previous = 0; 3899 SET_HARD_REG_BIT (*pset, ep->from); 3900 num_eliminable--; 3901 } 3902 } 3903 3904 /* If we didn't need a frame pointer last time, but we do now, spill 3905 the hard frame pointer. */ 3906 if (frame_pointer_needed((&x_rtl)->frame_pointer_needed) && ! previous_frame_pointer_needed) 3907 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM6); 3908} 3909 3910/* Call update_eliminables an spill any registers we can't eliminate anymore. 3911 Return true iff a register was spilled. */ 3912 3913static bool 3914update_eliminables_and_spill (void) 3915{ 3916 int i; 3917 bool did_spill = false; 3918 HARD_REG_SET to_spill; 3919 CLEAR_HARD_REG_SET (to_spill); 3920 update_eliminables (&to_spill); 3921 used_spill_regs &= ~to_spill; 3922 3923 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) 3924 if (TEST_HARD_REG_BIT (to_spill, i)) 3925 { 3926 spill_hard_reg (i, 1); 3927 did_spill = true; 3928 3929 /* Regardless of the state of spills, if we previously had 3930 a register that we thought we could eliminate, but now 3931 cannot eliminate, we must run another pass. 3932 3933 Consider pseudos which have an entry in reg_equiv_* which 3934 reference an eliminable register. We must make another pass 3935 to update reg_equiv_* so that we do not substitute in the 3936 old value from when we thought the elimination could be 3937 performed. */ 3938 } 3939 return did_spill; 3940} 3941 3942/* Return true if X is used as the target register of an elimination. */ 3943 3944bool 3945elimination_target_reg_p (rtx x) 3946{ 3947 struct elim_table *ep; 3948 3949 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3950 if (ep->to_rtx == x && ep->can_eliminate) 3951 return true; 3952 3953 return false; 3954} 3955 3956/* Initialize the table of registers to eliminate. 3957 Pre-condition: global flag frame_pointer_needed has been set before 3958 calling this function. */ 3959 3960static void 3961init_elim_table (void) 3962{ 3963 struct elim_table *ep; 3964 const struct elim_table_1 *ep1; 3965 3966 if (!reg_eliminate) 3967 reg_eliminate = XCNEWVEC (struct elim_table, NUM_ELIMINABLE_REGS)((struct elim_table *) xcalloc (((sizeof (reg_eliminate_1) / sizeof
((reg_eliminate_1)[0]))), sizeof (struct elim_table)))
; 3968 3969 num_eliminable = 0; 3970 3971 for (ep = reg_eliminate, ep1 = reg_eliminate_1; 3972 ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++, ep1++) 3973 { 3974 ep->from = ep1->from; 3975 ep->to = ep1->to; 3976 ep->can_eliminate = ep->can_eliminate_previous 3977 = (targetm.can_eliminate (ep->from, ep->to) 3978 && ! (ep->to == STACK_POINTER_REGNUM7 3979 && frame_pointer_needed((&x_rtl)->frame_pointer_needed) 3980 && (! SUPPORTS_STACK_ALIGNMENT((((unsigned int) 1 << 28) * 8) > ((((global_options
.x_ix86_isa_flags & (1UL << 1)) != 0) && ix86_cfun_abi
() == MS_ABI) ? 128 : ((8) * (((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4))))
3981 || ! stack_realign_fp((&x_rtl)->stack_realign_needed && !(&x_rtl
)->need_drap)
))); 3982 } 3983 3984 /* Count the number of eliminable registers and build the FROM and TO 3985 REG rtx's. Note that code in gen_rtx_REG will cause, e.g., 3986 gen_rtx_REG (Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx. 3987 We depend on this. */ 3988 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]; ep++) 3989 { 3990 num_eliminable += ep->can_eliminate; 3991 ep->from_rtx = gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, ep->from); 3992 ep->to_rtx = gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, ep->to); 3993 } 3994} 3995 3996/* Find all the pseudo registers that didn't get hard regs 3997 but do have known equivalent constants or memory slots. 3998 These include parameters (known equivalent to parameter slots) 3999 and cse'd or loop-moved constant memory addresses. 4000 4001 Record constant equivalents in reg_equiv_constant 4002 so they will be substituted by find_reloads. 4003 Record memory equivalents in reg_mem_equiv so they can 4004 be substituted eventually by altering the REG-rtx's. */ 4005 4006static void 4007init_eliminable_invariants (rtx_insn *first, bool do_subregs) 4008{ 4009 int i; 4010 rtx_insn *insn; 4011 4012 grow_reg_equivs (); 4013 if (do_subregs) 4014 reg_max_ref_mode = XCNEWVEC (machine_mode, max_regno)((machine_mode *) xcalloc ((max_regno), sizeof (machine_mode)
))
; 4015 else 4016 reg_max_ref_mode = NULLnullptr; 4017 4018 num_eliminable_invariants = 0; 4019 4020 first_label_num = get_first_label_num (); 4021 num_labels = max_label_num () - first_label_num; 4022 4023 /* Allocate the tables used to store offset information at labels. */ 4024 offsets_known_at = XNEWVEC (char, num_labels)((char *) xmalloc (sizeof (char) * (num_labels))); 4025 offsets_at = (poly_int64_pod (*)[NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0]))]) 4026 xmalloc (num_labels * NUM_ELIMINABLE_REGS(sizeof (reg_eliminate_1) / sizeof ((reg_eliminate_1)[0])) * sizeof (poly_int64)); 4027 4028/* Look for REG_EQUIV notes; record what each pseudo is equivalent 4029 to. If DO_SUBREGS is true, also find all paradoxical subregs and 4030 find largest such for each pseudo. FIRST is the head of the insn 4031 list. */ 4032 4033 for (insn = first; insn; insn = NEXT_INSN (insn)) 4034 { 4035 rtx set = single_set (insn); 4036 4037 /* We may introduce USEs that we want to remove at the end, so 4038 we'll mark them with QImode. Make sure there are no 4039 previously-marked insns left by say regmove. */ 4040 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
&& GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == USE 4041 && GET_MODE (insn)((machine_mode) (insn)->mode) != VOIDmode((void) 0, E_VOIDmode)) 4042 PUT_MODE (insn, VOIDmode((void) 0, E_VOIDmode)); 4043 4044 if (do_subregs && NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN))
) 4045 scan_paradoxical_subregs (PATTERN (insn)); 4046 4047 if (set != 0 && REG_P (SET_DEST (set))(((enum rtx_code) ((((set)->u.fld[0]).rt_rtx))->code) ==
REG)
) 4048 { 4049 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX(rtx) 0); 4050 rtx x; 4051 4052 if (! note) 4053 continue; 4054 4055 i = REGNO (SET_DEST (set))(rhs_regno((((set)->u.fld[0]).rt_rtx))); 4056 x = XEXP (note, 0)(((note)->u.fld[0]).rt_rtx); 4057 4058 if (i <= LAST_VIRTUAL_REGISTER(((76)) + 5)) 4059 continue; 4060 4061 /* If flag_pic and we have constant, verify it's legitimate. */ 4062 if (!CONSTANT_P (x)((rtx_class[(int) (((enum rtx_code) (x)->code))]) == RTX_CONST_OBJ
)
4063 || !flag_picglobal_options.x_flag_pic || LEGITIMATE_PIC_OPERAND_P (x)legitimate_pic_operand_p (x)) 4064 { 4065 /* It can happen that a REG_EQUIV note contains a MEM 4066 that is not a legitimate memory operand. As later 4067 stages of reload assume that all addresses found 4068 in the reg_equiv_* arrays were originally legitimate, 4069 we ignore such REG_EQUIV notes. */ 4070 if (memory_operand (x, VOIDmode((void) 0, E_VOIDmode))) 4071 { 4072 /* Always unshare the equivalence, so we can 4073 substitute into this insn without touching the 4074 equivalence. */ 4075 reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc = copy_rtx (x); 4076 } 4077 else if (function_invariant_p (x)) 4078 { 4079 machine_mode mode; 4080 4081 mode = GET_MODE (SET_DEST (set))((machine_mode) ((((set)->u.fld[0]).rt_rtx))->mode); 4082 if (GET_CODE (x)((enum rtx_code) (x)->code) == PLUS) 4083 { 4084 /* This is PLUS of frame pointer and a constant, 4085 and might be shared. Unshare it. */ 4086 reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant = copy_rtx (x); 4087 num_eliminable_invariants++; 4088 } 4089 else if (x == frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]) || x == arg_pointer_rtx((this_target_rtl->x_global_rtl)[GR_ARG_POINTER])) 4090 { 4091 reg_equiv_invariant (i)(*reg_equivs)[(i)].invariant = x; 4092 num_eliminable_invariants++; 4093 } 4094 else if (targetm.legitimate_constant_p (mode, x)) 4095 reg_equiv_constant (i)(*reg_equivs)[(i)].constant = x; 4096 else 4097 { 4098 reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc = force_const_mem (mode, x); 4099 if (! reg_equiv_memory_loc (i)(*reg_equivs)[(i)].memory_loc) 4100 reg_equiv_init (i)(*reg_equivs)[(i)].init = NULLnullptr; 4101 } 4102 } 4103 else 4104 { 4105 reg_equiv_init (i)(*reg_equivs)[(i)].init = NULLnullptr; 4106 continue; 4107 } 4108 } 4109 else 4110 reg_equiv_init (i)(*reg_equivs)[(i)].init = NULLnullptr; 4111 } 4112 } 4113 4114 if (dump_file) 4115 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) 4116 if (reg_equiv_init (i)(*reg_equivs)[(i)].init) 4117 { 4118 fprintf (dump_file, "init_insns for %u: ", i); 4119 print_inline_rtx (dump_file, reg_equiv_init (i)(*reg_equivs)[(i)].init, 20); 4120 fprintf (dump_file, "\n"); 4121 } 4122} 4123 4124/* Indicate that we no longer have known memory locations or constants. 4125 Free all data involved in tracking these. */ 4126 4127static void 4128free_reg_equiv (void) 4129{ 4130 int i; 4131 4132 free (offsets_known_at); 4133 free (offsets_at); 4134 offsets_at = 0; 4135 offsets_known_at = 0; 4136 4137 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) 4138 if (reg_equiv_alt_mem_list (i)(*reg_equivs)[(i)].alt_mem_list) 4139 free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i)(*reg_equivs)[(i)].alt_mem_list); 4140 vec_free (reg_equivs); 4141} 4142
4143/* Kick all pseudos out of hard register REGNO. 4144 4145 If CANT_ELIMINATE is nonzero, it means that we are doing this spill 4146 because we found we can't eliminate some register. In the case, no pseudos 4147 are allowed to be in the register, even if they are only in a block that 4148 doesn't require spill registers, unlike the case when we are spilling this 4149 hard reg to produce another spill register. 4150 4151 Return nonzero if any pseudos needed to be kicked out. */ 4152 4153static void 4154spill_hard_reg (unsigned int regno, int cant_eliminate) 4155{ 4156 int i; 4157 4158 if (cant_eliminate) 4159 { 4160 SET_HARD_REG_BIT (bad_spill_regs_global, regno); 4161 df_set_regs_ever_live (regno, true); 4162 } 4163 4164 /* Spill every pseudo reg that was allocated to this reg 4165 or to something that overlaps this reg. */ 4166 4167 for (i = FIRST_PSEUDO_REGISTER76; i < max_regno; i++) 4168 if (reg_renumber[i] >= 0 4169 && (unsigned int) reg_renumber[i] <= regno 4170 && end_hard_regno (PSEUDO_REGNO_MODE (i)((machine_mode) (regno_reg_rtx[i])->mode), reg_renumber[i]) > regno) 4171 SET_REGNO_REG_SET (&spilled_pseudos, i)bitmap_set_bit (&spilled_pseudos, i); 4172} 4173 4174/* After spill_hard_reg was called and/or find_reload_regs was run for all 4175 insns that need reloads, this function is used to actually spill pseudo 4176 registers and try to reallocate them. It also sets up the spill_regs 4177 array for use by choose_reload_regs. 4178 4179 GLOBAL nonzero means we should attempt to reallocate any pseudo registers 4180 that we displace from hard registers. */ 4181 4182static int 4183finish_spills (int global) 4184{ 4185 class insn_chain *chain; 4186 int something_changed = 0; 4187 unsigned i; 4188 reg_set_iterator rsi; 4189 4190 /* Build the spill_regs array for the function. */ 4191 /* If there are some registers still to eliminate and one of the spill regs 4192 wasn't ever used before, additional stack space may have to be 4193 allocated to store this register. Thus, we may have changed the offset 4194 between the stack and frame pointers, so mark that something has changed. 4195 4196 One might think that we need only set VAL to 1 if this is a call-used 4197 register. However, the set of registers that must be saved by the 4198 prologue is not identical to the call-used set. For example, the 4199 register used by the call insn for the return PC is a call-used register, 4200 but must be saved by the prologue. */ 4201 4202 n_spills = 0; 4203 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++) 4204 if (TEST_HARD_REG_BIT (used_spill_regs, i)) 4205 { 4206 spill_reg_order[i] = n_spills; 4207 spill_regs[n_spills++] = i; 4208 if (num_eliminable && ! df_regs_ever_live_p (i)) 4209 something_changed = 1; 4210 df_set_regs_ever_live (i, true); 4211 } 4212 else 4213 spill_reg_order[i] = -1; 4214 4215 EXECUTE_IF_SET_IN_REG_SET (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&spilled_pseudos), (
76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
4216 if (reg_renumber[i] >= 0) 4217 { 4218 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]); 4219 /* Mark it as no longer having a hard register home. */ 4220 reg_renumber[i] = -1; 4221 if (ira_conflicts_p) 4222 /* Inform IRA about the change. */ 4223 ira_mark_allocation_change (i); 4224 /* We will need to scan everything again. */ 4225 something_changed = 1; 4226 } 4227 4228 /* Retry global register allocation if possible. */ 4229 if (global && ira_conflicts_p) 4230 { 4231 unsigned int n; 4232 4233 memset (pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET)); 4234 /* For every insn that needs reloads, set the registers used as spill 4235 regs in pseudo_forbidden_regs for every pseudo live across the 4236 insn. */ 4237 for (chain = insns_need_reload; chain; chain = chain->next_need_reload) 4238 { 4239 EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->live_throughout
), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
4240 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&chain->live_throughout
), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
4241 { 4242 pseudo_forbidden_regs[i] |= chain->used_spill_regs; 4243 } 4244 EXECUTE_IF_SET_IN_REG_SETfor (bmp_iter_set_init (&(rsi), (&chain->dead_or_set
), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
4245 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i, rsi)for (bmp_iter_set_init (&(rsi), (&chain->dead_or_set
), (76), &(i)); bmp_iter_set (&(rsi), &(i)); bmp_iter_next
(&(rsi), &(i)))
4246 { 4247 pseudo_forbidden_regs[i] |= chain->used_spill_regs; 4248 } 4249 } 4250 4251 /* Retry allocating the pseudos spilled in IRA and the 4252 reload. For each reg, merge the various reg sets that 4253 indicate which hard regs can't be used, and call 4254 ira_reassign_pseudos. */ 4255 for (n = 0, i = FIRST_PSEUDO_REGISTER76; i < (unsigned) max_regno; i++) 4256 if (reg_old_renumber[i] != reg_renumber[i]) 4257 { 4258 if (reg_renumber[i] < 0) 4259 temp_pseudo_reg_arr[n++] = i; 4260 else 4261 CLEAR_REGNO_REG_SET (&spilled_pseudos, i)bitmap_clear_bit (&spilled_pseudos, i); 4262 } 4263 if (ira_reassign_pseudos (temp_pseudo_reg_arr, n, 4264 bad_spill_regs_global, 4265 pseudo_forbidden_regs, pseudo_previous_regs, 4266 &spilled_pseudos)) 4267 something_changed = 1; 4268 } 4269 /* Fix up the register information in the insn chain. 4270 This involves deleting those of the spilled pseudos which did not get 4271 a new hard register home from the live_{before,after} sets. */ 4272 for (chain = reload_insn_chain; chain; chain = chain->next) 4273 { 4274 HARD_REG_SET used_by_pseudos; 4275 HARD_REG_SET used_by_pseudos2; 4276 4277 if (! ira_conflicts_p) 4278 { 4279 /* Don't do it for IRA because IRA and the reload still can 4280 assign hard registers to the spilled pseudos on next 4281 reload iterations. */ 4282 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos)bitmap_and_compl_into (&chain->live_throughout, &spilled_pseudos
)
; 4283 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos)bitmap_and_compl_into (&chain->dead_or_set, &spilled_pseudos
)
; 4284 } 4285 /* Mark any unallocated hard regs as available for spills. That 4286 makes inheritance work somewhat better. */ 4287 if (chain->need_reload) 4288 { 4289 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout)do { CLEAR_HARD_REG_SET (used_by_pseudos); reg_set_to_hard_reg_set
(&used_by_pseudos, &chain->live_throughout); } while
(0)
; 4290 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set)do { CLEAR_HARD_REG_SET (used_by_pseudos2); reg_set_to_hard_reg_set
(&used_by_pseudos2, &chain->dead_or_set); } while
(0)
; 4291 used_by_pseudos |= used_by_pseudos2; 4292 4293 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout); 4294 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set); 4295 /* Value of chain->used_spill_regs from previous iteration 4296 may be not included in the value calculated here because 4297 of possible removing caller-saves insns (see function 4298 delete_caller_save_insns. */ 4299 chain->used_spill_regs = ~used_by_pseudos & used_spill_regs; 4300 } 4301 } 4302 4303 CLEAR_REG_SET (&changed_allocation_pseudos)bitmap_clear (&changed_allocation_pseudos); 4304 /* Let alter_reg modify the reg rtx's for the modified pseudos. */ 4305 for (i = FIRST_PSEUDO_REGISTER76; i < (unsigned)max_regno; i++) 4306 { 4307 int regno = reg_renumber[i]; 4308 if (reg_old_renumber[i] == regno) 4309 continue; 4310 4311 SET_REGNO_REG_SET (&changed_allocation_pseudos, i)bitmap_set_bit (&changed_allocation_pseudos, i); 4312 4313 alter_reg (i, reg_old_renumber[i], false); 4314 reg_old_renumber[i] = regno; 4315 if (dump_file) 4316 { 4317 if (regno == -1) 4318 fprintf (dump_file, " Register %d now on stack.\n\n", i); 4319 else 4320 fprintf (dump_file, " Register %d now in %d.\n\n", 4321 i, reg_renumber[i]); 4322 } 4323 } 4324 4325 return something_changed; 4326} 4327
4328/* Find all paradoxical subregs within X and update reg_max_ref_mode. */ 4329 4330static void 4331scan_paradoxical_subregs (rtx x) 4332{ 4333 int i; 4334 const char *fmt; 4335 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code); 4336 4337 switch (code) 4338 { 4339 case REG: 4340 case CONST: 4341 case SYMBOL_REF: 4342 case LABEL_REF: 4343 CASE_CONST_ANYcase CONST_INT: case CONST_WIDE_INT: case CONST_POLY_INT: case
CONST_DOUBLE: case CONST_FIXED: case CONST_VECTOR
: 4344 case CC0: 4345 case PC: 4346 case USE: 4347 case CLOBBER: 4348 return; 4349 4350 case SUBREG: 4351 if (REG_P (SUBREG_REG (x))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == REG
)
) 4352 { 4353 unsigned int regno = REGNO (SUBREG_REG (x))(rhs_regno((((x)->u.fld[0]).rt_rtx))); 4354 if (partial_subreg_p (reg_max_ref_mode[regno], GET_MODE (x)((machine_mode) (x)->mode))) 4355 { 4356 reg_max_ref_mode[regno] = GET_MODE (x)((machine_mode) (x)->mode); 4357 mark_home_live_1 (regno, GET_MODE (x)((machine_mode) (x)->mode)); 4358 } 4359 } 4360 return; 4361 4362 default: 4363 break; 4364 } 4365 4366 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); 4367 for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) 4368 { 4369 if (fmt[i] == 'e') 4370 scan_paradoxical_subregs (XEXP (x, i)(((x)->u.fld[i]).rt_rtx)); 4371 else if (fmt[i] == 'E') 4372 { 4373 int j; 4374 for (j = XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--) 4375 scan_paradoxical_subregs (XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j])); 4376 } 4377 } 4378} 4379 4380/* *OP_PTR and *OTHER_PTR are two operands to a conceptual reload. 4381 If *OP_PTR is a paradoxical subreg, try to remove that subreg 4382 and apply the corresponding narrowing subreg to *OTHER_PTR. 4383 Return true if the operands were changed, false otherwise. */ 4384 4385static bool 4386strip_paradoxical_subreg (rtx *op_ptr, rtx *other_ptr) 4387{ 4388 rtx op, inner, other, tem; 4389 4390 op = *op_ptr; 4391 if (!paradoxical_subreg_p (op)) 4392 return false; 4393 inner = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx); 4394 4395 other = *other_ptr; 4396 tem = gen_lowpart_common (GET_MODE (inner)((machine_mode) (inner)->mode), other); 4397 if (!tem) 4398 return false; 4399 4400 /* If the lowpart operation turned a hard register into a subreg, 4401 rather than simplifying it to another hard register, then the 4402 mode change cannot be properly represented. For example, OTHER 4403 might be valid in its current mode, but not in the new one. */ 4404 if (GET_CODE (tem)((enum rtx_code) (tem)->code) == SUBREG 4405 && REG_P (other)(((enum rtx_code) (other)->code) == REG) 4406 && HARD_REGISTER_P (other)((((rhs_regno(other))) < 76))) 4407 return false; 4408 4409 *op_ptr = inner; 4410 *other_ptr = tem; 4411 return true; 4412} 4413
4414/* A subroutine of reload_as_needed. If INSN has a REG_EH_REGION note, 4415 examine all of the reload insns between PREV and NEXT exclusive, and 4416 annotate all that may trap. */ 4417 4418static void 4419fixup_eh_region_note (rtx_insn *insn, rtx_insn *prev, rtx_insn *next) 4420{ 4421 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX(rtx) 0); 4422 if (note == NULLnullptr) 4423 return; 4424 if (!insn_could_throw_p (insn)) 4425 remove_note (insn, note); 4426 copy_reg_eh_region_note_forward (note, NEXT_INSN (prev), next); 4427} 4428 4429/* Reload pseudo-registers into hard regs around each insn as needed. 4430 Additional register load insns are output before the insn that needs it 4431 and perhaps store insns after insns that modify the reloaded pseudo reg. 4432 4433 reg_last_reload_reg and reg_reloaded_contents keep track of 4434 which registers are already available in reload registers. 4435 We update these for the reloads that we perform, 4436 as the insns are scanned. */ 4437 4438static void 4439reload_as_needed (int live_known) 4440{ 4441 class insn_chain *chain; 4442#if AUTO_INC_DEC0 4443 int i; 4444#endif 4445 rtx_note *marker; 4446 4447 memset (spill_reg_rtx, 0, sizeof spill_reg_rtx); 4448 memset (spill_reg_store, 0, sizeof spill_reg_store); 4449 reg_last_reload_reg = XCNEWVEC (rtx, max_regno)((rtx *) xcalloc ((max_regno), sizeof (rtx))); 4450 INIT_REG_SET (&reg_has_output_reload)bitmap_initialize (&reg_has_output_reload, &reg_obstack
)
; 4451 CLEAR_HARD_REG_SET (reg_reloaded_valid); 4452 4453 set_initial_elim_offsets (); 4454 4455 /* Generate a marker insn that we will move around. */ 4456 marker = emit_note (NOTE_INSN_DELETED); 4457 unlink_insn_chain (marker, marker); 4458 4459 for (chain = reload_insn_chain; chain; chain = chain->next) 4460 { 4461 rtx_insn *prev = 0; 4462 rtx_insn *insn = chain->insn; 4463 rtx_insn *old_next = NEXT_INSN (insn); 4464#if AUTO_INC_DEC0 4465 rtx_insn *old_prev = PREV_INSN (insn); 4466#endif 4467 4468 if (will_delete_init_insn_p (insn)) 4469 continue; 4470 4471 /* If we pass a label, copy the offsets from the label information 4472 into the current offsets of each elimination. */ 4473 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL)) 4474 set_offsets_for_label (insn); 4475 4476 else if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
) 4477 { 4478 regset_head regs_to_forget; 4479 INIT_REG_SET (&regs_to_forget)bitmap_initialize (&regs_to_forget, &reg_obstack); 4480 note_stores (insn, forget_old_reloads_1, &regs_to_forget); 4481 4482 /* If this is a USE and CLOBBER of a MEM, ensure that any 4483 references to eliminable registers have been removed. */ 4484 4485 if ((GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == USE 4486 || GET_CODE (PATTERN (insn))((enum rtx_code) (PATTERN (insn))->code) == CLOBBER) 4487 && MEM_P (XEXP (PATTERN (insn), 0))(((enum rtx_code) ((((PATTERN (insn))->u.fld[0]).rt_rtx))->
code) == MEM)
) 4488 XEXP (XEXP (PATTERN (insn), 0), 0)((((((PATTERN (insn))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx
)
4489 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0)((((((PATTERN (insn))->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx
)
, 4490 GET_MODE (XEXP (PATTERN (insn), 0))((machine_mode) ((((PATTERN (insn))->u.fld[0]).rt_rtx))->
mode)
, 4491 NULL_RTX(rtx) 0); 4492 4493 /* If we need to do register elimination processing, do so. 4494 This might delete the insn, in which case we are done. */ 4495 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim) 4496 { 4497 eliminate_regs_in_insn (insn, 1); 4498 if (NOTE_P (insn)(((enum rtx_code) (insn)->code) == NOTE)) 4499 { 4500 update_eliminable_offsets (); 4501 CLEAR_REG_SET (&regs_to_forget)bitmap_clear (&regs_to_forget); 4502 continue; 4503 } 4504 } 4505 4506 /* If need_elim is nonzero but need_reload is zero, one might think 4507 that we could simply set n_reloads to 0. However, find_reloads 4508 could have done some manipulation of the insn (such as swapping 4509 commutative operands), and these manipulations are lost during 4510 the first pass for every insn that needs register elimination. 4511 So the actions of find_reloads must be redone here. */ 4512 4513 if (! chain->need_elim && ! chain->need_reload 4514 && ! chain->need_operand_change) 4515 n_reloads = 0; 4516 /* First find the pseudo regs that must be reloaded for this insn. 4517 This info is returned in the tables reload_... (see reload.h). 4518 Also modify the body of INSN by substituting RELOAD 4519 rtx's for those pseudo regs. */ 4520 else 4521 { 4522 CLEAR_REG_SET (&reg_has_output_reload)bitmap_clear (&reg_has_output_reload); 4523 CLEAR_HARD_REG_SET (reg_is_output_reload); 4524 4525 find_reloads (insn, 1, spill_indirect_levels(this_target_reload->x_spill_indirect_levels), live_known, 4526 spill_reg_order); 4527 } 4528 4529 if (n_reloads > 0) 4530 { 4531 rtx_insn *next = NEXT_INSN (insn); 4532 4533 /* ??? PREV can get deleted by reload inheritance. 4534 Work around this by emitting a marker note. */ 4535 prev = PREV_INSN (insn); 4536 reorder_insns_nobb (marker, marker, prev); 4537 4538 /* Now compute which reload regs to reload them into. Perhaps 4539 reusing reload regs from previous insns, or else output 4540 load insns to reload them. Maybe output store insns too. 4541 Record the choices of reload reg in reload_reg_rtx. */ 4542 choose_reload_regs (chain); 4543 4544 /* Generate the insns to reload operands into or out of 4545 their reload regs. */ 4546 emit_reload_insns (chain); 4547 4548 /* Substitute the chosen reload regs from reload_reg_rtx 4549 into the insn's body (or perhaps into the bodies of other 4550 load and store insn that we just made for reloading 4551 and that we moved the structure into). */ 4552 subst_reloads (insn); 4553 4554 prev = PREV_INSN (marker); 4555 unlink_insn_chain (marker, marker); 4556 4557 /* Adjust the exception region notes for loads and stores. */ 4558 if (cfun(cfun + 0)->can_throw_non_call_exceptions && !CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) 4559 fixup_eh_region_note (insn, prev, next); 4560 4561 /* Adjust the location of REG_ARGS_SIZE. */ 4562 rtx p = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX(rtx) 0); 4563 if (p) 4564 { 4565 remove_note (insn, p); 4566 fixup_args_size_notes (prev, PREV_INSN (next), 4567 get_args_size (p)); 4568 } 4569 4570 /* If this was an ASM, make sure that all the reload insns 4571 we have generated are valid. If not, give an error 4572 and delete them. */ 4573 if (asm_noperands (PATTERN (insn)) >= 0) 4574 for (rtx_insn *p = NEXT_INSN (prev); 4575 p != next; 4576 p = NEXT_INSN (p)) 4577 if (p != insn && INSN_P (p)(((((enum rtx_code) (p)->code) == INSN) || (((enum rtx_code
) (p)->code) == JUMP_INSN) || (((enum rtx_code) (p)->code
) == CALL_INSN)) || (((enum rtx_code) (p)->code) == DEBUG_INSN
))
4578 && GET_CODE (PATTERN (p))((enum rtx_code) (PATTERN (p))->code) != USE 4579 && (recog_memoized (p) < 0 4580 || (extract_insn (p), 4581 !(constrain_operands (1, 4582 get_enabled_alternatives (p)))))) 4583 { 4584 error_for_asm (insn, 4585 "%<asm%> operand requires " 4586 "impossible reload"); 4587 delete_insn (p); 4588 } 4589 } 4590 4591 if (num_eliminable && chain->need_elim) 4592 update_eliminable_offsets (); 4593 4594 /* Any previously reloaded spilled pseudo reg, stored in this insn, 4595 is no longer validly lying around to save a future reload. 4596 Note that this does not detect pseudos that were reloaded 4597 for this insn in order to be stored in 4598 (obeying register constraints). That is correct; such reload 4599 registers ARE still valid. */ 4600 forget_marked_reloads (&regs_to_forget); 4601 CLEAR_REG_SET (&regs_to_forget)bitmap_clear (&regs_to_forget); 4602 4603 /* There may have been CLOBBER insns placed after INSN. So scan 4604 between INSN and NEXT and use them to forget old reloads. */ 4605 for (rtx_insn *x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x)) 4606 if (NONJUMP_INSN_P (x)(((enum rtx_code) (x)->code) == INSN) && GET_CODE (PATTERN (x))((enum rtx_code) (PATTERN (x))->code) == CLOBBER) 4607 note_stores (x, forget_old_reloads_1, NULLnullptr); 4608 4609#if AUTO_INC_DEC0 4610 /* Likewise for regs altered by auto-increment in this insn. 4611 REG_INC notes have been changed by reloading: 4612 find_reloads_address_1 records substitutions for them, 4613 which have been performed by subst_reloads above. */ 4614 for (i = n_reloads - 1; i >= 0; i--) 4615 { 4616 rtx in_reg = rld[i].in_reg; 4617 if (in_reg) 4618 { 4619 enum rtx_code code = GET_CODE (in_reg)((enum rtx_code) (in_reg)->code); 4620 /* PRE_INC / PRE_DEC will have the reload register ending up 4621 with the same value as the stack slot, but that doesn't 4622 hold true for POST_INC / POST_DEC. Either we have to 4623 convert the memory access to a true POST_INC / POST_DEC, 4624 or we can't use the reload register for inheritance. */ 4625 if ((code == POST_INC || code == POST_DEC) 4626 && TEST_HARD_REG_BIT (reg_reloaded_valid, 4627 REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))) 4628 /* Make sure it is the inc/dec pseudo, and not 4629 some other (e.g. output operand) pseudo. */ 4630 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))] 4631 == REGNO (XEXP (in_reg, 0))(rhs_regno((((in_reg)->u.fld[0]).rt_rtx))))) 4632 4633 { 4634 rtx reload_reg = rld[i].reg_rtx; 4635 machine_mode mode = GET_MODE (reload_reg)((machine_mode) (reload_reg)->mode); 4636 int n = 0; 4637 rtx_insn *p; 4638 4639 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p)) 4640 { 4641 /* We really want to ignore REG_INC notes here, so 4642 use PATTERN (p) as argument to reg_set_p . */ 4643 if (reg_set_p (reload_reg, PATTERN (p))) 4644 break; 4645 n = count_occurrences (PATTERN (p), reload_reg, 0); 4646 if (! n) 4647 continue; 4648 if (n == 1) 4649 { 4650 rtx replace_reg 4651 = gen_rtx_fmt_e (code, mode, reload_reg)gen_rtx_fmt_e_stat ((code), (mode), (reload_reg) ); 4652 4653 validate_replace_rtx_group (reload_reg, 4654 replace_reg, p); 4655 n = verify_changes (0); 4656 4657 /* We must also verify that the constraints 4658 are met after the replacement. Make sure 4659 extract_insn is only called for an insn 4660 where the replacements were found to be 4661 valid so far. */ 4662 if (n) 4663 { 4664 extract_insn (p); 4665 n = constrain_operands (1, 4666 get_enabled_alternatives (p)); 4667 } 4668 4669 /* If the constraints were not met, then 4670 undo the replacement, else confirm it. */ 4671 if (!n) 4672 cancel_changes (0); 4673 else 4674 confirm_change_group (); 4675 } 4676 break; 4677 } 4678 if (n == 1) 4679 { 4680 add_reg_note (p, REG_INC, reload_reg); 4681 /* Mark this as having an output reload so that the 4682 REG_INC processing code below won't invalidate 4683 the reload for inheritance. */ 4684 SET_HARD_REG_BIT (reg_is_output_reload, 4685 REGNO (reload_reg)(rhs_regno(reload_reg))); 4686 SET_REGNO_REG_SET (&reg_has_output_reload,bitmap_set_bit (&reg_has_output_reload, (rhs_regno((((in_reg
)->u.fld[0]).rt_rtx))))
4687 REGNO (XEXP (in_reg, 0)))bitmap_set_bit (&reg_has_output_reload, (rhs_regno((((in_reg
)->u.fld[0]).rt_rtx))))
; 4688 } 4689 else 4690 forget_old_reloads_1 (XEXP (in_reg, 0)(((in_reg)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0, 4691 NULLnullptr); 4692 } 4693 else if ((code == PRE_INC || code == PRE_DEC) 4694 && TEST_HARD_REG_BIT (reg_reloaded_valid, 4695 REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))) 4696 /* Make sure it is the inc/dec pseudo, and not 4697 some other (e.g. output operand) pseudo. */ 4698 && ((unsigned) reg_reloaded_contents[REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))] 4699 == REGNO (XEXP (in_reg, 0))(rhs_regno((((in_reg)->u.fld[0]).rt_rtx))))) 4700 { 4701 SET_HARD_REG_BIT (reg_is_output_reload, 4702 REGNO (rld[i].reg_rtx)(rhs_regno(rld[i].reg_rtx))); 4703 SET_REGNO_REG_SET (&reg_has_output_reload,bitmap_set_bit (&reg_has_output_reload, (rhs_regno((((in_reg
)->u.fld[0]).rt_rtx))))
4704 REGNO (XEXP (in_reg, 0)))bitmap_set_bit (&reg_has_output_reload, (rhs_regno((((in_reg
)->u.fld[0]).rt_rtx))))
; 4705 } 4706 else if (code == PRE_INC || code == PRE_DEC 4707 || code == POST_INC || code == POST_DEC) 4708 { 4709 int in_regno = REGNO (XEXP (in_reg, 0))(rhs_regno((((in_reg)->u.fld[0]).rt_rtx))); 4710 4711 if (reg_last_reload_reg[in_regno] != NULL_RTX(rtx) 0) 4712 { 4713 int in_hard_regno; 4714 bool forget_p = true; 4715 4716 in_hard_regno = REGNO (reg_last_reload_reg[in_regno])(rhs_regno(reg_last_reload_reg[in_regno])); 4717 if (TEST_HARD_REG_BIT (reg_reloaded_valid, 4718 in_hard_regno)) 4719 { 4720 for (rtx_insn *x = (old_prev ? 4721 NEXT_INSN (old_prev) : insn); 4722 x != old_next; 4723 x = NEXT_INSN (x)) 4724 if (x == reg_reloaded_insn[in_hard_regno]) 4725 { 4726 forget_p = false; 4727 break; 4728 } 4729 } 4730 /* If for some reasons, we didn't set up 4731 reg_last_reload_reg in this insn, 4732 invalidate inheritance from previous 4733 insns for the incremented/decremented 4734 register. Such registers will be not in 4735 reg_has_output_reload. Invalidate it 4736 also if the corresponding element in 4737 reg_reloaded_insn is also 4738 invalidated. */ 4739 if (forget_p) 4740 forget_old_reloads_1 (XEXP (in_reg, 0)(((in_reg)->u.fld[0]).rt_rtx), 4741 NULL_RTX(rtx) 0, NULLnullptr); 4742 } 4743 } 4744 } 4745 } 4746 /* If a pseudo that got a hard register is auto-incremented, 4747 we must purge records of copying it into pseudos without 4748 hard registers. */ 4749 for (rtx x = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); x; x = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)) 4750 if (REG_NOTE_KIND (x)((enum reg_note) ((machine_mode) (x)->mode)) == REG_INC) 4751 { 4752 /* See if this pseudo reg was reloaded in this insn. 4753 If so, its last-reload info is still valid 4754 because it is based on this insn's reload. */ 4755 for (i = 0; i < n_reloads; i++) 4756 if (rld[i].out == XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)) 4757 break; 4758 4759 if (i == n_reloads) 4760 forget_old_reloads_1 (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), NULL_RTX(rtx) 0, NULLnullptr); 4761 } 4762#endif 4763 } 4764 /* A reload reg's contents are unknown after a label. */ 4765 if (LABEL_P (insn)(((enum rtx_code) (insn)->code) == CODE_LABEL)) 4766 CLEAR_HARD_REG_SET (reg_reloaded_valid); 4767 4768 /* Don't assume a reload reg is still good after a call insn 4769 if it is a call-used reg, or if it contains a value that will 4770 be partially clobbered by the call. */ 4771 else if (CALL_P (insn)(((enum rtx_code) (insn)->code) == CALL_INSN)) 4772 { 4773 reg_reloaded_valid 4774 &= ~insn_callee_abi (insn).full_and_partial_reg_clobbers (); 4775 4776 /* If this is a call to a setjmp-type function, we must not 4777 reuse any reload reg contents across the call; that will 4778 just be clobbered by other uses of the register in later 4779 code, before the longjmp. */ 4780 if (find_reg_note (insn, REG_SETJMP, NULL_RTX(rtx) 0)) 4781 CLEAR_HARD_REG_SET (reg_reloaded_valid); 4782 } 4783 } 4784 4785 /* Clean up. */ 4786 free (reg_last_reload_reg); 4787 CLEAR_REG_SET (&reg_has_output_reload)bitmap_clear (&reg_has_output_reload); 4788} 4789 4790/* Discard all record of any value reloaded from X, 4791 or reloaded in X from someplace else; 4792 unless X is an output reload reg of the current insn. 4793 4794 X may be a hard reg (the reload reg) 4795 or it may be a pseudo reg that was reloaded from. 4796 4797 When DATA is non-NULL just mark the registers in regset 4798 to be forgotten later. */ 4799 4800static void 4801forget_old_reloads_1 (rtx x, const_rtx, void *data) 4802{ 4803 unsigned int regno; 4804 unsigned int nr; 4805 regset regs = (regset) data; 4806 4807 /* note_stores does give us subregs of hard regs, 4808 subreg_regno_offset requires a hard reg. */ 4809 while (GET_CODE (x)((enum rtx_code) (x)->code) == SUBREG) 4810 { 4811 /* We ignore the subreg offset when calculating the regno, 4812 because we are using the entire underlying hard register 4813 below. */ 4814 x = SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx); 4815 } 4816 4817 if (!REG_P (x)(((enum rtx_code) (x)->code) == REG)) 4818 return; 4819 4820 regno = REGNO (x)(rhs_regno(x)); 4821 4822 if (regno >= FIRST_PSEUDO_REGISTER76) 4823 nr = 1; 4824 else 4825 { 4826 unsigned int i; 4827 4828 nr = REG_NREGS (x)((&(x)->u.reg)->nregs); 4829 /* Storing into a spilled-reg invalidates its contents. 4830 This can happen if a block-local pseudo is allocated to that reg 4831 and it wasn't spilled because this block's total need is 0. 4832 Then some insn might have an optional reload and use this reg. */ 4833 if (!regs) 4834 for (i = 0; i < nr; i++) 4835 /* But don't do this if the reg actually serves as an output 4836 reload reg in the current instruction. */ 4837 if (n_reloads == 0 4838 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)) 4839 { 4840 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i); 4841 spill_reg_store[regno + i] = 0; 4842 } 4843 } 4844 4845 if (regs) 4846 while (nr-- > 0) 4847 SET_REGNO_REG_SET (regs, regno + nr)bitmap_set_bit (regs, regno + nr); 4848 else 4849 { 4850 /* Since value of X has changed, 4851 forget any value previously copied from it. */ 4852 4853 while (nr-- > 0) 4854 /* But don't forget a copy if this is the output reload 4855 that establishes the copy's validity. */ 4856 if (n_reloads == 0 4857 || !REGNO_REG_SET_P (&reg_has_output_reload, regno + nr)bitmap_bit_p (&reg_has_output_reload, regno + nr)) 4858 reg_last_reload_reg[regno + nr] = 0; 4859 } 4860} 4861 4862/* Forget the reloads marked in regset by previous function. */ 4863static void 4864forget_marked_reloads (regset regs) 4865{ 4866 unsigned int reg; 4867 reg_set_iterator rsi; 4868 EXECUTE_IF_SET_IN_REG_SET (regs, 0, reg, rsi)for (bmp_iter_set_init (&(rsi), (regs), (0), &(reg));
bmp_iter_set (&(rsi), &(reg)); bmp_iter_next (&(
rsi), &(reg)))
4869 { 4870 if (reg < FIRST_PSEUDO_REGISTER76 4871 /* But don't do this if the reg actually serves as an output 4872 reload reg in the current instruction. */ 4873 && (n_reloads == 0 4874 || ! TEST_HARD_REG_BIT (reg_is_output_reload, reg))) 4875 { 4876 CLEAR_HARD_REG_BIT (reg_reloaded_valid, reg); 4877 spill_reg_store[reg] = 0; 4878 } 4879 if (n_reloads == 0 4880 || !REGNO_REG_SET_P (&reg_has_output_reload, reg)bitmap_bit_p (&reg_has_output_reload, reg)) 4881 reg_last_reload_reg[reg] = 0; 4882 } 4883} 4884
4885/* The following HARD_REG_SETs indicate when each hard register is 4886 used for a reload of various parts of the current insn. */ 4887 4888/* If reg is unavailable for all reloads. */ 4889static HARD_REG_SET reload_reg_unavailable; 4890/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */ 4891static HARD_REG_SET reload_reg_used; 4892/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */ 4893static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS30]; 4894/* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */ 4895static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS30]; 4896/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */ 4897static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS30]; 4898/* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */ 4899static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS30]; 4900/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */ 4901static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS30]; 4902/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */ 4903static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS30]; 4904/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */ 4905static HARD_REG_SET reload_reg_used_in_op_addr; 4906/* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */ 4907static HARD_REG_SET reload_reg_used_in_op_addr_reload; 4908/* If reg is in use for a RELOAD_FOR_INSN reload. */ 4909static HARD_REG_SET reload_reg_used_in_insn; 4910/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */ 4911static HARD_REG_SET reload_reg_used_in_other_addr; 4912 4913/* If reg is in use as a reload reg for any sort of reload. */ 4914static HARD_REG_SET reload_reg_used_at_all; 4915 4916/* If reg is use as an inherited reload. We just mark the first register 4917 in the group. */ 4918static HARD_REG_SET reload_reg_used_for_inherit; 4919 4920/* Records which hard regs are used in any way, either as explicit use or 4921 by being allocated to a pseudo during any point of the current insn. */ 4922static HARD_REG_SET reg_used_in_insn; 4923 4924/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and 4925 TYPE. MODE is used to indicate how many consecutive regs are 4926 actually used. */ 4927 4928static void 4929mark_reload_reg_in_use (unsigned int regno, int opnum, enum reload_type type, 4930 machine_mode mode) 4931{ 4932 switch (type) 4933 { 4934 case RELOAD_OTHER: 4935 add_to_hard_reg_set (&reload_reg_used, mode, regno); 4936 break; 4937 4938 case RELOAD_FOR_INPUT_ADDRESS: 4939 add_to_hard_reg_set (&reload_reg_used_in_input_addr[opnum], mode, regno); 4940 break; 4941 4942 case RELOAD_FOR_INPADDR_ADDRESS: 4943 add_to_hard_reg_set (&reload_reg_used_in_inpaddr_addr[opnum], mode, regno); 4944 break; 4945 4946 case RELOAD_FOR_OUTPUT_ADDRESS: 4947 add_to_hard_reg_set (&reload_reg_used_in_output_addr[opnum], mode, regno); 4948 break; 4949 4950 case RELOAD_FOR_OUTADDR_ADDRESS: 4951 add_to_hard_reg_set (&reload_reg_used_in_outaddr_addr[opnum], mode, regno); 4952 break; 4953 4954 case RELOAD_FOR_OPERAND_ADDRESS: 4955 add_to_hard_reg_set (&reload_reg_used_in_op_addr, mode, regno); 4956 break; 4957 4958 case RELOAD_FOR_OPADDR_ADDR: 4959 add_to_hard_reg_set (&reload_reg_used_in_op_addr_reload, mode, regno); 4960 break; 4961 4962 case RELOAD_FOR_OTHER_ADDRESS: 4963 add_to_hard_reg_set (&reload_reg_used_in_other_addr, mode, regno); 4964 break; 4965 4966 case RELOAD_FOR_INPUT: 4967 add_to_hard_reg_set (&reload_reg_used_in_input[opnum], mode, regno); 4968 break; 4969 4970 case RELOAD_FOR_OUTPUT: 4971 add_to_hard_reg_set (&reload_reg_used_in_output[opnum], mode, regno); 4972 break; 4973 4974 case RELOAD_FOR_INSN: 4975 add_to_hard_reg_set (&reload_reg_used_in_insn, mode, regno); 4976 break; 4977 } 4978 4979 add_to_hard_reg_set (&reload_reg_used_at_all, mode, regno); 4980} 4981 4982/* Similarly, but show REGNO is no longer in use for a reload. */ 4983 4984static void 4985clear_reload_reg_in_use (unsigned int regno, int opnum, 4986 enum reload_type type, machine_mode mode) 4987{ 4988 unsigned int nregs = hard_regno_nregs (regno, mode); 4989 unsigned int start_regno, end_regno, r; 4990 int i; 4991 /* A complication is that for some reload types, inheritance might 4992 allow multiple reloads of the same types to share a reload register. 4993 We set check_opnum if we have to check only reloads with the same 4994 operand number, and check_any if we have to check all reloads. */ 4995 int check_opnum = 0; 4996 int check_any = 0; 4997 HARD_REG_SET *used_in_set; 4998 4999 switch (type) 5000 { 5001 case RELOAD_OTHER: 5002 used_in_set = &reload_reg_used; 5003 break; 5004 5005 case RELOAD_FOR_INPUT_ADDRESS: 5006 used_in_set = &reload_reg_used_in_input_addr[opnum]; 5007 break; 5008 5009 case RELOAD_FOR_INPADDR_ADDRESS: 5010 check_opnum = 1; 5011 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum]; 5012 break; 5013 5014 case RELOAD_FOR_OUTPUT_ADDRESS: 5015 used_in_set = &reload_reg_used_in_output_addr[opnum]; 5016 break; 5017 5018 case RELOAD_FOR_OUTADDR_ADDRESS: 5019 check_opnum = 1; 5020 used_in_set = &reload_reg_used_in_outaddr_addr[opnum]; 5021 break; 5022 5023 case RELOAD_FOR_OPERAND_ADDRESS: 5024 used_in_set = &reload_reg_used_in_op_addr; 5025 break; 5026 5027 case RELOAD_FOR_OPADDR_ADDR: 5028 check_any = 1; 5029 used_in_set = &reload_reg_used_in_op_addr_reload; 5030 break; 5031 5032 case RELOAD_FOR_OTHER_ADDRESS: 5033 used_in_set = &reload_reg_used_in_other_addr; 5034 check_any = 1; 5035 break; 5036 5037 case RELOAD_FOR_INPUT: 5038 used_in_set = &reload_reg_used_in_input[opnum]; 5039 break; 5040 5041 case RELOAD_FOR_OUTPUT: 5042 used_in_set = &reload_reg_used_in_output[opnum]; 5043 break; 5044 5045 case RELOAD_FOR_INSN: 5046 used_in_set = &reload_reg_used_in_insn; 5047 break; 5048 default: 5049 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 5049, __FUNCTION__))
; 5050 } 5051 /* We resolve conflicts with remaining reloads of the same type by 5052 excluding the intervals of reload registers by them from the 5053 interval of freed reload registers. Since we only keep track of 5054 one set of interval bounds, we might have to exclude somewhat 5055 more than what would be necessary if we used a HARD_REG_SET here. 5056 But this should only happen very infrequently, so there should 5057 be no reason to worry about it. */ 5058 5059 start_regno = regno; 5060 end_regno = regno + nregs; 5061 if (check_opnum || check_any) 5062 { 5063 for (i = n_reloads - 1; i >= 0; i--) 5064 { 5065 if (rld[i].when_needed == type 5066 && (check_any || rld[i].opnum == opnum) 5067 && rld[i].reg_rtx) 5068 { 5069 unsigned int conflict_start = true_regnum (rld[i].reg_rtx); 5070 unsigned int conflict_end 5071 = end_hard_regno (rld[i].mode, conflict_start); 5072 5073 /* If there is an overlap with the first to-be-freed register, 5074 adjust the interval start. */ 5075 if (conflict_start <= start_regno && conflict_end > start_regno) 5076 start_regno = conflict_end; 5077 /* Otherwise, if there is a conflict with one of the other 5078 to-be-freed registers, adjust the interval end. */ 5079 if (conflict_start > start_regno && conflict_start < end_regno) 5080 end_regno = conflict_start; 5081 } 5082 } 5083 } 5084 5085 for (r = start_regno; r < end_regno; r++) 5086 CLEAR_HARD_REG_BIT (*used_in_set, r); 5087} 5088 5089/* 1 if reg REGNO is free as a reload reg for a reload of the sort 5090 specified by OPNUM and TYPE. */ 5091 5092static int 5093reload_reg_free_p (unsigned int regno, int opnum, enum reload_type type) 5094{ 5095 int i; 5096 5097 /* In use for a RELOAD_OTHER means it's not available for anything. */ 5098 if (TEST_HARD_REG_BIT (reload_reg_used, regno) 5099 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno)) 5100 return 0; 5101 5102 switch (type) 5103 { 5104 case RELOAD_OTHER: 5105 /* In use for anything means we can't use it for RELOAD_OTHER. */ 5106 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno) 5107 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) 5108 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno) 5109 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)) 5110 return 0; 5111 5112 for (i = 0; i < reload_n_operands; i++) 5113 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 5114 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) 5115 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5116 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 5117 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno) 5118 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5119 return 0; 5120 5121 return 1; 5122 5123 case RELOAD_FOR_INPUT: 5124 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 5125 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)) 5126 return 0; 5127 5128 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)) 5129 return 0; 5130 5131 /* If it is used for some other input, can't use it. */ 5132 for (i = 0; i < reload_n_operands; i++) 5133 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5134 return 0; 5135 5136 /* If it is used in a later operand's address, can't use it. */ 5137 for (i = opnum + 1; i < reload_n_operands; i++) 5138 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 5139 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)) 5140 return 0; 5141 5142 return 1; 5143 5144 case RELOAD_FOR_INPUT_ADDRESS: 5145 /* Can't use a register if it is used for an input address for this 5146 operand or used as an input in an earlier one. */ 5147 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno) 5148 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno)) 5149 return 0; 5150 5151 for (i = 0; i < opnum; i++) 5152 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5153 return 0; 5154 5155 return 1; 5156 5157 case RELOAD_FOR_INPADDR_ADDRESS: 5158 /* Can't use a register if it is used for an input address 5159 for this operand or used as an input in an earlier 5160 one. */ 5161 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno)) 5162 return 0; 5163 5164 for (i = 0; i < opnum; i++) 5165 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5166 return 0; 5167 5168 return 1; 5169 5170 case RELOAD_FOR_OUTPUT_ADDRESS: 5171 /* Can't use a register if it is used for an output address for this 5172 operand or used as an output in this or a later operand. Note 5173 that multiple output operands are emitted in reverse order, so 5174 the conflicting ones are those with lower indices. */ 5175 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno)) 5176 return 0; 5177 5178 for (i = 0; i <= opnum; i++) 5179 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5180 return 0; 5181 5182 return 1; 5183 5184 case RELOAD_FOR_OUTADDR_ADDRESS: 5185 /* Can't use a register if it is used for an output address 5186 for this operand or used as an output in this or a 5187 later operand. Note that multiple output operands are 5188 emitted in reverse order, so the conflicting ones are 5189 those with lower indices. */ 5190 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno)) 5191 return 0; 5192 5193 for (i = 0; i <= opnum; i++) 5194 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5195 return 0; 5196 5197 return 1; 5198 5199 case RELOAD_FOR_OPERAND_ADDRESS: 5200 for (i = 0; i < reload_n_operands; i++) 5201 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5202 return 0; 5203 5204 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 5205 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)); 5206 5207 case RELOAD_FOR_OPADDR_ADDR: 5208 for (i = 0; i < reload_n_operands; i++) 5209 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5210 return 0; 5211 5212 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)); 5213 5214 case RELOAD_FOR_OUTPUT: 5215 /* This cannot share a register with RELOAD_FOR_INSN reloads, other 5216 outputs, or an operand address for this or an earlier output. 5217 Note that multiple output operands are emitted in reverse order, 5218 so the conflicting ones are those with higher indices. */ 5219 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)) 5220 return 0; 5221 5222 for (i = 0; i < reload_n_operands; i++) 5223 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5224 return 0; 5225 5226 for (i = opnum; i < reload_n_operands; i++) 5227 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5228 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)) 5229 return 0; 5230 5231 return 1; 5232 5233 case RELOAD_FOR_INSN: 5234 for (i = 0; i < reload_n_operands; i++) 5235 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno) 5236 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5237 return 0; 5238 5239 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 5240 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)); 5241 5242 case RELOAD_FOR_OTHER_ADDRESS: 5243 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno); 5244 5245 default: 5246 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 5246, __FUNCTION__))
; 5247 } 5248} 5249 5250/* Return 1 if the value in reload reg REGNO, as used by the reload with 5251 the number RELOADNUM, is still available in REGNO at the end of the insn. 5252 5253 We can assume that the reload reg was already tested for availability 5254 at the time it is needed, and we should not check this again, 5255 in case the reg has already been marked in use. */ 5256 5257static int 5258reload_reg_reaches_end_p (unsigned int regno, int reloadnum) 5259{ 5260 int opnum = rld[reloadnum].opnum; 5261 enum reload_type type = rld[reloadnum].when_needed; 5262 int i; 5263 5264 /* See if there is a reload with the same type for this operand, using 5265 the same register. This case is not handled by the code below. */ 5266 for (i = reloadnum + 1; i < n_reloads; i++) 5267 { 5268 rtx reg; 5269 5270 if (rld[i].opnum != opnum || rld[i].when_needed != type) 5271 continue; 5272 reg = rld[i].reg_rtx; 5273 if (reg == NULL_RTX(rtx) 0) 5274 continue; 5275 if (regno >= REGNO (reg)(rhs_regno(reg)) && regno < END_REGNO (reg)) 5276 return 0; 5277 } 5278 5279 switch (type) 5280 { 5281 case RELOAD_OTHER: 5282 /* Since a RELOAD_OTHER reload claims the reg for the entire insn, 5283 its value must reach the end. */ 5284 return 1; 5285 5286 /* If this use is for part of the insn, 5287 its value reaches if no subsequent part uses the same register. 5288 Just like the above function, don't try to do this with lots 5289 of fallthroughs. */ 5290 5291 case RELOAD_FOR_OTHER_ADDRESS: 5292 /* Here we check for everything else, since these don't conflict 5293 with anything else and everything comes later. */ 5294 5295 for (i = 0; i < reload_n_operands; i++) 5296 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5297 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 5298 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno) 5299 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 5300 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) 5301 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5302 return 0; 5303 5304 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) 5305 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno) 5306 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 5307 && ! TEST_HARD_REG_BIT (reload_reg_used, regno)); 5308 5309 case RELOAD_FOR_INPUT_ADDRESS: 5310 case RELOAD_FOR_INPADDR_ADDRESS: 5311 /* Similar, except that we check only for this and subsequent inputs 5312 and the address of only subsequent inputs and we do not need 5313 to check for RELOAD_OTHER objects since they are known not to 5314 conflict. */ 5315 5316 for (i = opnum; i < reload_n_operands; i++) 5317 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5318 return 0; 5319 5320 /* Reload register of reload with type RELOAD_FOR_INPADDR_ADDRESS 5321 could be killed if the register is also used by reload with type 5322 RELOAD_FOR_INPUT_ADDRESS, so check it. */ 5323 if (type == RELOAD_FOR_INPADDR_ADDRESS 5324 && TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)) 5325 return 0; 5326 5327 for (i = opnum + 1; i < reload_n_operands; i++) 5328 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 5329 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)) 5330 return 0; 5331 5332 for (i = 0; i < reload_n_operands; i++) 5333 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5334 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 5335 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5336 return 0; 5337 5338 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)) 5339 return 0; 5340 5341 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) 5342 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 5343 && !TEST_HARD_REG_BIT (reload_reg_used, regno)); 5344 5345 case RELOAD_FOR_INPUT: 5346 /* Similar to input address, except we start at the next operand for 5347 both input and input address and we do not check for 5348 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these 5349 would conflict. */ 5350 5351 for (i = opnum + 1; i < reload_n_operands; i++) 5352 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno) 5353 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno) 5354 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)) 5355 return 0; 5356 5357 /* ... fall through ... */ 5358 5359 case RELOAD_FOR_OPERAND_ADDRESS: 5360 /* Check outputs and their addresses. */ 5361 5362 for (i = 0; i < reload_n_operands; i++) 5363 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5364 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 5365 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5366 return 0; 5367 5368 return (!TEST_HARD_REG_BIT (reload_reg_used, regno)); 5369 5370 case RELOAD_FOR_OPADDR_ADDR: 5371 for (i = 0; i < reload_n_operands; i++) 5372 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5373 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno) 5374 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)) 5375 return 0; 5376 5377 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno) 5378 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno) 5379 && !TEST_HARD_REG_BIT (reload_reg_used, regno)); 5380 5381 case RELOAD_FOR_INSN: 5382 /* These conflict with other outputs with RELOAD_OTHER. So 5383 we need only check for output addresses. */ 5384 5385 opnum = reload_n_operands; 5386 5387 /* fall through */ 5388 5389 case RELOAD_FOR_OUTPUT: 5390 case RELOAD_FOR_OUTPUT_ADDRESS: 5391 case RELOAD_FOR_OUTADDR_ADDRESS: 5392 /* We already know these can't conflict with a later output. So the 5393 only thing to check are later output addresses. 5394 Note that multiple output operands are emitted in reverse order, 5395 so the conflicting ones are those with lower indices. */ 5396 for (i = 0; i < opnum; i++) 5397 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno) 5398 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)) 5399 return 0; 5400 5401 /* Reload register of reload with type RELOAD_FOR_OUTADDR_ADDRESS 5402 could be killed if the register is also used by reload with type 5403 RELOAD_FOR_OUTPUT_ADDRESS, so check it. */ 5404 if (type == RELOAD_FOR_OUTADDR_ADDRESS 5405 && TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno)) 5406 return 0; 5407 5408 return 1; 5409 5410 default: 5411 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 5411, __FUNCTION__))
; 5412 } 5413} 5414 5415/* Like reload_reg_reaches_end_p, but check that the condition holds for 5416 every register in REG. */ 5417 5418static bool 5419reload_reg_rtx_reaches_end_p (rtx reg, int reloadnum) 5420{ 5421 unsigned int i; 5422 5423 for (i = REGNO (reg)(rhs_regno(reg)); i < END_REGNO (reg); i++) 5424 if (!reload_reg_reaches_end_p (i, reloadnum)) 5425 return false; 5426 return true; 5427} 5428
5429 5430/* Returns whether R1 and R2 are uniquely chained: the value of one 5431 is used by the other, and that value is not used by any other 5432 reload for this insn. This is used to partially undo the decision 5433 made in find_reloads when in the case of multiple 5434 RELOAD_FOR_OPERAND_ADDRESS reloads it converts all 5435 RELOAD_FOR_OPADDR_ADDR reloads into RELOAD_FOR_OPERAND_ADDRESS 5436 reloads. This code tries to avoid the conflict created by that 5437 change. It might be cleaner to explicitly keep track of which 5438 RELOAD_FOR_OPADDR_ADDR reload is associated with which 5439 RELOAD_FOR_OPERAND_ADDRESS reload, rather than to try to detect 5440 this after the fact. */ 5441static bool 5442reloads_unique_chain_p (int r1, int r2) 5443{ 5444 int i; 5445 5446 /* We only check input reloads. */ 5447 if (! rld[r1].in || ! rld[r2].in) 5448 return false; 5449 5450 /* Avoid anything with output reloads. */ 5451 if (rld[r1].out || rld[r2].out) 5452 return false; 5453 5454 /* "chained" means one reload is a component of the other reload, 5455 not the same as the other reload. */ 5456 if (rld[r1].opnum != rld[r2].opnum 5457 || rtx_equal_p (rld[r1].in, rld[r2].in) 5458 || rld[r1].optional || rld[r2].optional 5459 || ! (reg_mentioned_p (rld[r1].in, rld[r2].in) 5460 || reg_mentioned_p (rld[r2].in, rld[r1].in))) 5461 return false; 5462 5463 /* The following loop assumes that r1 is the reload that feeds r2. */ 5464 if (r1 > r2) 5465 std::swap (r1, r2); 5466 5467 for (i = 0; i < n_reloads; i ++) 5468 /* Look for input reloads that aren't our two */ 5469 if (i != r1 && i != r2 && rld[i].in) 5470 { 5471 /* If our reload is mentioned at all, it isn't a simple chain. */ 5472 if (reg_mentioned_p (rld[r1].in, rld[i].in)) 5473 return false; 5474 } 5475 return true; 5476} 5477 5478/* The recursive function change all occurrences of WHAT in *WHERE 5479 to REPL. */ 5480static void 5481substitute (rtx *where, const_rtx what, rtx repl) 5482{ 5483 const char *fmt; 5484 int i; 5485 enum rtx_code code; 5486 5487 if (*where == 0) 5488 return; 5489 5490 if (*where == what || rtx_equal_p (*where, what)) 5491 { 5492 /* Record the location of the changed rtx. */ 5493 substitute_stack.safe_push (where); 5494 *where = repl; 5495 return; 5496 } 5497 5498 code = GET_CODE (*where)((enum rtx_code) (*where)->code); 5499 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]); 5500 for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--) 5501 { 5502 if (fmt[i] == 'E') 5503 { 5504 int j; 5505 5506 for (j = XVECLEN (*where, i)(((((*where)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--) 5507 substitute (&XVECEXP (*where, i, j)(((((*where)->u.fld[i]).rt_rtvec))->elem[j]), what, repl); 5508 } 5509 else if (fmt[i] == 'e') 5510 substitute (&XEXP (*where, i)(((*where)->u.fld[i]).rt_rtx), what, repl); 5511 } 5512} 5513 5514/* The function returns TRUE if chain of reload R1 and R2 (in any 5515 order) can be evaluated without usage of intermediate register for 5516 the reload containing another reload. It is important to see 5517 gen_reload to understand what the function is trying to do. As an 5518 example, let us have reload chain 5519 5520 r2: const 5521 r1: <something> + const 5522 5523 and reload R2 got reload reg HR. The function returns true if 5524 there is a correct insn HR = HR + <something>. Otherwise, 5525 gen_reload will use intermediate register (and this is the reload 5526 reg for R1) to reload <something>. 5527 5528 We need this function to find a conflict for chain reloads. In our 5529 example, if HR = HR + <something> is incorrect insn, then we cannot 5530 use HR as a reload register for R2. If we do use it then we get a 5531 wrong code: 5532 5533 HR = const 5534 HR = <something> 5535 HR = HR + HR 5536 5537*/ 5538static bool 5539gen_reload_chain_without_interm_reg_p (int r1, int r2) 5540{ 5541 /* Assume other cases in gen_reload are not possible for 5542 chain reloads or do need an intermediate hard registers. */ 5543 bool result = true; 5544 int regno, code; 5545 rtx out, in; 5546 rtx_insn *insn; 5547 rtx_insn *last = get_last_insn (); 5548 5549 /* Make r2 a component of r1. */ 5550 if (reg_mentioned_p (rld[r1].in, rld[r2].in)) 5551 std::swap (r1, r2); 5552 5553 gcc_assert (reg_mentioned_p (rld[r2].in, rld[r1].in))((void)(!(reg_mentioned_p (rld[r2].in, rld[r1].in)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 5553, __FUNCTION__), 0 : 0))
; 5554 regno = rld[r1].regno >= 0 ? rld[r1].regno : rld[r2].regno; 5555 gcc_assert (regno >= 0)((void)(!(regno >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/reload1.c"
, 5555, __FUNCTION__), 0 : 0))
; 5556 out = gen_rtx_REG (rld[r1].mode, regno); 5557 in = rld[r1].in; 5558 substitute (&in, rld[r2].in, gen_rtx_REG (rld[r2].mode, regno)); 5559 5560 /* If IN is a paradoxical SUBREG, remove it and try to put the 5561 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */ 5562 strip_paradoxical_subreg (&in, &out); 5563 5564 if (GET_CODE (in)((enum rtx_code) (in)->code) == PLUS 5565 && (REG_P (XEXP (in, 0))(((enum rtx_code) ((((in)->u.fld[0]).rt_rtx))->code) ==
REG)
5566 || GET_CODE (XEXP (in, 0))((enum rtx_code) ((((in)->u.fld[0]).rt_rtx))->code) == SUBREG 5567 || MEM_P (XEXP (in, 0))(((enum rtx_code) ((((in)->u.fld[0]).rt_rtx))->code) ==
MEM)
) 5568 && (REG_P (XEXP (in, 1))(((enum rtx_code) ((((in)->u.fld[1]).rt_rtx))->code) ==
REG)
5569 || GET_CODE (XEXP (in, 1))((enum rtx_code) ((((in)->u.fld[1]).rt_rtx))->code) == SUBREG 5570 || CONSTANT_P (XEXP (in, 1))((rtx_class[(int) (((enum rtx_code) ((((in)->u.fld[1]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
5571 || MEM_P (XEXP (in, 1))(((enum rtx_code) ((((in)->u.fld[1]).rt_rtx))->code) ==
MEM)
)) 5572 { 5573 insn = emit_insn (gen_rtx_SET (out, in)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((out))
, ((in)) )
); 5574 code = recog_memoized (insn); 5575 result = false; 5576 5577 if (code >= 0) 5578 { 5579 extract_insn (insn); 5580 /* We want constrain operands to treat this insn strictly in 5581 its validity determination, i.e., the way it would after 5582 reload has completed. */ 5583 result = constrain_operands (1, get_enabled_alternatives (insn)); 5584 } 5585 5586 delete_insns_since (last); 5587 } 5588 5589 /* Restore the original value at each changed address within R1. */ 5590 while (!substitute_stack.is_empty ()) 5591 { 5592 rtx *where = substitute_stack.pop (); 5593 *where = rld[r2].in; 5594 } 5595 5596 return result; 5597} 5598 5599/* Return 1 if the reloads denoted by R1 and R2 cannot share a register. 5600 Return 0 otherwise. 5601 5602 This function uses the same algorithm as reload_reg_free_p above. */ 5603 5604static int 5605reloads_conflict (int r1, int r2) 5606{ 5607 enum reload_type r1_type = rld[r1].when_needed; 5608 enum reload_type r2_type = rld[r2].when_needed; 5609 int r1_opnum = rld[r1].opnum; 5610 int r2_opnum = rld[r2].opnum; 5611 5612 /* RELOAD_OTHER conflicts with everything. */ 5613 if (r2_type == RELOAD_OTHER) 5614 return 1; 5615 5616 /* Otherwise, check conflicts differently for each type. */ 5617 5618 switch (r1_type) 5619 { 5620 case RELOAD_FOR_INPUT: 5621 return (r2_type == RELOAD_FOR_INSN 5622 || r2_type == RELOAD_FOR_OPERAND_ADDRESS 5623 || r2_type == RELOAD_FOR_OPADDR_ADDR 5624 || r2_type == RELOAD_FOR_INPUT 5625