Bug Summary

File:build/gcc/recog.c
Warning:line 377, column 19
Although the value stored to 'pat' is used in the enclosing expression, the value is never actually read from 'pat'

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name recog.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-FWGuVM.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c
1/* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "backend.h"
25#include "target.h"
26#include "rtl.h"
27#include "tree.h"
28#include "cfghooks.h"
29#include "df.h"
30#include "memmodel.h"
31#include "tm_p.h"
32#include "insn-config.h"
33#include "regs.h"
34#include "emit-rtl.h"
35#include "recog.h"
36#include "insn-attr.h"
37#include "addresses.h"
38#include "cfgrtl.h"
39#include "cfgbuild.h"
40#include "cfgcleanup.h"
41#include "reload.h"
42#include "tree-pass.h"
43#include "function-abi.h"
44
45#ifndef STACK_POP_CODEPOST_INC
46#if STACK_GROWS_DOWNWARD1
47#define STACK_POP_CODEPOST_INC POST_INC
48#else
49#define STACK_POP_CODEPOST_INC POST_DEC
50#endif
51#endif
52
53static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54static void validate_replace_src_1 (rtx *, void *);
55static rtx_insn *split_insn (rtx_insn *);
56
57struct target_recog default_target_recog;
58#if SWITCHABLE_TARGET1
59struct target_recog *this_target_recog = &default_target_recog;
60#endif
61
62/* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.c and final.c and reload.c.
67
68 init_recog and init_recog_no_volatile are responsible for setting this. */
69
70int volatile_ok;
71
72struct recog_data_d recog_data;
73
74/* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77const operand_alternative *recog_op_alt;
78
79/* Used to provide recog_op_alt for asms. */
80static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS30
81 * MAX_RECOG_ALTERNATIVES35];
82
83/* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
85
86int which_alternative;
87
88/* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
91
92int reload_completed;
93
94/* Nonzero after thread_prologue_and_epilogue_insns has run. */
95int epilogue_completed;
96
97/* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
100
101void
102init_recog_no_volatile (void)
103{
104 volatile_ok = 0;
105}
106
107void
108init_recog (void)
109{
110 volatile_ok = 1;
111}
112
113
114/* Return true if labels in asm operands BODY are LABEL_REFs. */
115
116static bool
117asm_labels_ok (rtx body)
118{
119 rtx asmop;
120 int i;
121
122 asmop = extract_asm_operands (body);
123 if (asmop == NULL_RTX(rtx) 0)
124 return true;
125
126 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop)(((((asmop)->u.fld[5]).rt_rtvec))->num_elem); i++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i))((enum rtx_code) ((((((asmop)->u.fld[5]).rt_rtvec))->elem
[i]))->code)
!= LABEL_REF)
128 return false;
129
130 return true;
131}
132
133/* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
135
136int
137check_asm_operands (rtx x)
138{
139 int noperands;
140 rtx *operands;
141 const char **constraints;
142 int i;
143
144 if (!asm_labels_ok (x))
145 return 0;
146
147 /* Post-reload, be more strict with things. */
148 if (reload_completed)
149 {
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn *insn = make_insn_raw (x);
152 extract_insn (insn);
153 constrain_operands (1, get_enabled_alternatives (insn));
154 return which_alternative >= 0;
155 }
156
157 noperands = asm_noperands (x);
158 if (noperands < 0)
159 return 0;
160 if (noperands == 0)
161 return 1;
162
163 operands = XALLOCAVEC (rtx, noperands)((rtx *) __builtin_alloca(sizeof (rtx) * (noperands)));
164 constraints = XALLOCAVEC (const char *, noperands)((const char * *) __builtin_alloca(sizeof (const char *) * (noperands
)))
;
165
166 decode_asm_operands (x, operands, NULLnullptr, constraints, NULLnullptr, NULLnullptr);
167
168 for (i = 0; i < noperands; i++)
169 {
170 const char *c = constraints[i];
171 if (c[0] == '%')
172 c++;
173 if (! asm_operand_ok (operands[i], c, constraints))
174 return 0;
175 }
176
177 return 1;
178}
179
180/* Static data for the next two routines. */
181
182struct change_t
183{
184 rtx object;
185 int old_code;
186 int old_len;
187 bool unshare;
188 rtx *loc;
189 rtx old;
190};
191
192static change_t *changes;
193static int changes_allocated;
194
195static int num_changes = 0;
196static int temporarily_undone_changes = 0;
197
198/* Validate a proposed change to OBJECT. LOC is the location in the rtl
199 at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
200 will also be changed to NEW_LEN, which is no greater than the current
201 XVECLEN. If OBJECT is zero, no validation is done, the change is
202 simply made.
203
204 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
205 will be called with the address and mode as parameters. If OBJECT is
206 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
207 the change in place.
208
209 IN_GROUP is nonzero if this is part of a group of changes that must be
210 performed as a group. In that case, the changes will be stored. The
211 function `apply_change_group' will validate and apply the changes.
212
213 If IN_GROUP is zero, this is a single change. Try to recognize the insn
214 or validate the memory reference with the change applied. If the result
215 is not valid for the machine, suppress the change and return zero.
216 Otherwise, perform the change and return 1. */
217
218static bool
219validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group,
220 bool unshare, int new_len = -1)
221{
222 gcc_assert (temporarily_undone_changes == 0)((void)(!(temporarily_undone_changes == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 222, __FUNCTION__), 0 : 0))
;
223 rtx old = *loc;
224
225 /* Single-element parallels aren't valid and won't match anything.
226 Replace them with the single element. */
227 if (new_len == 1 && GET_CODE (new_rtx)((enum rtx_code) (new_rtx)->code) == PARALLEL)
228 {
229 new_rtx = XVECEXP (new_rtx, 0, 0)(((((new_rtx)->u.fld[0]).rt_rtvec))->elem[0]);
230 new_len = -1;
231 }
232
233 if ((old == new_rtx || rtx_equal_p (old, new_rtx))
234 && (new_len < 0 || XVECLEN (new_rtx, 0)(((((new_rtx)->u.fld[0]).rt_rtvec))->num_elem) == new_len))
235 return 1;
236
237 gcc_assert ((in_group != 0 || num_changes == 0)((void)(!((in_group != 0 || num_changes == 0) && (new_len
< 0 || new_rtx == *loc)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 238, __FUNCTION__), 0 : 0))
238 && (new_len < 0 || new_rtx == *loc))((void)(!((in_group != 0 || num_changes == 0) && (new_len
< 0 || new_rtx == *loc)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 238, __FUNCTION__), 0 : 0))
;
239
240 *loc = new_rtx;
241
242 /* Save the information describing this change. */
243 if (num_changes >= changes_allocated)
244 {
245 if (changes_allocated == 0)
246 /* This value allows for repeated substitutions inside complex
247 indexed addresses, or changes in up to 5 insns. */
248 changes_allocated = MAX_RECOG_OPERANDS30 * 5;
249 else
250 changes_allocated *= 2;
251
252 changes = XRESIZEVEC (change_t, changes, changes_allocated)((change_t *) xrealloc ((void *) (changes), sizeof (change_t)
* (changes_allocated)))
;
253 }
254
255 changes[num_changes].object = object;
256 changes[num_changes].loc = loc;
257 changes[num_changes].old = old;
258 changes[num_changes].old_len = (new_len >= 0 ? XVECLEN (new_rtx, 0)(((((new_rtx)->u.fld[0]).rt_rtvec))->num_elem) : -1);
259 changes[num_changes].unshare = unshare;
260
261 if (new_len >= 0)
262 XVECLEN (new_rtx, 0)(((((new_rtx)->u.fld[0]).rt_rtvec))->num_elem) = new_len;
263
264 if (object && !MEM_P (object)(((enum rtx_code) (object)->code) == MEM))
265 {
266 /* Set INSN_CODE to force rerecognition of insn. Save old code in
267 case invalid. */
268 changes[num_changes].old_code = INSN_CODE (object)(((object)->u.fld[5]).rt_int);
269 INSN_CODE (object)(((object)->u.fld[5]).rt_int) = -1;
270 }
271
272 num_changes++;
273
274 /* If we are making a group of changes, return 1. Otherwise, validate the
275 change group we made. */
276
277 if (in_group)
278 return 1;
279 else
280 return apply_change_group ();
281}
282
283/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
284 UNSHARE to false. */
285
286bool
287validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
288{
289 return validate_change_1 (object, loc, new_rtx, in_group, false);
290}
291
292/* Wrapper for validate_change_1 without the UNSHARE argument defaulting
293 UNSHARE to true. */
294
295bool
296validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
297{
298 return validate_change_1 (object, loc, new_rtx, in_group, true);
299}
300
301/* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return
302 value are as for validate_change_1. */
303
304bool
305validate_change_xveclen (rtx object, rtx *loc, int new_len, bool in_group)
306{
307 return validate_change_1 (object, loc, *loc, in_group, false, new_len);
308}
309
310/* Keep X canonicalized if some changes have made it non-canonical; only
311 modifies the operands of X, not (for example) its code. Simplifications
312 are not the job of this routine.
313
314 Return true if anything was changed. */
315bool
316canonicalize_change_group (rtx_insn *insn, rtx x)
317{
318 if (COMMUTATIVE_P (x)(((rtx_class[(int) (((enum rtx_code) (x)->code))]) & (
~2)) == (RTX_COMM_COMPARE & (~2)))
319 && swap_commutative_operands_p (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)))
320 {
321 /* Oops, the caller has made X no longer canonical.
322 Let's redo the changes in the correct order. */
323 rtx tem = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
324 validate_unshare_change (insn, &XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), 1);
325 validate_unshare_change (insn, &XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), tem, 1);
326 return true;
327 }
328 else
329 return false;
330}
331
332
333/* This subroutine of apply_change_group verifies whether the changes to INSN
334 were valid; i.e. whether INSN can still be recognized.
335
336 If IN_GROUP is true clobbers which have to be added in order to
337 match the instructions will be added to the current change group.
338 Otherwise the changes will take effect immediately. */
339
340int
341insn_invalid_p (rtx_insn *insn, bool in_group)
342{
343 rtx pat = PATTERN (insn);
344 int num_clobbers = 0;
345 /* If we are before reload and the pattern is a SET, see if we can add
346 clobbers. */
347 int icode = recog (pat, insn,
348 (GET_CODE (pat)((enum rtx_code) (pat)->code) == SET
349 && ! reload_completed
350 && ! reload_in_progress)
351 ? &num_clobbers : 0);
352 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
353
354
355 /* If this is an asm and the operand aren't legal, then fail. Likewise if
356 this is not an asm and the insn wasn't recognized. */
357 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
358 || (!is_asm && icode < 0))
359 return 1;
360
361 /* If we have to add CLOBBERs, fail if we have to add ones that reference
362 hard registers since our callers can't know if they are live or not.
363 Otherwise, add them. */
364 if (num_clobbers > 0)
365 {
366 rtx newpat;
367
368 if (added_clobbers_hard_reg_p (icode))
369 return 1;
370
371 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(rtvec_alloc (num_clobbers + 1))) )
;
372 XVECEXP (newpat, 0, 0)(((((newpat)->u.fld[0]).rt_rtvec))->elem[0]) = pat;
373 add_clobbers (newpat, icode);
374 if (in_group)
375 validate_change (insn, &PATTERN (insn), newpat, 1);
376 else
377 PATTERN (insn) = pat = newpat;
Although the value stored to 'pat' is used in the enclosing expression, the value is never actually read from 'pat'
378 }
379
380 /* After reload, verify that all constraints are satisfied. */
381 if (reload_completed)
382 {
383 extract_insn (insn);
384
385 if (! constrain_operands (1, get_preferred_alternatives (insn)))
386 return 1;
387 }
388
389 INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) = icode;
390 return 0;
391}
392
393/* Return number of changes made and not validated yet. */
394int
395num_changes_pending (void)
396{
397 return num_changes;
398}
399
400/* Tentatively apply the changes numbered NUM and up.
401 Return 1 if all changes are valid, zero otherwise. */
402
403int
404verify_changes (int num)
405{
406 int i;
407 rtx last_validated = NULL_RTX(rtx) 0;
408
409 /* The changes have been applied and all INSN_CODEs have been reset to force
410 rerecognition.
411
412 The changes are valid if we aren't given an object, or if we are
413 given a MEM and it still is a valid address, or if this is in insn
414 and it is recognized. In the latter case, if reload has completed,
415 we also require that the operands meet the constraints for
416 the insn. */
417
418 for (i = num; i < num_changes; i++)
419 {
420 rtx object = changes[i].object;
421
422 /* If there is no object to test or if it is the same as the one we
423 already tested, ignore it. */
424 if (object == 0 || object == last_validated)
425 continue;
426
427 if (MEM_P (object)(((enum rtx_code) (object)->code) == MEM))
428 {
429 if (! memory_address_addr_space_p (GET_MODE (object)((machine_mode) (object)->mode),
430 XEXP (object, 0)(((object)->u.fld[0]).rt_rtx),
431 MEM_ADDR_SPACE (object)(get_mem_attrs (object)->addrspace)))
432 break;
433 }
434 else if (/* changes[i].old might be zero, e.g. when putting a
435 REG_FRAME_RELATED_EXPR into a previously empty list. */
436 changes[i].old
437 && REG_P (changes[i].old)(((enum rtx_code) (changes[i].old)->code) == REG)
438 && asm_noperands (PATTERN (object)) > 0
439 && register_asm_p (changes[i].old))
440 {
441 /* Don't allow changes of hard register operands to inline
442 assemblies if they have been defined as register asm ("x"). */
443 break;
444 }
445 else if (DEBUG_INSN_P (object)(((enum rtx_code) (object)->code) == DEBUG_INSN))
446 continue;
447 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
448 {
449 rtx pat = PATTERN (object);
450
451 /* Perhaps we couldn't recognize the insn because there were
452 extra CLOBBERs at the end. If so, try to re-recognize
453 without the last CLOBBER (later iterations will cause each of
454 them to be eliminated, in turn). But don't do this if we
455 have an ASM_OPERAND. */
456 if (GET_CODE (pat)((enum rtx_code) (pat)->code) == PARALLEL
457 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1))((enum rtx_code) ((((((pat)->u.fld[0]).rt_rtvec))->elem
[(((((pat)->u.fld[0]).rt_rtvec))->num_elem) - 1]))->
code)
== CLOBBER
458 && asm_noperands (PATTERN (object)) < 0)
459 {
460 rtx newpat;
461
462 if (XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem) == 2)
463 newpat = XVECEXP (pat, 0, 0)(((((pat)->u.fld[0]).rt_rtvec))->elem[0]);
464 else
465 {
466 int j;
467
468 newpat
469 = gen_rtx_PARALLEL (VOIDmode,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(rtvec_alloc ((((((pat)->u.fld[0]).rt_rtvec))->num_elem
) - 1))) )
470 rtvec_alloc (XVECLEN (pat, 0) - 1))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(rtvec_alloc ((((((pat)->u.fld[0]).rt_rtvec))->num_elem
) - 1))) )
;
471 for (j = 0; j < XVECLEN (newpat, 0)(((((newpat)->u.fld[0]).rt_rtvec))->num_elem); j++)
472 XVECEXP (newpat, 0, j)(((((newpat)->u.fld[0]).rt_rtvec))->elem[j]) = XVECEXP (pat, 0, j)(((((pat)->u.fld[0]).rt_rtvec))->elem[j]);
473 }
474
475 /* Add a new change to this group to replace the pattern
476 with this new pattern. Then consider this change
477 as having succeeded. The change we added will
478 cause the entire call to fail if things remain invalid.
479
480 Note that this can lose if a later change than the one
481 we are processing specified &XVECEXP (PATTERN (object), 0, X)
482 but this shouldn't occur. */
483
484 validate_change (object, &PATTERN (object), newpat, 1);
485 continue;
486 }
487 else if (GET_CODE (pat)((enum rtx_code) (pat)->code) == USE || GET_CODE (pat)((enum rtx_code) (pat)->code) == CLOBBER
488 || GET_CODE (pat)((enum rtx_code) (pat)->code) == VAR_LOCATION)
489 /* If this insn is a CLOBBER or USE, it is always valid, but is
490 never recognized. */
491 continue;
492 else
493 break;
494 }
495 last_validated = object;
496 }
497
498 return (i == num_changes);
499}
500
501/* A group of changes has previously been issued with validate_change
502 and verified with verify_changes. Call df_insn_rescan for each of
503 the insn changed and clear num_changes. */
504
505void
506confirm_change_group (void)
507{
508 int i;
509 rtx last_object = NULLnullptr;
510
511 gcc_assert (temporarily_undone_changes == 0)((void)(!(temporarily_undone_changes == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 511, __FUNCTION__), 0 : 0))
;
512 for (i = 0; i < num_changes; i++)
513 {
514 rtx object = changes[i].object;
515
516 if (changes[i].unshare)
517 *changes[i].loc = copy_rtx (*changes[i].loc);
518
519 /* Avoid unnecessary rescanning when multiple changes to same instruction
520 are made. */
521 if (object)
522 {
523 if (object != last_object && last_object && INSN_P (last_object)(((((enum rtx_code) (last_object)->code) == INSN) || (((enum
rtx_code) (last_object)->code) == JUMP_INSN) || (((enum rtx_code
) (last_object)->code) == CALL_INSN)) || (((enum rtx_code)
(last_object)->code) == DEBUG_INSN))
)
524 df_insn_rescan (as_a <rtx_insn *> (last_object));
525 last_object = object;
526 }
527 }
528
529 if (last_object && INSN_P (last_object)(((((enum rtx_code) (last_object)->code) == INSN) || (((enum
rtx_code) (last_object)->code) == JUMP_INSN) || (((enum rtx_code
) (last_object)->code) == CALL_INSN)) || (((enum rtx_code)
(last_object)->code) == DEBUG_INSN))
)
530 df_insn_rescan (as_a <rtx_insn *> (last_object));
531 num_changes = 0;
532}
533
534/* Apply a group of changes previously issued with `validate_change'.
535 If all changes are valid, call confirm_change_group and return 1,
536 otherwise, call cancel_changes and return 0. */
537
538int
539apply_change_group (void)
540{
541 if (verify_changes (0))
542 {
543 confirm_change_group ();
544 return 1;
545 }
546 else
547 {
548 cancel_changes (0);
549 return 0;
550 }
551}
552
553
554/* Return the number of changes so far in the current group. */
555
556int
557num_validated_changes (void)
558{
559 return num_changes;
560}
561
562/* Retract the changes numbered NUM and up. */
563
564void
565cancel_changes (int num)
566{
567 gcc_assert (temporarily_undone_changes == 0)((void)(!(temporarily_undone_changes == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 567, __FUNCTION__), 0 : 0))
;
568 int i;
569
570 /* Back out all the changes. Do this in the opposite order in which
571 they were made. */
572 for (i = num_changes - 1; i >= num; i--)
573 {
574 if (changes[i].old_len >= 0)
575 XVECLEN (*changes[i].loc, 0)(((((*changes[i].loc)->u.fld[0]).rt_rtvec))->num_elem) = changes[i].old_len;
576 else
577 *changes[i].loc = changes[i].old;
578 if (changes[i].object && !MEM_P (changes[i].object)(((enum rtx_code) (changes[i].object)->code) == MEM))
579 INSN_CODE (changes[i].object)(((changes[i].object)->u.fld[5]).rt_int) = changes[i].old_code;
580 }
581 num_changes = num;
582}
583
584/* Swap the status of change NUM from being applied to not being applied,
585 or vice versa. */
586
587static void
588swap_change (int num)
589{
590 if (changes[num].old_len >= 0)
591 std::swap (XVECLEN (*changes[num].loc, 0)(((((*changes[num].loc)->u.fld[0]).rt_rtvec))->num_elem
)
, changes[num].old_len);
592 else
593 std::swap (*changes[num].loc, changes[num].old);
594 if (changes[num].object && !MEM_P (changes[num].object)(((enum rtx_code) (changes[num].object)->code) == MEM))
595 std::swap (INSN_CODE (changes[num].object)(((changes[num].object)->u.fld[5]).rt_int), changes[num].old_code);
596}
597
598/* Temporarily undo all the changes numbered NUM and up, with a view
599 to reapplying them later. The next call to the changes machinery
600 must be:
601
602 redo_changes (NUM)
603
604 otherwise things will end up in an invalid state. */
605
606void
607temporarily_undo_changes (int num)
608{
609 gcc_assert (temporarily_undone_changes == 0 && num <= num_changes)((void)(!(temporarily_undone_changes == 0 && num <=
num_changes) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 609, __FUNCTION__), 0 : 0))
;
610 for (int i = num_changes - 1; i >= num; i--)
611 swap_change (i);
612 temporarily_undone_changes = num_changes - num;
613}
614
615/* Redo the changes that were temporarily undone by:
616
617 temporarily_undo_changes (NUM). */
618
619void
620redo_changes (int num)
621{
622 gcc_assert (temporarily_undone_changes == num_changes - num)((void)(!(temporarily_undone_changes == num_changes - num) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 622, __FUNCTION__), 0 : 0))
;
623 for (int i = num; i < num_changes; ++i)
624 swap_change (i);
625 temporarily_undone_changes = 0;
626}
627
628/* Reduce conditional compilation elsewhere. */
629/* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
630 rtx. */
631
632static void
633simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
634 machine_mode op0_mode)
635{
636 rtx x = *loc;
637 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code);
638 rtx new_rtx = NULL_RTX(rtx) 0;
639 scalar_int_mode is_mode;
640
641 if (SWAPPABLE_OPERANDS_P (x)((1 << (rtx_class[(int) (((enum rtx_code) (x)->code)
)])) & ((1 << RTX_COMM_ARITH) | (1 << RTX_COMM_COMPARE
) | (1 << RTX_COMPARE)))
642 && swap_commutative_operands_p (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)))
643 {
644 validate_unshare_change (object, loc,
645 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? codegen_rtx_fmt_ee_stat ((((rtx_class[(int) (((enum rtx_code) (x)
->code))]) == RTX_COMM_ARITH) ? code : swap_condition (code
)), (((machine_mode) (x)->mode)), ((((x)->u.fld[1]).rt_rtx
)), ((((x)->u.fld[0]).rt_rtx)) )
646 : swap_condition (code),gen_rtx_fmt_ee_stat ((((rtx_class[(int) (((enum rtx_code) (x)
->code))]) == RTX_COMM_ARITH) ? code : swap_condition (code
)), (((machine_mode) (x)->mode)), ((((x)->u.fld[1]).rt_rtx
)), ((((x)->u.fld[0]).rt_rtx)) )
647 GET_MODE (x), XEXP (x, 1),gen_rtx_fmt_ee_stat ((((rtx_class[(int) (((enum rtx_code) (x)
->code))]) == RTX_COMM_ARITH) ? code : swap_condition (code
)), (((machine_mode) (x)->mode)), ((((x)->u.fld[1]).rt_rtx
)), ((((x)->u.fld[0]).rt_rtx)) )
648 XEXP (x, 0))gen_rtx_fmt_ee_stat ((((rtx_class[(int) (((enum rtx_code) (x)
->code))]) == RTX_COMM_ARITH) ? code : swap_condition (code
)), (((machine_mode) (x)->mode)), ((((x)->u.fld[1]).rt_rtx
)), ((((x)->u.fld[0]).rt_rtx)) )
, 1);
649 x = *loc;
650 code = GET_CODE (x)((enum rtx_code) (x)->code);
651 }
652
653 /* Canonicalize arithmetics with all constant operands. */
654 switch (GET_RTX_CLASS (code)(rtx_class[(int) (code)]))
655 {
656 case RTX_UNARY:
657 if (CONSTANT_P (XEXP (x, 0))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[0]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
)
658 new_rtx = simplify_unary_operation (code, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
659 op0_mode);
660 break;
661 case RTX_COMM_ARITH:
662 case RTX_BIN_ARITH:
663 if (CONSTANT_P (XEXP (x, 0))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[0]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
&& CONSTANT_P (XEXP (x, 1))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
)
664 new_rtx = simplify_binary_operation (code, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
665 XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
666 break;
667 case RTX_COMPARE:
668 case RTX_COMM_COMPARE:
669 if (CONSTANT_P (XEXP (x, 0))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[0]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
&& CONSTANT_P (XEXP (x, 1))((rtx_class[(int) (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
)
670 new_rtx = simplify_relational_operation (code, GET_MODE (x)((machine_mode) (x)->mode), op0_mode,
671 XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
672 break;
673 default:
674 break;
675 }
676 if (new_rtx)
677 {
678 validate_change (object, loc, new_rtx, 1);
679 return;
680 }
681
682 switch (code)
683 {
684 case PLUS:
685 /* If we have a PLUS whose second operand is now a CONST_INT, use
686 simplify_gen_binary to try to simplify it.
687 ??? We may want later to remove this, once simplification is
688 separated from this function. */
689 if (CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT
)
&& XEXP (x, 1)(((x)->u.fld[1]).rt_rtx) == to)
690 validate_change (object, loc,
691 simplify_gen_binary
692 (PLUS, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)), 1);
693 break;
694 case MINUS:
695 if (CONST_SCALAR_INT_P (XEXP (x, 1))((((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) ==
CONST_INT) || (((enum rtx_code) ((((x)->u.fld[1]).rt_rtx)
)->code) == CONST_WIDE_INT))
)
696 validate_change (object, loc,
697 simplify_gen_binary
698 (PLUS, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
699 simplify_gen_unary (NEG,
700 GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx),
701 GET_MODE (x)((machine_mode) (x)->mode))), 1);
702 break;
703 case ZERO_EXTEND:
704 case SIGN_EXTEND:
705 if (GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) == VOIDmode((void) 0, E_VOIDmode))
706 {
707 new_rtx = simplify_gen_unary (code, GET_MODE (x)((machine_mode) (x)->mode), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx),
708 op0_mode);
709 /* If any of the above failed, substitute in something that
710 we know won't be recognized. */
711 if (!new_rtx)
712 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx)gen_rtx_fmt_e_stat ((CLOBBER), ((((machine_mode) (x)->mode
))), (((const_int_rtx[64]))) )
;
713 validate_change (object, loc, new_rtx, 1);
714 }
715 break;
716 case SUBREG:
717 /* All subregs possible to simplify should be simplified. */
718 new_rtx = simplify_subreg (GET_MODE (x)((machine_mode) (x)->mode), SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx), op0_mode,
719 SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg));
720
721 /* Subregs of VOIDmode operands are incorrect. */
722 if (!new_rtx && GET_MODE (SUBREG_REG (x))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) == VOIDmode((void) 0, E_VOIDmode))
723 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx)gen_rtx_fmt_e_stat ((CLOBBER), ((((machine_mode) (x)->mode
))), (((const_int_rtx[64]))) )
;
724 if (new_rtx)
725 validate_change (object, loc, new_rtx, 1);
726 break;
727 case ZERO_EXTRACT:
728 case SIGN_EXTRACT:
729 /* If we are replacing a register with memory, try to change the memory
730 to be the mode required for memory in extract operations (this isn't
731 likely to be an insertion operation; if it was, nothing bad will
732 happen, we might just fail in some cases). */
733
734 if (MEM_P (XEXP (x, 0))(((enum rtx_code) ((((x)->u.fld[0]).rt_rtx))->code) == MEM
)
735 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode), &is_mode)
736 && CONST_INT_P (XEXP (x, 1))(((enum rtx_code) ((((x)->u.fld[1]).rt_rtx))->code) == CONST_INT
)
737 && CONST_INT_P (XEXP (x, 2))(((enum rtx_code) ((((x)->u.fld[2]).rt_rtx))->code) == CONST_INT
)
738 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0)((((((x)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx),
739 MEM_ADDR_SPACE (XEXP (x, 0))(get_mem_attrs ((((x)->u.fld[0]).rt_rtx))->addrspace))
740 && !MEM_VOLATILE_P (XEXP (x, 0))(__extension__ ({ __typeof (((((x)->u.fld[0]).rt_rtx))) const
_rtx = (((((x)->u.fld[0]).rt_rtx))); if (((enum rtx_code)
(_rtx)->code) != MEM && ((enum rtx_code) (_rtx)->
code) != ASM_OPERANDS && ((enum rtx_code) (_rtx)->
code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P", _rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 740, __FUNCTION__); _rtx; })->volatil)
)
741 {
742 int pos = INTVAL (XEXP (x, 2))(((((x)->u.fld[2]).rt_rtx))->u.hwint[0]);
743 machine_mode new_mode = is_mode;
744 if (GET_CODE (x)((enum rtx_code) (x)->code) == ZERO_EXTRACT && targetm.have_extzv ())
745 new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
746 else if (GET_CODE (x)((enum rtx_code) (x)->code) == SIGN_EXTRACT && targetm.have_extv ())
747 new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
748 scalar_int_mode wanted_mode = (new_mode == VOIDmode((void) 0, E_VOIDmode)
749 ? word_mode
750 : as_a <scalar_int_mode> (new_mode));
751
752 /* If we have a narrower mode, we can do something. */
753 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
754 {
755 int offset = pos / BITS_PER_UNIT(8);
756 rtx newmem;
757
758 /* If the bytes and bits are counted differently, we
759 must adjust the offset. */
760 if (BYTES_BIG_ENDIAN0 != BITS_BIG_ENDIAN0)
761 offset =
762 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
763 offset);
764
765 gcc_assert (GET_MODE_PRECISION (wanted_mode)((void)(!(GET_MODE_PRECISION (wanted_mode) == GET_MODE_BITSIZE
(wanted_mode)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 766, __FUNCTION__), 0 : 0))
766 == GET_MODE_BITSIZE (wanted_mode))((void)(!(GET_MODE_PRECISION (wanted_mode) == GET_MODE_BITSIZE
(wanted_mode)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 766, __FUNCTION__), 0 : 0))
;
767 pos %= GET_MODE_BITSIZE (wanted_mode);
768
769 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset)adjust_address_1 ((((x)->u.fld[0]).rt_rtx), wanted_mode, offset
, 0, 1, 0, 0)
;
770
771 validate_change (object, &XEXP (x, 2)(((x)->u.fld[2]).rt_rtx), GEN_INT (pos)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (pos)), 1);
772 validate_change (object, &XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), newmem, 1);
773 }
774 }
775
776 break;
777
778 default:
779 break;
780 }
781}
782
783/* Replace every occurrence of FROM in X with TO. Mark each change with
784 validate_change passing OBJECT. */
785
786static void
787validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
788 bool simplify)
789{
790 int i, j;
791 const char *fmt;
792 rtx x = *loc;
793 enum rtx_code code;
794 machine_mode op0_mode = VOIDmode((void) 0, E_VOIDmode);
795 int prev_changes = num_changes;
796
797 if (!x)
798 return;
799
800 code = GET_CODE (x)((enum rtx_code) (x)->code);
801 fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]);
802 if (fmt[0] == 'e')
803 op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode);
804
805 /* X matches FROM if it is the same rtx or they are both referring to the
806 same register in the same mode. Avoid calling rtx_equal_p unless the
807 operands look similar. */
808
809 if (x == from
810 || (REG_P (x)(((enum rtx_code) (x)->code) == REG) && REG_P (from)(((enum rtx_code) (from)->code) == REG)
811 && GET_MODE (x)((machine_mode) (x)->mode) == GET_MODE (from)((machine_mode) (from)->mode)
812 && REGNO (x)(rhs_regno(x)) == REGNO (from)(rhs_regno(from)))
813 || (GET_CODE (x)((enum rtx_code) (x)->code) == GET_CODE (from)((enum rtx_code) (from)->code) && GET_MODE (x)((machine_mode) (x)->mode) == GET_MODE (from)((machine_mode) (from)->mode)
814 && rtx_equal_p (x, from)))
815 {
816 validate_unshare_change (object, loc, to, 1);
817 return;
818 }
819
820 /* Call ourself recursively to perform the replacements.
821 We must not replace inside already replaced expression, otherwise we
822 get infinite recursion for replacements like (reg X)->(subreg (reg X))
823 so we must special case shared ASM_OPERANDS. */
824
825 if (GET_CODE (x)((enum rtx_code) (x)->code) == PARALLEL)
826 {
827 for (j = XVECLEN (x, 0)(((((x)->u.fld[0]).rt_rtvec))->num_elem) - 1; j >= 0; j--)
828 {
829 if (j && GET_CODE (XVECEXP (x, 0, j))((enum rtx_code) ((((((x)->u.fld[0]).rt_rtvec))->elem[j
]))->code)
== SET
830 && GET_CODE (SET_SRC (XVECEXP (x, 0, j)))((enum rtx_code) (((((((((x)->u.fld[0]).rt_rtvec))->elem
[j]))->u.fld[1]).rt_rtx))->code)
== ASM_OPERANDS)
831 {
832 /* Verify that operands are really shared. */
833 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))((void)(!((((((((((((x)->u.fld[0]).rt_rtvec))->elem[0])
)->u.fld[1]).rt_rtx))->u.fld[3]).rt_rtvec) == (((((((((
((x)->u.fld[0]).rt_rtvec))->elem[j]))->u.fld[1]).rt_rtx
))->u.fld[3]).rt_rtvec)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 835, __FUNCTION__), 0 : 0))
834 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP((void)(!((((((((((((x)->u.fld[0]).rt_rtvec))->elem[0])
)->u.fld[1]).rt_rtx))->u.fld[3]).rt_rtvec) == (((((((((
((x)->u.fld[0]).rt_rtvec))->elem[j]))->u.fld[1]).rt_rtx
))->u.fld[3]).rt_rtvec)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 835, __FUNCTION__), 0 : 0))
835 (x, 0, j))))((void)(!((((((((((((x)->u.fld[0]).rt_rtvec))->elem[0])
)->u.fld[1]).rt_rtx))->u.fld[3]).rt_rtvec) == (((((((((
((x)->u.fld[0]).rt_rtvec))->elem[j]))->u.fld[1]).rt_rtx
))->u.fld[3]).rt_rtvec)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 835, __FUNCTION__), 0 : 0))
;
836 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j))((((((((x)->u.fld[0]).rt_rtvec))->elem[j]))->u.fld[0
]).rt_rtx)
,
837 from, to, object, simplify);
838 }
839 else
840 validate_replace_rtx_1 (&XVECEXP (x, 0, j)(((((x)->u.fld[0]).rt_rtvec))->elem[j]), from, to, object,
841 simplify);
842 }
843 }
844 else
845 for (i = GET_RTX_LENGTH (code)(rtx_length[(int) (code)]) - 1; i >= 0; i--)
846 {
847 if (fmt[i] == 'e')
848 validate_replace_rtx_1 (&XEXP (x, i)(((x)->u.fld[i]).rt_rtx), from, to, object, simplify);
849 else if (fmt[i] == 'E')
850 for (j = XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem) - 1; j >= 0; j--)
851 validate_replace_rtx_1 (&XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j]), from, to, object,
852 simplify);
853 }
854
855 /* If we didn't substitute, there is nothing more to do. */
856 if (num_changes == prev_changes)
857 return;
858
859 /* ??? The regmove is no more, so is this aberration still necessary? */
860 /* Allow substituted expression to have different mode. This is used by
861 regmove to change mode of pseudo register. */
862 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) != VOIDmode((void) 0, E_VOIDmode))
863 op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode);
864
865 /* Do changes needed to keep rtx consistent. Don't do any other
866 simplifications, as it is not our job. */
867 if (simplify)
868 simplify_while_replacing (loc, to, object, op0_mode);
869}
870
871/* Try replacing every occurrence of FROM in subexpression LOC of INSN
872 with TO. After all changes have been made, validate by seeing
873 if INSN is still valid. */
874
875int
876validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
877{
878 validate_replace_rtx_1 (loc, from, to, insn, true);
879 return apply_change_group ();
880}
881
882/* Try replacing every occurrence of FROM in INSN with TO. After all
883 changes have been made, validate by seeing if INSN is still valid. */
884
885int
886validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
887{
888 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
889 return apply_change_group ();
890}
891
892/* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
893 is a part of INSN. After all changes have been made, validate by seeing if
894 INSN is still valid.
895 validate_replace_rtx (from, to, insn) is equivalent to
896 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
897
898int
899validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
900{
901 validate_replace_rtx_1 (where, from, to, insn, true);
902 return apply_change_group ();
903}
904
905/* Same as above, but do not simplify rtx afterwards. */
906int
907validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
908 rtx_insn *insn)
909{
910 validate_replace_rtx_1 (where, from, to, insn, false);
911 return apply_change_group ();
912
913}
914
915/* Try replacing every occurrence of FROM in INSN with TO. This also
916 will replace in REG_EQUAL and REG_EQUIV notes. */
917
918void
919validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
920{
921 rtx note;
922 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
923 for (note = REG_NOTES (insn)(((insn)->u.fld[6]).rt_rtx); note; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx))
924 if (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUAL
925 || REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)) == REG_EQUIV)
926 validate_replace_rtx_1 (&XEXP (note, 0)(((note)->u.fld[0]).rt_rtx), from, to, insn, true);
927}
928
929/* Function called by note_uses to replace used subexpressions. */
930struct validate_replace_src_data
931{
932 rtx from; /* Old RTX */
933 rtx to; /* New RTX */
934 rtx_insn *insn; /* Insn in which substitution is occurring. */
935};
936
937static void
938validate_replace_src_1 (rtx *x, void *data)
939{
940 struct validate_replace_src_data *d
941 = (struct validate_replace_src_data *) data;
942
943 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
944}
945
946/* Try replacing every occurrence of FROM in INSN with TO, avoiding
947 SET_DESTs. */
948
949void
950validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
951{
952 struct validate_replace_src_data d;
953
954 d.from = from;
955 d.to = to;
956 d.insn = insn;
957 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
958}
959
960/* Try simplify INSN.
961 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
962 pattern and return true if something was simplified. */
963
964bool
965validate_simplify_insn (rtx_insn *insn)
966{
967 int i;
968 rtx pat = NULLnullptr;
969 rtx newpat = NULLnullptr;
970
971 pat = PATTERN (insn);
972
973 if (GET_CODE (pat)((enum rtx_code) (pat)->code) == SET)
974 {
975 newpat = simplify_rtx (SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx));
976 if (newpat && !rtx_equal_p (SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx), newpat))
977 validate_change (insn, &SET_SRC (pat)(((pat)->u.fld[1]).rt_rtx), newpat, 1);
978 newpat = simplify_rtx (SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx));
979 if (newpat && !rtx_equal_p (SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx), newpat))
980 validate_change (insn, &SET_DEST (pat)(((pat)->u.fld[0]).rt_rtx), newpat, 1);
981 }
982 else if (GET_CODE (pat)((enum rtx_code) (pat)->code) == PARALLEL)
983 for (i = 0; i < XVECLEN (pat, 0)(((((pat)->u.fld[0]).rt_rtvec))->num_elem); i++)
984 {
985 rtx s = XVECEXP (pat, 0, i)(((((pat)->u.fld[0]).rt_rtvec))->elem[i]);
986
987 if (GET_CODE (XVECEXP (pat, 0, i))((enum rtx_code) ((((((pat)->u.fld[0]).rt_rtvec))->elem
[i]))->code)
== SET)
988 {
989 newpat = simplify_rtx (SET_SRC (s)(((s)->u.fld[1]).rt_rtx));
990 if (newpat && !rtx_equal_p (SET_SRC (s)(((s)->u.fld[1]).rt_rtx), newpat))
991 validate_change (insn, &SET_SRC (s)(((s)->u.fld[1]).rt_rtx), newpat, 1);
992 newpat = simplify_rtx (SET_DEST (s)(((s)->u.fld[0]).rt_rtx));
993 if (newpat && !rtx_equal_p (SET_DEST (s)(((s)->u.fld[0]).rt_rtx), newpat))
994 validate_change (insn, &SET_DEST (s)(((s)->u.fld[0]).rt_rtx), newpat, 1);
995 }
996 }
997 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
998}
999
1000/* Try to process the address of memory expression MEM. Return true on
1001 success; leave the caller to clean up on failure. */
1002
1003bool
1004insn_propagation::apply_to_mem_1 (rtx mem)
1005{
1006 auto old_num_changes = num_validated_changes ();
1007 mem_depth += 1;
1008 bool res = apply_to_rvalue_1 (&XEXP (mem, 0)(((mem)->u.fld[0]).rt_rtx));
1009 mem_depth -= 1;
1010 if (!res)
1011 return false;
1012
1013 if (old_num_changes != num_validated_changes ()
1014 && should_check_mems
1015 && !check_mem (old_num_changes, mem))
1016 return false;
1017
1018 return true;
1019}
1020
1021/* Try to process the rvalue expression at *LOC. Return true on success;
1022 leave the caller to clean up on failure. */
1023
1024bool
1025insn_propagation::apply_to_rvalue_1 (rtx *loc)
1026{
1027 rtx x = *loc;
1028 enum rtx_code code = GET_CODE (x)((enum rtx_code) (x)->code);
1029 machine_mode mode = GET_MODE (x)((machine_mode) (x)->mode);
1030
1031 auto old_num_changes = num_validated_changes ();
1032 if (from && GET_CODE (x)((enum rtx_code) (x)->code) == GET_CODE (from)((enum rtx_code) (from)->code) && rtx_equal_p (x, from))
1033 {
1034 /* Don't replace register asms in asm statements; we mustn't
1035 change the user's register allocation. */
1036 if (REG_P (x)(((enum rtx_code) (x)->code) == REG)
1037 && HARD_REGISTER_P (x)((((rhs_regno(x))) < 76))
1038 && register_asm_p (x)
1039 && asm_noperands (PATTERN (insn)) > 0)
1040 return false;
1041
1042 if (should_unshare)
1043 validate_unshare_change (insn, loc, to, 1);
1044 else
1045 validate_change (insn, loc, to, 1);
1046 if (mem_depth && !REG_P (to)(((enum rtx_code) (to)->code) == REG) && !CONSTANT_P (to)((rtx_class[(int) (((enum rtx_code) (to)->code))]) == RTX_CONST_OBJ
)
)
1047 {
1048 /* We're substituting into an address, but TO will have the
1049 form expected outside an address. Canonicalize it if
1050 necessary. */
1051 insn_propagation subprop (insn);
1052 subprop.mem_depth += 1;
1053 if (!subprop.apply_to_rvalue (loc))
1054 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 1054, __FUNCTION__))
;
1055 if (should_unshare
1056 && num_validated_changes () != old_num_changes + 1)
1057 {
1058 /* TO is owned by someone else, so create a copy and
1059 return TO to its original form. */
1060 rtx to = copy_rtx (*loc);
1061 cancel_changes (old_num_changes);
1062 validate_change (insn, loc, to, 1);
1063 }
1064 }
1065 num_replacements += 1;
1066 should_unshare = true;
1067 result_flags |= UNSIMPLIFIED;
1068 return true;
1069 }
1070
1071 /* Recursively apply the substitution and see if we can simplify
1072 the result. This specifically shouldn't use simplify_gen_* for
1073 speculative simplifications, since we want to avoid generating new
1074 expressions where possible. */
1075 auto old_result_flags = result_flags;
1076 rtx newx = NULL_RTX(rtx) 0;
1077 bool recurse_p = false;
1078 switch (GET_RTX_CLASS (code)(rtx_class[(int) (code)]))
1079 {
1080 case RTX_UNARY:
1081 {
1082 machine_mode op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode);
1083 if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx)))
1084 return false;
1085 if (from && old_num_changes == num_validated_changes ())
1086 return true;
1087
1088 newx = simplify_unary_operation (code, mode, XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), op0_mode);
1089 break;
1090 }
1091
1092 case RTX_BIN_ARITH:
1093 case RTX_COMM_ARITH:
1094 {
1095 if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
1096 || !apply_to_rvalue_1 (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)))
1097 return false;
1098 if (from && old_num_changes == num_validated_changes ())
1099 return true;
1100
1101 if (GET_RTX_CLASS (code)(rtx_class[(int) (code)]) == RTX_COMM_ARITH
1102 && swap_commutative_operands_p (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)))
1103 newx = simplify_gen_binary (code, mode, XEXP (x, 1)(((x)->u.fld[1]).rt_rtx), XEXP (x, 0)(((x)->u.fld[0]).rt_rtx));
1104 else
1105 newx = simplify_binary_operation (code, mode,
1106 XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
1107 break;
1108 }
1109
1110 case RTX_COMPARE:
1111 case RTX_COMM_COMPARE:
1112 {
1113 machine_mode op_mode = (GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode) != VOIDmode((void) 0, E_VOIDmode)
1114 ? GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode)
1115 : GET_MODE (XEXP (x, 1))((machine_mode) ((((x)->u.fld[1]).rt_rtx))->mode));
1116 if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
1117 || !apply_to_rvalue_1 (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)))
1118 return false;
1119 if (from && old_num_changes == num_validated_changes ())
1120 return true;
1121
1122 newx = simplify_relational_operation (code, mode, op_mode,
1123 XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx));
1124 break;
1125 }
1126
1127 case RTX_TERNARY:
1128 case RTX_BITFIELD_OPS:
1129 {
1130 machine_mode op0_mode = GET_MODE (XEXP (x, 0))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode);
1131 if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
1132 || !apply_to_rvalue_1 (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx))
1133 || !apply_to_rvalue_1 (&XEXP (x, 2)(((x)->u.fld[2]).rt_rtx)))
1134 return false;
1135 if (from && old_num_changes == num_validated_changes ())
1136 return true;
1137
1138 newx = simplify_ternary_operation (code, mode, op0_mode,
1139 XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), XEXP (x, 1)(((x)->u.fld[1]).rt_rtx),
1140 XEXP (x, 2)(((x)->u.fld[2]).rt_rtx));
1141 break;
1142 }
1143
1144 case RTX_EXTRA:
1145 if (code == SUBREG)
1146 {
1147 machine_mode inner_mode = GET_MODE (SUBREG_REG (x))((machine_mode) ((((x)->u.fld[0]).rt_rtx))->mode);
1148 if (!apply_to_rvalue_1 (&SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx)))
1149 return false;
1150 if (from && old_num_changes == num_validated_changes ())
1151 return true;
1152
1153 rtx inner = SUBREG_REG (x)(((x)->u.fld[0]).rt_rtx);
1154 newx = simplify_subreg (mode, inner, inner_mode, SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg));
1155 /* Reject the same cases that simplify_gen_subreg would. */
1156 if (!newx
1157 && (GET_CODE (inner)((enum rtx_code) (inner)->code) == SUBREG
1158 || GET_CODE (inner)((enum rtx_code) (inner)->code) == CONCAT
1159 || GET_MODE (inner)((machine_mode) (inner)->mode) == VOIDmode((void) 0, E_VOIDmode)
1160 || !validate_subreg (mode, inner_mode,
1161 inner, SUBREG_BYTE (x)(((x)->u.fld[1]).rt_subreg))))
1162 {
1163 failure_reason = "would create an invalid subreg";
1164 return false;
1165 }
1166 break;
1167 }
1168 else
1169 recurse_p = true;
1170 break;
1171
1172 case RTX_OBJ:
1173 if (code == LO_SUM)
1174 {
1175 if (!apply_to_rvalue_1 (&XEXP (x, 0)(((x)->u.fld[0]).rt_rtx))
1176 || !apply_to_rvalue_1 (&XEXP (x, 1)(((x)->u.fld[1]).rt_rtx)))
1177 return false;
1178 if (from && old_num_changes == num_validated_changes ())
1179 return true;
1180
1181 /* (lo_sum (high x) y) -> y where x and y have the same base. */
1182 rtx op0 = XEXP (x, 0)(((x)->u.fld[0]).rt_rtx);
1183 rtx op1 = XEXP (x, 1)(((x)->u.fld[1]).rt_rtx);
1184 if (GET_CODE (op0)((enum rtx_code) (op0)->code) == HIGH)
1185 {
1186 rtx base0, base1, offset0, offset1;
1187 split_const (XEXP (op0, 0)(((op0)->u.fld[0]).rt_rtx), &base0, &offset0);
1188 split_const (op1, &base1, &offset1);
1189 if (rtx_equal_p (base0, base1))
1190 newx = op1;
1191 }
1192 }
1193 else if (code == REG)
1194 {
1195 if (from && REG_P (from)(((enum rtx_code) (from)->code) == REG) && reg_overlap_mentioned_p (x, from))
1196 {
1197 failure_reason = "inexact register overlap";
1198 return false;
1199 }
1200 }
1201 else if (code == MEM)
1202 return apply_to_mem_1 (x);
1203 else
1204 recurse_p = true;
1205 break;
1206
1207 case RTX_CONST_OBJ:
1208 break;
1209
1210 case RTX_AUTOINC:
1211 if (from && reg_overlap_mentioned_p (XEXP (x, 0)(((x)->u.fld[0]).rt_rtx), from))
1212 {
1213 failure_reason = "is subject to autoinc";
1214 return false;
1215 }
1216 recurse_p = true;
1217 break;
1218
1219 case RTX_MATCH:
1220 case RTX_INSN:
1221 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 1221, __FUNCTION__))
;
1222 }
1223
1224 if (recurse_p)
1225 {
1226 const char *fmt = GET_RTX_FORMAT (code)(rtx_format[(int) (code)]);
1227 for (int i = 0; fmt[i]; i++)
1228 switch (fmt[i])
1229 {
1230 case 'E':
1231 for (int j = 0; j < XVECLEN (x, i)(((((x)->u.fld[i]).rt_rtvec))->num_elem); j++)
1232 if (!apply_to_rvalue_1 (&XVECEXP (x, i, j)(((((x)->u.fld[i]).rt_rtvec))->elem[j])))
1233 return false;
1234 break;
1235
1236 case 'e':
1237 if (XEXP (x, i)(((x)->u.fld[i]).rt_rtx) && !apply_to_rvalue_1 (&XEXP (x, i)(((x)->u.fld[i]).rt_rtx)))
1238 return false;
1239 break;
1240 }
1241 }
1242 else if (newx && !rtx_equal_p (x, newx))
1243 {
1244 /* All substitutions made by OLD_NUM_CHANGES onwards have been
1245 simplified. */
1246 result_flags = ((result_flags & ~UNSIMPLIFIED)
1247 | (old_result_flags & UNSIMPLIFIED));
1248
1249 if (should_note_simplifications)
1250 note_simplification (old_num_changes, old_result_flags, x, newx);
1251
1252 /* There's no longer any point unsharing the substitutions made
1253 for subexpressions, since we'll just copy this one instead. */
1254 bool unshare = false;
1255 for (int i = old_num_changes; i < num_changes; ++i)
1256 {
1257 unshare |= changes[i].unshare;
1258 changes[i].unshare = false;
1259 }
1260 if (unshare)
1261 validate_unshare_change (insn, loc, newx, 1);
1262 else
1263 validate_change (insn, loc, newx, 1);
1264 }
1265
1266 return true;
1267}
1268
1269/* Try to process the lvalue expression at *LOC. Return true on success;
1270 leave the caller to clean up on failure. */
1271
1272bool
1273insn_propagation::apply_to_lvalue_1 (rtx dest)
1274{
1275 rtx old_dest = dest;
1276 while (GET_CODE (dest)((enum rtx_code) (dest)->code) == SUBREG
1277 || GET_CODE (dest)((enum rtx_code) (dest)->code) == ZERO_EXTRACT
1278 || GET_CODE (dest)((enum rtx_code) (dest)->code) == STRICT_LOW_PART)
1279 {
1280 if (GET_CODE (dest)((enum rtx_code) (dest)->code) == ZERO_EXTRACT
1281 && (!apply_to_rvalue_1 (&XEXP (dest, 1)(((dest)->u.fld[1]).rt_rtx))
1282 || !apply_to_rvalue_1 (&XEXP (dest, 2)(((dest)->u.fld[2]).rt_rtx))))
1283 return false;
1284 dest = XEXP (dest, 0)(((dest)->u.fld[0]).rt_rtx);
1285 }
1286
1287 if (MEM_P (dest)(((enum rtx_code) (dest)->code) == MEM))
1288 return apply_to_mem_1 (dest);
1289
1290 /* Check whether the substitution is safe in the presence of this lvalue. */
1291 if (!from
1292 || dest == old_dest
1293 || !REG_P (dest)(((enum rtx_code) (dest)->code) == REG)
1294 || !reg_overlap_mentioned_p (dest, from))
1295 return true;
1296
1297 if (SUBREG_P (old_dest)(((enum rtx_code) (old_dest)->code) == SUBREG)
1298 && SUBREG_REG (old_dest)(((old_dest)->u.fld[0]).rt_rtx) == dest
1299 && !read_modify_subreg_p (old_dest))
1300 return true;
1301
1302 failure_reason = "is part of a read-write destination";
1303 return false;
1304}
1305
1306/* Try to process the instruction pattern at *LOC. Return true on success;
1307 leave the caller to clean up on failure. */
1308
1309bool
1310insn_propagation::apply_to_pattern_1 (rtx *loc)
1311{
1312 rtx body = *loc;
1313 switch (GET_CODE (body)((enum rtx_code) (body)->code))
1314 {
1315 case COND_EXEC:
1316 return (apply_to_rvalue_1 (&COND_EXEC_TEST (body)(((body)->u.fld[0]).rt_rtx))
1317 && apply_to_pattern_1 (&COND_EXEC_CODE (body)(((body)->u.fld[1]).rt_rtx)));
1318
1319 case PARALLEL:
1320 {
1321 int last = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1;
1322 for (int i = 0; i < last; ++i)
1323 if (!apply_to_pattern_1 (&XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i])))
1324 return false;
1325 return apply_to_pattern_1 (&XVECEXP (body, 0, last)(((((body)->u.fld[0]).rt_rtvec))->elem[last]));
1326 }
1327
1328 case ASM_OPERANDS:
1329 for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (body)(((((body)->u.fld[3]).rt_rtvec))->num_elem); i < len; ++i)
1330 if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body, i)(((((body)->u.fld[3]).rt_rtvec))->elem[i])))
1331 return false;
1332 return true;
1333
1334 case CLOBBER:
1335 return apply_to_lvalue_1 (XEXP (body, 0)(((body)->u.fld[0]).rt_rtx));
1336
1337 case SET:
1338 return (apply_to_lvalue_1 (SET_DEST (body)(((body)->u.fld[0]).rt_rtx))
1339 && apply_to_rvalue_1 (&SET_SRC (body)(((body)->u.fld[1]).rt_rtx)));
1340
1341 default:
1342 /* All the other possibilities never store and can use a normal
1343 rtx walk. This includes:
1344
1345 - USE
1346 - TRAP_IF
1347 - PREFETCH
1348 - UNSPEC
1349 - UNSPEC_VOLATILE. */
1350 return apply_to_rvalue_1 (loc);
1351 }
1352}
1353
1354/* Apply this insn_propagation object's simplification or substitution
1355 to the instruction pattern at LOC. */
1356
1357bool
1358insn_propagation::apply_to_pattern (rtx *loc)
1359{
1360 unsigned int num_changes = num_validated_changes ();
1361 bool res = apply_to_pattern_1 (loc);
1362 if (!res)
1363 cancel_changes (num_changes);
1364 return res;
1365}
1366
1367/* Apply this insn_propagation object's simplification or substitution
1368 to the rvalue expression at LOC. */
1369
1370bool
1371insn_propagation::apply_to_rvalue (rtx *loc)
1372{
1373 unsigned int num_changes = num_validated_changes ();
1374 bool res = apply_to_rvalue_1 (loc);
1375 if (!res)
1376 cancel_changes (num_changes);
1377 return res;
1378}
1379
1380/* Check whether INSN matches a specific alternative of an .md pattern. */
1381
1382bool
1383valid_insn_p (rtx_insn *insn)
1384{
1385 recog_memoized (insn);
1386 if (INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) < 0)
1387 return false;
1388 extract_insn (insn);
1389 /* We don't know whether the insn will be in code that is optimized
1390 for size or speed, so consider all enabled alternatives. */
1391 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1392 return false;
1393 return true;
1394}
1395
1396/* Return 1 if OP is a valid general operand for machine mode MODE.
1397 This is either a register reference, a memory reference,
1398 or a constant. In the case of a memory reference, the address
1399 is checked for general validity for the target machine.
1400
1401 Register and memory references must have mode MODE in order to be valid,
1402 but some constants have no machine mode and are valid for any mode.
1403
1404 If MODE is VOIDmode, OP is checked for validity for whatever mode
1405 it has.
1406
1407 The main use of this function is as a predicate in match_operand
1408 expressions in the machine description. */
1409
1410int
1411general_operand (rtx op, machine_mode mode)
1412{
1413 enum rtx_code code = GET_CODE (op)((enum rtx_code) (op)->code);
1414
1415 if (mode == VOIDmode((void) 0, E_VOIDmode))
1416 mode = GET_MODE (op)((machine_mode) (op)->mode);
1417
1418 /* Don't accept CONST_INT or anything similar
1419 if the caller wants something floating. */
1420 if (GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) && mode != VOIDmode((void) 0, E_VOIDmode)
1421 && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_INT
1422 && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_PARTIAL_INT)
1423 return 0;
1424
1425 if (CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT)
1426 && mode != VOIDmode((void) 0, E_VOIDmode)
1427 && trunc_int_for_mode (INTVAL (op)((op)->u.hwint[0]), mode) != INTVAL (op)((op)->u.hwint[0]))
1428 return 0;
1429
1430 if (CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ
)
)
1431 return ((GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) || GET_MODE (op)((machine_mode) (op)->mode) == mode
1432 || mode == VOIDmode((void) 0, E_VOIDmode))
1433 && (! flag_picglobal_options.x_flag_pic || LEGITIMATE_PIC_OPERAND_P (op)legitimate_pic_operand_p (op))
1434 && targetm.legitimate_constant_p (mode == VOIDmode((void) 0, E_VOIDmode)
1435 ? GET_MODE (op)((machine_mode) (op)->mode)
1436 : mode, op));
1437
1438 /* Except for certain constants with VOIDmode, already checked for,
1439 OP's mode must match MODE if MODE specifies a mode. */
1440
1441 if (GET_MODE (op)((machine_mode) (op)->mode) != mode)
1442 return 0;
1443
1444 if (code == SUBREG)
1445 {
1446 rtx sub = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx);
1447
1448#ifdef INSN_SCHEDULING
1449 /* On machines that have insn scheduling, we want all memory
1450 reference to be explicit, so outlaw paradoxical SUBREGs.
1451 However, we must allow them after reload so that they can
1452 get cleaned up by cleanup_subreg_operands. */
1453 if (!reload_completed && MEM_P (sub)(((enum rtx_code) (sub)->code) == MEM)
1454 && paradoxical_subreg_p (op))
1455 return 0;
1456#endif
1457 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1458 may result in incorrect reference. We should simplify all valid
1459 subregs of MEM anyway. But allow this after reload because we
1460 might be called from cleanup_subreg_operands.
1461
1462 ??? This is a kludge. */
1463 if (!reload_completed
1464 && maybe_ne (SUBREG_BYTE (op)(((op)->u.fld[1]).rt_subreg), 0)
1465 && MEM_P (sub)(((enum rtx_code) (sub)->code) == MEM))
1466 return 0;
1467
1468 if (REG_P (sub)(((enum rtx_code) (sub)->code) == REG)
1469 && REGNO (sub)(rhs_regno(sub)) < FIRST_PSEUDO_REGISTER76
1470 && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)(targetm.can_change_mode_class (((machine_mode) (sub)->mode
), mode, (regclass_map[((rhs_regno(sub)))])))
1471 && GET_MODE_CLASS (GET_MODE (sub))((enum mode_class) mode_class[((machine_mode) (sub)->mode)
])
!= MODE_COMPLEX_INT
1472 && GET_MODE_CLASS (GET_MODE (sub))((enum mode_class) mode_class[((machine_mode) (sub)->mode)
])
!= MODE_COMPLEX_FLOAT
1473 /* LRA can generate some invalid SUBREGS just for matched
1474 operand reload presentation. LRA needs to treat them as
1475 valid. */
1476 && ! LRA_SUBREG_P (op)(__extension__ ({ __typeof ((op)) const _rtx = ((op)); if (((
enum rtx_code) (_rtx)->code) != SUBREG) rtl_check_failed_flag
("LRA_SUBREG_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 1476, __FUNCTION__); _rtx; })->jump)
)
1477 return 0;
1478
1479 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1480 create such rtl, and we must reject it. */
1481 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))(((enum mode_class) mode_class[((machine_mode) (op)->mode)
]) == MODE_FLOAT || ((enum mode_class) mode_class[((machine_mode
) (op)->mode)]) == MODE_DECIMAL_FLOAT)
1482 /* LRA can use subreg to store a floating point value in an
1483 integer mode. Although the floating point and the
1484 integer modes need the same number of hard registers, the
1485 size of floating point mode can be less than the integer
1486 mode. */
1487 && ! lra_in_progress
1488 && paradoxical_subreg_p (op))
1489 return 0;
1490
1491 op = sub;
1492 code = GET_CODE (op)((enum rtx_code) (op)->code);
1493 }
1494
1495 if (code == REG)
1496 return (REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76
1497 || in_hard_reg_set_p (operand_reg_set(this_target_hard_regs->x_operand_reg_set), GET_MODE (op)((machine_mode) (op)->mode), REGNO (op)(rhs_regno(op))));
1498
1499 if (code == MEM)
1500 {
1501 rtx y = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx);
1502
1503 if (! volatile_ok && MEM_VOLATILE_P (op)(__extension__ ({ __typeof ((op)) const _rtx = ((op)); if (((
enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 1503, __FUNCTION__); _rtx; })->volatil)
)
1504 return 0;
1505
1506 /* Use the mem's mode, since it will be reloaded thus. LRA can
1507 generate move insn with invalid addresses which is made valid
1508 and efficiently calculated by LRA through further numerous
1509 transformations. */
1510 if (lra_in_progress
1511 || memory_address_addr_space_p (GET_MODE (op)((machine_mode) (op)->mode), y, MEM_ADDR_SPACE (op)(get_mem_attrs (op)->addrspace)))
1512 return 1;
1513 }
1514
1515 return 0;
1516}
1517
1518/* Return 1 if OP is a valid memory address for a memory reference
1519 of mode MODE.
1520
1521 The main use of this function is as a predicate in match_operand
1522 expressions in the machine description. */
1523
1524int
1525address_operand (rtx op, machine_mode mode)
1526{
1527 /* Wrong mode for an address expr. */
1528 if (GET_MODE (op)((machine_mode) (op)->mode) != VOIDmode((void) 0, E_VOIDmode)
1529 && ! SCALAR_INT_MODE_P (GET_MODE (op))(((enum mode_class) mode_class[((machine_mode) (op)->mode)
]) == MODE_INT || ((enum mode_class) mode_class[((machine_mode
) (op)->mode)]) == MODE_PARTIAL_INT)
)
1530 return false;
1531
1532 return memory_address_p (mode, op)memory_address_addr_space_p ((mode), (op), 0);
1533}
1534
1535/* Return 1 if OP is a register reference of mode MODE.
1536 If MODE is VOIDmode, accept a register in any mode.
1537
1538 The main use of this function is as a predicate in match_operand
1539 expressions in the machine description. */
1540
1541int
1542register_operand (rtx op, machine_mode mode)
1543{
1544 if (GET_CODE (op)((enum rtx_code) (op)->code) == SUBREG)
1545 {
1546 rtx sub = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx);
1547
1548 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1549 because it is guaranteed to be reloaded into one.
1550 Just make sure the MEM is valid in itself.
1551 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1552 but currently it does result from (SUBREG (REG)...) where the
1553 reg went on the stack.) */
1554 if (!REG_P (sub)(((enum rtx_code) (sub)->code) == REG) && (reload_completed || !MEM_P (sub)(((enum rtx_code) (sub)->code) == MEM)))
1555 return 0;
1556 }
1557 else if (!REG_P (op)(((enum rtx_code) (op)->code) == REG))
1558 return 0;
1559 return general_operand (op, mode);
1560}
1561
1562/* Return 1 for a register in Pmode; ignore the tested mode. */
1563
1564int
1565pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
1566{
1567 return register_operand (op, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
1568}
1569
1570/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1571 or a hard register. */
1572
1573int
1574scratch_operand (rtx op, machine_mode mode)
1575{
1576 if (GET_MODE (op)((machine_mode) (op)->mode) != mode && mode != VOIDmode((void) 0, E_VOIDmode))
1577 return 0;
1578
1579 return (GET_CODE (op)((enum rtx_code) (op)->code) == SCRATCH
1580 || (REG_P (op)(((enum rtx_code) (op)->code) == REG)
1581 && (lra_in_progress
1582 || (REGNO (op)(rhs_regno(op)) < FIRST_PSEUDO_REGISTER76
1583 && REGNO_REG_CLASS (REGNO (op))(regclass_map[((rhs_regno(op)))]) != NO_REGS))));
1584}
1585
1586/* Return 1 if OP is a valid immediate operand for mode MODE.
1587
1588 The main use of this function is as a predicate in match_operand
1589 expressions in the machine description. */
1590
1591int
1592immediate_operand (rtx op, machine_mode mode)
1593{
1594 /* Don't accept CONST_INT or anything similar
1595 if the caller wants something floating. */
1596 if (GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) && mode != VOIDmode((void) 0, E_VOIDmode)
1597 && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_INT
1598 && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_PARTIAL_INT)
1599 return 0;
1600
1601 if (CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT)
1602 && mode != VOIDmode((void) 0, E_VOIDmode)
1603 && trunc_int_for_mode (INTVAL (op)((op)->u.hwint[0]), mode) != INTVAL (op)((op)->u.hwint[0]))
1604 return 0;
1605
1606 return (CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ
)
1607 && (GET_MODE (op)((machine_mode) (op)->mode) == mode || mode == VOIDmode((void) 0, E_VOIDmode)
1608 || GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode))
1609 && (! flag_picglobal_options.x_flag_pic || LEGITIMATE_PIC_OPERAND_P (op)legitimate_pic_operand_p (op))
1610 && targetm.legitimate_constant_p (mode == VOIDmode((void) 0, E_VOIDmode)
1611 ? GET_MODE (op)((machine_mode) (op)->mode)
1612 : mode, op));
1613}
1614
1615/* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1616
1617int
1618const_int_operand (rtx op, machine_mode mode)
1619{
1620 if (!CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT))
1621 return 0;
1622
1623 if (mode != VOIDmode((void) 0, E_VOIDmode)
1624 && trunc_int_for_mode (INTVAL (op)((op)->u.hwint[0]), mode) != INTVAL (op)((op)->u.hwint[0]))
1625 return 0;
1626
1627 return 1;
1628}
1629
1630#if TARGET_SUPPORTS_WIDE_INT1
1631/* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1632 of mode MODE. */
1633int
1634const_scalar_int_operand (rtx op, machine_mode mode)
1635{
1636 if (!CONST_SCALAR_INT_P (op)((((enum rtx_code) (op)->code) == CONST_INT) || (((enum rtx_code
) (op)->code) == CONST_WIDE_INT))
)
1637 return 0;
1638
1639 if (CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT))
1640 return const_int_operand (op, mode);
1641
1642 if (mode != VOIDmode((void) 0, E_VOIDmode))
1643 {
1644 scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1645 int prec = GET_MODE_PRECISION (int_mode);
1646 int bitsize = GET_MODE_BITSIZE (int_mode);
1647
1648 if (CONST_WIDE_INT_NUNITS (op)((int)__extension__ ({ __typeof ((op)) const _rtx = ((op)); if
(((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT) rtl_check_failed_flag
("CWI_GET_NUM_ELEM", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 1648, __FUNCTION__); _rtx; })->u2.num_elem)
* HOST_BITS_PER_WIDE_INT64 > bitsize)
1649 return 0;
1650
1651 if (prec == bitsize)
1652 return 1;
1653 else
1654 {
1655 /* Multiword partial int. */
1656 HOST_WIDE_INTlong x
1657 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1)((op)->u.hwiv.elem[((int)__extension__ ({ __typeof ((op)) const
_rtx = ((op)); if (((enum rtx_code) (_rtx)->code) != CONST_WIDE_INT
) rtl_check_failed_flag ("CWI_GET_NUM_ELEM", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 1657, __FUNCTION__); _rtx; })->u2.num_elem) - 1])
;
1658 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT64 - 1)) == x);
1659 }
1660 }
1661 return 1;
1662}
1663
1664/* Returns 1 if OP is an operand that is a constant integer or constant
1665 floating-point number of MODE. */
1666
1667int
1668const_double_operand (rtx op, machine_mode mode)
1669{
1670 return (GET_CODE (op)((enum rtx_code) (op)->code) == CONST_DOUBLE)
1671 && (GET_MODE (op)((machine_mode) (op)->mode) == mode || mode == VOIDmode((void) 0, E_VOIDmode));
1672}
1673#else
1674/* Returns 1 if OP is an operand that is a constant integer or constant
1675 floating-point number of MODE. */
1676
1677int
1678const_double_operand (rtx op, machine_mode mode)
1679{
1680 /* Don't accept CONST_INT or anything similar
1681 if the caller wants something floating. */
1682 if (GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode) && mode != VOIDmode((void) 0, E_VOIDmode)
1683 && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_INT
1684 && GET_MODE_CLASS (mode)((enum mode_class) mode_class[mode]) != MODE_PARTIAL_INT)
1685 return 0;
1686
1687 return ((CONST_DOUBLE_P (op)(((enum rtx_code) (op)->code) == CONST_DOUBLE) || CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT))
1688 && (mode == VOIDmode((void) 0, E_VOIDmode) || GET_MODE (op)((machine_mode) (op)->mode) == mode
1689 || GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode)));
1690}
1691#endif
1692/* Return 1 if OP is a general operand that is not an immediate
1693 operand of mode MODE. */
1694
1695int
1696nonimmediate_operand (rtx op, machine_mode mode)
1697{
1698 return (general_operand (op, mode) && ! CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ
)
);
1699}
1700
1701/* Return 1 if OP is a register reference or immediate value of mode MODE. */
1702
1703int
1704nonmemory_operand (rtx op, machine_mode mode)
1705{
1706 if (CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ
)
)
1707 return immediate_operand (op, mode);
1708 return register_operand (op, mode);
1709}
1710
1711/* Return 1 if OP is a valid operand that stands for pushing a
1712 value of mode MODE onto the stack.
1713
1714 The main use of this function is as a predicate in match_operand
1715 expressions in the machine description. */
1716
1717int
1718push_operand (rtx op, machine_mode mode)
1719{
1720 if (!MEM_P (op)(((enum rtx_code) (op)->code) == MEM))
1721 return 0;
1722
1723 if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode)
1724 return 0;
1725
1726 poly_int64 rounded_size = GET_MODE_SIZE (mode);
1727
1728#ifdef PUSH_ROUNDING
1729 rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size))ix86_push_rounding (((rounded_size).to_constant ()));
1730#endif
1731
1732 op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx);
1733
1734 if (known_eq (rounded_size, GET_MODE_SIZE (mode))(!maybe_ne (rounded_size, GET_MODE_SIZE (mode))))
1735 {
1736 if (GET_CODE (op)((enum rtx_code) (op)->code) != STACK_PUSH_CODEPRE_DEC)
1737 return 0;
1738 }
1739 else
1740 {
1741 poly_int64 offset;
1742 if (GET_CODE (op)((enum rtx_code) (op)->code) != PRE_MODIFY
1743 || GET_CODE (XEXP (op, 1))((enum rtx_code) ((((op)->u.fld[1]).rt_rtx))->code) != PLUS
1744 || XEXP (XEXP (op, 1), 0)((((((op)->u.fld[1]).rt_rtx))->u.fld[0]).rt_rtx) != XEXP (op, 0)(((op)->u.fld[0]).rt_rtx)
1745 || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1)((((((op)->u.fld[1]).rt_rtx))->u.fld[1]).rt_rtx), &offset)
1746 || (STACK_GROWS_DOWNWARD1
1747 ? maybe_ne (offset, -rounded_size)
1748 : maybe_ne (offset, rounded_size)))
1749 return 0;
1750 }
1751
1752 return XEXP (op, 0)(((op)->u.fld[0]).rt_rtx) == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]);
1753}
1754
1755/* Return 1 if OP is a valid operand that stands for popping a
1756 value of mode MODE off the stack.
1757
1758 The main use of this function is as a predicate in match_operand
1759 expressions in the machine description. */
1760
1761int
1762pop_operand (rtx op, machine_mode mode)
1763{
1764 if (!MEM_P (op)(((enum rtx_code) (op)->code) == MEM))
1765 return 0;
1766
1767 if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode)
1768 return 0;
1769
1770 op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx);
1771
1772 if (GET_CODE (op)((enum rtx_code) (op)->code) != STACK_POP_CODEPOST_INC)
1773 return 0;
1774
1775 return XEXP (op, 0)(((op)->u.fld[0]).rt_rtx) == stack_pointer_rtx((this_target_rtl->x_global_rtl)[GR_STACK_POINTER]);
1776}
1777
1778/* Return 1 if ADDR is a valid memory address
1779 for mode MODE in address space AS. */
1780
1781int
1782memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED__attribute__ ((__unused__)),
1783 rtx addr, addr_space_t as)
1784{
1785#ifdef GO_IF_LEGITIMATE_ADDRESS
1786 gcc_assert (ADDR_SPACE_GENERIC_P (as))((void)(!(((as) == 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 1786, __FUNCTION__), 0 : 0))
;
1787 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1788 return 0;
1789
1790 win:
1791 return 1;
1792#else
1793 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1794#endif
1795}
1796
1797/* Return 1 if OP is a valid memory reference with mode MODE,
1798 including a valid address.
1799
1800 The main use of this function is as a predicate in match_operand
1801 expressions in the machine description. */
1802
1803int
1804memory_operand (rtx op, machine_mode mode)
1805{
1806 rtx inner;
1807
1808 if (! reload_completed)
1809 /* Note that no SUBREG is a memory operand before end of reload pass,
1810 because (SUBREG (MEM...)) forces reloading into a register. */
1811 return MEM_P (op)(((enum rtx_code) (op)->code) == MEM) && general_operand (op, mode);
1812
1813 if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode)
1814 return 0;
1815
1816 inner = op;
1817 if (GET_CODE (inner)((enum rtx_code) (inner)->code) == SUBREG)
1818 inner = SUBREG_REG (inner)(((inner)->u.fld[0]).rt_rtx);
1819
1820 return (MEM_P (inner)(((enum rtx_code) (inner)->code) == MEM) && general_operand (op, mode));
1821}
1822
1823/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1824 that is, a memory reference whose address is a general_operand. */
1825
1826int
1827indirect_operand (rtx op, machine_mode mode)
1828{
1829 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1830 if (! reload_completed
1831 && GET_CODE (op)((enum rtx_code) (op)->code) == SUBREG && MEM_P (SUBREG_REG (op))(((enum rtx_code) ((((op)->u.fld[0]).rt_rtx))->code) ==
MEM)
)
1832 {
1833 if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode)
1834 return 0;
1835
1836 /* The only way that we can have a general_operand as the resulting
1837 address is if OFFSET is zero and the address already is an operand
1838 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1839 operand. */
1840 poly_int64 offset;
1841 rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0)((((((op)->u.fld[0]).rt_rtx))->u.fld[0]).rt_rtx), &offset);
1842 return (known_eq (offset + SUBREG_BYTE (op), 0)(!maybe_ne (offset + (((op)->u.fld[1]).rt_subreg), 0))
1843 && general_operand (addr, Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
));
1844 }
1845
1846 return (MEM_P (op)(((enum rtx_code) (op)->code) == MEM)
1847 && memory_operand (op, mode)
1848 && general_operand (XEXP (op, 0)(((op)->u.fld[0]).rt_rtx), Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
));
1849}
1850
1851/* Return 1 if this is an ordered comparison operator (not including
1852 ORDERED and UNORDERED). */
1853
1854int
1855ordered_comparison_operator (rtx op, machine_mode mode)
1856{
1857 if (mode != VOIDmode((void) 0, E_VOIDmode) && GET_MODE (op)((machine_mode) (op)->mode) != mode)
1858 return false;
1859 switch (GET_CODE (op)((enum rtx_code) (op)->code))
1860 {
1861 case EQ:
1862 case NE:
1863 case LT:
1864 case LTU:
1865 case LE:
1866 case LEU:
1867 case GT:
1868 case GTU:
1869 case GE:
1870 case GEU:
1871 return true;
1872 default:
1873 return false;
1874 }
1875}
1876
1877/* Return 1 if this is a comparison operator. This allows the use of
1878 MATCH_OPERATOR to recognize all the branch insns. */
1879
1880int
1881comparison_operator (rtx op, machine_mode mode)
1882{
1883 return ((mode == VOIDmode((void) 0, E_VOIDmode) || GET_MODE (op)((machine_mode) (op)->mode) == mode)
1884 && COMPARISON_P (op)(((rtx_class[(int) (((enum rtx_code) (op)->code))]) & (
~1)) == (RTX_COMPARE & (~1)))
);
1885}
1886
1887/* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1888
1889rtx
1890extract_asm_operands (rtx body)
1891{
1892 rtx tmp;
1893 switch (GET_CODE (body)((enum rtx_code) (body)->code))
1894 {
1895 case ASM_OPERANDS:
1896 return body;
1897
1898 case SET:
1899 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1900 tmp = SET_SRC (body)(((body)->u.fld[1]).rt_rtx);
1901 if (GET_CODE (tmp)((enum rtx_code) (tmp)->code) == ASM_OPERANDS)
1902 return tmp;
1903 break;
1904
1905 case PARALLEL:
1906 tmp = XVECEXP (body, 0, 0)(((((body)->u.fld[0]).rt_rtvec))->elem[0]);
1907 if (GET_CODE (tmp)((enum rtx_code) (tmp)->code) == ASM_OPERANDS)
1908 return tmp;
1909 if (GET_CODE (tmp)((enum rtx_code) (tmp)->code) == SET)
1910 {
1911 tmp = SET_SRC (tmp)(((tmp)->u.fld[1]).rt_rtx);
1912 if (GET_CODE (tmp)((enum rtx_code) (tmp)->code) == ASM_OPERANDS)
1913 return tmp;
1914 }
1915 break;
1916
1917 default:
1918 break;
1919 }
1920 return NULLnullptr;
1921}
1922
1923/* If BODY is an insn body that uses ASM_OPERANDS,
1924 return the number of operands (both input and output) in the insn.
1925 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1926 return 0.
1927 Otherwise return -1. */
1928
1929int
1930asm_noperands (const_rtx body)
1931{
1932 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body)(const_cast<struct rtx_def *> (((body)))));
1933 int i, n_sets = 0;
1934
1935 if (asm_op == NULLnullptr)
1936 {
1937 if (GET_CODE (body)((enum rtx_code) (body)->code) == PARALLEL && XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) >= 2
1938 && GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[0]))->code)
== ASM_INPUT)
1939 {
1940 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1941 for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; i > 0; i--)
1942 if (GET_CODE (XVECEXP (body, 0, i))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[i]))->code)
!= CLOBBER)
1943 return -1;
1944 return 0;
1945 }
1946 return -1;
1947 }
1948
1949 if (GET_CODE (body)((enum rtx_code) (body)->code) == SET)
1950 n_sets = 1;
1951 else if (GET_CODE (body)((enum rtx_code) (body)->code) == PARALLEL)
1952 {
1953 if (GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[0]))->code)
== SET)
1954 {
1955 /* Multiple output operands, or 1 output plus some clobbers:
1956 body is
1957 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1958 /* Count backwards through CLOBBERs to determine number of SETs. */
1959 for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem); i > 0; i--)
1960 {
1961 if (GET_CODE (XVECEXP (body, 0, i - 1))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[i - 1]))->code)
== SET)
1962 break;
1963 if (GET_CODE (XVECEXP (body, 0, i - 1))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[i - 1]))->code)
!= CLOBBER)
1964 return -1;
1965 }
1966
1967 /* N_SETS is now number of output operands. */
1968 n_sets = i;
1969
1970 /* Verify that all the SETs we have
1971 came from a single original asm_operands insn
1972 (so that invalid combinations are blocked). */
1973 for (i = 0; i < n_sets; i++)
1974 {
1975 rtx elt = XVECEXP (body, 0, i)(((((body)->u.fld[0]).rt_rtvec))->elem[i]);
1976 if (GET_CODE (elt)((enum rtx_code) (elt)->code) != SET)
1977 return -1;
1978 if (GET_CODE (SET_SRC (elt))((enum rtx_code) ((((elt)->u.fld[1]).rt_rtx))->code) != ASM_OPERANDS)
1979 return -1;
1980 /* If these ASM_OPERANDS rtx's came from different original insns
1981 then they aren't allowed together. */
1982 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))((((((elt)->u.fld[1]).rt_rtx))->u.fld[3]).rt_rtvec)
1983 != ASM_OPERANDS_INPUT_VEC (asm_op)(((asm_op)->u.fld[3]).rt_rtvec))
1984 return -1;
1985 }
1986 }
1987 else
1988 {
1989 /* 0 outputs, but some clobbers:
1990 body is [(asm_operands ...) (clobber (reg ...))...]. */
1991 /* Make sure all the other parallel things really are clobbers. */
1992 for (i = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem) - 1; i > 0; i--)
1993 if (GET_CODE (XVECEXP (body, 0, i))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[i]))->code)
!= CLOBBER)
1994 return -1;
1995 }
1996 }
1997
1998 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)(((((asm_op)->u.fld[3]).rt_rtvec))->num_elem)
1999 + ASM_OPERANDS_LABEL_LENGTH (asm_op)(((((asm_op)->u.fld[5]).rt_rtvec))->num_elem) + n_sets);
2000}
2001
2002/* Assuming BODY is an insn body that uses ASM_OPERANDS,
2003 copy its operands (both input and output) into the vector OPERANDS,
2004 the locations of the operands within the insn into the vector OPERAND_LOCS,
2005 and the constraints for the operands into CONSTRAINTS.
2006 Write the modes of the operands into MODES.
2007 Write the location info into LOC.
2008 Return the assembler-template.
2009 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2010 return the basic assembly string.
2011
2012 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2013 we don't store that info. */
2014
2015const char *
2016decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
2017 const char **constraints, machine_mode *modes,
2018 location_t *loc)
2019{
2020 int nbase = 0, n, i;
2021 rtx asmop;
2022
2023 switch (GET_CODE (body)((enum rtx_code) (body)->code))
2024 {
2025 case ASM_OPERANDS:
2026 /* Zero output asm: BODY is (asm_operands ...). */
2027 asmop = body;
2028 break;
2029
2030 case SET:
2031 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
2032 asmop = SET_SRC (body)(((body)->u.fld[1]).rt_rtx);
2033
2034 /* The output is in the SET.
2035 Its constraint is in the ASM_OPERANDS itself. */
2036 if (operands)
2037 operands[0] = SET_DEST (body)(((body)->u.fld[0]).rt_rtx);
2038 if (operand_locs)
2039 operand_locs[0] = &SET_DEST (body)(((body)->u.fld[0]).rt_rtx);
2040 if (constraints)
2041 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop)(((asmop)->u.fld[1]).rt_str);
2042 if (modes)
2043 modes[0] = GET_MODE (SET_DEST (body))((machine_mode) ((((body)->u.fld[0]).rt_rtx))->mode);
2044 nbase = 1;
2045 break;
2046
2047 case PARALLEL:
2048 {
2049 int nparallel = XVECLEN (body, 0)(((((body)->u.fld[0]).rt_rtvec))->num_elem); /* Includes CLOBBERs. */
2050
2051 asmop = XVECEXP (body, 0, 0)(((((body)->u.fld[0]).rt_rtvec))->elem[0]);
2052 if (GET_CODE (asmop)((enum rtx_code) (asmop)->code) == SET)
2053 {
2054 asmop = SET_SRC (asmop)(((asmop)->u.fld[1]).rt_rtx);
2055
2056 /* At least one output, plus some CLOBBERs. The outputs are in
2057 the SETs. Their constraints are in the ASM_OPERANDS itself. */
2058 for (i = 0; i < nparallel; i++)
2059 {
2060 if (GET_CODE (XVECEXP (body, 0, i))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[i]))->code)
== CLOBBER)
2061 break; /* Past last SET */
2062 gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET)((void)(!(((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec
))->elem[i]))->code) == SET) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2062, __FUNCTION__), 0 : 0))
;
2063 if (operands)
2064 operands[i] = SET_DEST (XVECEXP (body, 0, i))((((((((body)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld
[0]).rt_rtx)
;
2065 if (operand_locs)
2066 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i))((((((((body)->u.fld[0]).rt_rtvec))->elem[i]))->u.fld
[0]).rt_rtx)
;
2067 if (constraints)
2068 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1)(((((((((((body)->u.fld[0]).rt_rtvec))->elem[i]))->u
.fld[1]).rt_rtx))->u.fld[1]).rt_str)
;
2069 if (modes)
2070 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)))((machine_mode) (((((((((body)->u.fld[0]).rt_rtvec))->elem
[i]))->u.fld[0]).rt_rtx))->mode)
;
2071 }
2072 nbase = i;
2073 }
2074 else if (GET_CODE (asmop)((enum rtx_code) (asmop)->code) == ASM_INPUT)
2075 {
2076 if (loc)
2077 *loc = ASM_INPUT_SOURCE_LOCATION (asmop)(((asmop)->u.fld[1]).rt_uint);
2078 return XSTR (asmop, 0)(((asmop)->u.fld[0]).rt_str);
2079 }
2080 break;
2081 }
2082
2083 default:
2084 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2084, __FUNCTION__))
;
2085 }
2086
2087 n = ASM_OPERANDS_INPUT_LENGTH (asmop)(((((asmop)->u.fld[3]).rt_rtvec))->num_elem);
2088 for (i = 0; i < n; i++)
2089 {
2090 if (operand_locs)
2091 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i)(((((asmop)->u.fld[3]).rt_rtvec))->elem[i]);
2092 if (operands)
2093 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i)(((((asmop)->u.fld[3]).rt_rtvec))->elem[i]);
2094 if (constraints)
2095 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i)((((((((asmop)->u.fld[4]).rt_rtvec))->elem[i]))->u.fld
[0]).rt_str)
;
2096 if (modes)
2097 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i)((machine_mode) ((((((asmop)->u.fld[4]).rt_rtvec))->elem
[i]))->mode)
;
2098 }
2099 nbase += n;
2100
2101 n = ASM_OPERANDS_LABEL_LENGTH (asmop)(((((asmop)->u.fld[5]).rt_rtvec))->num_elem);
2102 for (i = 0; i < n; i++)
2103 {
2104 if (operand_locs)
2105 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i)(((((asmop)->u.fld[5]).rt_rtvec))->elem[i]);
2106 if (operands)
2107 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i)(((((asmop)->u.fld[5]).rt_rtvec))->elem[i]);
2108 if (constraints)
2109 constraints[nbase + i] = "";
2110 if (modes)
2111 modes[nbase + i] = Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
;
2112 }
2113
2114 if (loc)
2115 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop)(((asmop)->u.fld[6]).rt_uint);
2116
2117 return ASM_OPERANDS_TEMPLATE (asmop)(((asmop)->u.fld[0]).rt_str);
2118}
2119
2120/* Parse inline assembly string STRING and determine which operands are
2121 referenced by % markers. For the first NOPERANDS operands, set USED[I]
2122 to true if operand I is referenced.
2123
2124 This is intended to distinguish barrier-like asms such as:
2125
2126 asm ("" : "=m" (...));
2127
2128 from real references such as:
2129
2130 asm ("sw\t$0, %0" : "=m" (...)); */
2131
2132void
2133get_referenced_operands (const char *string, bool *used,
2134 unsigned int noperands)
2135{
2136 memset (used, 0, sizeof (bool) * noperands);
2137 const char *p = string;
2138 while (*p)
2139 switch (*p)
2140 {
2141 case '%':
2142 p += 1;
2143 /* A letter followed by a digit indicates an operand number. */
2144 if (ISALPHA (p[0])(_sch_istable[(p[0]) & 0xff] & (unsigned short)(_sch_isalpha
))
&& ISDIGIT (p[1])(_sch_istable[(p[1]) & 0xff] & (unsigned short)(_sch_isdigit
))
)
2145 p += 1;
2146 if (ISDIGIT (*p)(_sch_istable[(*p) & 0xff] & (unsigned short)(_sch_isdigit
))
)
2147 {
2148 char *endptr;
2149 unsigned long opnum = strtoul (p, &endptr, 10);
2150 if (endptr != p && opnum < noperands)
2151 used[opnum] = true;
2152 p = endptr;
2153 }
2154 else
2155 p += 1;
2156 break;
2157
2158 default:
2159 p++;
2160 break;
2161 }
2162}
2163
2164/* Check if an asm_operand matches its constraints.
2165 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
2166
2167int
2168asm_operand_ok (rtx op, const char *constraint, const char **constraints)
2169{
2170 int result = 0;
2171 bool incdec_ok = false;
2172
2173 /* Use constrain_operands after reload. */
2174 gcc_assert (!reload_completed)((void)(!(!reload_completed) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2174, __FUNCTION__), 0 : 0))
;
2175
2176 /* Empty constraint string is the same as "X,...,X", i.e. X for as
2177 many alternatives as required to match the other operands. */
2178 if (*constraint == '\0')
2179 result = 1;
2180
2181 while (*constraint)
2182 {
2183 enum constraint_num cn;
2184 char c = *constraint;
2185 int len;
2186 switch (c)
2187 {
2188 case ',':
2189 constraint++;
2190 continue;
2191
2192 case '0': case '1': case '2': case '3': case '4':
2193 case '5': case '6': case '7': case '8': case '9':
2194 /* If caller provided constraints pointer, look up
2195 the matching constraint. Otherwise, our caller should have
2196 given us the proper matching constraint, but we can't
2197 actually fail the check if they didn't. Indicate that
2198 results are inconclusive. */
2199 if (constraints)
2200 {
2201 char *end;
2202 unsigned long match;
2203
2204 match = strtoul (constraint, &end, 10);
2205 if (!result)
2206 result = asm_operand_ok (op, constraints[match], NULLnullptr);
2207 constraint = (const char *) end;
2208 }
2209 else
2210 {
2211 do
2212 constraint++;
2213 while (ISDIGIT (*constraint)(_sch_istable[(*constraint) & 0xff] & (unsigned short
)(_sch_isdigit))
);
2214 if (! result)
2215 result = -1;
2216 }
2217 continue;
2218
2219 /* The rest of the compiler assumes that reloading the address
2220 of a MEM into a register will make it fit an 'o' constraint.
2221 That is, if it sees a MEM operand for an 'o' constraint,
2222 it assumes that (mem (base-reg)) will fit.
2223
2224 That assumption fails on targets that don't have offsettable
2225 addresses at all. We therefore need to treat 'o' asm
2226 constraints as a special case and only accept operands that
2227 are already offsettable, thus proving that at least one
2228 offsettable address exists. */
2229 case 'o': /* offsettable */
2230 if (offsettable_nonstrict_memref_p (op))
2231 result = 1;
2232 break;
2233
2234 case 'g':
2235 if (general_operand (op, VOIDmode((void) 0, E_VOIDmode)))
2236 result = 1;
2237 break;
2238
2239 case '<':
2240 case '>':
2241 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2242 to exist, excepting those that expand_call created. Further,
2243 on some machines which do not have generalized auto inc/dec,
2244 an inc/dec is not a memory_operand.
2245
2246 Match any memory and hope things are resolved after reload. */
2247 incdec_ok = true;
2248 /* FALLTHRU */
2249 default:
2250 cn = lookup_constraint (constraint);
2251 rtx mem = NULLnullptr;
2252 switch (get_constraint_type (cn))
2253 {
2254 case CT_REGISTER:
2255 if (!result
2256 && reg_class_for_constraint (cn) != NO_REGS
2257 && GET_MODE (op)((machine_mode) (op)->mode) != BLKmode((void) 0, E_BLKmode)
2258 && register_operand (op, VOIDmode((void) 0, E_VOIDmode)))
2259 result = 1;
2260 break;
2261
2262 case CT_CONST_INT:
2263 if (!result
2264 && CONST_INT_P (op)(((enum rtx_code) (op)->code) == CONST_INT)
2265 && insn_const_int_ok_for_constraint (INTVAL (op)((op)->u.hwint[0]), cn))
2266 result = 1;
2267 break;
2268
2269 case CT_MEMORY:
2270 mem = op;
2271 /* Fall through. */
2272 case CT_SPECIAL_MEMORY:
2273 /* Every memory operand can be reloaded to fit. */
2274 if (!mem)
2275 mem = extract_mem_from_operand (op);
2276 result = result || memory_operand (mem, VOIDmode((void) 0, E_VOIDmode));
2277 break;
2278
2279 case CT_ADDRESS:
2280 /* Every address operand can be reloaded to fit. */
2281 result = result || address_operand (op, VOIDmode((void) 0, E_VOIDmode));
2282 break;
2283
2284 case CT_FIXED_FORM:
2285 result = result || constraint_satisfied_p (op, cn);
2286 break;
2287 }
2288 break;
2289 }
2290 len = CONSTRAINT_LEN (c, constraint)insn_constraint_len (c,constraint);
2291 do
2292 constraint++;
2293 while (--len && *constraint && *constraint != ',');
2294 if (len)
2295 return 0;
2296 }
2297
2298 /* For operands without < or > constraints reject side-effects. */
2299 if (AUTO_INC_DEC0 && !incdec_ok && result && MEM_P (op)(((enum rtx_code) (op)->code) == MEM))
2300 switch (GET_CODE (XEXP (op, 0))((enum rtx_code) ((((op)->u.fld[0]).rt_rtx))->code))
2301 {
2302 case PRE_INC:
2303 case POST_INC:
2304 case PRE_DEC:
2305 case POST_DEC:
2306 case PRE_MODIFY:
2307 case POST_MODIFY:
2308 return 0;
2309 default:
2310 break;
2311 }
2312
2313 return result;
2314}
2315
2316/* Given an rtx *P, if it is a sum containing an integer constant term,
2317 return the location (type rtx *) of the pointer to that constant term.
2318 Otherwise, return a null pointer. */
2319
2320rtx *
2321find_constant_term_loc (rtx *p)
2322{
2323 rtx *tem;
2324 enum rtx_code code = GET_CODE (*p)((enum rtx_code) (*p)->code);
2325
2326 /* If *P IS such a constant term, P is its location. */
2327
2328 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2329 || code == CONST)
2330 return p;
2331
2332 /* Otherwise, if not a sum, it has no constant term. */
2333
2334 if (GET_CODE (*p)((enum rtx_code) (*p)->code) != PLUS)
2335 return 0;
2336
2337 /* If one of the summands is constant, return its location. */
2338
2339 if (XEXP (*p, 0)(((*p)->u.fld[0]).rt_rtx) && CONSTANT_P (XEXP (*p, 0))((rtx_class[(int) (((enum rtx_code) ((((*p)->u.fld[0]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
2340 && XEXP (*p, 1)(((*p)->u.fld[1]).rt_rtx) && CONSTANT_P (XEXP (*p, 1))((rtx_class[(int) (((enum rtx_code) ((((*p)->u.fld[1]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
)
2341 return p;
2342
2343 /* Otherwise, check each summand for containing a constant term. */
2344
2345 if (XEXP (*p, 0)(((*p)->u.fld[0]).rt_rtx) != 0)
2346 {
2347 tem = find_constant_term_loc (&XEXP (*p, 0)(((*p)->u.fld[0]).rt_rtx));
2348 if (tem != 0)
2349 return tem;
2350 }
2351
2352 if (XEXP (*p, 1)(((*p)->u.fld[1]).rt_rtx) != 0)
2353 {
2354 tem = find_constant_term_loc (&XEXP (*p, 1)(((*p)->u.fld[1]).rt_rtx));
2355 if (tem != 0)
2356 return tem;
2357 }
2358
2359 return 0;
2360}
2361
2362/* Return 1 if OP is a memory reference
2363 whose address contains no side effects
2364 and remains valid after the addition
2365 of a positive integer less than the
2366 size of the object being referenced.
2367
2368 We assume that the original address is valid and do not check it.
2369
2370 This uses strict_memory_address_p as a subroutine, so
2371 don't use it before reload. */
2372
2373int
2374offsettable_memref_p (rtx op)
2375{
2376 return ((MEM_P (op)(((enum rtx_code) (op)->code) == MEM))
2377 && offsettable_address_addr_space_p (1, GET_MODE (op)((machine_mode) (op)->mode), XEXP (op, 0)(((op)->u.fld[0]).rt_rtx),
2378 MEM_ADDR_SPACE (op)(get_mem_attrs (op)->addrspace)));
2379}
2380
2381/* Similar, but don't require a strictly valid mem ref:
2382 consider pseudo-regs valid as index or base regs. */
2383
2384int
2385offsettable_nonstrict_memref_p (rtx op)
2386{
2387 return ((MEM_P (op)(((enum rtx_code) (op)->code) == MEM))
2388 && offsettable_address_addr_space_p (0, GET_MODE (op)((machine_mode) (op)->mode), XEXP (op, 0)(((op)->u.fld[0]).rt_rtx),
2389 MEM_ADDR_SPACE (op)(get_mem_attrs (op)->addrspace)));
2390}
2391
2392/* Return 1 if Y is a memory address which contains no side effects
2393 and would remain valid for address space AS after the addition of
2394 a positive integer less than the size of that mode.
2395
2396 We assume that the original address is valid and do not check it.
2397 We do check that it is valid for narrower modes.
2398
2399 If STRICTP is nonzero, we require a strictly valid address,
2400 for the sake of use in reload.c. */
2401
2402int
2403offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
2404 addr_space_t as)
2405{
2406 enum rtx_code ycode = GET_CODE (y)((enum rtx_code) (y)->code);
2407 rtx z;
2408 rtx y1 = y;
2409 rtx *y2;
2410 int (*addressp) (machine_mode, rtx, addr_space_t) =
2411 (strictp ? strict_memory_address_addr_space_p
2412 : memory_address_addr_space_p);
2413 poly_int64 mode_sz = GET_MODE_SIZE (mode);
2414
2415 if (CONSTANT_ADDRESS_P (y)constant_address_p (y))
2416 return 1;
2417
2418 /* Adjusting an offsettable address involves changing to a narrower mode.
2419 Make sure that's OK. */
2420
2421 if (mode_dependent_address_p (y, as))
2422 return 0;
2423
2424 machine_mode address_mode = GET_MODE (y)((machine_mode) (y)->mode);
2425 if (address_mode == VOIDmode((void) 0, E_VOIDmode))
2426 address_mode = targetm.addr_space.address_mode (as);
2427#ifdef POINTERS_EXTEND_UNSIGNED1
2428 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2429#endif
2430
2431 /* ??? How much offset does an offsettable BLKmode reference need?
2432 Clearly that depends on the situation in which it's being used.
2433 However, the current situation in which we test 0xffffffff is
2434 less than ideal. Caveat user. */
2435 if (known_eq (mode_sz, 0)(!maybe_ne (mode_sz, 0)))
2436 mode_sz = BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
/ BITS_PER_UNIT(8);
2437
2438 /* If the expression contains a constant term,
2439 see if it remains valid when max possible offset is added. */
2440
2441 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2442 {
2443 int good;
2444
2445 y1 = *y2;
2446 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2447 /* Use QImode because an odd displacement may be automatically invalid
2448 for any wider mode. But it should be valid for a single byte. */
2449 good = (*addressp) (QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), y, as);
2450
2451 /* In any case, restore old contents of memory. */
2452 *y2 = y1;
2453 return good;
2454 }
2455
2456 if (GET_RTX_CLASS (ycode)(rtx_class[(int) (ycode)]) == RTX_AUTOINC)
2457 return 0;
2458
2459 /* The offset added here is chosen as the maximum offset that
2460 any instruction could need to add when operating on something
2461 of the specified mode. We assume that if Y and Y+c are
2462 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2463 go inside a LO_SUM here, so we do so as well. */
2464 if (GET_CODE (y)((enum rtx_code) (y)->code) == LO_SUM
2465 && mode != BLKmode((void) 0, E_BLKmode)
2466 && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)(!maybe_lt (get_mode_alignment (mode) / (8), mode_sz)))
2467 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),gen_rtx_fmt_ee_stat ((LO_SUM), ((address_mode)), (((((y)->
u.fld[0]).rt_rtx))), ((plus_constant (address_mode, (((y)->
u.fld[1]).rt_rtx), mode_sz - 1))) )
2468 plus_constant (address_mode, XEXP (y, 1),gen_rtx_fmt_ee_stat ((LO_SUM), ((address_mode)), (((((y)->
u.fld[0]).rt_rtx))), ((plus_constant (address_mode, (((y)->
u.fld[1]).rt_rtx), mode_sz - 1))) )
2469 mode_sz - 1))gen_rtx_fmt_ee_stat ((LO_SUM), ((address_mode)), (((((y)->
u.fld[0]).rt_rtx))), ((plus_constant (address_mode, (((y)->
u.fld[1]).rt_rtx), mode_sz - 1))) )
;
2470#ifdef POINTERS_EXTEND_UNSIGNED1
2471 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2472 else if (POINTERS_EXTEND_UNSIGNED1 > 0
2473 && GET_CODE (y)((enum rtx_code) (y)->code) == ZERO_EXTEND
2474 && GET_MODE (XEXP (y, 0))((machine_mode) ((((y)->u.fld[0]).rt_rtx))->mode) == pointer_mode)
2475 z = gen_rtx_ZERO_EXTEND (address_mode,gen_rtx_fmt_e_stat ((ZERO_EXTEND), ((address_mode)), ((plus_constant
(pointer_mode, (((y)->u.fld[0]).rt_rtx), mode_sz - 1))) )
2476 plus_constant (pointer_mode, XEXP (y, 0),gen_rtx_fmt_e_stat ((ZERO_EXTEND), ((address_mode)), ((plus_constant
(pointer_mode, (((y)->u.fld[0]).rt_rtx), mode_sz - 1))) )
2477 mode_sz - 1))gen_rtx_fmt_e_stat ((ZERO_EXTEND), ((address_mode)), ((plus_constant
(pointer_mode, (((y)->u.fld[0]).rt_rtx), mode_sz - 1))) )
;
2478#endif
2479 else
2480 z = plus_constant (address_mode, y, mode_sz - 1);
2481
2482 /* Use QImode because an odd displacement may be automatically invalid
2483 for any wider mode. But it should be valid for a single byte. */
2484 return (*addressp) (QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), z, as);
2485}
2486
2487/* Return 1 if ADDR is an address-expression whose effect depends
2488 on the mode of the memory reference it is used in.
2489
2490 ADDRSPACE is the address space associated with the address.
2491
2492 Autoincrement addressing is a typical example of mode-dependence
2493 because the amount of the increment depends on the mode. */
2494
2495bool
2496mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2497{
2498 /* Auto-increment addressing with anything other than post_modify
2499 or pre_modify always introduces a mode dependency. Catch such
2500 cases now instead of deferring to the target. */
2501 if (GET_CODE (addr)((enum rtx_code) (addr)->code) == PRE_INC
2502 || GET_CODE (addr)((enum rtx_code) (addr)->code) == POST_INC
2503 || GET_CODE (addr)((enum rtx_code) (addr)->code) == PRE_DEC
2504 || GET_CODE (addr)((enum rtx_code) (addr)->code) == POST_DEC)
2505 return true;
2506
2507 return targetm.mode_dependent_address_p (addr, addrspace);
2508}
2509
2510/* Return true if boolean attribute ATTR is supported. */
2511
2512static bool
2513have_bool_attr (bool_attr attr)
2514{
2515 switch (attr)
2516 {
2517 case BA_ENABLED:
2518 return HAVE_ATTR_enabled1;
2519 case BA_PREFERRED_FOR_SIZE:
2520 return HAVE_ATTR_enabled1 || HAVE_ATTR_preferred_for_size1;
2521 case BA_PREFERRED_FOR_SPEED:
2522 return HAVE_ATTR_enabled1 || HAVE_ATTR_preferred_for_speed1;
2523 }
2524 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2524, __FUNCTION__))
;
2525}
2526
2527/* Return the value of ATTR for instruction INSN. */
2528
2529static bool
2530get_bool_attr (rtx_insn *insn, bool_attr attr)
2531{
2532 switch (attr)
2533 {
2534 case BA_ENABLED:
2535 return get_attr_enabled (insn);
2536 case BA_PREFERRED_FOR_SIZE:
2537 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2538 case BA_PREFERRED_FOR_SPEED:
2539 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2540 }
2541 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2541, __FUNCTION__))
;
2542}
2543
2544/* Like get_bool_attr_mask, but don't use the cache. */
2545
2546static alternative_mask
2547get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2548{
2549 /* Temporarily install enough information for get_attr_<foo> to assume
2550 that the insn operands are already cached. As above, the attribute
2551 mustn't depend on the values of operands, so we don't provide their
2552 real values here. */
2553 rtx_insn *old_insn = recog_data.insn;
2554 int old_alternative = which_alternative;
2555
2556 recog_data.insn = insn;
2557 alternative_mask mask = ALL_ALTERNATIVES((alternative_mask) -1);
2558 int n_alternatives = insn_data[INSN_CODE (insn)(((insn)->u.fld[5]).rt_int)].n_alternatives;
2559 for (int i = 0; i < n_alternatives; i++)
2560 {
2561 which_alternative = i;
2562 if (!get_bool_attr (insn, attr))
2563 mask &= ~ALTERNATIVE_BIT (i)((alternative_mask) 1 << (i));
2564 }
2565
2566 recog_data.insn = old_insn;
2567 which_alternative = old_alternative;
2568 return mask;
2569}
2570
2571/* Return the mask of operand alternatives that are allowed for INSN
2572 by boolean attribute ATTR. This mask depends only on INSN and on
2573 the current target; it does not depend on things like the values of
2574 operands. */
2575
2576static alternative_mask
2577get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2578{
2579 /* Quick exit for asms and for targets that don't use these attributes. */
2580 int code = INSN_CODE (insn)(((insn)->u.fld[5]).rt_int);
2581 if (code < 0 || !have_bool_attr (attr))
2582 return ALL_ALTERNATIVES((alternative_mask) -1);
2583
2584 /* Calling get_attr_<foo> can be expensive, so cache the mask
2585 for speed. */
2586 if (!this_target_recog->x_bool_attr_masks[code][attr])
2587 this_target_recog->x_bool_attr_masks[code][attr]
2588 = get_bool_attr_mask_uncached (insn, attr);
2589 return this_target_recog->x_bool_attr_masks[code][attr];
2590}
2591
2592/* Return the set of alternatives of INSN that are allowed by the current
2593 target. */
2594
2595alternative_mask
2596get_enabled_alternatives (rtx_insn *insn)
2597{
2598 return get_bool_attr_mask (insn, BA_ENABLED);
2599}
2600
2601/* Return the set of alternatives of INSN that are allowed by the current
2602 target and are preferred for the current size/speed optimization
2603 choice. */
2604
2605alternative_mask
2606get_preferred_alternatives (rtx_insn *insn)
2607{
2608 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2609 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2610 else
2611 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2612}
2613
2614/* Return the set of alternatives of INSN that are allowed by the current
2615 target and are preferred for the size/speed optimization choice
2616 associated with BB. Passing a separate BB is useful if INSN has not
2617 been emitted yet or if we are considering moving it to a different
2618 block. */
2619
2620alternative_mask
2621get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2622{
2623 if (optimize_bb_for_speed_p (bb))
2624 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2625 else
2626 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2627}
2628
2629/* Assert that the cached boolean attributes for INSN are still accurate.
2630 The backend is required to define these attributes in a way that only
2631 depends on the current target (rather than operands, compiler phase,
2632 etc.). */
2633
2634bool
2635check_bool_attrs (rtx_insn *insn)
2636{
2637 int code = INSN_CODE (insn)(((insn)->u.fld[5]).rt_int);
2638 if (code >= 0)
2639 for (int i = 0; i <= BA_LAST; ++i)
2640 {
2641 enum bool_attr attr = (enum bool_attr) i;
2642 if (this_target_recog->x_bool_attr_masks[code][attr])
2643 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]((void)(!(this_target_recog->x_bool_attr_masks[code][attr]
== get_bool_attr_mask_uncached (insn, attr)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2644, __FUNCTION__), 0 : 0))
2644 == get_bool_attr_mask_uncached (insn, attr))((void)(!(this_target_recog->x_bool_attr_masks[code][attr]
== get_bool_attr_mask_uncached (insn, attr)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2644, __FUNCTION__), 0 : 0))
;
2645 }
2646 return true;
2647}
2648
2649/* Like extract_insn, but save insn extracted and don't extract again, when
2650 called again for the same insn expecting that recog_data still contain the
2651 valid information. This is used primary by gen_attr infrastructure that
2652 often does extract insn again and again. */
2653void
2654extract_insn_cached (rtx_insn *insn)
2655{
2656 if (recog_data.insn == insn && INSN_CODE (insn)(((insn)->u.fld[5]).rt_int) >= 0)
2657 return;
2658 extract_insn (insn);
2659 recog_data.insn = insn;
2660}
2661
2662/* Do uncached extract_insn, constrain_operands and complain about failures.
2663 This should be used when extracting a pre-existing constrained instruction
2664 if the caller wants to know which alternative was chosen. */
2665void
2666extract_constrain_insn (rtx_insn *insn)
2667{
2668 extract_insn (insn);
2669 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2670 fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2670, __FUNCTION__)
;
2671}
2672
2673/* Do cached extract_insn, constrain_operands and complain about failures.
2674 Used by insn_attrtab. */
2675void
2676extract_constrain_insn_cached (rtx_insn *insn)
2677{
2678 extract_insn_cached (insn);
2679 if (which_alternative == -1
2680 && !constrain_operands (reload_completed,
2681 get_enabled_alternatives (insn)))
2682 fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2682, __FUNCTION__)
;
2683}
2684
2685/* Do cached constrain_operands on INSN and complain about failures. */
2686int
2687constrain_operands_cached (rtx_insn *insn, int strict)
2688{
2689 if (which_alternative == -1)
2690 return constrain_operands (strict, get_enabled_alternatives (insn));
2691 else
2692 return 1;
2693}
2694
2695/* Analyze INSN and fill in recog_data. */
2696
2697void
2698extract_insn (rtx_insn *insn)
2699{
2700 int i;
2701 int icode;
2702 int noperands;
2703 rtx body = PATTERN (insn);
2704
2705 recog_data.n_operands = 0;
2706 recog_data.n_alternatives = 0;
2707 recog_data.n_dups = 0;
2708 recog_data.is_asm = false;
2709
2710 switch (GET_CODE (body)((enum rtx_code) (body)->code))
2711 {
2712 case USE:
2713 case CLOBBER:
2714 case ASM_INPUT:
2715 case ADDR_VEC:
2716 case ADDR_DIFF_VEC:
2717 case VAR_LOCATION:
2718 case DEBUG_MARKER:
2719 return;
2720
2721 case SET:
2722 if (GET_CODE (SET_SRC (body))((enum rtx_code) ((((body)->u.fld[1]).rt_rtx))->code) == ASM_OPERANDS)
2723 goto asm_insn;
2724 else
2725 goto normal_insn;
2726 case PARALLEL:
2727 if ((GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[0]))->code)
== SET
2728 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0)))((enum rtx_code) (((((((((body)->u.fld[0]).rt_rtvec))->
elem[0]))->u.fld[1]).rt_rtx))->code)
== ASM_OPERANDS)
2729 || GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[0]))->code)
== ASM_OPERANDS
2730 || GET_CODE (XVECEXP (body, 0, 0))((enum rtx_code) ((((((body)->u.fld[0]).rt_rtvec))->elem
[0]))->code)
== ASM_INPUT)
2731 goto asm_insn;
2732 else
2733 goto normal_insn;
2734 case ASM_OPERANDS:
2735 asm_insn:
2736 recog_data.n_operands = noperands = asm_noperands (body);
2737 if (noperands >= 0)
2738 {
2739 /* This insn is an `asm' with operands. */
2740
2741 /* expand_asm_operands makes sure there aren't too many operands. */
2742 gcc_assert (noperands <= MAX_RECOG_OPERANDS)((void)(!(noperands <= 30) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2742, __FUNCTION__), 0 : 0))
;
2743
2744 /* Now get the operand values and constraints out of the insn. */
2745 decode_asm_operands (body, recog_data.operand,
2746 recog_data.operand_loc,
2747 recog_data.constraints,
2748 recog_data.operand_mode, NULLnullptr);
2749 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2750 if (noperands > 0)
2751 {
2752 const char *p = recog_data.constraints[0];
2753 recog_data.n_alternatives = 1;
2754 while (*p)
2755 recog_data.n_alternatives += (*p++ == ',');
2756 }
2757 recog_data.is_asm = true;
2758 break;
2759 }
2760 fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2760, __FUNCTION__)
;
2761
2762 default:
2763 normal_insn:
2764 /* Ordinary insn: recognize it, get the operands via insn_extract
2765 and get the constraints. */
2766
2767 icode = recog_memoized (insn);
2768 if (icode < 0)
2769 fatal_insn_not_found (insn)_fatal_insn_not_found (insn, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2769, __FUNCTION__)
;
2770
2771 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2772 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2773 recog_data.n_dups = insn_data[icode].n_dups;
2774
2775 insn_extract (insn);
2776
2777 for (i = 0; i < noperands; i++)
2778 {
2779 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2780 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2781 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2782 /* VOIDmode match_operands gets mode from their real operand. */
2783 if (recog_data.operand_mode[i] == VOIDmode((void) 0, E_VOIDmode))
2784 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i])((machine_mode) (recog_data.operand[i])->mode);
2785 }
2786 }
2787 for (i = 0; i < noperands; i++)
2788 recog_data.operand_type[i]
2789 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2790 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2791 : OP_IN);
2792
2793 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES)((void)(!(recog_data.n_alternatives <= 35) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2793, __FUNCTION__), 0 : 0))
;
2794
2795 recog_data.insn = NULLnullptr;
2796 which_alternative = -1;
2797}
2798
2799/* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2800 operands, N_ALTERNATIVES alternatives and constraint strings
2801 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2802 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2803 if the insn is an asm statement and preprocessing should take the
2804 asm operands into account, e.g. to determine whether they could be
2805 addresses in constraints that require addresses; it should then
2806 point to an array of pointers to each operand. */
2807
2808void
2809preprocess_constraints (int n_operands, int n_alternatives,
2810 const char **constraints,
2811 operand_alternative *op_alt_base,
2812 rtx **oploc)
2813{
2814 for (int i = 0; i < n_operands; i++)
2815 {
2816 int j;
2817 struct operand_alternative *op_alt;
2818 const char *p = constraints[i];
2819
2820 op_alt = op_alt_base;
2821
2822 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2823 {
2824 op_alt[i].cl = NO_REGS;
2825 op_alt[i].constraint = p;
2826 op_alt[i].matches = -1;
2827 op_alt[i].matched = -1;
2828
2829 if (*p == '\0' || *p == ',')
2830 {
2831 op_alt[i].anything_ok = 1;
2832 continue;
2833 }
2834
2835 for (;;)
2836 {
2837 char c = *p;
2838 if (c == '#')
2839 do
2840 c = *++p;
2841 while (c != ',' && c != '\0');
2842 if (c == ',' || c == '\0')
2843 {
2844 p++;
2845 break;
2846 }
2847
2848 switch (c)
2849 {
2850 case '?':
2851 op_alt[i].reject += 6;
2852 break;
2853 case '!':
2854 op_alt[i].reject += 600;
2855 break;
2856 case '&':
2857 op_alt[i].earlyclobber = 1;
2858 break;
2859
2860 case '0': case '1': case '2': case '3': case '4':
2861 case '5': case '6': case '7': case '8': case '9':
2862 {
2863 char *end;
2864 op_alt[i].matches = strtoul (p, &end, 10);
2865 op_alt[op_alt[i].matches].matched = i;
2866 p = end;
2867 }
2868 continue;
2869
2870 case 'X':
2871 op_alt[i].anything_ok = 1;
2872 break;
2873
2874 case 'g':
2875 op_alt[i].cl =
2876 reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[(int) op_alt[i].cl][(int) GENERAL_REGS];
2877 break;
2878
2879 default:
2880 enum constraint_num cn = lookup_constraint (p);
2881 enum reg_class cl;
2882 switch (get_constraint_type (cn))
2883 {
2884 case CT_REGISTER:
2885 cl = reg_class_for_constraint (cn);
2886 if (cl != NO_REGS)
2887 op_alt[i].cl = reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)[op_alt[i].cl][cl];
2888 break;
2889
2890 case CT_CONST_INT:
2891 break;
2892
2893 case CT_MEMORY:
2894 case CT_SPECIAL_MEMORY:
2895 op_alt[i].memory_ok = 1;
2896 break;
2897
2898 case CT_ADDRESS:
2899 if (oploc && !address_operand (*oploc[i], VOIDmode((void) 0, E_VOIDmode)))
2900 break;
2901
2902 op_alt[i].is_address = 1;
2903 op_alt[i].cl
2904 = (reg_class_subunion(this_target_hard_regs->x_reg_class_subunion)
2905 [(int) op_alt[i].cl]
2906 [(int) base_reg_class (VOIDmode((void) 0, E_VOIDmode), ADDR_SPACE_GENERIC0,
2907 ADDRESS, SCRATCH)]);
2908 break;
2909
2910 case CT_FIXED_FORM:
2911 break;
2912 }
2913 break;
2914 }
2915 p += CONSTRAINT_LEN (c, p)insn_constraint_len (c,p);
2916 }
2917 }
2918 }
2919}
2920
2921/* Return an array of operand_alternative instructions for
2922 instruction ICODE. */
2923
2924const operand_alternative *
2925preprocess_insn_constraints (unsigned int icode)
2926{
2927 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1))((void)(!(((unsigned long) (icode) - (unsigned long) (0) <=
(unsigned long) (NUM_INSN_CODES - 1) - (unsigned long) (0)))
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 2927, __FUNCTION__), 0 : 0))
;
2928 if (this_target_recog->x_op_alt[icode])
2929 return this_target_recog->x_op_alt[icode];
2930
2931 int n_operands = insn_data[icode].n_operands;
2932 if (n_operands == 0)
2933 return 0;
2934 /* Always provide at least one alternative so that which_op_alt ()
2935 works correctly. If the instruction has 0 alternatives (i.e. all
2936 constraint strings are empty) then each operand in this alternative
2937 will have anything_ok set. */
2938 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1)((insn_data[icode].n_alternatives) > (1) ? (insn_data[icode
].n_alternatives) : (1))
;
2939 int n_entries = n_operands * n_alternatives;
2940
2941 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries)((operand_alternative *) xcalloc ((n_entries), sizeof (operand_alternative
)))
;
2942 const char **constraints = XALLOCAVEC (const char *, n_operands)((const char * *) __builtin_alloca(sizeof (const char *) * (n_operands
)))
;
2943
2944 for (int i = 0; i < n_operands; ++i)
2945 constraints[i] = insn_data[icode].operand[i].constraint;
2946 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2947 NULLnullptr);
2948
2949 this_target_recog->x_op_alt[icode] = op_alt;
2950 return op_alt;
2951}
2952
2953/* After calling extract_insn, you can use this function to extract some
2954 information from the constraint strings into a more usable form.
2955 The collected data is stored in recog_op_alt. */
2956
2957void
2958preprocess_constraints (rtx_insn *insn)
2959{
2960 int icode = INSN_CODE (insn)(((insn)->u.fld[5]).rt_int);
2961 if (icode >= 0)
2962 recog_op_alt = preprocess_insn_constraints (icode);
2963 else
2964 {
2965 int n_operands = recog_data.n_operands;
2966 int n_alternatives = recog_data.n_alternatives;
2967 int n_entries = n_operands * n_alternatives;
2968 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2969 preprocess_constraints (n_operands, n_alternatives,
2970 recog_data.constraints, asm_op_alt,
2971 NULLnullptr);
2972 recog_op_alt = asm_op_alt;
2973 }
2974}
2975
2976/* Check the operands of an insn against the insn's operand constraints
2977 and return 1 if they match any of the alternatives in ALTERNATIVES.
2978
2979 The information about the insn's operands, constraints, operand modes
2980 etc. is obtained from the global variables set up by extract_insn.
2981
2982 WHICH_ALTERNATIVE is set to a number which indicates which
2983 alternative of constraints was matched: 0 for the first alternative,
2984 1 for the next, etc.
2985
2986 In addition, when two operands are required to match
2987 and it happens that the output operand is (reg) while the
2988 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2989 make the output operand look like the input.
2990 This is because the output operand is the one the template will print.
2991
2992 This is used in final, just before printing the assembler code and by
2993 the routines that determine an insn's attribute.
2994
2995 If STRICT is a positive nonzero value, it means that we have been
2996 called after reload has been completed. In that case, we must
2997 do all checks strictly. If it is zero, it means that we have been called
2998 before reload has completed. In that case, we first try to see if we can
2999 find an alternative that matches strictly. If not, we try again, this
3000 time assuming that reload will fix up the insn. This provides a "best
3001 guess" for the alternative and is used to compute attributes of insns prior
3002 to reload. A negative value of STRICT is used for this internal call. */
3003
3004struct funny_match
3005{
3006 int this_op, other;
3007};
3008
3009int
3010constrain_operands (int strict, alternative_mask alternatives)
3011{
3012 const char *constraints[MAX_RECOG_OPERANDS30];
3013 int matching_operands[MAX_RECOG_OPERANDS30];
3014 int earlyclobber[MAX_RECOG_OPERANDS30];
3015 int c;
3016
3017 struct funny_match funny_match[MAX_RECOG_OPERANDS30];
3018 int funny_match_index;
3019
3020 which_alternative = 0;
3021 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
3022 return 1;
3023
3024 for (c = 0; c < recog_data.n_operands; c++)
3025 constraints[c] = recog_data.constraints[c];
3026
3027 do
3028 {
3029 int seen_earlyclobber_at = -1;
3030 int opno;
3031 int lose = 0;
3032 funny_match_index = 0;
3033
3034 if (!TEST_BIT (alternatives, which_alternative)(((alternatives) >> (which_alternative)) & 1))
3035 {
3036 int i;
3037
3038 for (i = 0; i < recog_data.n_operands; i++)
3039 constraints[i] = skip_alternative (constraints[i]);
3040
3041 which_alternative++;
3042 continue;
3043 }
3044
3045 for (opno = 0; opno < recog_data.n_operands; opno++)
3046 matching_operands[opno] = -1;
3047
3048 for (opno = 0; opno < recog_data.n_operands; opno++)
3049 {
3050 rtx op = recog_data.operand[opno];
3051 machine_mode mode = GET_MODE (op)((machine_mode) (op)->mode);
3052 const char *p = constraints[opno];
3053 int offset = 0;
3054 int win = 0;
3055 int val;
3056 int len;
3057
3058 earlyclobber[opno] = 0;
3059
3060 /* A unary operator may be accepted by the predicate, but it
3061 is irrelevant for matching constraints. */
3062 /* For special_memory_operand, there could be a memory operand inside,
3063 and it would cause a mismatch for constraint_satisfied_p. */
3064 if (UNARY_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_UNARY
)
&& op == extract_mem_from_operand (op))
3065 op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx);
3066
3067 if (GET_CODE (op)((enum rtx_code) (op)->code) == SUBREG)
3068 {
3069 if (REG_P (SUBREG_REG (op))(((enum rtx_code) ((((op)->u.fld[0]).rt_rtx))->code) ==
REG)
3070 && REGNO (SUBREG_REG (op))(rhs_regno((((op)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76)
3071 offset = subreg_regno_offset (REGNO (SUBREG_REG (op))(rhs_regno((((op)->u.fld[0]).rt_rtx))),
3072 GET_MODE (SUBREG_REG (op))((machine_mode) ((((op)->u.fld[0]).rt_rtx))->mode),
3073 SUBREG_BYTE (op)(((op)->u.fld[1]).rt_subreg),
3074 GET_MODE (op)((machine_mode) (op)->mode));
3075 op = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx);
3076 }
3077
3078 /* An empty constraint or empty alternative
3079 allows anything which matched the pattern. */
3080 if (*p == 0 || *p == ',')
3081 win = 1;
3082
3083 do
3084 switch (c = *p, len = CONSTRAINT_LEN (c, p)insn_constraint_len (c,p), c)
3085 {
3086 case '\0':
3087 len = 0;
3088 break;
3089 case ',':
3090 c = '\0';
3091 break;
3092
3093 case '#':
3094 /* Ignore rest of this alternative as far as
3095 constraint checking is concerned. */
3096 do
3097 p++;
3098 while (*p && *p != ',');
3099 len = 0;
3100 break;
3101
3102 case '&':
3103 earlyclobber[opno] = 1;
3104 if (seen_earlyclobber_at < 0)
3105 seen_earlyclobber_at = opno;
3106 break;
3107
3108 case '0': case '1': case '2': case '3': case '4':
3109 case '5': case '6': case '7': case '8': case '9':
3110 {
3111 /* This operand must be the same as a previous one.
3112 This kind of constraint is used for instructions such
3113 as add when they take only two operands.
3114
3115 Note that the lower-numbered operand is passed first.
3116
3117 If we are not testing strictly, assume that this
3118 constraint will be satisfied. */
3119
3120 char *end;
3121 int match;
3122
3123 match = strtoul (p, &end, 10);
3124 p = end;
3125
3126 if (strict < 0)
3127 val = 1;
3128 else
3129 {
3130 rtx op1 = recog_data.operand[match];
3131 rtx op2 = recog_data.operand[opno];
3132
3133 /* A unary operator may be accepted by the predicate,
3134 but it is irrelevant for matching constraints. */
3135 if (UNARY_P (op1)((rtx_class[(int) (((enum rtx_code) (op1)->code))]) == RTX_UNARY
)
)
3136 op1 = XEXP (op1, 0)(((op1)->u.fld[0]).rt_rtx);
3137 if (UNARY_P (op2)((rtx_class[(int) (((enum rtx_code) (op2)->code))]) == RTX_UNARY
)
)
3138 op2 = XEXP (op2, 0)(((op2)->u.fld[0]).rt_rtx);
3139
3140 val = operands_match_p (op1, op2);
3141 }
3142
3143 matching_operands[opno] = match;
3144 matching_operands[match] = opno;
3145
3146 if (val != 0)
3147 win = 1;
3148
3149 /* If output is *x and input is *--x, arrange later
3150 to change the output to *--x as well, since the
3151 output op is the one that will be printed. */
3152 if (val == 2 && strict > 0)
3153 {
3154 funny_match[funny_match_index].this_op = opno;
3155 funny_match[funny_match_index++].other = match;
3156 }
3157 }
3158 len = 0;
3159 break;
3160
3161 case 'p':
3162 /* p is used for address_operands. When we are called by
3163 gen_reload, no one will have checked that the address is
3164 strictly valid, i.e., that all pseudos requiring hard regs
3165 have gotten them. We also want to make sure we have a
3166 valid mode. */
3167 if ((GET_MODE (op)((machine_mode) (op)->mode) == VOIDmode((void) 0, E_VOIDmode)
3168 || SCALAR_INT_MODE_P (GET_MODE (op))(((enum mode_class) mode_class[((machine_mode) (op)->mode)
]) == MODE_INT || ((enum mode_class) mode_class[((machine_mode
) (op)->mode)]) == MODE_PARTIAL_INT)
)
3169 && (strict <= 0
3170 || (strict_memory_address_pstrict_memory_address_addr_space_p ((recog_data.operand_mode[
opno]), (op), 0)
3171 (recog_data.operand_mode[opno], op)strict_memory_address_addr_space_p ((recog_data.operand_mode[
opno]), (op), 0)
)))
3172 win = 1;
3173 break;
3174
3175 /* No need to check general_operand again;
3176 it was done in insn-recog.c. Well, except that reload
3177 doesn't check the validity of its replacements, but
3178 that should only matter when there's a bug. */
3179 case 'g':
3180 /* Anything goes unless it is a REG and really has a hard reg
3181 but the hard reg is not in the class GENERAL_REGS. */
3182 if (REG_P (op)(((enum rtx_code) (op)->code) == REG))
3183 {
3184 if (strict < 0
3185 || GENERAL_REGS == ALL_REGS
3186 || (reload_in_progress
3187 && REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76)
3188 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
3189 win = 1;
3190 }
3191 else if (strict < 0 || general_operand (op, mode))
3192 win = 1;
3193 break;
3194
3195 default:
3196 {
3197 enum constraint_num cn = lookup_constraint (p);
3198 enum reg_class cl = reg_class_for_constraint (cn);
3199 if (cl != NO_REGS)
3200 {
3201 if (strict < 0
3202 || (strict == 0
3203 && REG_P (op)(((enum rtx_code) (op)->code) == REG)
3204 && REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76)
3205 || (strict == 0 && GET_CODE (op)((enum rtx_code) (op)->code) == SCRATCH)
3206 || (REG_P (op)(((enum rtx_code) (op)->code) == REG)
3207 && reg_fits_class_p (op, cl, offset, mode)))
3208 win = 1;
3209 }
3210
3211 else if (constraint_satisfied_p (op, cn))
3212 win = 1;
3213
3214 else if (insn_extra_memory_constraint (cn)
3215 /* Every memory operand can be reloaded to fit. */
3216 && ((strict < 0 && MEM_P (op)(((enum rtx_code) (op)->code) == MEM))
3217 /* Before reload, accept what reload can turn
3218 into a mem. */
3219 || (strict < 0 && CONSTANT_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_CONST_OBJ
)
)
3220 /* Before reload, accept a pseudo or hard register,
3221 since LRA can turn it into a mem. */
3222 || (strict < 0 && targetm.lra_p () && REG_P (op)(((enum rtx_code) (op)->code) == REG))
3223 /* During reload, accept a pseudo */
3224 || (reload_in_progress && REG_P (op)(((enum rtx_code) (op)->code) == REG)
3225 && REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76)))
3226 win = 1;
3227 else if (insn_extra_address_constraint (cn)
3228 /* Every address operand can be reloaded to fit. */
3229 && strict < 0)
3230 win = 1;
3231 /* Cater to architectures like IA-64 that define extra memory
3232 constraints without using define_memory_constraint. */
3233 else if (reload_in_progress
3234 && REG_P (op)(((enum rtx_code) (op)->code) == REG)
3235 && REGNO (op)(rhs_regno(op)) >= FIRST_PSEUDO_REGISTER76
3236 && reg_renumber[REGNO (op)(rhs_regno(op))] < 0
3237 && reg_equiv_mem (REGNO (op))(*reg_equivs)[((rhs_regno(op)))].mem != 0
3238 && constraint_satisfied_p
3239 (reg_equiv_mem (REGNO (op))(*reg_equivs)[((rhs_regno(op)))].mem, cn))
3240 win = 1;
3241 break;
3242 }
3243 }
3244 while (p += len, c);
3245
3246 constraints[opno] = p;
3247 /* If this operand did not win somehow,
3248 this alternative loses. */
3249 if (! win)
3250 lose = 1;
3251 }
3252 /* This alternative won; the operands are ok.
3253 Change whichever operands this alternative says to change. */
3254 if (! lose)
3255 {
3256 int opno, eopno;
3257
3258 /* See if any earlyclobber operand conflicts with some other
3259 operand. */
3260
3261 if (strict > 0 && seen_earlyclobber_at >= 0)
3262 for (eopno = seen_earlyclobber_at;
3263 eopno < recog_data.n_operands;
3264 eopno++)
3265 /* Ignore earlyclobber operands now in memory,
3266 because we would often report failure when we have
3267 two memory operands, one of which was formerly a REG. */
3268 if (earlyclobber[eopno]
3269 && REG_P (recog_data.operand[eopno])(((enum rtx_code) (recog_data.operand[eopno])->code) == REG
)
)
3270 for (opno = 0; opno < recog_data.n_operands; opno++)
3271 if ((MEM_P (recog_data.operand[opno])(((enum rtx_code) (recog_data.operand[opno])->code) == MEM
)
3272 || recog_data.operand_type[opno] != OP_OUT)
3273 && opno != eopno
3274 /* Ignore things like match_operator operands. */
3275 && *recog_data.constraints[opno] != 0
3276 && ! (matching_operands[opno] == eopno
3277 && operands_match_p (recog_data.operand[opno],
3278 recog_data.operand[eopno]))
3279 && ! safe_from_earlyclobber (recog_data.operand[opno],
3280 recog_data.operand[eopno]))
3281 lose = 1;
3282
3283 if (! lose)
3284 {
3285 while (--funny_match_index >= 0)
3286 {
3287 recog_data.operand[funny_match[funny_match_index].other]
3288 = recog_data.operand[funny_match[funny_match_index].this_op];
3289 }
3290
3291 /* For operands without < or > constraints reject side-effects. */
3292 if (AUTO_INC_DEC0 && recog_data.is_asm)
3293 {
3294 for (opno = 0; opno < recog_data.n_operands; opno++)
3295 if (MEM_P (recog_data.operand[opno])(((enum rtx_code) (recog_data.operand[opno])->code) == MEM
)
)
3296 switch (GET_CODE (XEXP (recog_data.operand[opno], 0))((enum rtx_code) ((((recog_data.operand[opno])->u.fld[0]).
rt_rtx))->code)
)
3297 {
3298 case PRE_INC:
3299 case POST_INC:
3300 case PRE_DEC:
3301 case POST_DEC:
3302 case PRE_MODIFY:
3303 case POST_MODIFY:
3304 if (strchr (recog_data.constraints[opno], '<') == NULLnullptr
3305 && strchr (recog_data.constraints[opno], '>')
3306 == NULLnullptr)
3307 return 0;
3308 break;
3309 default:
3310 break;
3311 }
3312 }
3313
3314 return 1;
3315 }
3316 }
3317
3318 which_alternative++;
3319 }
3320 while (which_alternative < recog_data.n_alternatives);
3321
3322 which_alternative = -1;
3323 /* If we are about to reject this, but we are not to test strictly,
3324 try a very loose test. Only return failure if it fails also. */
3325 if (strict == 0)
3326 return constrain_operands (-1, alternatives);
3327 else
3328 return 0;
3329}
3330
3331/* Return true iff OPERAND (assumed to be a REG rtx)
3332 is a hard reg in class CLASS when its regno is offset by OFFSET
3333 and changed to mode MODE.
3334 If REG occupies multiple hard regs, all of them must be in CLASS. */
3335
3336bool
3337reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
3338 machine_mode mode)
3339{
3340 unsigned int regno = REGNO (operand)(rhs_regno(operand));
3341
3342 if (cl == NO_REGS)
3343 return false;
3344
3345 /* Regno must not be a pseudo register. Offset may be negative. */
3346 return (HARD_REGISTER_NUM_P (regno)((regno) < 76)
3347 && HARD_REGISTER_NUM_P (regno + offset)((regno + offset) < 76)
3348 && in_hard_reg_set_p (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[(int) cl], mode,
3349 regno + offset));
3350}
3351
3352/* Split single instruction. Helper function for split_all_insns and
3353 split_all_insns_noflow. Return last insn in the sequence if successful,
3354 or NULL if unsuccessful. */
3355
3356static rtx_insn *
3357split_insn (rtx_insn *insn)
3358{
3359 /* Split insns here to get max fine-grain parallelism. */
3360 rtx_insn *first = PREV_INSN (insn);
3361 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
3362 rtx insn_set, last_set, note;
3363
3364 if (last == insn)
3365 return NULLnullptr;
3366
3367 /* If the original instruction was a single set that was known to be
3368 equivalent to a constant, see if we can say the same about the last
3369 instruction in the split sequence. The two instructions must set
3370 the same destination. */
3371 insn_set = single_set (insn);
3372 if (insn_set)
3373 {
3374 last_set = single_set (last);
3375 if (last_set && rtx_equal_p (SET_DEST (last_set)(((last_set)->u.fld[0]).rt_rtx), SET_DEST (insn_set)(((insn_set)->u.fld[0]).rt_rtx)))
3376 {
3377 note = find_reg_equal_equiv_note (insn);
3378 if (note && CONSTANT_P (XEXP (note, 0))((rtx_class[(int) (((enum rtx_code) ((((note)->u.fld[0]).rt_rtx
))->code))]) == RTX_CONST_OBJ)
)
3379 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0)(((note)->u.fld[0]).rt_rtx));
3380 else if (CONSTANT_P (SET_SRC (insn_set))((rtx_class[(int) (((enum rtx_code) ((((insn_set)->u.fld[1
]).rt_rtx))->code))]) == RTX_CONST_OBJ)
)
3381 set_unique_reg_note (last, REG_EQUAL,
3382 copy_rtx (SET_SRC (insn_set)(((insn_set)->u.fld[1]).rt_rtx)));
3383 }
3384 }
3385
3386 /* try_split returns the NOTE that INSN became. */
3387 SET_INSN_DELETED (insn)set_insn_deleted (insn);;
3388
3389 /* ??? Coddle to md files that generate subregs in post-reload
3390 splitters instead of computing the proper hard register. */
3391 if (reload_completed && first != last)
3392 {
3393 first = NEXT_INSN (first);
3394 for (;;)
3395 {
3396 if (INSN_P (first)(((((enum rtx_code) (first)->code) == INSN) || (((enum rtx_code
) (first)->code) == JUMP_INSN) || (((enum rtx_code) (first
)->code) == CALL_INSN)) || (((enum rtx_code) (first)->code
) == DEBUG_INSN))
)
3397 cleanup_subreg_operands (first);
3398 if (first == last)
3399 break;
3400 first = NEXT_INSN (first);
3401 }
3402 }
3403
3404 return last;
3405}
3406
3407/* Split all insns in the function. If UPD_LIFE, update life info after. */
3408
3409void
3410split_all_insns (void)
3411{
3412 bool changed;
3413 bool need_cfg_cleanup = false;
3414 basic_block bb;
3415
3416 auto_sbitmap blocks (last_basic_block_for_fn (cfun)(((cfun + 0))->cfg->x_last_basic_block));
3417 bitmap_clear (blocks);
3418 changed = false;
3419
3420 FOR_EACH_BB_REVERSE_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_exit_block_ptr->prev_bb
; bb != ((cfun + 0))->cfg->x_entry_block_ptr; bb = bb->
prev_bb)
3421 {
3422 rtx_insn *insn, *next;
3423 bool finish = false;
3424
3425 rtl_profile_for_bb (bb);
3426 for (insn = BB_HEAD (bb)(bb)->il.x.head_; !finish ; insn = next)
3427 {
3428 /* Can't use `next_real_insn' because that might go across
3429 CODE_LABELS and short-out basic blocks. */
3430 next = NEXT_INSN (insn);
3431 finish = (insn == BB_END (bb)(bb)->il.x.rtl->end_);
3432
3433 /* If INSN has a REG_EH_REGION note and we split INSN, the
3434 resulting split may not have/need REG_EH_REGION notes.
3435
3436 If that happens and INSN was the last reference to the
3437 given EH region, then the EH region will become unreachable.
3438 We cannot leave the unreachable blocks in the CFG as that
3439 will trigger a checking failure.
3440
3441 So track if INSN has a REG_EH_REGION note. If so and we
3442 split INSN, then trigger a CFG cleanup. */
3443 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX(rtx) 0);
3444 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
3445 {
3446 rtx set = single_set (insn);
3447
3448 /* Don't split no-op move insns. These should silently
3449 disappear later in final. Splitting such insns would
3450 break the code that handles LIBCALL blocks. */
3451 if (set && set_noop_p (set))
3452 {
3453 /* Nops get in the way while scheduling, so delete them
3454 now if register allocation has already been done. It
3455 is too risky to try to do this before register
3456 allocation, and there are unlikely to be very many
3457 nops then anyways. */
3458 if (reload_completed)
3459 delete_insn_and_edges (insn);
3460 if (note)
3461 need_cfg_cleanup = true;
3462 }
3463 else
3464 {
3465 if (split_insn (insn))
3466 {
3467 bitmap_set_bit (blocks, bb->index);
3468 changed = true;
3469 if (note)
3470 need_cfg_cleanup = true;
3471 }
3472 }
3473 }
3474 }
3475 }
3476
3477 default_rtl_profile ();
3478 if (changed)
3479 {
3480 find_many_sub_basic_blocks (blocks);
3481
3482 /* Splitting could drop an REG_EH_REGION if it potentially
3483 trapped in its original form, but does not in its split
3484 form. Consider a FLOAT_TRUNCATE which splits into a memory
3485 store/load pair and -fnon-call-exceptions. */
3486 if (need_cfg_cleanup)
3487 cleanup_cfg (0);
3488 }
3489
3490 checking_verify_flow_info ();
3491}
3492
3493/* Same as split_all_insns, but do not expect CFG to be available.
3494 Used by machine dependent reorg passes. */
3495
3496unsigned int
3497split_all_insns_noflow (void)
3498{
3499 rtx_insn *next, *insn;
3500
3501 for (insn = get_insns (); insn; insn = next)
3502 {
3503 next = NEXT_INSN (insn);
3504 if (INSN_P (insn)(((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN)) || (((enum rtx_code) (insn)->code) ==
DEBUG_INSN))
)
3505 {
3506 /* Don't split no-op move insns. These should silently
3507 disappear later in final. Splitting such insns would
3508 break the code that handles LIBCALL blocks. */
3509 rtx set = single_set (insn);
3510 if (set && set_noop_p (set))
3511 {
3512 /* Nops get in the way while scheduling, so delete them
3513 now if register allocation has already been done. It
3514 is too risky to try to do this before register
3515 allocation, and there are unlikely to be very many
3516 nops then anyways.
3517
3518 ??? Should we use delete_insn when the CFG isn't valid? */
3519 if (reload_completed)
3520 delete_insn_and_edges (insn);
3521 }
3522 else
3523 split_insn (insn);
3524 }
3525 }
3526 return 0;
3527}
3528
3529struct peep2_insn_data
3530{
3531 rtx_insn *insn;
3532 regset live_before;
3533};
3534
3535static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP25 + 1];
3536static int peep2_current;
3537
3538static bool peep2_do_rebuild_jump_labels;
3539static bool peep2_do_cleanup_cfg;
3540
3541/* The number of instructions available to match a peep2. */
3542int peep2_current_count;
3543
3544/* A marker indicating the last insn of the block. The live_before regset
3545 for this element is correct, indicating DF_LIVE_OUT for the block. */
3546#define PEEP2_EOBinvalid_insn_rtx invalid_insn_rtx
3547
3548/* Wrap N to fit into the peep2_insn_data buffer. */
3549
3550static int
3551peep2_buf_position (int n)
3552{
3553 if (n >= MAX_INSNS_PER_PEEP25 + 1)
3554 n -= MAX_INSNS_PER_PEEP25 + 1;
3555 return n;
3556}
3557
3558/* Return the Nth non-note insn after `current', or return NULL_RTX if it
3559 does not exist. Used by the recognizer to find the next insn to match
3560 in a multi-insn pattern. */
3561
3562rtx_insn *
3563peep2_next_insn (int n)
3564{
3565 gcc_assert (n <= peep2_current_count)((void)(!(n <= peep2_current_count) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3565, __FUNCTION__), 0 : 0))
;
3566
3567 n = peep2_buf_position (peep2_current + n);
3568
3569 return peep2_insn_data[n].insn;
3570}
3571
3572/* Return true if REGNO is dead before the Nth non-note insn
3573 after `current'. */
3574
3575int
3576peep2_regno_dead_p (int ofs, int regno)
3577{
3578 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1)((void)(!(ofs < 5 + 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3578, __FUNCTION__), 0 : 0))
;
3579
3580 ofs = peep2_buf_position (peep2_current + ofs);
3581
3582 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX)((void)(!(peep2_insn_data[ofs].insn != (rtx) 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3582, __FUNCTION__), 0 : 0))
;
3583
3584 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno)bitmap_bit_p (peep2_insn_data[ofs].live_before, regno);
3585}
3586
3587/* Similarly for a REG. */
3588
3589int
3590peep2_reg_dead_p (int ofs, rtx reg)
3591{
3592 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1)((void)(!(ofs < 5 + 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3592, __FUNCTION__), 0 : 0))
;
3593
3594 ofs = peep2_buf_position (peep2_current + ofs);
3595
3596 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX)((void)(!(peep2_insn_data[ofs].insn != (rtx) 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3596, __FUNCTION__), 0 : 0))
;
3597
3598 unsigned int end_regno = END_REGNO (reg);
3599 for (unsigned int regno = REGNO (reg)(rhs_regno(reg)); regno < end_regno; ++regno)
3600 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno)bitmap_bit_p (peep2_insn_data[ofs].live_before, regno))
3601 return 0;
3602 return 1;
3603}
3604
3605/* Regno offset to be used in the register search. */
3606static int search_ofs;
3607
3608/* Try to find a hard register of mode MODE, matching the register class in
3609 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3610 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3611 in which case the only condition is that the register must be available
3612 before CURRENT_INSN.
3613 Registers that already have bits set in REG_SET will not be considered.
3614
3615 If an appropriate register is available, it will be returned and the
3616 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3617 returned. */
3618
3619rtx
3620peep2_find_free_register (int from, int to, const char *class_str,
3621 machine_mode mode, HARD_REG_SET *reg_set)
3622{
3623 enum reg_class cl;
3624 HARD_REG_SET live;
3625 df_ref def;
3626 int i;
3627
3628 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1)((void)(!(from < 5 + 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3628, __FUNCTION__), 0 : 0))
;
3629 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1)((void)(!(to < 5 + 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3629, __FUNCTION__), 0 : 0))
;
3630
3631 from = peep2_buf_position (peep2_current + from);
3632 to = peep2_buf_position (peep2_current + to);
3633
3634 gcc_assert (peep2_insn_data[from].insn != NULL_RTX)((void)(!(peep2_insn_data[from].insn != (rtx) 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3634, __FUNCTION__), 0 : 0))
;
3635 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before)do { CLEAR_HARD_REG_SET (live); reg_set_to_hard_reg_set (&
live, peep2_insn_data[from].live_before); } while (0)
;
3636
3637 while (from != to)
3638 {
3639 gcc_assert (peep2_insn_data[from].insn != NULL_RTX)((void)(!(peep2_insn_data[from].insn != (rtx) 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3639, __FUNCTION__), 0 : 0))
;
3640
3641 /* Don't use registers set or clobbered by the insn. */
3642 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)for (def = (((df->insns[(INSN_UID (peep2_insn_data[from].insn
))]))->defs); def; def = ((def)->base.next_loc))
3643 SET_HARD_REG_BIT (live, DF_REF_REGNO (def)((def)->base.regno));
3644
3645 from = peep2_buf_position (from + 1);
3646 }
3647
3648 cl = reg_class_for_constraint (lookup_constraint (class_str));
3649
3650 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
3651 {
3652 int raw_regno, regno, success, j;
3653
3654 /* Distribute the free registers as much as possible. */
3655 raw_regno = search_ofs + i;
3656 if (raw_regno >= FIRST_PSEUDO_REGISTER76)
3657 raw_regno -= FIRST_PSEUDO_REGISTER76;
3658#ifdef REG_ALLOC_ORDER
3659 regno = reg_alloc_order(this_target_hard_regs->x_reg_alloc_order)[raw_regno];
3660#else
3661 regno = raw_regno;
3662#endif
3663
3664 /* Can it support the mode we need? */
3665 if (!targetm.hard_regno_mode_ok (regno, mode))
3666 continue;
3667
3668 success = 1;
3669 for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3670 {
3671 /* Don't allocate fixed registers. */
3672 if (fixed_regs(this_target_hard_regs->x_fixed_regs)[regno + j])
3673 {
3674 success = 0;
3675 break;
3676 }
3677 /* Don't allocate global registers. */
3678 if (global_regs[regno + j])
3679 {
3680 success = 0;
3681 break;
3682 }
3683 /* Make sure the register is of the right class. */
3684 if (! TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[cl], regno + j))
3685 {
3686 success = 0;
3687 break;
3688 }
3689 /* And that we don't create an extra save/restore. */
3690 if (! crtl(&x_rtl)->abi->clobbers_full_reg_p (regno + j)
3691 && ! df_regs_ever_live_p (regno + j))
3692 {
3693 success = 0;
3694 break;
3695 }
3696
3697 if (! targetm.hard_regno_scratch_ok (regno + j))
3698 {
3699 success = 0;
3700 break;
3701 }
3702
3703 /* And we don't clobber traceback for noreturn functions. */
3704 if ((regno + j == FRAME_POINTER_REGNUM19
3705 || regno + j == HARD_FRAME_POINTER_REGNUM6)
3706 && (! reload_completed || frame_pointer_needed((&x_rtl)->frame_pointer_needed)))
3707 {
3708 success = 0;
3709 break;
3710 }
3711
3712 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3713 || TEST_HARD_REG_BIT (live, regno + j))
3714 {
3715 success = 0;
3716 break;
3717 }
3718 }
3719
3720 if (success)
3721 {
3722 add_to_hard_reg_set (reg_set, mode, regno);
3723
3724 /* Start the next search with the next register. */
3725 if (++raw_regno >= FIRST_PSEUDO_REGISTER76)
3726 raw_regno = 0;
3727 search_ofs = raw_regno;
3728
3729 return gen_rtx_REG (mode, regno);
3730 }
3731 }
3732
3733 search_ofs = 0;
3734 return NULL_RTX(rtx) 0;
3735}
3736
3737/* Forget all currently tracked instructions, only remember current
3738 LIVE regset. */
3739
3740static void
3741peep2_reinit_state (regset live)
3742{
3743 int i;
3744
3745 /* Indicate that all slots except the last holds invalid data. */
3746 for (i = 0; i < MAX_INSNS_PER_PEEP25; ++i)
3747 peep2_insn_data[i].insn = NULLnullptr;
3748 peep2_current_count = 0;
3749
3750 /* Indicate that the last slot contains live_after data. */
3751 peep2_insn_data[MAX_INSNS_PER_PEEP25].insn = PEEP2_EOBinvalid_insn_rtx;
3752 peep2_current = MAX_INSNS_PER_PEEP25;
3753
3754 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live)bitmap_copy (peep2_insn_data[5].live_before, live);
3755}
3756
3757/* Copies frame related info of an insn (OLD_INSN) to the single
3758 insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3759
3760void
3761copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn)
3762{
3763 bool any_note = false;
3764 rtx note;
3765
3766 if (!RTX_FRAME_RELATED_P (old_insn)(__extension__ ({ __typeof ((old_insn)) const _rtx = ((old_insn
)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN &&
((enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != CALL_INSN && ((enum rtx_code) (
_rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx
)->code) != BARRIER && ((enum rtx_code) (_rtx)->
code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3766, __FUNCTION__); _rtx; })->frame_related)
)
3767 return;
3768
3769 RTX_FRAME_RELATED_P (new_insn)(__extension__ ({ __typeof ((new_insn)) const _rtx = ((new_insn
)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN &&
((enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != CALL_INSN && ((enum rtx_code) (
_rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx
)->code) != BARRIER && ((enum rtx_code) (_rtx)->
code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3769, __FUNCTION__); _rtx; })->frame_related)
= 1;
3770
3771 /* Allow the backend to fill in a note during the split. */
3772 for (note = REG_NOTES (new_insn)(((new_insn)->u.fld[6]).rt_rtx); note ; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx))
3773 switch (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)))
3774 {
3775 case REG_FRAME_RELATED_EXPR:
3776 case REG_CFA_DEF_CFA:
3777 case REG_CFA_ADJUST_CFA:
3778 case REG_CFA_OFFSET:
3779 case REG_CFA_REGISTER:
3780 case REG_CFA_EXPRESSION:
3781 case REG_CFA_RESTORE:
3782 case REG_CFA_SET_VDRAP:
3783 any_note = true;
3784 break;
3785 default:
3786 break;
3787 }
3788
3789 /* If the backend didn't supply a note, copy one over. */
3790 if (!any_note)
3791 for (note = REG_NOTES (old_insn)(((old_insn)->u.fld[6]).rt_rtx); note ; note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx))
3792 switch (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)))
3793 {
3794 case REG_FRAME_RELATED_EXPR:
3795 case REG_CFA_DEF_CFA:
3796 case REG_CFA_ADJUST_CFA:
3797 case REG_CFA_OFFSET:
3798 case REG_CFA_REGISTER:
3799 case REG_CFA_EXPRESSION:
3800 case REG_CFA_RESTORE:
3801 case REG_CFA_SET_VDRAP:
3802 add_reg_note (new_insn, REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)), XEXP (note, 0)(((note)->u.fld[0]).rt_rtx));
3803 any_note = true;
3804 break;
3805 default:
3806 break;
3807 }
3808
3809 /* If there still isn't a note, make sure the unwind info sees the
3810 same expression as before the split. */
3811 if (!any_note)
3812 {
3813 rtx old_set, new_set;
3814
3815 /* The old insn had better have been simple, or annotated. */
3816 old_set = single_set (old_insn);
3817 gcc_assert (old_set != NULL)((void)(!(old_set != nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3817, __FUNCTION__), 0 : 0))
;
3818
3819 new_set = single_set (new_insn);
3820 if (!new_set || !rtx_equal_p (new_set, old_set))
3821 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3822 }
3823
3824 /* Copy prologue/epilogue status. This is required in order to keep
3825 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3826 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3827}
3828
3829/* While scanning basic block BB, we found a match of length MATCH_LEN,
3830 starting at INSN. Perform the replacement, removing the old insns and
3831 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3832 if the replacement is rejected. */
3833
3834static rtx_insn *
3835peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3836{
3837 int i;
3838 rtx_insn *last, *before_try, *x;
3839 rtx eh_note, as_note;
3840 rtx_insn *old_insn;
3841 rtx_insn *new_insn;
3842 bool was_call = false;
3843
3844 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3845 match more than one insn, or to be split into more than one insn. */
3846 old_insn = peep2_insn_data[peep2_current].insn;
3847 if (RTX_FRAME_RELATED_P (old_insn)(__extension__ ({ __typeof ((old_insn)) const _rtx = ((old_insn
)); if (((enum rtx_code) (_rtx)->code) != DEBUG_INSN &&
((enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != CALL_INSN && ((enum rtx_code) (
_rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx
)->code) != BARRIER && ((enum rtx_code) (_rtx)->
code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3847, __FUNCTION__); _rtx; })->frame_related)
)
3848 {
3849 if (match_len != 0)
3850 return NULLnullptr;
3851
3852 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3853 may be in the stream for the purpose of register allocation. */
3854 if (active_insn_p (attempt))
3855 new_insn = attempt;
3856 else
3857 new_insn = next_active_insn (attempt);
3858 if (next_active_insn (new_insn))
3859 return NULLnullptr;
3860
3861 /* We have a 1-1 replacement. Copy over any frame-related info. */
3862 copy_frame_info_to_split_insn (old_insn, new_insn);
3863 }
3864
3865 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3866 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3867 cfg-related call notes. */
3868 for (i = 0; i <= match_len; ++i)
3869 {
3870 int j;
3871 rtx note;
3872
3873 j = peep2_buf_position (peep2_current + i);
3874 old_insn = peep2_insn_data[j].insn;
3875 if (!CALL_P (old_insn)(((enum rtx_code) (old_insn)->code) == CALL_INSN))
3876 continue;
3877 was_call = true;
3878
3879 new_insn = attempt;
3880 while (new_insn != NULL_RTX(rtx) 0)
3881 {
3882 if (CALL_P (new_insn)(((enum rtx_code) (new_insn)->code) == CALL_INSN))
3883 break;
3884 new_insn = NEXT_INSN (new_insn);
3885 }
3886
3887 gcc_assert (new_insn != NULL_RTX)((void)(!(new_insn != (rtx) 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3887, __FUNCTION__), 0 : 0))
;
3888
3889 CALL_INSN_FUNCTION_USAGE (new_insn)(((new_insn)->u.fld[7]).rt_rtx)
3890 = CALL_INSN_FUNCTION_USAGE (old_insn)(((old_insn)->u.fld[7]).rt_rtx);
3891 SIBLING_CALL_P (new_insn)(__extension__ ({ __typeof ((new_insn)) const _rtx = ((new_insn
)); if (((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("SIBLING_CALL_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3891, __FUNCTION__); _rtx; })->jump)
= SIBLING_CALL_P (old_insn)(__extension__ ({ __typeof ((old_insn)) const _rtx = ((old_insn
)); if (((enum rtx_code) (_rtx)->code) != CALL_INSN) rtl_check_failed_flag
("SIBLING_CALL_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3891, __FUNCTION__); _rtx; })->jump)
;
3892
3893 for (note = REG_NOTES (old_insn)(((old_insn)->u.fld[6]).rt_rtx);
3894 note;
3895 note = XEXP (note, 1)(((note)->u.fld[1]).rt_rtx))
3896 switch (REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)))
3897 {
3898 case REG_NORETURN:
3899 case REG_SETJMP:
3900 case REG_TM:
3901 case REG_CALL_NOCF_CHECK:
3902 add_reg_note (new_insn, REG_NOTE_KIND (note)((enum reg_note) ((machine_mode) (note)->mode)),
3903 XEXP (note, 0)(((note)->u.fld[0]).rt_rtx));
3904 break;
3905 default:
3906 /* Discard all other reg notes. */
3907 break;
3908 }
3909
3910 /* Croak if there is another call in the sequence. */
3911 while (++i <= match_len)
3912 {
3913 j = peep2_buf_position (peep2_current + i);
3914 old_insn = peep2_insn_data[j].insn;
3915 gcc_assert (!CALL_P (old_insn))((void)(!(!(((enum rtx_code) (old_insn)->code) == CALL_INSN
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3915, __FUNCTION__), 0 : 0))
;
3916 }
3917 break;
3918 }
3919
3920 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3921 move those notes over to the new sequence. */
3922 as_note = NULLnullptr;
3923 for (i = match_len; i >= 0; --i)
3924 {
3925 int j = peep2_buf_position (peep2_current + i);
3926 old_insn = peep2_insn_data[j].insn;
3927
3928 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULLnullptr);
3929 if (as_note)
3930 break;
3931 }
3932
3933 i = peep2_buf_position (peep2_current + match_len);
3934 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX(rtx) 0);
3935
3936 /* Replace the old sequence with the new. */
3937 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3938 last = emit_insn_after_setloc (attempt,
3939 peep2_insn_data[i].insn,
3940 INSN_LOCATION (peepinsn));
3941 if (JUMP_P (peepinsn)(((enum rtx_code) (peepinsn)->code) == JUMP_INSN) && JUMP_P (last)(((enum rtx_code) (last)->code) == JUMP_INSN))
3942 CROSSING_JUMP_P (last)(__extension__ ({ __typeof ((last)) const _rtx = ((last)); if
(((enum rtx_code) (_rtx)->code) != JUMP_INSN) rtl_check_failed_flag
("CROSSING_JUMP_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3942, __FUNCTION__); _rtx; })->jump)
= CROSSING_JUMP_P (peepinsn)(__extension__ ({ __typeof ((peepinsn)) const _rtx = ((peepinsn
)); if (((enum rtx_code) (_rtx)->code) != JUMP_INSN) rtl_check_failed_flag
("CROSSING_JUMP_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 3942, __FUNCTION__); _rtx; })->jump)
;
3943 before_try = PREV_INSN (insn);
3944 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3945
3946 /* Re-insert the EH_REGION notes. */
3947 if (eh_note || (was_call && nonlocal_goto_handler_labels((&x_rtl)->x_nonlocal_goto_handler_labels)))
3948 {
3949 edge eh_edge;
3950 edge_iterator ei;
3951
3952 FOR_EACH_EDGE (eh_edge, ei, bb->succs)for ((ei) = ei_start_1 (&((bb->succs))); ei_cond ((ei)
, &(eh_edge)); ei_next (&(ei)))
3953 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3954 break;
3955
3956 if (eh_note)
3957 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3958
3959 if (eh_edge)
3960 for (x = last; x != before_try; x = PREV_INSN (x))
3961 if (x != BB_END (bb)(bb)->il.x.rtl->end_
3962 && (can_throw_internal (x)
3963 || can_nonlocal_goto (x)))
3964 {
3965 edge nfte, nehe;
3966 int flags;
3967
3968 nfte = split_block (bb, x);
3969 flags = (eh_edge->flags
3970 & (EDGE_EH | EDGE_ABNORMAL));
3971 if (CALL_P (x)(((enum rtx_code) (x)->code) == CALL_INSN))
3972 flags |= EDGE_ABNORMAL_CALL;
3973 nehe = make_edge (nfte->src, eh_edge->dest,
3974 flags);
3975
3976 nehe->probability = eh_edge->probability;
3977 nfte->probability = nehe->probability.invert ();
3978
3979 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3980 bb = nfte->src;
3981 eh_edge = nehe;
3982 }
3983
3984 /* Converting possibly trapping insn to non-trapping is
3985 possible. Zap dummy outgoing edges. */
3986 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3987 }
3988
3989 /* Re-insert the ARGS_SIZE notes. */
3990 if (as_note)
3991 fixup_args_size_notes (before_try, last, get_args_size (as_note));
3992
3993 /* Scan the new insns for embedded side effects and add appropriate
3994 REG_INC notes. */
3995 if (AUTO_INC_DEC0)
3996 for (x = last; x != before_try; x = PREV_INSN (x))
3997 if (NONDEBUG_INSN_P (x)((((enum rtx_code) (x)->code) == INSN) || (((enum rtx_code
) (x)->code) == JUMP_INSN) || (((enum rtx_code) (x)->code
) == CALL_INSN))
)
3998 add_auto_inc_notes (x, PATTERN (x));
3999
4000 /* If we generated a jump instruction, it won't have
4001 JUMP_LABEL set. Recompute after we're done. */
4002 for (x = last; x != before_try; x = PREV_INSN (x))
4003 if (JUMP_P (x)(((enum rtx_code) (x)->code) == JUMP_INSN))
4004 {
4005 peep2_do_rebuild_jump_labels = true;
4006 break;
4007 }
4008
4009 return last;
4010}
4011
4012/* After performing a replacement in basic block BB, fix up the life
4013 information in our buffer. LAST is the last of the insns that we
4014 emitted as a replacement. PREV is the insn before the start of
4015 the replacement. MATCH_LEN is the number of instructions that were
4016 matched, and which now need to be replaced in the buffer. */
4017
4018static void
4019peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
4020 rtx_insn *prev)
4021{
4022 int i = peep2_buf_position (peep2_current + match_len + 1);
4023 rtx_insn *x;
4024 regset_head live;
4025
4026 INIT_REG_SET (&live)bitmap_initialize (&live, &reg_obstack);
4027 COPY_REG_SET (&live, peep2_insn_data[i].live_before)bitmap_copy (&live, peep2_insn_data[i].live_before);
4028
4029 gcc_assert (peep2_current_count >= match_len + 1)((void)(!(peep2_current_count >= match_len + 1) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 4029, __FUNCTION__), 0 : 0))
;
4030 peep2_current_count -= match_len + 1;
4031
4032 x = last;
4033 do
4034 {
4035 if (INSN_P (x)(((((enum rtx_code) (x)->code) == INSN) || (((enum rtx_code
) (x)->code) == JUMP_INSN) || (((enum rtx_code) (x)->code
) == CALL_INSN)) || (((enum rtx_code) (x)->code) == DEBUG_INSN
))
)
4036 {
4037 df_insn_rescan (x);
4038 if (peep2_current_count < MAX_INSNS_PER_PEEP25)
4039 {
4040 peep2_current_count++;
4041 if (--i < 0)
4042 i = MAX_INSNS_PER_PEEP25;
4043 peep2_insn_data[i].insn = x;
4044 df_simulate_one_insn_backwards (bb, x, &live);
4045 COPY_REG_SET (peep2_insn_data[i].live_before, &live)bitmap_copy (peep2_insn_data[i].live_before, &live);
4046 }
4047 }
4048 x = PREV_INSN (x);
4049 }
4050 while (x != prev);
4051 CLEAR_REG_SET (&live)bitmap_clear (&live);
4052
4053 peep2_current = i;
4054}
4055
4056/* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4057 Return true if we added it, false otherwise. The caller will try to match
4058 peepholes against the buffer if we return false; otherwise it will try to
4059 add more instructions to the buffer. */
4060
4061static bool
4062peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
4063{
4064 int pos;
4065
4066 /* Once we have filled the maximum number of insns the buffer can hold,
4067 allow the caller to match the insns against peepholes. We wait until
4068 the buffer is full in case the target has similar peepholes of different
4069 length; we always want to match the longest if possible. */
4070 if (peep2_current_count == MAX_INSNS_PER_PEEP25)
4071 return false;
4072
4073 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4074 any other pattern, lest it change the semantics of the frame info. */
4075 if (RTX_FRAME_RELATED_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if
(((enum rtx_code) (_rtx)->code) != DEBUG_INSN && (
(enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != CALL_INSN && ((enum rtx_code) (
_rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx
)->code) != BARRIER && ((enum rtx_code) (_rtx)->
code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 4075, __FUNCTION__); _rtx; })->frame_related)
)
4076 {
4077 /* Let the buffer drain first. */
4078 if (peep2_current_count > 0)
4079 return false;
4080 /* Now the insn will be the only thing in the buffer. */
4081 }
4082
4083 pos = peep2_buf_position (peep2_current + peep2_current_count);
4084 peep2_insn_data[pos].insn = insn;
4085 COPY_REG_SET (peep2_insn_data[pos].live_before, live)bitmap_copy (peep2_insn_data[pos].live_before, live);
4086 peep2_current_count++;
4087
4088 df_simulate_one_insn_forwards (bb, insn, live);
4089 return true;
4090}
4091
4092/* Perform the peephole2 optimization pass. */
4093
4094static void
4095peephole2_optimize (void)
4096{
4097 rtx_insn *insn;
4098 bitmap live;
4099 int i;
4100 basic_block bb;
4101
4102 peep2_do_cleanup_cfg = false;
4103 peep2_do_rebuild_jump_labels = false;
4104
4105 df_set_flags (DF_LR_RUN_DCE);
4106 df_note_add_problem ();
4107 df_analyze ();
4108
4109 /* Initialize the regsets we're going to use. */
4110 for (i = 0; i < MAX_INSNS_PER_PEEP25 + 1; ++i)
4111 peep2_insn_data[i].live_before = BITMAP_ALLOCbitmap_alloc (&reg_obstack);
4112 search_ofs = 0;
4113 live = BITMAP_ALLOCbitmap_alloc (&reg_obstack);
4114
4115 FOR_EACH_BB_REVERSE_FN (bb, cfun)for (bb = ((cfun + 0))->cfg->x_exit_block_ptr->prev_bb
; bb != ((cfun + 0))->cfg->x_entry_block_ptr; bb = bb->
prev_bb)
4116 {
4117 bool past_end = false;
4118 int pos;
4119
4120 rtl_profile_for_bb (bb);
4121
4122 /* Start up propagation. */
4123 bitmap_copy (live, DF_LR_IN (bb)(&(df_lr_get_bb_info ((bb)->index))->in));
4124 df_simulate_initialize_forwards (bb, live);
4125 peep2_reinit_state (live);
4126
4127 insn = BB_HEAD (bb)(bb)->il.x.head_;
4128 for (;;)
4129 {
4130 rtx_insn *attempt, *head;
4131 int match_len;
4132
4133 if (!past_end && !NONDEBUG_INSN_P (insn)((((enum rtx_code) (insn)->code) == INSN) || (((enum rtx_code
) (insn)->code) == JUMP_INSN) || (((enum rtx_code) (insn)->
code) == CALL_INSN))
)
4134 {
4135 next_insn:
4136 insn = NEXT_INSN (insn);
4137 if (insn == NEXT_INSN (BB_END (bb)(bb)->il.x.rtl->end_))
4138 past_end = true;
4139 continue;
4140 }
4141 if (!past_end && peep2_fill_buffer (bb, insn, live))
4142 goto next_insn;
4143
4144 /* If we did not fill an empty buffer, it signals the end of the
4145 block. */
4146 if (peep2_current_count == 0)
4147 break;
4148
4149 /* The buffer filled to the current maximum, so try to match. */
4150
4151 pos = peep2_buf_position (peep2_current + peep2_current_count);
4152 peep2_insn_data[pos].insn = PEEP2_EOBinvalid_insn_rtx;
4153 COPY_REG_SET (peep2_insn_data[pos].live_before, live)bitmap_copy (peep2_insn_data[pos].live_before, live);
4154
4155 /* Match the peephole. */
4156 head = peep2_insn_data[peep2_current].insn;
4157 attempt = peephole2_insns (PATTERN (head), head, &match_len);
4158 if (attempt != NULLnullptr)
4159 {
4160 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
4161 if (last)
4162 {
4163 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
4164 continue;
4165 }
4166 }
4167
4168 /* No match: advance the buffer by one insn. */
4169 peep2_current = peep2_buf_position (peep2_current + 1);
4170 peep2_current_count--;
4171 }
4172 }
4173
4174 default_rtl_profile ();
4175 for (i = 0; i < MAX_INSNS_PER_PEEP25 + 1; ++i)
4176 BITMAP_FREE (peep2_insn_data[i].live_before)((void) (bitmap_obstack_free ((bitmap) peep2_insn_data[i].live_before
), (peep2_insn_data[i].live_before) = (bitmap) nullptr))
;
4177 BITMAP_FREE (live)((void) (bitmap_obstack_free ((bitmap) live), (live) = (bitmap
) nullptr))
;
4178 if (peep2_do_rebuild_jump_labels)
4179 rebuild_jump_labels (get_insns ());
4180 if (peep2_do_cleanup_cfg)
4181 cleanup_cfg (CLEANUP_CFG_CHANGED64);
4182}
4183
4184/* Common predicates for use with define_bypass. */
4185
4186/* Helper function for store_data_bypass_p, handle just a single SET
4187 IN_SET. */
4188
4189static bool
4190store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
4191{
4192 if (!MEM_P (SET_DEST (in_set))(((enum rtx_code) ((((in_set)->u.fld[0]).rt_rtx))->code
) == MEM)
)
4193 return false;
4194
4195 rtx out_set = single_set (out_insn);
4196 if (out_set)
4197 return !reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), SET_DEST (in_set)(((in_set)->u.fld[0]).rt_rtx));
4198
4199 rtx out_pat = PATTERN (out_insn);
4200 if (GET_CODE (out_pat)((enum rtx_code) (out_pat)->code) != PARALLEL)
4201 return false;
4202
4203 for (int i = 0; i < XVECLEN (out_pat, 0)(((((out_pat)->u.fld[0]).rt_rtvec))->num_elem); i++)
4204 {
4205 rtx out_exp = XVECEXP (out_pat, 0, i)(((((out_pat)->u.fld[0]).rt_rtvec))->elem[i]);
4206
4207 if (GET_CODE (out_exp)((enum rtx_code) (out_exp)->code) == CLOBBER || GET_CODE (out_exp)((enum rtx_code) (out_exp)->code) == USE)
4208 continue;
4209
4210 gcc_assert (GET_CODE (out_exp) == SET)((void)(!(((enum rtx_code) (out_exp)->code) == SET) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 4210, __FUNCTION__), 0 : 0))
;
4211
4212 if (reg_mentioned_p (SET_DEST (out_exp)(((out_exp)->u.fld[0]).rt_rtx), SET_DEST (in_set)(((in_set)->u.fld[0]).rt_rtx)))
4213 return false;
4214 }
4215
4216 return true;
4217}
4218
4219/* True if the dependency between OUT_INSN and IN_INSN is on the store
4220 data not the address operand(s) of the store. IN_INSN and OUT_INSN
4221 must be either a single_set or a PARALLEL with SETs inside. */
4222
4223int
4224store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4225{
4226 rtx in_set = single_set (in_insn);
4227 if (in_set)
4228 return store_data_bypass_p_1 (out_insn, in_set);
4229
4230 rtx in_pat = PATTERN (in_insn);
4231 if (GET_CODE (in_pat)((enum rtx_code) (in_pat)->code) != PARALLEL)
4232 return false;
4233
4234 for (int i = 0; i < XVECLEN (in_pat, 0)(((((in_pat)->u.fld[0]).rt_rtvec))->num_elem); i++)
4235 {
4236 rtx in_exp = XVECEXP (in_pat, 0, i)(((((in_pat)->u.fld[0]).rt_rtvec))->elem[i]);
4237
4238 if (GET_CODE (in_exp)((enum rtx_code) (in_exp)->code) == CLOBBER || GET_CODE (in_exp)((enum rtx_code) (in_exp)->code) == USE)
4239 continue;
4240
4241 gcc_assert (GET_CODE (in_exp) == SET)((void)(!(((enum rtx_code) (in_exp)->code) == SET) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 4241, __FUNCTION__), 0 : 0))
;
4242
4243 if (!store_data_bypass_p_1 (out_insn, in_exp))
4244 return false;
4245 }
4246
4247 return true;
4248}
4249
4250/* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4251 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
4252 or multiple set; IN_INSN should be single_set for truth, but for convenience
4253 of insn categorization may be any JUMP or CALL insn. */
4254
4255int
4256if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4257{
4258 rtx out_set, in_set;
4259
4260 in_set = single_set (in_insn);
4261 if (! in_set)
4262 {
4263 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn))((void)(!((((enum rtx_code) (in_insn)->code) == JUMP_INSN)
|| (((enum rtx_code) (in_insn)->code) == CALL_INSN)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 4263, __FUNCTION__), 0 : 0))
;
4264 return false;
4265 }
4266
4267 if (GET_CODE (SET_SRC (in_set))((enum rtx_code) ((((in_set)->u.fld[1]).rt_rtx))->code) != IF_THEN_ELSE)
4268 return false;
4269 in_set = SET_SRC (in_set)(((in_set)->u.fld[1]).rt_rtx);
4270
4271 out_set = single_set (out_insn);
4272 if (out_set)
4273 {
4274 if (reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), XEXP (in_set, 1)(((in_set)->u.fld[1]).rt_rtx))
4275 || reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), XEXP (in_set, 2)(((in_set)->u.fld[2]).rt_rtx)))
4276 return false;
4277 }
4278 else
4279 {
4280 rtx out_pat;
4281 int i;
4282
4283 out_pat = PATTERN (out_insn);
4284 gcc_assert (GET_CODE (out_pat) == PARALLEL)((void)(!(((enum rtx_code) (out_pat)->code) == PARALLEL) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 4284, __FUNCTION__), 0 : 0))
;
4285
4286 for (i = 0; i < XVECLEN (out_pat, 0)(((((out_pat)->u.fld[0]).rt_rtvec))->num_elem); i++)
4287 {
4288 rtx exp = XVECEXP (out_pat, 0, i)(((((out_pat)->u.fld[0]).rt_rtvec))->elem[i]);
4289
4290 if (GET_CODE (exp)((enum rtx_code) (exp)->code) == CLOBBER)
4291 continue;
4292
4293 gcc_assert (GET_CODE (exp) == SET)((void)(!(((enum rtx_code) (exp)->code) == SET) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/recog.c"
, 4293, __FUNCTION__), 0 : 0))
;
4294
4295 if (reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), XEXP (in_set, 1)(((in_set)->u.fld[1]).rt_rtx))
4296 || reg_mentioned_p (SET_DEST (out_set)(((out_set)->u.fld[0]).rt_rtx), XEXP (in_set, 2)(((in_set)->u.fld[2]).rt_rtx)))
4297 return false;
4298 }
4299 }
4300
4301 return true;
4302}
4303
4304static unsigned int
4305rest_of_handle_peephole2 (void)
4306{
4307 if (HAVE_peephole21)
4308 peephole2_optimize ();
4309
4310 return 0;
4311}
4312
4313namespace {
4314
4315const pass_data pass_data_peephole2 =
4316{
4317 RTL_PASS, /* type */
4318 "peephole2", /* name */
4319 OPTGROUP_NONE, /* optinfo_flags */
4320 TV_PEEPHOLE2, /* tv_id */
4321 0, /* properties_required */
4322 0, /* properties_provided */
4323 0, /* properties_destroyed */
4324 0, /* todo_flags_start */
4325 TODO_df_finish(1 << 17), /* todo_flags_finish */
4326};
4327
4328class pass_peephole2 : public rtl_opt_pass
4329{
4330public:
4331 pass_peephole2 (gcc::context *ctxt)
4332 : rtl_opt_pass (pass_data_peephole2, ctxt)
4333 {}
4334
4335 /* opt_pass methods: */
4336 /* The epiphany backend creates a second instance of this pass, so we need
4337 a clone method. */
4338 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
4339 virtual bool gate (function *) { return (optimizeglobal_options.x_optimize > 0 && flag_peephole2global_options.x_flag_peephole2); }
4340 virtual unsigned int execute (function *)
4341 {
4342 return rest_of_handle_peephole2 ();
4343 }
4344
4345}; // class pass_peephole2
4346
4347} // anon namespace
4348
4349rtl_opt_pass *
4350make_pass_peephole2 (gcc::context *ctxt)
4351{
4352 return new pass_peephole2 (ctxt);
4353}
4354
4355namespace {
4356
4357const pass_data pass_data_split_all_insns =
4358{
4359 RTL_PASS, /* type */
4360 "split1", /* name */
4361 OPTGROUP_NONE, /* optinfo_flags */
4362 TV_NONE, /* tv_id */
4363 0, /* properties_required */
4364 PROP_rtl_split_insns(1 << 17), /* properties_provided */
4365 0, /* properties_destroyed */
4366 0, /* todo_flags_start */
4367 0, /* todo_flags_finish */
4368};
4369
4370class pass_split_all_insns : public rtl_opt_pass
4371{
4372public:
4373 pass_split_all_insns (gcc::context *ctxt)
4374 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
4375 {}
4376
4377 /* opt_pass methods: */
4378 /* The epiphany backend creates a second instance of this pass, so
4379 we need a clone method. */
4380 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
4381 virtual unsigned int execute (function *)
4382 {
4383 split_all_insns ();
4384 return 0;
4385 }
4386
4387}; // class pass_split_all_insns
4388
4389} // anon namespace
4390
4391rtl_opt_pass *
4392make_pass_split_all_insns (gcc::context *ctxt)
4393{
4394 return new pass_split_all_insns (ctxt);
4395}
4396
4397namespace {
4398
4399const pass_data pass_data_split_after_reload =
4400{
4401 RTL_PASS, /* type */
4402 "split2", /* name */
4403 OPTGROUP_NONE, /* optinfo_flags */
4404 TV_NONE, /* tv_id */
4405 0, /* properties_required */
4406 0, /* properties_provided */
4407 0, /* properties_destroyed */
4408 0, /* todo_flags_start */
4409 0, /* todo_flags_finish */
4410};
4411
4412class pass_split_after_reload : public rtl_opt_pass
4413{
4414public:
4415 pass_split_after_reload (gcc::context *ctxt)
4416 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
4417 {}
4418
4419 /* opt_pass methods: */
4420 virtual bool gate (function *)
4421 {
4422 /* If optimizing, then go ahead and split insns now. */
4423 return optimizeglobal_options.x_optimize > 0;
4424 }
4425
4426 virtual unsigned int execute (function *)
4427 {
4428 split_all_insns ();
4429 return 0;
4430 }
4431
4432}; // class pass_split_after_reload
4433
4434} // anon namespace
4435
4436rtl_opt_pass *
4437make_pass_split_after_reload (gcc::context *ctxt)
4438{
4439 return new pass_split_after_reload (ctxt);
4440}
4441
4442static bool
4443enable_split_before_sched2 (void)
4444{
4445#ifdef INSN_SCHEDULING
4446 return optimizeglobal_options.x_optimize > 0 && flag_schedule_insns_after_reloadglobal_options.x_flag_schedule_insns_after_reload;
4447#else
4448 return false;
4449#endif
4450}
4451
4452namespace {
4453
4454const pass_data pass_data_split_before_sched2 =
4455{
4456 RTL_PASS, /* type */
4457 "split3", /* name */
4458 OPTGROUP_NONE, /* optinfo_flags */
4459 TV_NONE, /* tv_id */
4460 0, /* properties_required */
4461 0, /* properties_provided */
4462 0, /* properties_destroyed */
4463 0, /* todo_flags_start */
4464 0, /* todo_flags_finish */
4465};
4466
4467class pass_split_before_sched2 : public rtl_opt_pass
4468{
4469public:
4470 pass_split_before_sched2 (gcc::context *ctxt)
4471 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4472 {}
4473
4474 /* opt_pass methods: */
4475 virtual bool gate (function *)
4476 {
4477 return enable_split_before_sched2 ();
4478 }
4479
4480 virtual unsigned int execute (function *)
4481 {
4482 split_all_insns ();
4483 return 0;
4484 }
4485
4486}; // class pass_split_before_sched2
4487
4488} // anon namespace
4489
4490rtl_opt_pass *
4491make_pass_split_before_sched2 (gcc::context *ctxt)
4492{
4493 return new pass_split_before_sched2 (ctxt);
4494}
4495
4496namespace {
4497
4498const pass_data pass_data_split_before_regstack =
4499{
4500 RTL_PASS, /* type */
4501 "split4", /* name */
4502 OPTGROUP_NONE, /* optinfo_flags */
4503 TV_NONE, /* tv_id */
4504 0, /* properties_required */
4505 0, /* properties_provided */
4506 0, /* properties_destroyed */
4507 0, /* todo_flags_start */
4508 0, /* todo_flags_finish */
4509};
4510
4511class pass_split_before_regstack : public rtl_opt_pass
4512{
4513public:
4514 pass_split_before_regstack (gcc::context *ctxt)
4515 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4516 {}
4517
4518 /* opt_pass methods: */
4519 virtual bool gate (function *);
4520 virtual unsigned int execute (function *)
4521 {
4522 split_all_insns ();
4523 return 0;
4524 }
4525
4526}; // class pass_split_before_regstack
4527
4528bool
4529pass_split_before_regstack::gate (function *)
4530{
4531#if HAVE_ATTR_length1 && defined (STACK_REGS)
4532 /* If flow2 creates new instructions which need splitting
4533 and scheduling after reload is not done, they might not be
4534 split until final which doesn't allow splitting
4535 if HAVE_ATTR_length. */
4536 return !enable_split_before_sched2 ();
4537#else
4538 return false;
4539#endif
4540}
4541
4542} // anon namespace
4543
4544rtl_opt_pass *
4545make_pass_split_before_regstack (gcc::context *ctxt)
4546{
4547 return new pass_split_before_regstack (ctxt);
4548}
4549
4550namespace {
4551
4552const pass_data pass_data_split_for_shorten_branches =
4553{
4554 RTL_PASS, /* type */
4555 "split5", /* name */
4556 OPTGROUP_NONE, /* optinfo_flags */
4557 TV_NONE, /* tv_id */
4558 0, /* properties_required */
4559 0, /* properties_provided */
4560 0, /* properties_destroyed */
4561 0, /* todo_flags_start */
4562 0, /* todo_flags_finish */
4563};
4564
4565class pass_split_for_shorten_branches : public rtl_opt_pass
4566{
4567public:
4568 pass_split_for_shorten_branches (gcc::context *ctxt)
4569 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4570 {}
4571
4572 /* opt_pass methods: */
4573 virtual bool gate (function *)
4574 {
4575 /* The placement of the splitting that we do for shorten_branches
4576 depends on whether regstack is used by the target or not. */
4577#if HAVE_ATTR_length1 && !defined (STACK_REGS)
4578 return true;
4579#else
4580 return false;
4581#endif
4582 }
4583
4584 virtual unsigned int execute (function *)
4585 {
4586 return split_all_insns_noflow ();
4587 }
4588
4589}; // class pass_split_for_shorten_branches
4590
4591} // anon namespace
4592
4593rtl_opt_pass *
4594make_pass_split_for_shorten_branches (gcc::context *ctxt)
4595{
4596 return new pass_split_for_shorten_branches (ctxt);
4597}
4598
4599/* (Re)initialize the target information after a change in target. */
4600
4601void
4602recog_init ()
4603{
4604 /* The information is zero-initialized, so we don't need to do anything
4605 first time round. */
4606 if (!this_target_recog->x_initialized)
4607 {
4608 this_target_recog->x_initialized = true;
4609 return;
4610 }
4611 memset (this_target_recog->x_bool_attr_masks, 0,
4612 sizeof (this_target_recog->x_bool_attr_masks));
4613 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4614 if (this_target_recog->x_op_alt[i])
4615 {
4616 free (this_target_recog->x_op_alt[i]);
4617 this_target_recog->x_op_alt[i] = 0;
4618 }
4619}