Bug Summary

File:build/gcc/config/i386/i386.c
Warning:line 10183, column 19
Although the value stored to 'base_reg' is used in the enclosing expression, the value is never actually read from 'base_reg'

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name i386.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-6vlSUu.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c
1/* Subroutines used for code generation on IA-32.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 3, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#define IN_TARGET_CODE1 1
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "rtl.h"
27#include "tree.h"
28#include "memmodel.h"
29#include "gimple.h"
30#include "cfghooks.h"
31#include "cfgloop.h"
32#include "df.h"
33#include "tm_p.h"
34#include "stringpool.h"
35#include "expmed.h"
36#include "optabs.h"
37#include "regs.h"
38#include "emit-rtl.h"
39#include "recog.h"
40#include "cgraph.h"
41#include "diagnostic.h"
42#include "cfgbuild.h"
43#include "alias.h"
44#include "fold-const.h"
45#include "attribs.h"
46#include "calls.h"
47#include "stor-layout.h"
48#include "varasm.h"
49#include "output.h"
50#include "insn-attr.h"
51#include "flags.h"
52#include "except.h"
53#include "explow.h"
54#include "expr.h"
55#include "cfgrtl.h"
56#include "common/common-target.h"
57#include "langhooks.h"
58#include "reload.h"
59#include "gimplify.h"
60#include "dwarf2.h"
61#include "tm-constrs.h"
62#include "cselib.h"
63#include "sched-int.h"
64#include "opts.h"
65#include "tree-pass.h"
66#include "context.h"
67#include "pass_manager.h"
68#include "target-globals.h"
69#include "gimple-iterator.h"
70#include "tree-vectorizer.h"
71#include "shrink-wrap.h"
72#include "builtins.h"
73#include "rtl-iter.h"
74#include "tree-iterator.h"
75#include "dbgcnt.h"
76#include "case-cfn-macros.h"
77#include "dojump.h"
78#include "fold-const-call.h"
79#include "tree-vrp.h"
80#include "tree-ssanames.h"
81#include "selftest.h"
82#include "selftest-rtl.h"
83#include "print-rtl.h"
84#include "intl.h"
85#include "ifcvt.h"
86#include "symbol-summary.h"
87#include "ipa-prop.h"
88#include "ipa-fnsummary.h"
89#include "wide-int-bitmask.h"
90#include "tree-vector-builder.h"
91#include "debug.h"
92#include "dwarf2out.h"
93#include "i386-options.h"
94#include "i386-builtins.h"
95#include "i386-expand.h"
96#include "i386-features.h"
97#include "function-abi.h"
98
99/* This file should be included last. */
100#include "target-def.h"
101
102static rtx legitimize_dllimport_symbol (rtx, bool);
103static rtx legitimize_pe_coff_extern_decl (rtx, bool);
104static void ix86_print_operand_address_as (FILE *, rtx, addr_space_t, bool);
105static void ix86_emit_restore_reg_using_pop (rtx);
106
107
108#ifndef CHECK_STACK_LIMIT(-1)
109#define CHECK_STACK_LIMIT(-1) (-1)
110#endif
111
112/* Return index of given mode in mult and division cost tables. */
113#define MODE_INDEX(mode)((mode) == (scalar_int_mode ((scalar_int_mode::from_int) E_QImode
)) ? 0 : (mode) == (scalar_int_mode ((scalar_int_mode::from_int
) E_HImode)) ? 1 : (mode) == (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)) ? 2 : (mode) == (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) ? 3 : 4)
\
114 ((mode) == QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode)) ? 0 \
115 : (mode) == HImode(scalar_int_mode ((scalar_int_mode::from_int) E_HImode)) ? 1 \
116 : (mode) == SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)) ? 2 \
117 : (mode) == DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) ? 3 \
118 : 4)
119
120
121/* Set by -mtune. */
122const struct processor_costs *ix86_tune_cost = NULL__null;
123
124/* Set by -mtune or -Os. */
125const struct processor_costs *ix86_cost = NULL__null;
126
127/* In case the average insn count for single function invocation is
128 lower than this constant, emit fast (but longer) prologue and
129 epilogue code. */
130#define FAST_PROLOGUE_INSN_COUNT20 20
131
132/* Names for 8 (low), 8 (high), and 16-bit registers, respectively. */
133static const char *const qi_reg_name[] = QI_REGISTER_NAMES{"al", "dl", "cl", "bl", "sil", "dil", "bpl", "spl"};
134static const char *const qi_high_reg_name[] = QI_HIGH_REGISTER_NAMES{"ah", "dh", "ch", "bh"};
135static const char *const hi_reg_name[] = HI_REGISTER_NAMES{"ax","dx","cx","bx","si","di","bp","sp", "st","st(1)","st(2)"
,"st(3)","st(4)","st(5)","st(6)","st(7)", "argp", "flags", "fpsr"
, "frame", "xmm0","xmm1","xmm2","xmm3","xmm4","xmm5","xmm6","xmm7"
, "mm0", "mm1", "mm2", "mm3", "mm4", "mm5", "mm6", "mm7", "r8"
, "r9", "r10", "r11", "r12", "r13", "r14", "r15", "xmm8", "xmm9"
, "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15", "xmm16"
, "xmm17", "xmm18", "xmm19", "xmm20", "xmm21", "xmm22", "xmm23"
, "xmm24", "xmm25", "xmm26", "xmm27", "xmm28", "xmm29", "xmm30"
, "xmm31", "k0", "k1", "k2", "k3", "k4", "k5", "k6", "k7" }
;
136
137/* Array of the smallest class containing reg number REGNO, indexed by
138 REGNO. Used by REGNO_REG_CLASS in i386.h. */
139
140enum reg_class const regclass_map[FIRST_PSEUDO_REGISTER76] =
141{
142 /* ax, dx, cx, bx */
143 AREG, DREG, CREG, BREG,
144 /* si, di, bp, sp */
145 SIREG, DIREG, NON_Q_REGS, NON_Q_REGS,
146 /* FP registers */
147 FP_TOP_REG, FP_SECOND_REG, FLOAT_REGS, FLOAT_REGS,
148 FLOAT_REGS, FLOAT_REGS, FLOAT_REGS, FLOAT_REGS,
149 /* arg pointer, flags, fpsr, frame */
150 NON_Q_REGS, NO_REGS, NO_REGS, NON_Q_REGS,
151 /* SSE registers */
152 SSE_FIRST_REG, SSE_REGS, SSE_REGS, SSE_REGS,
153 SSE_REGS, SSE_REGS, SSE_REGS, SSE_REGS,
154 /* MMX registers */
155 MMX_REGS, MMX_REGS, MMX_REGS, MMX_REGS,
156 MMX_REGS, MMX_REGS, MMX_REGS, MMX_REGS,
157 /* REX registers */
158 GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, GENERAL_REGS,
159 GENERAL_REGS, GENERAL_REGS, GENERAL_REGS, GENERAL_REGS,
160 /* SSE REX registers */
161 SSE_REGS, SSE_REGS, SSE_REGS, SSE_REGS,
162 SSE_REGS, SSE_REGS, SSE_REGS, SSE_REGS,
163 /* AVX-512 SSE registers */
164 ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS,
165 ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS,
166 ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS,
167 ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS, ALL_SSE_REGS,
168 /* Mask registers. */
169 ALL_MASK_REGS, MASK_REGS, MASK_REGS, MASK_REGS,
170 MASK_REGS, MASK_REGS, MASK_REGS, MASK_REGS
171};
172
173/* The "default" register map used in 32bit mode. */
174
175int const dbx_register_map[FIRST_PSEUDO_REGISTER76] =
176{
177 /* general regs */
178 0, 2, 1, 3, 6, 7, 4, 5,
179 /* fp regs */
180 12, 13, 14, 15, 16, 17, 18, 19,
181 /* arg, flags, fpsr, frame */
182 IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1),
183 IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1),
184 /* SSE */
185 21, 22, 23, 24, 25, 26, 27, 28,
186 /* MMX */
187 29, 30, 31, 32, 33, 34, 35, 36,
188 /* extended integer registers */
189 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
190 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
191 /* extended sse registers */
192 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
193 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
194 /* AVX-512 registers 16-23 */
195 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
196 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
197 /* AVX-512 registers 24-31 */
198 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
199 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
200 /* Mask registers */
201 93, 94, 95, 96, 97, 98, 99, 100
202};
203
204/* The "default" register map used in 64bit mode. */
205
206int const dbx64_register_map[FIRST_PSEUDO_REGISTER76] =
207{
208 /* general regs */
209 0, 1, 2, 3, 4, 5, 6, 7,
210 /* fp regs */
211 33, 34, 35, 36, 37, 38, 39, 40,
212 /* arg, flags, fpsr, frame */
213 IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1),
214 IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1),
215 /* SSE */
216 17, 18, 19, 20, 21, 22, 23, 24,
217 /* MMX */
218 41, 42, 43, 44, 45, 46, 47, 48,
219 /* extended integer registers */
220 8, 9, 10, 11, 12, 13, 14, 15,
221 /* extended SSE registers */
222 25, 26, 27, 28, 29, 30, 31, 32,
223 /* AVX-512 registers 16-23 */
224 67, 68, 69, 70, 71, 72, 73, 74,
225 /* AVX-512 registers 24-31 */
226 75, 76, 77, 78, 79, 80, 81, 82,
227 /* Mask registers */
228 118, 119, 120, 121, 122, 123, 124, 125
229};
230
231/* Define the register numbers to be used in Dwarf debugging information.
232 The SVR4 reference port C compiler uses the following register numbers
233 in its Dwarf output code:
234 0 for %eax (gcc regno = 0)
235 1 for %ecx (gcc regno = 2)
236 2 for %edx (gcc regno = 1)
237 3 for %ebx (gcc regno = 3)
238 4 for %esp (gcc regno = 7)
239 5 for %ebp (gcc regno = 6)
240 6 for %esi (gcc regno = 4)
241 7 for %edi (gcc regno = 5)
242 The following three DWARF register numbers are never generated by
243 the SVR4 C compiler or by the GNU compilers, but SDB on x86/svr4
244 believed these numbers have these meanings.
245 8 for %eip (no gcc equivalent)
246 9 for %eflags (gcc regno = 17)
247 10 for %trapno (no gcc equivalent)
248 It is not at all clear how we should number the FP stack registers
249 for the x86 architecture. If the version of SDB on x86/svr4 were
250 a bit less brain dead with respect to floating-point then we would
251 have a precedent to follow with respect to DWARF register numbers
252 for x86 FP registers, but the SDB on x86/svr4 was so completely
253 broken with respect to FP registers that it is hardly worth thinking
254 of it as something to strive for compatibility with.
255 The version of x86/svr4 SDB I had does (partially)
256 seem to believe that DWARF register number 11 is associated with
257 the x86 register %st(0), but that's about all. Higher DWARF
258 register numbers don't seem to be associated with anything in
259 particular, and even for DWARF regno 11, SDB only seemed to under-
260 stand that it should say that a variable lives in %st(0) (when
261 asked via an `=' command) if we said it was in DWARF regno 11,
262 but SDB still printed garbage when asked for the value of the
263 variable in question (via a `/' command).
264 (Also note that the labels SDB printed for various FP stack regs
265 when doing an `x' command were all wrong.)
266 Note that these problems generally don't affect the native SVR4
267 C compiler because it doesn't allow the use of -O with -g and
268 because when it is *not* optimizing, it allocates a memory
269 location for each floating-point variable, and the memory
270 location is what gets described in the DWARF AT_location
271 attribute for the variable in question.
272 Regardless of the severe mental illness of the x86/svr4 SDB, we
273 do something sensible here and we use the following DWARF
274 register numbers. Note that these are all stack-top-relative
275 numbers.
276 11 for %st(0) (gcc regno = 8)
277 12 for %st(1) (gcc regno = 9)
278 13 for %st(2) (gcc regno = 10)
279 14 for %st(3) (gcc regno = 11)
280 15 for %st(4) (gcc regno = 12)
281 16 for %st(5) (gcc regno = 13)
282 17 for %st(6) (gcc regno = 14)
283 18 for %st(7) (gcc regno = 15)
284*/
285int const svr4_dbx_register_map[FIRST_PSEUDO_REGISTER76] =
286{
287 /* general regs */
288 0, 2, 1, 3, 6, 7, 5, 4,
289 /* fp regs */
290 11, 12, 13, 14, 15, 16, 17, 18,
291 /* arg, flags, fpsr, frame */
292 IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), 9,
293 IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1), IGNORED_DWARF_REGNUM((~(unsigned int) 0) - 1),
294 /* SSE registers */
295 21, 22, 23, 24, 25, 26, 27, 28,
296 /* MMX registers */
297 29, 30, 31, 32, 33, 34, 35, 36,
298 /* extended integer registers */
299 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
300 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
301 /* extended sse registers */
302 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
303 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
304 /* AVX-512 registers 16-23 */
305 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
306 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
307 /* AVX-512 registers 24-31 */
308 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
309 INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0), INVALID_REGNUM(~(unsigned int) 0),
310 /* Mask registers */
311 93, 94, 95, 96, 97, 98, 99, 100
312};
313
314/* Define parameter passing and return registers. */
315
316static int const x86_64_int_parameter_registers[6] =
317{
318 DI_REG5, SI_REG4, DX_REG1, CX_REG2, R8_REG36, R9_REG37
319};
320
321static int const x86_64_ms_abi_int_parameter_registers[4] =
322{
323 CX_REG2, DX_REG1, R8_REG36, R9_REG37
324};
325
326static int const x86_64_int_return_registers[4] =
327{
328 AX_REG0, DX_REG1, DI_REG5, SI_REG4
329};
330
331/* Define the structure for the machine field in struct function. */
332
333struct GTY(()) stack_local_entry {
334 unsigned short mode;
335 unsigned short n;
336 rtx rtl;
337 struct stack_local_entry *next;
338};
339
340/* Which cpu are we scheduling for. */
341enum attr_cpu ix86_schedule;
342
343/* Which cpu are we optimizing for. */
344enum processor_type ix86_tune;
345
346/* Which instruction set architecture to use. */
347enum processor_type ix86_arch;
348
349/* True if processor has SSE prefetch instruction. */
350unsigned char ix86_prefetch_sse;
351
352/* Preferred alignment for stack boundary in bits. */
353unsigned int ix86_preferred_stack_boundary;
354
355/* Alignment for incoming stack boundary in bits specified at
356 command line. */
357unsigned int ix86_user_incoming_stack_boundary;
358
359/* Default alignment for incoming stack boundary in bits. */
360unsigned int ix86_default_incoming_stack_boundary;
361
362/* Alignment for incoming stack boundary in bits. */
363unsigned int ix86_incoming_stack_boundary;
364
365/* Calling abi specific va_list type nodes. */
366tree sysv_va_list_type_node;
367tree ms_va_list_type_node;
368
369/* Prefix built by ASM_GENERATE_INTERNAL_LABEL. */
370char internal_label_prefix[16];
371int internal_label_prefix_len;
372
373/* Fence to use after loop using movnt. */
374tree x86_mfence;
375
376/* Register class used for passing given 64bit part of the argument.
377 These represent classes as documented by the PS ABI, with the exception
378 of SSESF, SSEDF classes, that are basically SSE class, just gcc will
379 use SF or DFmode move instead of DImode to avoid reformatting penalties.
380
381 Similarly we play games with INTEGERSI_CLASS to use cheaper SImode moves
382 whenever possible (upper half does contain padding). */
383enum x86_64_reg_class
384 {
385 X86_64_NO_CLASS,
386 X86_64_INTEGER_CLASS,
387 X86_64_INTEGERSI_CLASS,
388 X86_64_SSE_CLASS,
389 X86_64_SSESF_CLASS,
390 X86_64_SSEDF_CLASS,
391 X86_64_SSEUP_CLASS,
392 X86_64_X87_CLASS,
393 X86_64_X87UP_CLASS,
394 X86_64_COMPLEX_X87_CLASS,
395 X86_64_MEMORY_CLASS
396 };
397
398#define MAX_CLASSES8 8
399
400/* Table of constants used by fldpi, fldln2, etc.... */
401static REAL_VALUE_TYPEstruct real_value ext_80387_constants_table [5];
402static bool ext_80387_constants_init;
403
404
405static rtx ix86_function_value (const_tree, const_tree, bool);
406static bool ix86_function_value_regno_p (const unsigned int);
407static unsigned int ix86_function_arg_boundary (machine_mode,
408 const_tree);
409static rtx ix86_static_chain (const_tree, bool);
410static int ix86_function_regparm (const_tree, const_tree);
411static void ix86_compute_frame_layout (void);
412static tree ix86_canonical_va_list_type (tree);
413static unsigned int split_stack_prologue_scratch_regno (void);
414static bool i386_asm_output_addr_const_extra (FILE *, rtx);
415
416static bool ix86_can_inline_p (tree, tree);
417static unsigned int ix86_minimum_incoming_stack_boundary (bool);
418
419
420/* Whether -mtune= or -march= were specified */
421int ix86_tune_defaulted;
422int ix86_arch_specified;
423
424/* Return true if a red-zone is in use. We can't use red-zone when
425 there are local indirect jumps, like "indirect_jump" or "tablejump",
426 which jumps to another place in the function, since "call" in the
427 indirect thunk pushes the return address onto stack, destroying
428 red-zone.
429
430 TODO: If we can reserve the first 2 WORDs, for PUSH and, another
431 for CALL, in red-zone, we can allow local indirect jumps with
432 indirect thunk. */
433
434bool
435ix86_using_red_zone (void)
436{
437 return (TARGET_RED_ZONE((global_options.x_target_flags & (1U << 20)) == 0)
438 && !TARGET_64BIT_MS_ABI(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) && ix86_cfun_abi () == MS_ABI)
439 && (!cfun(cfun + 0)->machine->has_local_indirect_jump
440 || cfun(cfun + 0)->machine->indirect_branch_type == indirect_branch_keep));
441}
442
443/* Return true, if profiling code should be emitted before
444 prologue. Otherwise it returns false.
445 Note: For x86 with "hotfix" it is sorried. */
446static bool
447ix86_profile_before_prologue (void)
448{
449 return flag_fentryglobal_options.x_flag_fentry != 0;
450}
451
452/* Update register usage after having seen the compiler flags. */
453
454static void
455ix86_conditional_register_usage (void)
456{
457 int i, c_mask;
458
459 /* If there are no caller-saved registers, preserve all registers.
460 except fixed_regs and registers used for function return value
461 since aggregate_value_p checks call_used_regs[regno] on return
462 value. */
463 if (cfun(cfun + 0) && cfun(cfun + 0)->machine->no_caller_saved_registers)
464 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
465 if (!fixed_regs(this_target_hard_regs->x_fixed_regs)[i] && !ix86_function_value_regno_p (i))
466 call_used_regs(this_target_hard_regs->x_call_used_regs)[i] = 0;
467
468 /* For 32-bit targets, disable the REX registers. */
469 if (! TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
470 {
471 for (i = FIRST_REX_INT_REG36; i <= LAST_REX_INT_REG43; i++)
472 CLEAR_HARD_REG_BIT (accessible_reg_set(this_target_hard_regs->x_accessible_reg_set), i);
473 for (i = FIRST_REX_SSE_REG44; i <= LAST_REX_SSE_REG51; i++)
474 CLEAR_HARD_REG_BIT (accessible_reg_set(this_target_hard_regs->x_accessible_reg_set), i);
475 for (i = FIRST_EXT_REX_SSE_REG52; i <= LAST_EXT_REX_SSE_REG67; i++)
476 CLEAR_HARD_REG_BIT (accessible_reg_set(this_target_hard_regs->x_accessible_reg_set), i);
477 }
478
479 /* See the definition of CALL_USED_REGISTERS in i386.h. */
480 c_mask = CALL_USED_REGISTERS_MASK (TARGET_64BIT_MS_ABI)(((((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) && ix86_cfun_abi () == MS_ABI)) ? (1 << 3) :
((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? (1 << 2) : (1 << 1))
;
481
482 CLEAR_HARD_REG_SET (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[(int)CLOBBERED_REGS]);
483
484 for (i = 0; i < FIRST_PSEUDO_REGISTER76; i++)
485 {
486 /* Set/reset conditionally defined registers from
487 CALL_USED_REGISTERS initializer. */
488 if (call_used_regs(this_target_hard_regs->x_call_used_regs)[i] > 1)
489 call_used_regs(this_target_hard_regs->x_call_used_regs)[i] = !!(call_used_regs(this_target_hard_regs->x_call_used_regs)[i] & c_mask);
490
491 /* Calculate registers of CLOBBERED_REGS register set
492 as call used registers from GENERAL_REGS register set. */
493 if (TEST_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[(int)GENERAL_REGS], i)
494 && call_used_regs(this_target_hard_regs->x_call_used_regs)[i])
495 SET_HARD_REG_BIT (reg_class_contents(this_target_hard_regs->x_reg_class_contents)[(int)CLOBBERED_REGS], i);
496 }
497
498 /* If MMX is disabled, disable the registers. */
499 if (! TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) !=
0)
)
500 accessible_reg_set(this_target_hard_regs->x_accessible_reg_set) &= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[MMX_REGS];
501
502 /* If SSE is disabled, disable the registers. */
503 if (! TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
)
504 accessible_reg_set(this_target_hard_regs->x_accessible_reg_set) &= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[ALL_SSE_REGS];
505
506 /* If the FPU is disabled, disable the registers. */
507 if (! (TARGET_80387((global_options.x_target_flags & (1U << 1)) != 0) || TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0
) && ((global_options.x_target_flags & (1U <<
1)) != 0) && !((global_options.x_target_flags & (
1U << 12)) != 0))
))
508 accessible_reg_set(this_target_hard_regs->x_accessible_reg_set) &= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[FLOAT_REGS];
509
510 /* If AVX512F is disabled, disable the registers. */
511 if (! TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) !=
0)
)
512 {
513 for (i = FIRST_EXT_REX_SSE_REG52; i <= LAST_EXT_REX_SSE_REG67; i++)
514 CLEAR_HARD_REG_BIT (accessible_reg_set(this_target_hard_regs->x_accessible_reg_set), i);
515
516 accessible_reg_set(this_target_hard_regs->x_accessible_reg_set) &= ~reg_class_contents(this_target_hard_regs->x_reg_class_contents)[ALL_MASK_REGS];
517 }
518}
519
520/* Canonicalize a comparison from one we don't have to one we do have. */
521
522static void
523ix86_canonicalize_comparison (int *code, rtx *op0, rtx *op1,
524 bool op0_preserve_value)
525{
526 /* The order of operands in x87 ficom compare is forced by combine in
527 simplify_comparison () function. Float operator is treated as RTX_OBJ
528 with a precedence over other operators and is always put in the first
529 place. Swap condition and operands to match ficom instruction. */
530 if (!op0_preserve_value
531 && GET_CODE (*op0)((enum rtx_code) (*op0)->code) == FLOAT && MEM_P (XEXP (*op0, 0))(((enum rtx_code) ((((*op0)->u.fld[0]).rt_rtx))->code) ==
MEM)
&& REG_P (*op1)(((enum rtx_code) (*op1)->code) == REG))
532 {
533 enum rtx_code scode = swap_condition ((enum rtx_code) *code);
534
535 /* We are called only for compares that are split to SAHF instruction.
536 Ensure that we have setcc/jcc insn for the swapped condition. */
537 if (ix86_fp_compare_code_to_integer (scode) != UNKNOWN)
538 {
539 std::swap (*op0, *op1);
540 *code = (int) scode;
541 }
542 }
543}
544
545
546/* Hook to determine if one function can safely inline another. */
547
548static bool
549ix86_can_inline_p (tree caller, tree callee)
550{
551 tree caller_tree = DECL_FUNCTION_SPECIFIC_TARGET (caller)((tree_check ((caller), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 551, __FUNCTION__, (FUNCTION_DECL)))->function_decl.function_specific_target
)
;
552 tree callee_tree = DECL_FUNCTION_SPECIFIC_TARGET (callee)((tree_check ((callee), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 552, __FUNCTION__, (FUNCTION_DECL)))->function_decl.function_specific_target
)
;
553
554 /* Changes of those flags can be tolerated for always inlines. Lets hope
555 user knows what he is doing. */
556 const unsigned HOST_WIDE_INTlong always_inline_safe_mask
557 = (MASK_USE_8BIT_IDIV(1U << 2) | MASK_ACCUMULATE_OUTGOING_ARGS(1U << 3)
558 | MASK_NO_ALIGN_STRINGOPS(1U << 5) | MASK_AVX256_SPLIT_UNALIGNED_LOAD(1U << 6)
559 | MASK_AVX256_SPLIT_UNALIGNED_STORE(1U << 7) | MASK_CLD(1U << 9)
560 | MASK_NO_FANCY_MATH_387(1U << 10) | MASK_IEEE_FP(1U << 13) | MASK_INLINE_ALL_STRINGOPS(1U << 14)
561 | MASK_INLINE_STRINGOPS_DYNAMICALLY(1U << 15) | MASK_RECIP(1U << 22) | MASK_STACK_PROBE(1U << 25)
562 | MASK_STV(1U << 26) | MASK_TLS_DIRECT_SEG_REFS(1U << 27) | MASK_VZEROUPPER(1U << 29)
563 | MASK_NO_PUSH_ARGS(1U << 19) | MASK_OMIT_LEAF_FRAME_POINTER(1U << 21));
564
565
566 if (!callee_tree)
567 callee_tree = target_option_default_nodeglobal_trees[TI_TARGET_OPTION_DEFAULT];
568 if (!caller_tree)
569 caller_tree = target_option_default_nodeglobal_trees[TI_TARGET_OPTION_DEFAULT];
570 if (callee_tree == caller_tree)
571 return true;
572
573 struct cl_target_option *caller_opts = TREE_TARGET_OPTION (caller_tree)((tree_check ((caller_tree), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 573, __FUNCTION__, (TARGET_OPTION_NODE)))->target_option
.opts)
;
574 struct cl_target_option *callee_opts = TREE_TARGET_OPTION (callee_tree)((tree_check ((callee_tree), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 574, __FUNCTION__, (TARGET_OPTION_NODE)))->target_option
.opts)
;
575 bool ret = false;
576 bool always_inline
577 = (DECL_DISREGARD_INLINE_LIMITS (callee)((tree_check ((callee), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 577, __FUNCTION__, (FUNCTION_DECL)))->function_decl.disregard_inline_limits
)
578 && lookup_attribute ("always_inline",
579 DECL_ATTRIBUTES (callee)((contains_struct_check ((callee), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 579, __FUNCTION__))->decl_common.attributes)
));
580
581 cgraph_node *callee_node = cgraph_node::get (callee);
582 /* Callee's isa options should be a subset of the caller's, i.e. a SSE4
583 function can inline a SSE2 function but a SSE2 function can't inline
584 a SSE4 function. */
585 if (((caller_opts->x_ix86_isa_flags & callee_opts->x_ix86_isa_flags)
586 != callee_opts->x_ix86_isa_flags)
587 || ((caller_opts->x_ix86_isa_flags2 & callee_opts->x_ix86_isa_flags2)
588 != callee_opts->x_ix86_isa_flags2))
589 ret = false;
590
591 /* See if we have the same non-isa options. */
592 else if ((!always_inline
593 && caller_opts->x_target_flags != callee_opts->x_target_flags)
594 || (caller_opts->x_target_flags & ~always_inline_safe_mask)
595 != (callee_opts->x_target_flags & ~always_inline_safe_mask))
596 ret = false;
597
598 /* See if arch, tune, etc. are the same. */
599 else if (caller_opts->arch != callee_opts->arch)
600 ret = false;
601
602 else if (!always_inline && caller_opts->tune != callee_opts->tune)
603 ret = false;
604
605 else if (caller_opts->x_ix86_fpmath != callee_opts->x_ix86_fpmath
606 /* If the calle doesn't use FP expressions differences in
607 ix86_fpmath can be ignored. We are called from FEs
608 for multi-versioning call optimization, so beware of
609 ipa_fn_summaries not available. */
610 && (! ipa_fn_summaries
611 || ipa_fn_summaries->get (callee_node) == NULL__null
612 || ipa_fn_summaries->get (callee_node)->fp_expressions))
613 ret = false;
614
615 else if (!always_inline
616 && caller_opts->branch_cost != callee_opts->branch_cost)
617 ret = false;
618
619 else
620 ret = true;
621
622 return ret;
623}
624
625/* Return true if this goes in large data/bss. */
626
627static bool
628ix86_in_large_data_p (tree exp)
629{
630 if (ix86_cmodelglobal_options.x_ix86_cmodel != CM_MEDIUM && ix86_cmodelglobal_options.x_ix86_cmodel != CM_MEDIUM_PIC)
631 return false;
632
633 if (exp == NULL_TREE(tree) __null)
634 return false;
635
636 /* Functions are never large data. */
637 if (TREE_CODE (exp)((enum tree_code) (exp)->base.code) == FUNCTION_DECL)
638 return false;
639
640 /* Automatic variables are never large data. */
641 if (VAR_P (exp)(((enum tree_code) (exp)->base.code) == VAR_DECL) && !is_global_var (exp))
642 return false;
643
644 if (VAR_P (exp)(((enum tree_code) (exp)->base.code) == VAR_DECL) && DECL_SECTION_NAME (exp)decl_section_name (exp))
645 {
646 const char *section = DECL_SECTION_NAME (exp)decl_section_name (exp);
647 if (strcmp (section, ".ldata") == 0
648 || strcmp (section, ".lbss") == 0)
649 return true;
650 return false;
651 }
652 else
653 {
654 HOST_WIDE_INTlong size = int_size_in_bytes (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 654, __FUNCTION__))->typed.type)
);
655
656 /* If this is an incomplete type with size 0, then we can't put it
657 in data because it might be too big when completed. Also,
658 int_size_in_bytes returns -1 if size can vary or is larger than
659 an integer in which case also it is safer to assume that it goes in
660 large data. */
661 if (size <= 0 || size > ix86_section_thresholdglobal_options.x_ix86_section_threshold)
662 return true;
663 }
664
665 return false;
666}
667
668/* i386-specific section flag to mark large sections. */
669#define SECTION_LARGE0x10000000 SECTION_MACH_DEP0x10000000
670
671/* Switch to the appropriate section for output of DECL.
672 DECL is either a `VAR_DECL' node or a constant of some sort.
673 RELOC indicates whether forming the initial value of DECL requires
674 link-time relocations. */
675
676ATTRIBUTE_UNUSED__attribute__ ((__unused__)) static section *
677x86_64_elf_select_section (tree decl, int reloc,
678 unsigned HOST_WIDE_INTlong align)
679{
680 if (ix86_in_large_data_p (decl))
681 {
682 const char *sname = NULL__null;
683 unsigned int flags = SECTION_WRITE0x00200 | SECTION_LARGE0x10000000;
684 switch (categorize_decl_for_section (decl, reloc))
685 {
686 case SECCAT_DATA:
687 sname = ".ldata";
688 break;
689 case SECCAT_DATA_REL:
690 sname = ".ldata.rel";
691 break;
692 case SECCAT_DATA_REL_LOCAL:
693 sname = ".ldata.rel.local";
694 break;
695 case SECCAT_DATA_REL_RO:
696 sname = ".ldata.rel.ro";
697 break;
698 case SECCAT_DATA_REL_RO_LOCAL:
699 sname = ".ldata.rel.ro.local";
700 break;
701 case SECCAT_BSS:
702 sname = ".lbss";
703 flags |= SECTION_BSS0x02000;
704 break;
705 case SECCAT_RODATA:
706 case SECCAT_RODATA_MERGE_STR:
707 case SECCAT_RODATA_MERGE_STR_INIT:
708 case SECCAT_RODATA_MERGE_CONST:
709 sname = ".lrodata";
710 flags &= ~SECTION_WRITE0x00200;
711 break;
712 case SECCAT_SRODATA:
713 case SECCAT_SDATA:
714 case SECCAT_SBSS:
715 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 715, __FUNCTION__))
;
716 case SECCAT_TEXT:
717 case SECCAT_TDATA:
718 case SECCAT_TBSS:
719 /* We don't split these for medium model. Place them into
720 default sections and hope for best. */
721 break;
722 }
723 if (sname)
724 {
725 /* We might get called with string constants, but get_named_section
726 doesn't like them as they are not DECLs. Also, we need to set
727 flags in that case. */
728 if (!DECL_P (decl)(tree_code_type[(int) (((enum tree_code) (decl)->base.code
))] == tcc_declaration)
)
729 return get_section (sname, flags, NULL__null);
730 return get_named_section (decl, sname, reloc);
731 }
732 }
733 return default_elf_select_section (decl, reloc, align);
734}
735
736/* Select a set of attributes for section NAME based on the properties
737 of DECL and whether or not RELOC indicates that DECL's initializer
738 might contain runtime relocations. */
739
740static unsigned int ATTRIBUTE_UNUSED__attribute__ ((__unused__))
741x86_64_elf_section_type_flags (tree decl, const char *name, int reloc)
742{
743 unsigned int flags = default_section_type_flags (decl, name, reloc);
744
745 if (ix86_in_large_data_p (decl))
746 flags |= SECTION_LARGE0x10000000;
747
748 if (decl == NULL_TREE(tree) __null
749 && (strcmp (name, ".ldata.rel.ro") == 0
750 || strcmp (name, ".ldata.rel.ro.local") == 0))
751 flags |= SECTION_RELRO0x1000000;
752
753 if (strcmp (name, ".lbss") == 0
754 || strncmp (name, ".lbss.", sizeof (".lbss.") - 1) == 0
755 || strncmp (name, ".gnu.linkonce.lb.",
756 sizeof (".gnu.linkonce.lb.") - 1) == 0)
757 flags |= SECTION_BSS0x02000;
758
759 return flags;
760}
761
762/* Build up a unique section name, expressed as a
763 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
764 RELOC indicates whether the initial value of EXP requires
765 link-time relocations. */
766
767static void ATTRIBUTE_UNUSED__attribute__ ((__unused__))
768x86_64_elf_unique_section (tree decl, int reloc)
769{
770 if (ix86_in_large_data_p (decl))
771 {
772 const char *prefix = NULL__null;
773 /* We only need to use .gnu.linkonce if we don't have COMDAT groups. */
774 bool one_only = DECL_COMDAT_GROUP (decl)decl_comdat_group (decl) && !HAVE_COMDAT_GROUP1;
775
776 switch (categorize_decl_for_section (decl, reloc))
777 {
778 case SECCAT_DATA:
779 case SECCAT_DATA_REL:
780 case SECCAT_DATA_REL_LOCAL:
781 case SECCAT_DATA_REL_RO:
782 case SECCAT_DATA_REL_RO_LOCAL:
783 prefix = one_only ? ".ld" : ".ldata";
784 break;
785 case SECCAT_BSS:
786 prefix = one_only ? ".lb" : ".lbss";
787 break;
788 case SECCAT_RODATA:
789 case SECCAT_RODATA_MERGE_STR:
790 case SECCAT_RODATA_MERGE_STR_INIT:
791 case SECCAT_RODATA_MERGE_CONST:
792 prefix = one_only ? ".lr" : ".lrodata";
793 break;
794 case SECCAT_SRODATA:
795 case SECCAT_SDATA:
796 case SECCAT_SBSS:
797 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 797, __FUNCTION__))
;
798 case SECCAT_TEXT:
799 case SECCAT_TDATA:
800 case SECCAT_TBSS:
801 /* We don't split these for medium model. Place them into
802 default sections and hope for best. */
803 break;
804 }
805 if (prefix)
806 {
807 const char *name, *linkonce;
808 char *string;
809
810 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))((const char *) (tree_check ((decl_assembler_name (decl)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 810, __FUNCTION__, (IDENTIFIER_NODE)))->identifier.id.str
)
;
811 name = targetm.strip_name_encoding (name);
812
813 /* If we're using one_only, then there needs to be a .gnu.linkonce
814 prefix to the section name. */
815 linkonce = one_only ? ".gnu.linkonce" : "";
816
817 string = ACONCAT ((linkonce, prefix, ".", name, NULL))(libiberty_concat_ptr = (char *) __builtin_alloca(concat_length
(linkonce, prefix, ".", name, __null) + 1), concat_copy2 (linkonce
, prefix, ".", name, __null))
;
818
819 set_decl_section_name (decl, string);
820 return;
821 }
822 }
823 default_unique_section (decl, reloc);
824}
825
826#ifdef COMMON_ASM_OP"\t.comm\t"
827
828#ifndef LARGECOMM_SECTION_ASM_OP"\t.largecomm\t"
829#define LARGECOMM_SECTION_ASM_OP"\t.largecomm\t" "\t.largecomm\t"
830#endif
831
832/* This says how to output assembler code to declare an
833 uninitialized external linkage data object.
834
835 For medium model x86-64 we need to use LARGECOMM_SECTION_ASM_OP opcode for
836 large objects. */
837void
838x86_elf_aligned_decl_common (FILE *file, tree decl,
839 const char *name, unsigned HOST_WIDE_INTlong size,
840 int align)
841{
842 if ((ix86_cmodelglobal_options.x_ix86_cmodel == CM_MEDIUM || ix86_cmodelglobal_options.x_ix86_cmodel == CM_MEDIUM_PIC)
843 && size > (unsigned int)ix86_section_thresholdglobal_options.x_ix86_section_threshold)
844 {
845 switch_to_section (get_named_section (decl, ".lbss", 0));
846 fputs (LARGECOMM_SECTION_ASM_OP"\t.largecomm\t", file);
847 }
848 else
849 fputs (COMMON_ASM_OP"\t.comm\t", file);
850 assemble_name (file, name);
851 fprintf (file, "," HOST_WIDE_INT_PRINT_UNSIGNED"%" "l" "u" ",%u\n",
852 size, align / BITS_PER_UNIT(8));
853}
854#endif
855
856/* Utility function for targets to use in implementing
857 ASM_OUTPUT_ALIGNED_BSS. */
858
859void
860x86_output_aligned_bss (FILE *file, tree decl, const char *name,
861 unsigned HOST_WIDE_INTlong size, int align)
862{
863 if ((ix86_cmodelglobal_options.x_ix86_cmodel == CM_MEDIUM || ix86_cmodelglobal_options.x_ix86_cmodel == CM_MEDIUM_PIC)
864 && size > (unsigned int)ix86_section_thresholdglobal_options.x_ix86_section_threshold)
865 switch_to_section (get_named_section (decl, ".lbss", 0));
866 else
867 switch_to_section (bss_section);
868 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT))if ((floor_log2 (align / (8))) != 0) fprintf ((file), "\t.align %d\n"
, 1 << (floor_log2 (align / (8))))
;
869#ifdef ASM_DECLARE_OBJECT_NAME
870 last_assemble_variable_decl = decl;
871 ASM_DECLARE_OBJECT_NAME (file, name, decl)do { long size; if (global_options.x_flag_gnu_unique &&
(decl_comdat_group (decl) != (tree) __null && (((decl
)->base.public_flag) || ((contains_struct_check ((decl), (
TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 871, __FUNCTION__))->decl_common.decl_flag_1))) &&
(!((contains_struct_check ((decl), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 871, __FUNCTION__))->decl_common.artificial_flag) || !((
non_type_check ((decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 871, __FUNCTION__))->base.readonly_flag))) do { fputs ("\t.type\t"
, file); assemble_name (file, name); fputs (", ", file); fprintf
(file, "@%s", "gnu_unique_object"); putc ('\n', file); } while
(0); else do { fputs ("\t.type\t", file); assemble_name (file
, name); fputs (", ", file); fprintf (file, "@%s", "object");
putc ('\n', file); } while (0); size_directive_output = 0; if
(!global_options.x_flag_inhibit_size_directive && (decl
) && ((contains_struct_check ((decl), (TS_DECL_COMMON
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 871, __FUNCTION__))->decl_common.size)) { size_directive_output
= 1; size = tree_to_uhwi (((contains_struct_check ((decl), (
TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 871, __FUNCTION__))->decl_common.size_unit)); do { long size_
= (size); fputs ("\t.size\t", file); assemble_name (file, name
); fprintf (file, ", " "%" "l" "d" "\n", size_); } while (0);
} do { assemble_name ((file), (name)); fputs (":\n", (file))
; } while (0); } while (0)
;
872#else
873 /* Standard thing is just output label for the object. */
874 ASM_OUTPUT_LABEL (file, name)do { assemble_name ((file), (name)); fputs (":\n", (file)); }
while (0)
;
875#endif /* ASM_DECLARE_OBJECT_NAME */
876 ASM_OUTPUT_SKIP (file, size ? size : 1)fprintf ((file), "%s" "%" "l" "u" "\n", "\t.zero\t", (size ? size
: 1))
;
877}
878
879/* Decide whether we must probe the stack before any space allocation
880 on this target. It's essentially TARGET_STACK_PROBE except when
881 -fstack-check causes the stack to be already probed differently. */
882
883bool
884ix86_target_stack_probe (void)
885{
886 /* Do not probe the stack twice if static stack checking is enabled. */
887 if (flag_stack_checkglobal_options.x_flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
888 return false;
889
890 return TARGET_STACK_PROBE((global_options.x_target_flags & (1U << 25)) != 0);
891}
892
893/* Decide whether we can make a sibling call to a function. DECL is the
894 declaration of the function being targeted by the call and EXP is the
895 CALL_EXPR representing the call. */
896
897static bool
898ix86_function_ok_for_sibcall (tree decl, tree exp)
899{
900 tree type, decl_or_type;
901 rtx a, b;
902 bool bind_global = decl && !targetm.binds_local_p (decl);
903
904 if (ix86_function_naked (current_function_decl))
905 return false;
906
907 /* Sibling call isn't OK if there are no caller-saved registers
908 since all registers must be preserved before return. */
909 if (cfun(cfun + 0)->machine->no_caller_saved_registers)
910 return false;
911
912 /* If we are generating position-independent code, we cannot sibcall
913 optimize direct calls to global functions, as the PLT requires
914 %ebx be live. (Darwin does not have a PLT.) */
915 if (!TARGET_MACHO0
916 && !TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
917 && flag_picglobal_options.x_flag_pic
918 && flag_pltglobal_options.x_flag_plt
919 && bind_global)
920 return false;
921
922 /* If we need to align the outgoing stack, then sibcalling would
923 unalign the stack, which may break the called function. */
924 if (ix86_minimum_incoming_stack_boundary (true)
925 < PREFERRED_STACK_BOUNDARYix86_preferred_stack_boundary)
926 return false;
927
928 if (decl)
929 {
930 decl_or_type = decl;
931 type = TREE_TYPE (decl)((contains_struct_check ((decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 931, __FUNCTION__))->typed.type)
;
932 }
933 else
934 {
935 /* We're looking at the CALL_EXPR, we need the type of the function. */
936 type = CALL_EXPR_FN (exp)(*((const_cast<tree*> (tree_operand_check (((tree_check
((exp), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 936, __FUNCTION__, (CALL_EXPR)))), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 936, __FUNCTION__)))))
; /* pointer expression */
937 type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 937, __FUNCTION__))->typed.type)
; /* pointer type */
938 type = TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 938, __FUNCTION__))->typed.type)
; /* function type */
939 decl_or_type = type;
940 }
941
942 /* If outgoing reg parm stack space changes, we cannot do sibcall. */
943 if ((OUTGOING_REG_PARM_STACK_SPACE (type)(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) && ix86_function_type_abi (type) == MS_ABI)
944 != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) && ix86_function_type_abi (((contains_struct_check
((current_function_decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 944, __FUNCTION__))->typed.type)) == MS_ABI)
)
945 || (REG_PARM_STACK_SPACE (decl_or_type)ix86_reg_parm_stack_space (decl_or_type)
946 != REG_PARM_STACK_SPACE (current_function_decl)ix86_reg_parm_stack_space (current_function_decl)))
947 {
948 maybe_complain_about_tail_call (exp,
949 "inconsistent size of stack space"
950 " allocated for arguments which are"
951 " passed in registers");
952 return false;
953 }
954
955 /* Check that the return value locations are the same. Like
956 if we are returning floats on the 80387 register stack, we cannot
957 make a sibcall from a function that doesn't return a float to a
958 function that does or, conversely, from a function that does return
959 a float to a function that doesn't; the necessary stack adjustment
960 would not be executed. This is also the place we notice
961 differences in the return value ABI. Note that it is ok for one
962 of the functions to have void return type as long as the return
963 value of the other is passed in a register. */
964 a = ix86_function_value (TREE_TYPE (exp)((contains_struct_check ((exp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 964, __FUNCTION__))->typed.type)
, decl_or_type, false);
965 b = ix86_function_value (TREE_TYPE (DECL_RESULT (cfun->decl))((contains_struct_check ((((tree_check (((cfun + 0)->decl)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 965, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 965, __FUNCTION__))->typed.type)
,
966 cfun(cfun + 0)->decl, false);
967 if (STACK_REG_P (a)((((enum rtx_code) (a)->code) == REG) && ((unsigned
long) (((rhs_regno(a)))) - (unsigned long) (8) <= (unsigned
long) (15) - (unsigned long) (8)))
|| STACK_REG_P (b)((((enum rtx_code) (b)->code) == REG) && ((unsigned
long) (((rhs_regno(b)))) - (unsigned long) (8) <= (unsigned
long) (15) - (unsigned long) (8)))
)
968 {
969 if (!rtx_equal_p (a, b))
970 return false;
971 }
972 else if (VOID_TYPE_P (TREE_TYPE (DECL_RESULT (cfun->decl)))(((enum tree_code) (((contains_struct_check ((((tree_check ((
(cfun + 0)->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 972, __FUNCTION__, (FUNCTION_DECL)))->decl_non_common.result
)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 972, __FUNCTION__))->typed.type))->base.code) == VOID_TYPE
)
)
973 ;
974 else if (!rtx_equal_p (a, b))
975 return false;
976
977 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
978 {
979 /* The SYSV ABI has more call-clobbered registers;
980 disallow sibcalls from MS to SYSV. */
981 if (cfun(cfun + 0)->machine->call_abi == MS_ABI
982 && ix86_function_type_abi (type) == SYSV_ABI)
983 return false;
984 }
985 else
986 {
987 /* If this call is indirect, we'll need to be able to use a
988 call-clobbered register for the address of the target function.
989 Make sure that all such registers are not used for passing
990 parameters. Note that DLLIMPORT functions and call to global
991 function via GOT slot are indirect. */
992 if (!decl
993 || (bind_global && flag_picglobal_options.x_flag_pic && !flag_pltglobal_options.x_flag_plt)
994 || (TARGET_DLLIMPORT_DECL_ATTRIBUTES0 && DECL_DLLIMPORT_P (decl)((contains_struct_check ((decl), (TS_DECL_WITH_VIS), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 994, __FUNCTION__))->decl_with_vis.dllimport_flag)
)
995 || flag_force_indirect_callglobal_options.x_flag_force_indirect_call)
996 {
997 /* Check if regparm >= 3 since arg_reg_available is set to
998 false if regparm == 0. If regparm is 1 or 2, there is
999 always a call-clobbered register available.
1000
1001 ??? The symbol indirect call doesn't need a call-clobbered
1002 register. But we don't know if this is a symbol indirect
1003 call or not here. */
1004 if (ix86_function_regparm (type, decl) >= 3
1005 && !cfun(cfun + 0)->machine->arg_reg_available)
1006 return false;
1007 }
1008 }
1009
1010 /* Otherwise okay. That also includes certain types of indirect calls. */
1011 return true;
1012}
1013
1014/* This function determines from TYPE the calling-convention. */
1015
1016unsigned int
1017ix86_get_callcvt (const_tree type)
1018{
1019 unsigned int ret = 0;
1020 bool is_stdarg;
1021 tree attrs;
1022
1023 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1024 return IX86_CALLCVT_CDECL0x1;
1025
1026 attrs = TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1026, __FUNCTION__))->type_common.attributes)
;
1027 if (attrs != NULL_TREE(tree) __null)
1028 {
1029 if (lookup_attribute ("cdecl", attrs))
1030 ret |= IX86_CALLCVT_CDECL0x1;
1031 else if (lookup_attribute ("stdcall", attrs))
1032 ret |= IX86_CALLCVT_STDCALL0x2;
1033 else if (lookup_attribute ("fastcall", attrs))
1034 ret |= IX86_CALLCVT_FASTCALL0x4;
1035 else if (lookup_attribute ("thiscall", attrs))
1036 ret |= IX86_CALLCVT_THISCALL0x8;
1037
1038 /* Regparam isn't allowed for thiscall and fastcall. */
1039 if ((ret & (IX86_CALLCVT_THISCALL0x8 | IX86_CALLCVT_FASTCALL0x4)) == 0)
1040 {
1041 if (lookup_attribute ("regparm", attrs))
1042 ret |= IX86_CALLCVT_REGPARM0x10;
1043 if (lookup_attribute ("sseregparm", attrs))
1044 ret |= IX86_CALLCVT_SSEREGPARM0x20;
1045 }
1046
1047 if (IX86_BASE_CALLCVT(ret)((ret) & (0x1 | 0x2 | 0x4 | 0x8)) != 0)
1048 return ret;
1049 }
1050
1051 is_stdarg = stdarg_p (type);
1052 if (TARGET_RTD((global_options.x_target_flags & (1U << 23)) != 0) && !is_stdarg)
1053 return IX86_CALLCVT_STDCALL0x2 | ret;
1054
1055 if (ret != 0
1056 || is_stdarg
1057 || TREE_CODE (type)((enum tree_code) (type)->base.code) != METHOD_TYPE
1058 || ix86_function_type_abi (type) != MS_ABI)
1059 return IX86_CALLCVT_CDECL0x1 | ret;
1060
1061 return IX86_CALLCVT_THISCALL0x8;
1062}
1063
1064/* Return 0 if the attributes for two types are incompatible, 1 if they
1065 are compatible, and 2 if they are nearly compatible (which causes a
1066 warning to be generated). */
1067
1068static int
1069ix86_comp_type_attributes (const_tree type1, const_tree type2)
1070{
1071 unsigned int ccvt1, ccvt2;
1072
1073 if (TREE_CODE (type1)((enum tree_code) (type1)->base.code) != FUNCTION_TYPE
1074 && TREE_CODE (type1)((enum tree_code) (type1)->base.code) != METHOD_TYPE)
1075 return 1;
1076
1077 ccvt1 = ix86_get_callcvt (type1);
1078 ccvt2 = ix86_get_callcvt (type2);
1079 if (ccvt1 != ccvt2)
1080 return 0;
1081 if (ix86_function_regparm (type1, NULL__null)
1082 != ix86_function_regparm (type2, NULL__null))
1083 return 0;
1084
1085 return 1;
1086}
1087
1088/* Return the regparm value for a function with the indicated TYPE and DECL.
1089 DECL may be NULL when calling function indirectly
1090 or considering a libcall. */
1091
1092static int
1093ix86_function_regparm (const_tree type, const_tree decl)
1094{
1095 tree attr;
1096 int regparm;
1097 unsigned int ccvt;
1098
1099 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1100 return (ix86_function_type_abi (type) == SYSV_ABI
1101 ? X86_64_REGPARM_MAX6 : X86_64_MS_REGPARM_MAX4);
1102 ccvt = ix86_get_callcvt (type);
1103 regparm = ix86_regparmglobal_options.x_ix86_regparm;
1104
1105 if ((ccvt & IX86_CALLCVT_REGPARM0x10) != 0)
1106 {
1107 attr = lookup_attribute ("regparm", TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1107, __FUNCTION__))->type_common.attributes)
);
1108 if (attr)
1109 {
1110 regparm = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((((
tree_check ((attr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1110, __FUNCTION__, (TREE_LIST)))->list.value)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1110, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1110, __FUNCTION__)))
;
1111 return regparm;
1112 }
1113 }
1114 else if ((ccvt & IX86_CALLCVT_FASTCALL0x4) != 0)
1115 return 2;
1116 else if ((ccvt & IX86_CALLCVT_THISCALL0x8) != 0)
1117 return 1;
1118
1119 /* Use register calling convention for local functions when possible. */
1120 if (decl
1121 && TREE_CODE (decl)((enum tree_code) (decl)->base.code) == FUNCTION_DECL)
1122 {
1123 cgraph_node *target = cgraph_node::get (decl);
1124 if (target)
1125 target = target->function_symbol ();
1126
1127 /* Caller and callee must agree on the calling convention, so
1128 checking here just optimize means that with
1129 __attribute__((optimize (...))) caller could use regparm convention
1130 and callee not, or vice versa. Instead look at whether the callee
1131 is optimized or not. */
1132 if (target && opt_for_fn (target->decl, optimize)(opts_for_fn (target->decl)->x_optimize)
1133 && !(profile_flagglobal_options.x_profile_flag && !flag_fentryglobal_options.x_flag_fentry))
1134 {
1135 if (target->local && target->can_change_signature)
1136 {
1137 int local_regparm, globals = 0, regno;
1138
1139 /* Make sure no regparm register is taken by a
1140 fixed register variable. */
1141 for (local_regparm = 0; local_regparm < REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? ((((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 4 : 6) : 3
)
;
1142 local_regparm++)
1143 if (fixed_regs(this_target_hard_regs->x_fixed_regs)[local_regparm])
1144 break;
1145
1146 /* We don't want to use regparm(3) for nested functions as
1147 these use a static chain pointer in the third argument. */
1148 if (local_regparm == 3 && DECL_STATIC_CHAIN (target->decl)((tree_check ((target->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1148, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.regdecl_flag
)
)
1149 local_regparm = 2;
1150
1151 /* Save a register for the split stack. */
1152 if (flag_split_stackglobal_options.x_flag_split_stack)
1153 {
1154 if (local_regparm == 3)
1155 local_regparm = 2;
1156 else if (local_regparm == 2
1157 && DECL_STATIC_CHAIN (target->decl)((tree_check ((target->decl), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1157, __FUNCTION__, (FUNCTION_DECL)))->decl_with_vis.regdecl_flag
)
)
1158 local_regparm = 1;
1159 }
1160
1161 /* Each fixed register usage increases register pressure,
1162 so less registers should be used for argument passing.
1163 This functionality can be overriden by an explicit
1164 regparm value. */
1165 for (regno = AX_REG0; regno <= DI_REG5; regno++)
1166 if (fixed_regs(this_target_hard_regs->x_fixed_regs)[regno])
1167 globals++;
1168
1169 local_regparm
1170 = globals < local_regparm ? local_regparm - globals : 0;
1171
1172 if (local_regparm > regparm)
1173 regparm = local_regparm;
1174 }
1175 }
1176 }
1177
1178 return regparm;
1179}
1180
1181/* Return 1 or 2, if we can pass up to SSE_REGPARM_MAX SFmode (1) and
1182 DFmode (2) arguments in SSE registers for a function with the
1183 indicated TYPE and DECL. DECL may be NULL when calling function
1184 indirectly or considering a libcall. Return -1 if any FP parameter
1185 should be rejected by error. This is used in siutation we imply SSE
1186 calling convetion but the function is called from another function with
1187 SSE disabled. Otherwise return 0. */
1188
1189static int
1190ix86_function_sseregparm (const_tree type, const_tree decl, bool warn)
1191{
1192 gcc_assert (!TARGET_64BIT)((void)(!(!((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1192, __FUNCTION__), 0 : 0))
;
1193
1194 /* Use SSE registers to pass SFmode and DFmode arguments if requested
1195 by the sseregparm attribute. */
1196 if (TARGET_SSEREGPARM((global_options.x_target_flags & (1U << 24)) != 0)
1197 || (type && lookup_attribute ("sseregparm", TYPE_ATTRIBUTES (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1197, __FUNCTION__))->type_common.attributes)
)))
1198 {
1199 if (!TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
)
1200 {
1201 if (warn)
1202 {
1203 if (decl)
1204 error ("calling %qD with attribute sseregparm without "
1205 "SSE/SSE2 enabled", decl);
1206 else
1207 error ("calling %qT with attribute sseregparm without "
1208 "SSE/SSE2 enabled", type);
1209 }
1210 return 0;
1211 }
1212
1213 return 2;
1214 }
1215
1216 if (!decl)
1217 return 0;
1218
1219 cgraph_node *target = cgraph_node::get (decl);
1220 if (target)
1221 target = target->function_symbol ();
1222
1223 /* For local functions, pass up to SSE_REGPARM_MAX SFmode
1224 (and DFmode for SSE2) arguments in SSE registers. */
1225 if (target
1226 /* TARGET_SSE_MATH */
1227 && (target_opts_for_fn (target->decl)->x_ix86_fpmath & FPMATH_SSE)
1228 && opt_for_fn (target->decl, optimize)(opts_for_fn (target->decl)->x_optimize)
1229 && !(profile_flagglobal_options.x_profile_flag && !flag_fentryglobal_options.x_flag_fentry))
1230 {
1231 if (target->local && target->can_change_signature)
1232 {
1233 /* Refuse to produce wrong code when local function with SSE enabled
1234 is called from SSE disabled function.
1235 FIXME: We need a way to detect these cases cross-ltrans partition
1236 and avoid using SSE calling conventions on local functions called
1237 from function with SSE disabled. For now at least delay the
1238 warning until we know we are going to produce wrong code.
1239 See PR66047 */
1240 if (!TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
&& warn)
1241 return -1;
1242 return TARGET_SSE2_P (target_opts_for_fn (target->decl)(((target_opts_for_fn (target->decl) ->x_ix86_isa_flags
) & (1UL << 51)) != 0)
1243 ->x_ix86_isa_flags)(((target_opts_for_fn (target->decl) ->x_ix86_isa_flags
) & (1UL << 51)) != 0)
? 2 : 1;
1244 }
1245 }
1246
1247 return 0;
1248}
1249
1250/* Return true if EAX is live at the start of the function. Used by
1251 ix86_expand_prologue to determine if we need special help before
1252 calling allocate_stack_worker. */
1253
1254static bool
1255ix86_eax_live_at_start_p (void)
1256{
1257 /* Cheat. Don't bother working forward from ix86_function_regparm
1258 to the function type to whether an actual argument is located in
1259 eax. Instead just look at cfg info, which is still close enough
1260 to correct at this point. This gives false positives for broken
1261 functions that might use uninitialized data that happens to be
1262 allocated in eax, but who cares? */
1263 return REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)), 0)bitmap_bit_p (df_get_live_out ((((cfun + 0))->cfg->x_entry_block_ptr
)), 0)
;
1264}
1265
1266static bool
1267ix86_keep_aggregate_return_pointer (tree fntype)
1268{
1269 tree attr;
1270
1271 if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1272 {
1273 attr = lookup_attribute ("callee_pop_aggregate_return",
1274 TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1274, __FUNCTION__))->type_common.attributes)
);
1275 if (attr)
1276 return (TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attr)))((unsigned long) (*tree_int_cst_elt_check ((((tree_check ((((
tree_check ((attr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1276, __FUNCTION__, (TREE_LIST)))->list.value)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1276, __FUNCTION__, (TREE_LIST)))->list.value)), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1276, __FUNCTION__)))
== 0);
1277
1278 /* For 32-bit MS-ABI the default is to keep aggregate
1279 return pointer. */
1280 if (ix86_function_type_abi (fntype) == MS_ABI)
1281 return true;
1282 }
1283 return KEEP_AGGREGATE_RETURN_POINTER0 != 0;
1284}
1285
1286/* Value is the number of bytes of arguments automatically
1287 popped when returning from a subroutine call.
1288 FUNDECL is the declaration node of the function (as a tree),
1289 FUNTYPE is the data type of the function (as a tree),
1290 or for a library call it is an identifier node for the subroutine name.
1291 SIZE is the number of bytes of arguments passed on the stack.
1292
1293 On the 80386, the RTD insn may be used to pop them if the number
1294 of args is fixed, but if the number is variable then the caller
1295 must pop them all. RTD can't be used for library calls now
1296 because the library is compiled with the Unix compiler.
1297 Use of RTD is a selectable option, since it is incompatible with
1298 standard Unix calling sequences. If the option is not selected,
1299 the caller must always pop the args.
1300
1301 The attribute stdcall is equivalent to RTD on a per module basis. */
1302
1303static poly_int64
1304ix86_return_pops_args (tree fundecl, tree funtype, poly_int64 size)
1305{
1306 unsigned int ccvt;
1307
1308 /* None of the 64-bit ABIs pop arguments. */
1309 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1310 return 0;
1311
1312 ccvt = ix86_get_callcvt (funtype);
1313
1314 if ((ccvt & (IX86_CALLCVT_STDCALL0x2 | IX86_CALLCVT_FASTCALL0x4
1315 | IX86_CALLCVT_THISCALL0x8)) != 0
1316 && ! stdarg_p (funtype))
1317 return size;
1318
1319 /* Lose any fake structure return argument if it is passed on the stack. */
1320 if (aggregate_value_p (TREE_TYPE (funtype)((contains_struct_check ((funtype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1320, __FUNCTION__))->typed.type)
, fundecl)
1321 && !ix86_keep_aggregate_return_pointer (funtype))
1322 {
1323 int nregs = ix86_function_regparm (funtype, fundecl);
1324 if (nregs == 0)
1325 return GET_MODE_SIZE (Pmode)((unsigned short) mode_to_bytes ((global_options.x_ix86_pmode
== PMODE_DI ? (scalar_int_mode ((scalar_int_mode::from_int) E_DImode
)) : (scalar_int_mode ((scalar_int_mode::from_int) E_SImode))
)).coeffs[0])
;
1326 }
1327
1328 return 0;
1329}
1330
1331/* Implement the TARGET_LEGITIMATE_COMBINED_INSN hook. */
1332
1333static bool
1334ix86_legitimate_combined_insn (rtx_insn *insn)
1335{
1336 int i;
1337
1338 /* Check operand constraints in case hard registers were propagated
1339 into insn pattern. This check prevents combine pass from
1340 generating insn patterns with invalid hard register operands.
1341 These invalid insns can eventually confuse reload to error out
1342 with a spill failure. See also PRs 46829 and 46843. */
1343
1344 gcc_assert (INSN_CODE (insn) >= 0)((void)(!((((insn)->u.fld[5]).rt_int) >= 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1344, __FUNCTION__), 0 : 0))
;
1345
1346 extract_insn (insn);
1347 preprocess_constraints (insn);
1348
1349 int n_operands = recog_data.n_operands;
1350 int n_alternatives = recog_data.n_alternatives;
1351 for (i = 0; i < n_operands; i++)
1352 {
1353 rtx op = recog_data.operand[i];
1354 machine_mode mode = GET_MODE (op)((machine_mode) (op)->mode);
1355 const operand_alternative *op_alt;
1356 int offset = 0;
1357 bool win;
1358 int j;
1359
1360 /* A unary operator may be accepted by the predicate, but it
1361 is irrelevant for matching constraints. */
1362 if (UNARY_P (op)((rtx_class[(int) (((enum rtx_code) (op)->code))]) == RTX_UNARY
)
)
1363 op = XEXP (op, 0)(((op)->u.fld[0]).rt_rtx);
1364
1365 if (SUBREG_P (op)(((enum rtx_code) (op)->code) == SUBREG))
1366 {
1367 if (REG_P (SUBREG_REG (op))(((enum rtx_code) ((((op)->u.fld[0]).rt_rtx))->code) ==
REG)
1368 && REGNO (SUBREG_REG (op))(rhs_regno((((op)->u.fld[0]).rt_rtx))) < FIRST_PSEUDO_REGISTER76)
1369 offset = subreg_regno_offset (REGNO (SUBREG_REG (op))(rhs_regno((((op)->u.fld[0]).rt_rtx))),
1370 GET_MODE (SUBREG_REG (op))((machine_mode) ((((op)->u.fld[0]).rt_rtx))->mode),
1371 SUBREG_BYTE (op)(((op)->u.fld[1]).rt_subreg),
1372 GET_MODE (op)((machine_mode) (op)->mode));
1373 op = SUBREG_REG (op)(((op)->u.fld[0]).rt_rtx);
1374 }
1375
1376 if (!(REG_P (op)(((enum rtx_code) (op)->code) == REG) && HARD_REGISTER_P (op)((((rhs_regno(op))) < 76))))
1377 continue;
1378
1379 op_alt = recog_op_alt;
1380
1381 /* Operand has no constraints, anything is OK. */
1382 win = !n_alternatives;
1383
1384 alternative_mask preferred = get_preferred_alternatives (insn);
1385 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
1386 {
1387 if (!TEST_BIT (preferred, j)(((preferred) >> (j)) & 1))
1388 continue;
1389 if (op_alt[i].anything_ok
1390 || (op_alt[i].matches != -1
1391 && operands_match_p
1392 (recog_data.operand[i],
1393 recog_data.operand[op_alt[i].matches]))
1394 || reg_fits_class_p (op, op_alt[i].cl, offset, mode))
1395 {
1396 win = true;
1397 break;
1398 }
1399 }
1400
1401 if (!win)
1402 return false;
1403 }
1404
1405 return true;
1406}
1407
1408/* Implement the TARGET_ASAN_SHADOW_OFFSET hook. */
1409
1410static unsigned HOST_WIDE_INTlong
1411ix86_asan_shadow_offset (void)
1412{
1413 return SUBTARGET_SHADOW_OFFSET(((global_options.x_ix86_isa_flags & (1UL << 4)) !=
0) ? 0x7fff8000L : 1L << 29)
;
1414}
1415
1416/* Argument support functions. */
1417
1418/* Return true when register may be used to pass function parameters. */
1419bool
1420ix86_function_arg_regno_p (int regno)
1421{
1422 int i;
1423 enum calling_abi call_abi;
1424 const int *parm_regs;
1425
1426 if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
&& SSE_REGNO_P (regno)(((unsigned long) ((regno)) - (unsigned long) (20) <= (unsigned
long) (27) - (unsigned long) (20)) || ((unsigned long) ((regno
)) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned
long) (44)) || ((unsigned long) ((regno)) - (unsigned long) (
52) <= (unsigned long) (67) - (unsigned long) (52)))
1427 && regno < FIRST_SSE_REG20 + SSE_REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? ((((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 4 : 8) : (
((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0) ? (0 ? 4 : 3) : 0))
)
1428 return true;
1429
1430 if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1431 return (regno < REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? ((((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 4 : 6) : 3
)
1432 || (TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) !=
0)
&& MMX_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (28) <= (unsigned
long) (35) - (unsigned long) (28))
1433 && regno < FIRST_MMX_REG28 + MMX_REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 0 : (((global_options.x_ix86_isa_flags & (1UL <<
36)) != 0) ? (0 ? 0 : 3) : 0))
));
1434
1435 /* TODO: The function should depend on current function ABI but
1436 builtins.c would need updating then. Therefore we use the
1437 default ABI. */
1438 call_abi = ix86_cfun_abi ();
1439
1440 /* RAX is used as hidden argument to va_arg functions. */
1441 if (call_abi == SYSV_ABI && regno == AX_REG0)
1442 return true;
1443
1444 if (call_abi == MS_ABI)
1445 parm_regs = x86_64_ms_abi_int_parameter_registers;
1446 else
1447 parm_regs = x86_64_int_parameter_registers;
1448
1449 for (i = 0; i < (call_abi == MS_ABI
1450 ? X86_64_MS_REGPARM_MAX4 : X86_64_REGPARM_MAX6); i++)
1451 if (regno == parm_regs[i])
1452 return true;
1453 return false;
1454}
1455
1456/* Return if we do not know how to pass ARG solely in registers. */
1457
1458static bool
1459ix86_must_pass_in_stack (const function_arg_info &arg)
1460{
1461 if (must_pass_in_stack_var_size_or_pad (arg))
1462 return true;
1463
1464 /* For 32-bit, we want TImode aggregates to go on the stack. But watch out!
1465 The layout_type routine is crafty and tries to trick us into passing
1466 currently unsupported vector types on the stack by using TImode. */
1467 return (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
&& arg.mode == TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode))
1468 && arg.type && TREE_CODE (arg.type)((enum tree_code) (arg.type)->base.code) != VECTOR_TYPE);
1469}
1470
1471/* It returns the size, in bytes, of the area reserved for arguments passed
1472 in registers for the function represented by fndecl dependent to the used
1473 abi format. */
1474int
1475ix86_reg_parm_stack_space (const_tree fndecl)
1476{
1477 enum calling_abi call_abi = SYSV_ABI;
1478 if (fndecl != NULL_TREE(tree) __null && TREE_CODE (fndecl)((enum tree_code) (fndecl)->base.code) == FUNCTION_DECL)
1479 call_abi = ix86_function_abi (fndecl);
1480 else
1481 call_abi = ix86_function_type_abi (fndecl);
1482 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
&& call_abi == MS_ABI)
1483 return 32;
1484 return 0;
1485}
1486
1487/* We add this as a workaround in order to use libc_has_function
1488 hook in i386.md. */
1489bool
1490ix86_libc_has_function (enum function_class fn_class)
1491{
1492 return targetm.libc_has_function (fn_class, NULL_TREE(tree) __null);
1493}
1494
1495/* Returns value SYSV_ABI, MS_ABI dependent on fntype,
1496 specifying the call abi used. */
1497enum calling_abi
1498ix86_function_type_abi (const_tree fntype)
1499{
1500 enum calling_abi abi = ix86_abiglobal_options.x_ix86_abi;
1501
1502 if (fntype == NULL_TREE(tree) __null || TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1502, __FUNCTION__))->type_common.attributes)
== NULL_TREE(tree) __null)
1503 return abi;
1504
1505 if (abi == SYSV_ABI
1506 && lookup_attribute ("ms_abi", TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1506, __FUNCTION__))->type_common.attributes)
))
1507 {
1508 static int warned;
1509 if (TARGET_X32((global_options.x_ix86_isa_flags & (1UL << 58)) !=
0)
&& !warned)
1510 {
1511 error ("X32 does not support %<ms_abi%> attribute");
1512 warned = 1;
1513 }
1514
1515 abi = MS_ABI;
1516 }
1517 else if (abi == MS_ABI
1518 && lookup_attribute ("sysv_abi", TYPE_ATTRIBUTES (fntype)((tree_class_check ((fntype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1518, __FUNCTION__))->type_common.attributes)
))
1519 abi = SYSV_ABI;
1520
1521 return abi;
1522}
1523
1524enum calling_abi
1525ix86_function_abi (const_tree fndecl)
1526{
1527 return fndecl ? ix86_function_type_abi (TREE_TYPE (fndecl)((contains_struct_check ((fndecl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1527, __FUNCTION__))->typed.type)
) : ix86_abiglobal_options.x_ix86_abi;
1528}
1529
1530/* Returns value SYSV_ABI, MS_ABI dependent on cfun,
1531 specifying the call abi used. */
1532enum calling_abi
1533ix86_cfun_abi (void)
1534{
1535 return cfun(cfun + 0) ? cfun(cfun + 0)->machine->call_abi : ix86_abiglobal_options.x_ix86_abi;
1536}
1537
1538bool
1539ix86_function_ms_hook_prologue (const_tree fn)
1540{
1541 if (fn && lookup_attribute ("ms_hook_prologue", DECL_ATTRIBUTES (fn)((contains_struct_check ((fn), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1541, __FUNCTION__))->decl_common.attributes)
))
1542 {
1543 if (decl_function_context (fn) != NULL_TREE(tree) __null)
1544 error_at (DECL_SOURCE_LOCATION (fn)((contains_struct_check ((fn), (TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1544, __FUNCTION__))->decl_minimal.locus)
,
1545 "%<ms_hook_prologue%> attribute is not compatible "
1546 "with nested function");
1547 else
1548 return true;
1549 }
1550 return false;
1551}
1552
1553bool
1554ix86_function_naked (const_tree fn)
1555{
1556 if (fn && lookup_attribute ("naked", DECL_ATTRIBUTES (fn)((contains_struct_check ((fn), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1556, __FUNCTION__))->decl_common.attributes)
))
1557 return true;
1558
1559 return false;
1560}
1561
1562/* Write the extra assembler code needed to declare a function properly. */
1563
1564void
1565ix86_asm_output_function_label (FILE *asm_out_file, const char *fname,
1566 tree decl)
1567{
1568 bool is_ms_hook = ix86_function_ms_hook_prologue (decl);
1569
1570 if (cfun(cfun + 0))
1571 cfun(cfun + 0)->machine->function_label_emitted = true;
1572
1573 if (is_ms_hook)
1574 {
1575 int i, filler_count = (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
? 32 : 16);
1576 unsigned int filler_cc = 0xcccccccc;
1577
1578 for (i = 0; i < filler_count; i += 4)
1579 fprintf (asm_out_file, ASM_LONG"\t.long\t" " %#x\n", filler_cc);
1580 }
1581
1582#ifdef SUBTARGET_ASM_UNWIND_INIT
1583 SUBTARGET_ASM_UNWIND_INIT (asm_out_file);
1584#endif
1585
1586 ASM_OUTPUT_LABEL (asm_out_file, fname)do { assemble_name ((asm_out_file), (fname)); fputs (":\n", (
asm_out_file)); } while (0)
;
1587
1588 /* Output magic byte marker, if hot-patch attribute is set. */
1589 if (is_ms_hook)
1590 {
1591 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1592 {
1593 /* leaq [%rsp + 0], %rsp */
1594 fputs (ASM_BYTE"\t.byte\t" "0x48, 0x8d, 0xa4, 0x24, 0x00, 0x00, 0x00, 0x00\n",
1595 asm_out_file);
1596 }
1597 else
1598 {
1599 /* movl.s %edi, %edi
1600 push %ebp
1601 movl.s %esp, %ebp */
1602 fputs (ASM_BYTE"\t.byte\t" "0x8b, 0xff, 0x55, 0x8b, 0xec\n", asm_out_file);
1603 }
1604 }
1605}
1606
1607/* Implementation of call abi switching target hook. Specific to FNDECL
1608 the specific call register sets are set. See also
1609 ix86_conditional_register_usage for more details. */
1610void
1611ix86_call_abi_override (const_tree fndecl)
1612{
1613 cfun(cfun + 0)->machine->call_abi = ix86_function_abi (fndecl);
1614}
1615
1616/* Return 1 if pseudo register should be created and used to hold
1617 GOT address for PIC code. */
1618bool
1619ix86_use_pseudo_pic_reg (void)
1620{
1621 if ((TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
1622 && (ix86_cmodelglobal_options.x_ix86_cmodel == CM_SMALL_PIC
1623 || TARGET_PECOFF0))
1624 || !flag_picglobal_options.x_flag_pic)
1625 return false;
1626 return true;
1627}
1628
1629/* Initialize large model PIC register. */
1630
1631static void
1632ix86_init_large_pic_reg (unsigned int tmp_regno)
1633{
1634 rtx_code_label *label;
1635 rtx tmp_reg;
1636
1637 gcc_assert (Pmode == DImode)((void)(!((global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode
((scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode (
(scalar_int_mode::from_int) E_SImode))) == (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1637, __FUNCTION__), 0 : 0))
;
1638 label = gen_label_rtx ();
1639 emit_label (label);
1640 LABEL_PRESERVE_P (label)(__extension__ ({ __typeof ((label)) const _rtx = ((label)); if
(((enum rtx_code) (_rtx)->code) != CODE_LABEL && (
(enum rtx_code) (_rtx)->code) != NOTE) rtl_check_failed_flag
("LABEL_PRESERVE_P",_rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1640, __FUNCTION__); _rtx; })->in_struct)
= 1;
1641 tmp_reg = gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, tmp_regno);
1642 gcc_assert (REGNO (pic_offset_table_rtx) != tmp_regno)((void)(!((rhs_regno((this_target_rtl->x_pic_offset_table_rtx
))) != tmp_regno) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1642, __FUNCTION__), 0 : 0))
;
1643 emit_insn (gen_set_rip_rex64 (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx),
1644 label));
1645 emit_insn (gen_set_got_offset_rex64 (tmp_reg, label));
1646 emit_insn (gen_add2_insn (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx), tmp_reg));
1647 const char *name = LABEL_NAME (label)(((label)->u.fld[6]).rt_str);
1648 PUT_CODE (label, NOTE)((label)->code = (NOTE));
1649 NOTE_KIND (label)(((label)->u.fld[4]).rt_int) = NOTE_INSN_DELETED_LABEL;
1650 NOTE_DELETED_LABEL_NAME (label)(((label)->u.fld[3]).rt_str) = name;
1651}
1652
1653/* Create and initialize PIC register if required. */
1654static void
1655ix86_init_pic_reg (void)
1656{
1657 edge entry_edge;
1658 rtx_insn *seq;
1659
1660 if (!ix86_use_pseudo_pic_reg ())
1661 return;
1662
1663 start_sequence ();
1664
1665 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1666 {
1667 if (ix86_cmodelglobal_options.x_ix86_cmodel == CM_LARGE_PIC)
1668 ix86_init_large_pic_reg (R11_REG39);
1669 else
1670 emit_insn (gen_set_got_rex64 (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx)));
1671 }
1672 else
1673 {
1674 /* If there is future mcount call in the function it is more profitable
1675 to emit SET_GOT into ABI defined REAL_PIC_OFFSET_TABLE_REGNUM. */
1676 rtx reg = crtl(&x_rtl)->profile
1677 ? gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, REAL_PIC_OFFSET_TABLE_REGNUM(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 43 : 3)
)
1678 : pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx);
1679 rtx_insn *insn = emit_insn (gen_set_got (reg));
1680 RTX_FRAME_RELATED_P (insn)(__extension__ ({ __typeof ((insn)) const _rtx = ((insn)); if
(((enum rtx_code) (_rtx)->code) != DEBUG_INSN && (
(enum rtx_code) (_rtx)->code) != INSN && ((enum rtx_code
) (_rtx)->code) != CALL_INSN && ((enum rtx_code) (
_rtx)->code) != JUMP_INSN && ((enum rtx_code) (_rtx
)->code) != BARRIER && ((enum rtx_code) (_rtx)->
code) != SET) rtl_check_failed_flag ("RTX_FRAME_RELATED_P",_rtx
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1680, __FUNCTION__); _rtx; })->frame_related)
= 1;
1681 if (crtl(&x_rtl)->profile)
1682 emit_move_insn (pic_offset_table_rtx(this_target_rtl->x_pic_offset_table_rtx), reg);
1683 add_reg_note (insn, REG_CFA_FLUSH_QUEUE, NULL_RTX(rtx) 0);
1684 }
1685
1686 seq = get_insns ();
1687 end_sequence ();
1688
1689 entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)(((cfun + 0))->cfg->x_entry_block_ptr));
1690 insert_insn_on_edge (seq, entry_edge);
1691 commit_one_edge_insertion (entry_edge);
1692}
1693
1694/* Initialize a variable CUM of type CUMULATIVE_ARGS
1695 for a call to a function whose data type is FNTYPE.
1696 For a library call, FNTYPE is 0. */
1697
1698void
1699init_cumulative_args (CUMULATIVE_ARGS *cum, /* Argument info to initialize */
1700 tree fntype, /* tree ptr for function decl */
1701 rtx libname, /* SYMBOL_REF of library name or 0 */
1702 tree fndecl,
1703 int caller)
1704{
1705 struct cgraph_node *local_info_node = NULL__null;
1706 struct cgraph_node *target = NULL__null;
1707
1708 memset (cum, 0, sizeof (*cum));
1709
1710 if (fndecl)
1711 {
1712 target = cgraph_node::get (fndecl);
1713 if (target)
1714 {
1715 target = target->function_symbol ();
1716 local_info_node = cgraph_node::local_info_node (target->decl);
1717 cum->call_abi = ix86_function_abi (target->decl);
1718 }
1719 else
1720 cum->call_abi = ix86_function_abi (fndecl);
1721 }
1722 else
1723 cum->call_abi = ix86_function_type_abi (fntype);
1724
1725 cum->caller = caller;
1726
1727 /* Set up the number of registers to use for passing arguments. */
1728 cum->nregs = ix86_regparmglobal_options.x_ix86_regparm;
1729 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1730 {
1731 cum->nregs = (cum->call_abi == SYSV_ABI
1732 ? X86_64_REGPARM_MAX6
1733 : X86_64_MS_REGPARM_MAX4);
1734 }
1735 if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
)
1736 {
1737 cum->sse_nregs = SSE_REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? ((((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) && ix86_cfun_abi () == MS_ABI) ? 4 : 8) : (
((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0) ? (0 ? 4 : 3) : 0))
;
1738 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1739 {
1740 cum->sse_nregs = (cum->call_abi == SYSV_ABI
1741 ? X86_64_SSE_REGPARM_MAX8
1742 : X86_64_MS_SSE_REGPARM_MAX4);
1743 }
1744 }
1745 if (TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) !=
0)
)
1746 cum->mmx_nregs = MMX_REGPARM_MAX(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 0 : (((global_options.x_ix86_isa_flags & (1UL <<
36)) != 0) ? (0 ? 0 : 3) : 0))
;
1747 cum->warn_avx512f = true;
1748 cum->warn_avx = true;
1749 cum->warn_sse = true;
1750 cum->warn_mmx = true;
1751
1752 /* Because type might mismatch in between caller and callee, we need to
1753 use actual type of function for local calls.
1754 FIXME: cgraph_analyze can be told to actually record if function uses
1755 va_start so for local functions maybe_vaarg can be made aggressive
1756 helping K&R code.
1757 FIXME: once typesytem is fixed, we won't need this code anymore. */
1758 if (local_info_node && local_info_node->local
1759 && local_info_node->can_change_signature)
1760 fntype = TREE_TYPE (target->decl)((contains_struct_check ((target->decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1760, __FUNCTION__))->typed.type)
;
1761 cum->stdarg = stdarg_p (fntype);
1762 cum->maybe_vaarg = (fntype
1763 ? (!prototype_p (fntype) || stdarg_p (fntype))
1764 : !libname);
1765
1766 cum->decl = fndecl;
1767
1768 cum->warn_empty = !warn_abiglobal_options.x_warn_abi || cum->stdarg;
1769 if (!cum->warn_empty && fntype)
1770 {
1771 function_args_iterator iter;
1772 tree argtype;
1773 bool seen_empty_type = false;
1774 FOREACH_FUNCTION_ARGS (fntype, argtype, iter)for (function_args_iter_init (&(iter), (fntype)); (argtype
= function_args_iter_cond (&(iter))) != (tree) __null; function_args_iter_next
(&(iter)))
1775 {
1776 if (argtype == error_mark_nodeglobal_trees[TI_ERROR_MARK] || VOID_TYPE_P (argtype)(((enum tree_code) (argtype)->base.code) == VOID_TYPE))
1777 break;
1778 if (TYPE_EMPTY_P (argtype)((tree_class_check ((argtype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1778, __FUNCTION__))->type_common.empty_flag)
)
1779 seen_empty_type = true;
1780 else if (seen_empty_type)
1781 {
1782 cum->warn_empty = true;
1783 break;
1784 }
1785 }
1786 }
1787
1788 if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1789 {
1790 /* If there are variable arguments, then we won't pass anything
1791 in registers in 32-bit mode. */
1792 if (stdarg_p (fntype))
1793 {
1794 cum->nregs = 0;
1795 /* Since in 32-bit, variable arguments are always passed on
1796 stack, there is scratch register available for indirect
1797 sibcall. */
1798 cfun(cfun + 0)->machine->arg_reg_available = true;
1799 cum->sse_nregs = 0;
1800 cum->mmx_nregs = 0;
1801 cum->warn_avx512f = false;
1802 cum->warn_avx = false;
1803 cum->warn_sse = false;
1804 cum->warn_mmx = false;
1805 return;
1806 }
1807
1808 /* Use ecx and edx registers if function has fastcall attribute,
1809 else look for regparm information. */
1810 if (fntype)
1811 {
1812 unsigned int ccvt = ix86_get_callcvt (fntype);
1813 if ((ccvt & IX86_CALLCVT_THISCALL0x8) != 0)
1814 {
1815 cum->nregs = 1;
1816 cum->fastcall = 1; /* Same first register as in fastcall. */
1817 }
1818 else if ((ccvt & IX86_CALLCVT_FASTCALL0x4) != 0)
1819 {
1820 cum->nregs = 2;
1821 cum->fastcall = 1;
1822 }
1823 else
1824 cum->nregs = ix86_function_regparm (fntype, fndecl);
1825 }
1826
1827 /* Set up the number of SSE registers used for passing SFmode
1828 and DFmode arguments. Warn for mismatching ABI. */
1829 cum->float_in_sse = ix86_function_sseregparm (fntype, fndecl, true);
1830 }
1831
1832 cfun(cfun + 0)->machine->arg_reg_available = (cum->nregs > 0);
1833}
1834
1835/* Return the "natural" mode for TYPE. In most cases, this is just TYPE_MODE.
1836 But in the case of vector types, it is some vector mode.
1837
1838 When we have only some of our vector isa extensions enabled, then there
1839 are some modes for which vector_mode_supported_p is false. For these
1840 modes, the generic vector support in gcc will choose some non-vector mode
1841 in order to implement the type. By computing the natural mode, we'll
1842 select the proper ABI location for the operand and not depend on whatever
1843 the middle-end decides to do with these vector types.
1844
1845 The midde-end can't deal with the vector types > 16 bytes. In this
1846 case, we return the original mode and warn ABI change if CUM isn't
1847 NULL.
1848
1849 If INT_RETURN is true, warn ABI change if the vector mode isn't
1850 available for function return value. */
1851
1852static machine_mode
1853type_natural_mode (const_tree type, const CUMULATIVE_ARGS *cum,
1854 bool in_return)
1855{
1856 machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1856, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
;
1857
1858 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == VECTOR_TYPE && !VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
)
1859 {
1860 HOST_WIDE_INTlong size = int_size_in_bytes (type);
1861 if ((size == 8 || size == 16 || size == 32 || size == 64)
1862 /* ??? Generic code allows us to create width 1 vectors. Ignore. */
1863 && TYPE_VECTOR_SUBPARTS (type) > 1)
1864 {
1865 machine_mode innermode = TYPE_MODE (TREE_TYPE (type))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1865, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1865, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1865, __FUNCTION__))->typed.type)) : (((contains_struct_check
((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1865, __FUNCTION__))->typed.type))->type_common.mode)
;
1866
1867 /* There are no XFmode vector modes. */
1868 if (innermode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)))
1869 return mode;
1870
1871 if (TREE_CODE (TREE_TYPE (type))((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1871, __FUNCTION__))->typed.type))->base.code)
== REAL_TYPE)
1872 mode = MIN_MODE_VECTOR_FLOAT;
1873 else
1874 mode = MIN_MODE_VECTOR_INT;
1875
1876 /* Get the mode which has this inner mode and number of units. */
1877 FOR_EACH_MODE_FROM (mode, mode)for ((mode) = (mode); mode_iterator::iterate_p (&(mode));
mode_iterator::get_wider (&(mode)))
1878 if (GET_MODE_NUNITS (mode)(mode_to_nunits (mode).coeffs[0]) == TYPE_VECTOR_SUBPARTS (type)
1879 && GET_MODE_INNER (mode)(mode_to_inner (mode)) == innermode)
1880 {
1881 if (size == 64 && !TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) !=
0)
&& !TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0))
1882 {
1883 static bool warnedavx512f;
1884 static bool warnedavx512f_ret;
1885
1886 if (cum && cum->warn_avx512f && !warnedavx512f)
1887 {
1888 if (warning (OPT_Wpsabi, "AVX512F vector argument "
1889 "without AVX512F enabled changes the ABI"))
1890 warnedavx512f = true;
1891 }
1892 else if (in_return && !warnedavx512f_ret)
1893 {
1894 if (warning (OPT_Wpsabi, "AVX512F vector return "
1895 "without AVX512F enabled changes the ABI"))
1896 warnedavx512f_ret = true;
1897 }
1898
1899 return TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1899, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
;
1900 }
1901 else if (size == 32 && !TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0
)
&& !TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0))
1902 {
1903 static bool warnedavx;
1904 static bool warnedavx_ret;
1905
1906 if (cum && cum->warn_avx && !warnedavx)
1907 {
1908 if (warning (OPT_Wpsabi, "AVX vector argument "
1909 "without AVX enabled changes the ABI"))
1910 warnedavx = true;
1911 }
1912 else if (in_return && !warnedavx_ret)
1913 {
1914 if (warning (OPT_Wpsabi, "AVX vector return "
1915 "without AVX enabled changes the ABI"))
1916 warnedavx_ret = true;
1917 }
1918
1919 return TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1919, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
;
1920 }
1921 else if (((size == 8 && TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
) || size == 16)
1922 && !TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
1923 && !TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0))
1924 {
1925 static bool warnedsse;
1926 static bool warnedsse_ret;
1927
1928 if (cum && cum->warn_sse && !warnedsse)
1929 {
1930 if (warning (OPT_Wpsabi, "SSE vector argument "
1931 "without SSE enabled changes the ABI"))
1932 warnedsse = true;
1933 }
1934 else if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
&& in_return && !warnedsse_ret)
1935 {
1936 if (warning (OPT_Wpsabi, "SSE vector return "
1937 "without SSE enabled changes the ABI"))
1938 warnedsse_ret = true;
1939 }
1940 }
1941 else if ((size == 8 && !TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
1942 && (!cfun(cfun + 0)
1943 || cfun(cfun + 0)->machine->func_type == TYPE_NORMAL)
1944 && !TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) !=
0)
1945 && !TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0))
1946 {
1947 static bool warnedmmx;
1948 static bool warnedmmx_ret;
1949
1950 if (cum && cum->warn_mmx && !warnedmmx)
1951 {
1952 if (warning (OPT_Wpsabi, "MMX vector argument "
1953 "without MMX enabled changes the ABI"))
1954 warnedmmx = true;
1955 }
1956 else if (in_return && !warnedmmx_ret)
1957 {
1958 if (warning (OPT_Wpsabi, "MMX vector return "
1959 "without MMX enabled changes the ABI"))
1960 warnedmmx_ret = true;
1961 }
1962 }
1963 return mode;
1964 }
1965
1966 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 1966, __FUNCTION__))
;
1967 }
1968 }
1969
1970 return mode;
1971}
1972
1973/* We want to pass a value in REGNO whose "natural" mode is MODE. However,
1974 this may not agree with the mode that the type system has chosen for the
1975 register, which is ORIG_MODE. If ORIG_MODE is not BLKmode, then we can
1976 go ahead and use it. Otherwise we have to build a PARALLEL instead. */
1977
1978static rtx
1979gen_reg_or_parallel (machine_mode mode, machine_mode orig_mode,
1980 unsigned int regno)
1981{
1982 rtx tmp;
1983
1984 if (orig_mode != BLKmode((void) 0, E_BLKmode))
1985 tmp = gen_rtx_REG (orig_mode, regno);
1986 else
1987 {
1988 tmp = gen_rtx_REG (mode, regno);
1989 tmp = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), tmp, const0_rtx(const_int_rtx[64]));
1990 tmp = gen_rtx_PARALLEL (orig_mode, gen_rtvec (1, tmp))gen_rtx_fmt_E_stat ((PARALLEL), ((orig_mode)), ((gen_rtvec (1
, tmp))) )
;
1991 }
1992
1993 return tmp;
1994}
1995
1996/* x86-64 register passing implementation. See x86-64 ABI for details. Goal
1997 of this code is to classify each 8bytes of incoming argument by the register
1998 class and assign registers accordingly. */
1999
2000/* Return the union class of CLASS1 and CLASS2.
2001 See the x86-64 PS ABI for details. */
2002
2003static enum x86_64_reg_class
2004merge_classes (enum x86_64_reg_class class1, enum x86_64_reg_class class2)
2005{
2006 /* Rule #1: If both classes are equal, this is the resulting class. */
2007 if (class1 == class2)
2008 return class1;
2009
2010 /* Rule #2: If one of the classes is NO_CLASS, the resulting class is
2011 the other class. */
2012 if (class1 == X86_64_NO_CLASS)
2013 return class2;
2014 if (class2 == X86_64_NO_CLASS)
2015 return class1;
2016
2017 /* Rule #3: If one of the classes is MEMORY, the result is MEMORY. */
2018 if (class1 == X86_64_MEMORY_CLASS || class2 == X86_64_MEMORY_CLASS)
2019 return X86_64_MEMORY_CLASS;
2020
2021 /* Rule #4: If one of the classes is INTEGER, the result is INTEGER. */
2022 if ((class1 == X86_64_INTEGERSI_CLASS && class2 == X86_64_SSESF_CLASS)
2023 || (class2 == X86_64_INTEGERSI_CLASS && class1 == X86_64_SSESF_CLASS))
2024 return X86_64_INTEGERSI_CLASS;
2025 if (class1 == X86_64_INTEGER_CLASS || class1 == X86_64_INTEGERSI_CLASS
2026 || class2 == X86_64_INTEGER_CLASS || class2 == X86_64_INTEGERSI_CLASS)
2027 return X86_64_INTEGER_CLASS;
2028
2029 /* Rule #5: If one of the classes is X87, X87UP, or COMPLEX_X87 class,
2030 MEMORY is used. */
2031 if (class1 == X86_64_X87_CLASS
2032 || class1 == X86_64_X87UP_CLASS
2033 || class1 == X86_64_COMPLEX_X87_CLASS
2034 || class2 == X86_64_X87_CLASS
2035 || class2 == X86_64_X87UP_CLASS
2036 || class2 == X86_64_COMPLEX_X87_CLASS)
2037 return X86_64_MEMORY_CLASS;
2038
2039 /* Rule #6: Otherwise class SSE is used. */
2040 return X86_64_SSE_CLASS;
2041}
2042
2043/* Classify the argument of type TYPE and mode MODE.
2044 CLASSES will be filled by the register class used to pass each word
2045 of the operand. The number of words is returned. In case the parameter
2046 should be passed in memory, 0 is returned. As a special case for zero
2047 sized containers, classes[0] will be NO_CLASS and 1 is returned.
2048
2049 BIT_OFFSET is used internally for handling records and specifies offset
2050 of the offset in bits modulo 512 to avoid overflow cases.
2051
2052 See the x86-64 PS ABI for details.
2053*/
2054
2055static int
2056classify_argument (machine_mode mode, const_tree type,
2057 enum x86_64_reg_class classes[MAX_CLASSES8], int bit_offset)
2058{
2059 HOST_WIDE_INTlong bytes
2060 = mode == BLKmode((void) 0, E_BLKmode) ? int_size_in_bytes (type) : (int) GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]);
2061 int words = CEIL (bytes + (bit_offset % 64) / 8, UNITS_PER_WORD)(((bytes + (bit_offset % 64) / 8) + ((((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4)) - 1) / ((((global_options
.x_ix86_isa_flags & (1UL << 1)) != 0) ? 8 : 4)))
;
2062
2063 /* Variable sized entities are always passed/returned in memory. */
2064 if (bytes < 0)
2065 return 0;
2066
2067 if (mode != VOIDmode((void) 0, E_VOIDmode))
2068 {
2069 /* The value of "named" doesn't matter. */
2070 function_arg_info arg (const_cast<tree> (type), mode, /*named=*/true);
2071 if (targetm.calls.must_pass_in_stack (arg))
2072 return 0;
2073 }
2074
2075 if (type && AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
2076 {
2077 int i;
2078 tree field;
2079 enum x86_64_reg_class subclasses[MAX_CLASSES8];
2080
2081 /* On x86-64 we pass structures larger than 64 bytes on the stack. */
2082 if (bytes > 64)
2083 return 0;
2084
2085 for (i = 0; i < words; i++)
2086 classes[i] = X86_64_NO_CLASS;
2087
2088 /* Zero sized arrays or structures are NO_CLASS. We return 0 to
2089 signalize memory class, so handle it as special case. */
2090 if (!words)
2091 {
2092 classes[0] = X86_64_NO_CLASS;
2093 return 1;
2094 }
2095
2096 /* Classify each field of record and merge classes. */
2097 switch (TREE_CODE (type)((enum tree_code) (type)->base.code))
2098 {
2099 case RECORD_TYPE:
2100 /* And now merge the fields of structure. */
2101 for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2101, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
; field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2101, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2101, __FUNCTION__))->common.chain))
)
2102 {
2103 if (TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL)
2104 {
2105 int num;
2106
2107 if (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2107, __FUNCTION__))->typed.type)
== error_mark_nodeglobal_trees[TI_ERROR_MARK])
2108 continue;
2109
2110 /* Bitfields are always classified as integer. Handle them
2111 early, since later code would consider them to be
2112 misaligned integers. */
2113 if (DECL_BIT_FIELD (field)((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2113, __FUNCTION__, (FIELD_DECL)))->decl_common.decl_flag_1
)
)
2114 {
2115 for (i = (int_bit_position (field)
2116 + (bit_offset % 64)) / 8 / 8;
2117 i < ((int_bit_position (field) + (bit_offset % 64))
2118 + tree_to_shwi (DECL_SIZE (field)((contains_struct_check ((field), (TS_DECL_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2118, __FUNCTION__))->decl_common.size)
)
2119 + 63) / 8 / 8; i++)
2120 classes[i]
2121 = merge_classes (X86_64_INTEGER_CLASS, classes[i]);
2122 }
2123 else
2124 {
2125 int pos;
2126
2127 type = TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2127, __FUNCTION__))->typed.type)
;
2128
2129 /* Flexible array member is ignored. */
2130 if (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2130, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
== BLKmode((void) 0, E_BLKmode)
2131 && TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE
2132 && TYPE_SIZE (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2132, __FUNCTION__))->type_common.size)
== NULL_TREE(tree) __null
2133 && TYPE_DOMAIN (type)((tree_check ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2133, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values
)
!= NULL_TREE(tree) __null
2134 && (TYPE_MAX_VALUE (TYPE_DOMAIN (type))((tree_check5 ((((tree_check ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2134, __FUNCTION__, (ARRAY_TYPE)))->type_non_common.values
)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2134, __FUNCTION__, (INTEGER_TYPE), (ENUMERAL_TYPE), (BOOLEAN_TYPE
), (REAL_TYPE), (FIXED_POINT_TYPE)))->type_non_common.maxval
)
2135 == NULL_TREE(tree) __null))
2136 {
2137 static bool warned;
2138
2139 if (!warned && warn_psabiglobal_options.x_warn_psabi)
2140 {
2141 warned = true;
2142 inform (input_location,
2143 "the ABI of passing struct with"
2144 " a flexible array member has"
2145 " changed in GCC 4.4");
2146 }
2147 continue;
2148 }
2149 num = classify_argument (TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2149, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
, type,
2150 subclasses,
2151 (int_bit_position (field)
2152 + bit_offset) % 512);
2153 if (!num)
2154 return 0;
2155 pos = (int_bit_position (field)
2156 + (bit_offset % 64)) / 8 / 8;
2157 for (i = 0; i < num && (i + pos) < words; i++)
2158 classes[i + pos]
2159 = merge_classes (subclasses[i], classes[i + pos]);
2160 }
2161 }
2162 }
2163 break;
2164
2165 case ARRAY_TYPE:
2166 /* Arrays are handled as small records. */
2167 {
2168 int num;
2169 num = classify_argument (TYPE_MODE (TREE_TYPE (type))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2169, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2169, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2169, __FUNCTION__))->typed.type)) : (((contains_struct_check
((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2169, __FUNCTION__))->typed.type))->type_common.mode)
,
2170 TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2170, __FUNCTION__))->typed.type)
, subclasses, bit_offset);
2171 if (!num)
2172 return 0;
2173
2174 /* The partial classes are now full classes. */
2175 if (subclasses[0] == X86_64_SSESF_CLASS && bytes != 4)
2176 subclasses[0] = X86_64_SSE_CLASS;
2177 if (subclasses[0] == X86_64_INTEGERSI_CLASS
2178 && !((bit_offset % 64) == 0 && bytes == 4))
2179 subclasses[0] = X86_64_INTEGER_CLASS;
2180
2181 for (i = 0; i < words; i++)
2182 classes[i] = subclasses[i % num];
2183
2184 break;
2185 }
2186 case UNION_TYPE:
2187 case QUAL_UNION_TYPE:
2188 /* Unions are similar to RECORD_TYPE but offset is always 0.
2189 */
2190 for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2190, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
; field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2190, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2190, __FUNCTION__))->common.chain))
)
2191 {
2192 if (TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL)
2193 {
2194 int num;
2195
2196 if (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2196, __FUNCTION__))->typed.type)
== error_mark_nodeglobal_trees[TI_ERROR_MARK])
2197 continue;
2198
2199 num = classify_argument (TYPE_MODE (TREE_TYPE (field))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2199, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2199, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2199, __FUNCTION__))->typed.type)) : (((contains_struct_check
((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2199, __FUNCTION__))->typed.type))->type_common.mode)
,
2200 TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2200, __FUNCTION__))->typed.type)
, subclasses,
2201 bit_offset);
2202 if (!num)
2203 return 0;
2204 for (i = 0; i < num && i < words; i++)
2205 classes[i] = merge_classes (subclasses[i], classes[i]);
2206 }
2207 }
2208 break;
2209
2210 default:
2211 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2211, __FUNCTION__))
;
2212 }
2213
2214 if (words > 2)
2215 {
2216 /* When size > 16 bytes, if the first one isn't
2217 X86_64_SSE_CLASS or any other ones aren't
2218 X86_64_SSEUP_CLASS, everything should be passed in
2219 memory. */
2220 if (classes[0] != X86_64_SSE_CLASS)
2221 return 0;
2222
2223 for (i = 1; i < words; i++)
2224 if (classes[i] != X86_64_SSEUP_CLASS)
2225 return 0;
2226 }
2227
2228 /* Final merger cleanup. */
2229 for (i = 0; i < words; i++)
2230 {
2231 /* If one class is MEMORY, everything should be passed in
2232 memory. */
2233 if (classes[i] == X86_64_MEMORY_CLASS)
2234 return 0;
2235
2236 /* The X86_64_SSEUP_CLASS should be always preceded by
2237 X86_64_SSE_CLASS or X86_64_SSEUP_CLASS. */
2238 if (classes[i] == X86_64_SSEUP_CLASS
2239 && classes[i - 1] != X86_64_SSE_CLASS
2240 && classes[i - 1] != X86_64_SSEUP_CLASS)
2241 {
2242 /* The first one should never be X86_64_SSEUP_CLASS. */
2243 gcc_assert (i != 0)((void)(!(i != 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2243, __FUNCTION__), 0 : 0))
;
2244 classes[i] = X86_64_SSE_CLASS;
2245 }
2246
2247 /* If X86_64_X87UP_CLASS isn't preceded by X86_64_X87_CLASS,
2248 everything should be passed in memory. */
2249 if (classes[i] == X86_64_X87UP_CLASS
2250 && (classes[i - 1] != X86_64_X87_CLASS))
2251 {
2252 static bool warned;
2253
2254 /* The first one should never be X86_64_X87UP_CLASS. */
2255 gcc_assert (i != 0)((void)(!(i != 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2255, __FUNCTION__), 0 : 0))
;
2256 if (!warned && warn_psabiglobal_options.x_warn_psabi)
2257 {
2258 warned = true;
2259 inform (input_location,
2260 "the ABI of passing union with %<long double%>"
2261 " has changed in GCC 4.4");
2262 }
2263 return 0;
2264 }
2265 }
2266 return words;
2267 }
2268
2269 /* Compute alignment needed. We align all types to natural boundaries with
2270 exception of XFmode that is aligned to 64bits. */
2271 if (mode != VOIDmode((void) 0, E_VOIDmode) && mode != BLKmode((void) 0, E_BLKmode))
2272 {
2273 int mode_alignment = GET_MODE_BITSIZE (mode)((unsigned short) mode_to_bits (mode).coeffs[0]);
2274
2275 if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)))
2276 mode_alignment = 128;
2277 else if (mode == XCmode(complex_mode ((complex_mode::from_int) E_XCmode)))
2278 mode_alignment = 256;
2279 if (COMPLEX_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_COMPLEX_INT || (
(enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT)
)
2280 mode_alignment /= 2;
2281 /* Misaligned fields are always returned in memory. */
2282 if (bit_offset % mode_alignment)
2283 return 0;
2284 }
2285
2286 /* for V1xx modes, just use the base mode */
2287 if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
&& mode != V1DImode((void) 0, E_V1DImode) && mode != V1TImode((void) 0, E_V1TImode)
2288 && GET_MODE_UNIT_SIZE (mode)mode_to_unit_size (mode) == bytes)
2289 mode = GET_MODE_INNER (mode)(mode_to_inner (mode));
2290
2291 /* Classification of atomic types. */
2292 switch (mode)
2293 {
2294 case E_SDmode:
2295 case E_DDmode:
2296 classes[0] = X86_64_SSE_CLASS;
2297 return 1;
2298 case E_TDmode:
2299 classes[0] = X86_64_SSE_CLASS;
2300 classes[1] = X86_64_SSEUP_CLASS;
2301 return 2;
2302 case E_DImode:
2303 case E_SImode:
2304 case E_HImode:
2305 case E_QImode:
2306 case E_CSImode:
2307 case E_CHImode:
2308 case E_CQImode:
2309 {
2310 int size = bit_offset + (int) GET_MODE_BITSIZE (mode)((unsigned short) mode_to_bits (mode).coeffs[0]);
2311
2312 /* Analyze last 128 bits only. */
2313 size = (size - 1) & 0x7f;
2314
2315 if (size < 32)
2316 {
2317 classes[0] = X86_64_INTEGERSI_CLASS;
2318 return 1;
2319 }
2320 else if (size < 64)
2321 {
2322 classes[0] = X86_64_INTEGER_CLASS;
2323 return 1;
2324 }
2325 else if (size < 64+32)
2326 {
2327 classes[0] = X86_64_INTEGER_CLASS;
2328 classes[1] = X86_64_INTEGERSI_CLASS;
2329 return 2;
2330 }
2331 else if (size < 64+64)
2332 {
2333 classes[0] = classes[1] = X86_64_INTEGER_CLASS;
2334 return 2;
2335 }
2336 else
2337 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2337, __FUNCTION__))
;
2338 }
2339 case E_CDImode:
2340 case E_TImode:
2341 classes[0] = classes[1] = X86_64_INTEGER_CLASS;
2342 return 2;
2343 case E_COImode:
2344 case E_OImode:
2345 /* OImode shouldn't be used directly. */
2346 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2346, __FUNCTION__))
;
2347 case E_CTImode:
2348 return 0;
2349 case E_SFmode:
2350 if (!(bit_offset % 64))
2351 classes[0] = X86_64_SSESF_CLASS;
2352 else
2353 classes[0] = X86_64_SSE_CLASS;
2354 return 1;
2355 case E_DFmode:
2356 classes[0] = X86_64_SSEDF_CLASS;
2357 return 1;
2358 case E_XFmode:
2359 classes[0] = X86_64_X87_CLASS;
2360 classes[1] = X86_64_X87UP_CLASS;
2361 return 2;
2362 case E_TFmode:
2363 classes[0] = X86_64_SSE_CLASS;
2364 classes[1] = X86_64_SSEUP_CLASS;
2365 return 2;
2366 case E_SCmode:
2367 classes[0] = X86_64_SSE_CLASS;
2368 if (!(bit_offset % 64))
2369 return 1;
2370 else
2371 {
2372 static bool warned;
2373
2374 if (!warned && warn_psabiglobal_options.x_warn_psabi)
2375 {
2376 warned = true;
2377 inform (input_location,
2378 "the ABI of passing structure with %<complex float%>"
2379 " member has changed in GCC 4.4");
2380 }
2381 classes[1] = X86_64_SSESF_CLASS;
2382 return 2;
2383 }
2384 case E_DCmode:
2385 classes[0] = X86_64_SSEDF_CLASS;
2386 classes[1] = X86_64_SSEDF_CLASS;
2387 return 2;
2388 case E_XCmode:
2389 classes[0] = X86_64_COMPLEX_X87_CLASS;
2390 return 1;
2391 case E_TCmode:
2392 /* This modes is larger than 16 bytes. */
2393 return 0;
2394 case E_V8SFmode:
2395 case E_V8SImode:
2396 case E_V32QImode:
2397 case E_V16HImode:
2398 case E_V4DFmode:
2399 case E_V4DImode:
2400 classes[0] = X86_64_SSE_CLASS;
2401 classes[1] = X86_64_SSEUP_CLASS;
2402 classes[2] = X86_64_SSEUP_CLASS;
2403 classes[3] = X86_64_SSEUP_CLASS;
2404 return 4;
2405 case E_V8DFmode:
2406 case E_V16SFmode:
2407 case E_V8DImode:
2408 case E_V16SImode:
2409 case E_V32HImode:
2410 case E_V64QImode:
2411 classes[0] = X86_64_SSE_CLASS;
2412 classes[1] = X86_64_SSEUP_CLASS;
2413 classes[2] = X86_64_SSEUP_CLASS;
2414 classes[3] = X86_64_SSEUP_CLASS;
2415 classes[4] = X86_64_SSEUP_CLASS;
2416 classes[5] = X86_64_SSEUP_CLASS;
2417 classes[6] = X86_64_SSEUP_CLASS;
2418 classes[7] = X86_64_SSEUP_CLASS;
2419 return 8;
2420 case E_V4SFmode:
2421 case E_V4SImode:
2422 case E_V16QImode:
2423 case E_V8HImode:
2424 case E_V2DFmode:
2425 case E_V2DImode:
2426 classes[0] = X86_64_SSE_CLASS;
2427 classes[1] = X86_64_SSEUP_CLASS;
2428 return 2;
2429 case E_V1TImode:
2430 case E_V1DImode:
2431 case E_V2SFmode:
2432 case E_V2SImode:
2433 case E_V4HImode:
2434 case E_V8QImode:
2435 classes[0] = X86_64_SSE_CLASS;
2436 return 1;
2437 case E_BLKmode:
2438 case E_VOIDmode:
2439 return 0;
2440 default:
2441 gcc_assert (VECTOR_MODE_P (mode))((void)(!((((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL
|| ((enum mode_class) mode_class[mode]) == MODE_VECTOR_INT ||
((enum mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT ||
((enum mode_class) mode_class[mode]) == MODE_VECTOR_FRACT ||
((enum mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT ||
((enum mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM ||
((enum mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM))
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2441, __FUNCTION__), 0 : 0))
;
2442
2443 if (bytes > 16)
2444 return 0;
2445
2446 gcc_assert (GET_MODE_CLASS (GET_MODE_INNER (mode)) == MODE_INT)((void)(!(((enum mode_class) mode_class[(mode_to_inner (mode)
)]) == MODE_INT) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2446, __FUNCTION__), 0 : 0))
;
2447
2448 if (bit_offset + GET_MODE_BITSIZE (mode)((unsigned short) mode_to_bits (mode).coeffs[0]) <= 32)
2449 classes[0] = X86_64_INTEGERSI_CLASS;
2450 else
2451 classes[0] = X86_64_INTEGER_CLASS;
2452 classes[1] = X86_64_INTEGER_CLASS;
2453 return 1 + (bytes > 8);
2454 }
2455}
2456
2457/* Examine the argument and return set number of register required in each
2458 class. Return true iff parameter should be passed in memory. */
2459
2460static bool
2461examine_argument (machine_mode mode, const_tree type, int in_return,
2462 int *int_nregs, int *sse_nregs)
2463{
2464 enum x86_64_reg_class regclass[MAX_CLASSES8];
2465 int n = classify_argument (mode, type, regclass, 0);
2466
2467 *int_nregs = 0;
2468 *sse_nregs = 0;
2469
2470 if (!n)
2471 return true;
2472 for (n--; n >= 0; n--)
2473 switch (regclass[n])
2474 {
2475 case X86_64_INTEGER_CLASS:
2476 case X86_64_INTEGERSI_CLASS:
2477 (*int_nregs)++;
2478 break;
2479 case X86_64_SSE_CLASS:
2480 case X86_64_SSESF_CLASS:
2481 case X86_64_SSEDF_CLASS:
2482 (*sse_nregs)++;
2483 break;
2484 case X86_64_NO_CLASS:
2485 case X86_64_SSEUP_CLASS:
2486 break;
2487 case X86_64_X87_CLASS:
2488 case X86_64_X87UP_CLASS:
2489 case X86_64_COMPLEX_X87_CLASS:
2490 if (!in_return)
2491 return true;
2492 break;
2493 case X86_64_MEMORY_CLASS:
2494 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2494, __FUNCTION__))
;
2495 }
2496
2497 return false;
2498}
2499
2500/* Construct container for the argument used by GCC interface. See
2501 FUNCTION_ARG for the detailed description. */
2502
2503static rtx
2504construct_container (machine_mode mode, machine_mode orig_mode,
2505 const_tree type, int in_return, int nintregs, int nsseregs,
2506 const int *intreg, int sse_regno)
2507{
2508 /* The following variables hold the static issued_error state. */
2509 static bool issued_sse_arg_error;
2510 static bool issued_sse_ret_error;
2511 static bool issued_x87_ret_error;
2512
2513 machine_mode tmpmode;
2514 int bytes
2515 = mode == BLKmode((void) 0, E_BLKmode) ? int_size_in_bytes (type) : (int) GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]);
2516 enum x86_64_reg_class regclass[MAX_CLASSES8];
2517 int n;
2518 int i;
2519 int nexps = 0;
2520 int needed_sseregs, needed_intregs;
2521 rtx exp[MAX_CLASSES8];
2522 rtx ret;
2523
2524 n = classify_argument (mode, type, regclass, 0);
2525 if (!n)
2526 return NULL__null;
2527 if (examine_argument (mode, type, in_return, &needed_intregs,
2528 &needed_sseregs))
2529 return NULL__null;
2530 if (needed_intregs > nintregs || needed_sseregs > nsseregs)
2531 return NULL__null;
2532
2533 /* We allowed the user to turn off SSE for kernel mode. Don't crash if
2534 some less clueful developer tries to use floating-point anyway. */
2535 if (needed_sseregs && !TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
)
2536 {
2537 if (in_return)
2538 {
2539 if (!issued_sse_ret_error)
2540 {
2541 error ("SSE register return with SSE disabled");
2542 issued_sse_ret_error = true;
2543 }
2544 }
2545 else if (!issued_sse_arg_error)
2546 {
2547 error ("SSE register argument with SSE disabled");
2548 issued_sse_arg_error = true;
2549 }
2550 return NULL__null;
2551 }
2552
2553 /* Likewise, error if the ABI requires us to return values in the
2554 x87 registers and the user specified -mno-80387. */
2555 if (!TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0
) && ((global_options.x_target_flags & (1U <<
1)) != 0) && !((global_options.x_target_flags & (
1U << 12)) != 0))
&& in_return)
2556 for (i = 0; i < n; i++)
2557 if (regclass[i] == X86_64_X87_CLASS
2558 || regclass[i] == X86_64_X87UP_CLASS
2559 || regclass[i] == X86_64_COMPLEX_X87_CLASS)
2560 {
2561 if (!issued_x87_ret_error)
2562 {
2563 error ("x87 register return with x87 disabled");
2564 issued_x87_ret_error = true;
2565 }
2566 return NULL__null;
2567 }
2568
2569 /* First construct simple cases. Avoid SCmode, since we want to use
2570 single register to pass this type. */
2571 if (n == 1 && mode != SCmode(complex_mode ((complex_mode::from_int) E_SCmode)))
2572 switch (regclass[0])
2573 {
2574 case X86_64_INTEGER_CLASS:
2575 case X86_64_INTEGERSI_CLASS:
2576 return gen_rtx_REG (mode, intreg[0]);
2577 case X86_64_SSE_CLASS:
2578 case X86_64_SSESF_CLASS:
2579 case X86_64_SSEDF_CLASS:
2580 if (mode != BLKmode((void) 0, E_BLKmode))
2581 return gen_reg_or_parallel (mode, orig_mode,
2582 GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ?
44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)
);
2583 break;
2584 case X86_64_X87_CLASS:
2585 case X86_64_COMPLEX_X87_CLASS:
2586 return gen_rtx_REG (mode, FIRST_STACK_REG8);
2587 case X86_64_NO_CLASS:
2588 /* Zero sized array, struct or class. */
2589 return NULL__null;
2590 default:
2591 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2591, __FUNCTION__))
;
2592 }
2593 if (n == 2
2594 && regclass[0] == X86_64_SSE_CLASS
2595 && regclass[1] == X86_64_SSEUP_CLASS
2596 && mode != BLKmode((void) 0, E_BLKmode))
2597 return gen_reg_or_parallel (mode, orig_mode,
2598 GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ?
44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)
);
2599 if (n == 4
2600 && regclass[0] == X86_64_SSE_CLASS
2601 && regclass[1] == X86_64_SSEUP_CLASS
2602 && regclass[2] == X86_64_SSEUP_CLASS
2603 && regclass[3] == X86_64_SSEUP_CLASS
2604 && mode != BLKmode((void) 0, E_BLKmode))
2605 return gen_reg_or_parallel (mode, orig_mode,
2606 GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ?
44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)
);
2607 if (n == 8
2608 && regclass[0] == X86_64_SSE_CLASS
2609 && regclass[1] == X86_64_SSEUP_CLASS
2610 && regclass[2] == X86_64_SSEUP_CLASS
2611 && regclass[3] == X86_64_SSEUP_CLASS
2612 && regclass[4] == X86_64_SSEUP_CLASS
2613 && regclass[5] == X86_64_SSEUP_CLASS
2614 && regclass[6] == X86_64_SSEUP_CLASS
2615 && regclass[7] == X86_64_SSEUP_CLASS
2616 && mode != BLKmode((void) 0, E_BLKmode))
2617 return gen_reg_or_parallel (mode, orig_mode,
2618 GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ?
44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)
);
2619 if (n == 2
2620 && regclass[0] == X86_64_X87_CLASS
2621 && regclass[1] == X86_64_X87UP_CLASS)
2622 return gen_rtx_REG (XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)), FIRST_STACK_REG8);
2623
2624 if (n == 2
2625 && regclass[0] == X86_64_INTEGER_CLASS
2626 && regclass[1] == X86_64_INTEGER_CLASS
2627 && (mode == CDImode(complex_mode ((complex_mode::from_int) E_CDImode)) || mode == TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode)) || mode == BLKmode((void) 0, E_BLKmode))
2628 && intreg[0] + 1 == intreg[1])
2629 {
2630 if (mode == BLKmode((void) 0, E_BLKmode))
2631 {
2632 /* Use TImode for BLKmode values in 2 integer registers. */
2633 exp[0] = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode),
2634 gen_rtx_REG (TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode)), intreg[0]),
2635 GEN_INT (0)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (0)));
2636 ret = gen_rtx_PARALLEL (mode, rtvec_alloc (1))gen_rtx_fmt_E_stat ((PARALLEL), ((mode)), ((rtvec_alloc (1)))
)
;
2637 XVECEXP (ret, 0, 0)(((((ret)->u.fld[0]).rt_rtvec))->elem[0]) = exp[0];
2638 return ret;
2639 }
2640 else
2641 return gen_rtx_REG (mode, intreg[0]);
2642 }
2643
2644 /* Otherwise figure out the entries of the PARALLEL. */
2645 for (i = 0; i < n; i++)
2646 {
2647 int pos;
2648
2649 switch (regclass[i])
2650 {
2651 case X86_64_NO_CLASS:
2652 break;
2653 case X86_64_INTEGER_CLASS:
2654 case X86_64_INTEGERSI_CLASS:
2655 /* Merge TImodes on aligned occasions here too. */
2656 if (i * 8 + 8 > bytes)
2657 {
2658 unsigned int tmpbits = (bytes - i * 8) * BITS_PER_UNIT(8);
2659 if (!int_mode_for_size (tmpbits, 0).exists (&tmpmode))
2660 /* We've requested 24 bytes we
2661 don't have mode for. Use DImode. */
2662 tmpmode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode));
2663 }
2664 else if (regclass[i] == X86_64_INTEGERSI_CLASS)
2665 tmpmode = SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode));
2666 else
2667 tmpmode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode));
2668 exp [nexps++]
2669 = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode),
2670 gen_rtx_REG (tmpmode, *intreg),
2671 GEN_INT (i*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (i*8)));
2672 intreg++;
2673 break;
2674 case X86_64_SSESF_CLASS:
2675 exp [nexps++]
2676 = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode),
2677 gen_rtx_REG (SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)),
2678 GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ?
44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)
),
2679 GEN_INT (i*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (i*8)));
2680 sse_regno++;
2681 break;
2682 case X86_64_SSEDF_CLASS:
2683 exp [nexps++]
2684 = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode),
2685 gen_rtx_REG (DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode)),
2686 GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ?
44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)
),
2687 GEN_INT (i*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (i*8)));
2688 sse_regno++;
2689 break;
2690 case X86_64_SSE_CLASS:
2691 pos = i;
2692 switch (n)
2693 {
2694 case 1:
2695 tmpmode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode));
2696 break;
2697 case 2:
2698 if (i == 0 && regclass[1] == X86_64_SSEUP_CLASS)
2699 {
2700 tmpmode = TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode));
2701 i++;
2702 }
2703 else
2704 tmpmode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode));
2705 break;
2706 case 4:
2707 gcc_assert (i == 0((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2710, __FUNCTION__), 0 : 0))
2708 && regclass[1] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2710, __FUNCTION__), 0 : 0))
2709 && regclass[2] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2710, __FUNCTION__), 0 : 0))
2710 && regclass[3] == X86_64_SSEUP_CLASS)((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2710, __FUNCTION__), 0 : 0))
;
2711 tmpmode = OImode(scalar_int_mode ((scalar_int_mode::from_int) E_OImode));
2712 i += 3;
2713 break;
2714 case 8:
2715 gcc_assert (i == 0((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS
&& regclass[5] == X86_64_SSEUP_CLASS && regclass
[6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2722, __FUNCTION__), 0 : 0))
2716 && regclass[1] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS
&& regclass[5] == X86_64_SSEUP_CLASS && regclass
[6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2722, __FUNCTION__), 0 : 0))
2717 && regclass[2] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS
&& regclass[5] == X86_64_SSEUP_CLASS && regclass
[6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2722, __FUNCTION__), 0 : 0))
2718 && regclass[3] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS
&& regclass[5] == X86_64_SSEUP_CLASS && regclass
[6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2722, __FUNCTION__), 0 : 0))
2719 && regclass[4] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS
&& regclass[5] == X86_64_SSEUP_CLASS && regclass
[6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2722, __FUNCTION__), 0 : 0))
2720 && regclass[5] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS
&& regclass[5] == X86_64_SSEUP_CLASS && regclass
[6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2722, __FUNCTION__), 0 : 0))
2721 && regclass[6] == X86_64_SSEUP_CLASS((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS
&& regclass[5] == X86_64_SSEUP_CLASS && regclass
[6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2722, __FUNCTION__), 0 : 0))
2722 && regclass[7] == X86_64_SSEUP_CLASS)((void)(!(i == 0 && regclass[1] == X86_64_SSEUP_CLASS
&& regclass[2] == X86_64_SSEUP_CLASS && regclass
[3] == X86_64_SSEUP_CLASS && regclass[4] == X86_64_SSEUP_CLASS
&& regclass[5] == X86_64_SSEUP_CLASS && regclass
[6] == X86_64_SSEUP_CLASS && regclass[7] == X86_64_SSEUP_CLASS
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2722, __FUNCTION__), 0 : 0))
;
2723 tmpmode = XImode(scalar_int_mode ((scalar_int_mode::from_int) E_XImode));
2724 i += 7;
2725 break;
2726 default:
2727 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2727, __FUNCTION__))
;
2728 }
2729 exp [nexps++]
2730 = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode),
2731 gen_rtx_REG (tmpmode,
2732 GET_SSE_REGNO (sse_regno)((sse_regno) < 8 ? 20 + (sse_regno) : (sse_regno) < 16 ?
44 + (sse_regno) - 8 : 52 + (sse_regno) - 16)
),
2733 GEN_INT (pos*8)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (pos*8)));
2734 sse_regno++;
2735 break;
2736 default:
2737 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2737, __FUNCTION__))
;
2738 }
2739 }
2740
2741 /* Empty aligned struct, union or class. */
2742 if (nexps == 0)
2743 return NULL__null;
2744
2745 ret = gen_rtx_PARALLEL (mode, rtvec_alloc (nexps))gen_rtx_fmt_E_stat ((PARALLEL), ((mode)), ((rtvec_alloc (nexps
))) )
;
2746 for (i = 0; i < nexps; i++)
2747 XVECEXP (ret, 0, i)(((((ret)->u.fld[0]).rt_rtvec))->elem[i]) = exp [i];
2748 return ret;
2749}
2750
2751/* Update the data in CUM to advance over an argument of mode MODE
2752 and data type TYPE. (TYPE is null for libcalls where that information
2753 may not be available.)
2754
2755 Return a number of integer regsiters advanced over. */
2756
2757static int
2758function_arg_advance_32 (CUMULATIVE_ARGS *cum, machine_mode mode,
2759 const_tree type, HOST_WIDE_INTlong bytes,
2760 HOST_WIDE_INTlong words)
2761{
2762 int res = 0;
2763 bool error_p = false;
2764
2765 if (TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0))
2766 {
2767 /* Intel MCU psABI passes scalars and aggregates no larger than 8
2768 bytes in registers. */
2769 if (!VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
&& bytes <= 8)
2770 goto pass_in_reg;
2771 return res;
2772 }
2773
2774 switch (mode)
2775 {
2776 default:
2777 break;
2778
2779 case E_BLKmode:
2780 if (bytes < 0)
2781 break;
2782 /* FALLTHRU */
2783
2784 case E_DImode:
2785 case E_SImode:
2786 case E_HImode:
2787 case E_QImode:
2788pass_in_reg:
2789 cum->words += words;
2790 cum->nregs -= words;
2791 cum->regno += words;
2792 if (cum->nregs >= 0)
2793 res = words;
2794 if (cum->nregs <= 0)
2795 {
2796 cum->nregs = 0;
2797 cfun(cfun + 0)->machine->arg_reg_available = false;
2798 cum->regno = 0;
2799 }
2800 break;
2801
2802 case E_OImode:
2803 /* OImode shouldn't be used directly. */
2804 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2804, __FUNCTION__))
;
2805
2806 case E_DFmode:
2807 if (cum->float_in_sse == -1)
2808 error_p = true;
2809 if (cum->float_in_sse < 2)
2810 break;
2811 /* FALLTHRU */
2812 case E_SFmode:
2813 if (cum->float_in_sse == -1)
2814 error_p = true;
2815 if (cum->float_in_sse < 1)
2816 break;
2817 /* FALLTHRU */
2818
2819 case E_V8SFmode:
2820 case E_V8SImode:
2821 case E_V64QImode:
2822 case E_V32HImode:
2823 case E_V16SImode:
2824 case E_V8DImode:
2825 case E_V16SFmode:
2826 case E_V8DFmode:
2827 case E_V32QImode:
2828 case E_V16HImode:
2829 case E_V4DFmode:
2830 case E_V4DImode:
2831 case E_TImode:
2832 case E_V16QImode:
2833 case E_V8HImode:
2834 case E_V4SImode:
2835 case E_V2DImode:
2836 case E_V4SFmode:
2837 case E_V2DFmode:
2838 if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
2839 {
2840 cum->sse_words += words;
2841 cum->sse_nregs -= 1;
2842 cum->sse_regno += 1;
2843 if (cum->sse_nregs <= 0)
2844 {
2845 cum->sse_nregs = 0;
2846 cum->sse_regno = 0;
2847 }
2848 }
2849 break;
2850
2851 case E_V8QImode:
2852 case E_V4HImode:
2853 case E_V2SImode:
2854 case E_V2SFmode:
2855 case E_V1TImode:
2856 case E_V1DImode:
2857 if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
2858 {
2859 cum->mmx_words += words;
2860 cum->mmx_nregs -= 1;
2861 cum->mmx_regno += 1;
2862 if (cum->mmx_nregs <= 0)
2863 {
2864 cum->mmx_nregs = 0;
2865 cum->mmx_regno = 0;
2866 }
2867 }
2868 break;
2869 }
2870 if (error_p)
2871 {
2872 cum->float_in_sse = 0;
2873 error ("calling %qD with SSE calling convention without "
2874 "SSE/SSE2 enabled", cum->decl);
2875 sorry ("this is a GCC bug that can be worked around by adding "
2876 "attribute used to function called");
2877 }
2878
2879 return res;
2880}
2881
2882static int
2883function_arg_advance_64 (CUMULATIVE_ARGS *cum, machine_mode mode,
2884 const_tree type, HOST_WIDE_INTlong words, bool named)
2885{
2886 int int_nregs, sse_nregs;
2887
2888 /* Unnamed 512 and 256bit vector mode parameters are passed on stack. */
2889 if (!named && (VALID_AVX512F_REG_MODE (mode)((mode) == ((void) 0, E_V8DImode) || (mode) == ((void) 0, E_V8DFmode
) || (mode) == ((void) 0, E_V64QImode) || (mode) == ((void) 0
, E_V16SImode) || (mode) == ((void) 0, E_V16SFmode) || (mode)
== ((void) 0, E_V32HImode) || (mode) == ((void) 0, E_V4TImode
))
2890 || VALID_AVX256_REG_MODE (mode)((mode) == ((void) 0, E_V32QImode) || (mode) == ((void) 0, E_V16HImode
) || (mode) == ((void) 0, E_V8SImode) || (mode) == ((void) 0,
E_V4DImode) || (mode) == ((void) 0, E_V2TImode) || (mode) ==
((void) 0, E_V8SFmode) || (mode) == ((void) 0, E_V4DFmode))
))
2891 return 0;
2892
2893 if (!examine_argument (mode, type, 0, &int_nregs, &sse_nregs)
2894 && sse_nregs <= cum->sse_nregs && int_nregs <= cum->nregs)
2895 {
2896 cum->nregs -= int_nregs;
2897 cum->sse_nregs -= sse_nregs;
2898 cum->regno += int_nregs;
2899 cum->sse_regno += sse_nregs;
2900 return int_nregs;
2901 }
2902 else
2903 {
2904 int align = ix86_function_arg_boundary (mode, type) / BITS_PER_WORD((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
;
2905 cum->words = ROUND_UP (cum->words, align)(((cum->words) + (align) - 1) & ~((align) - 1));
2906 cum->words += words;
2907 return 0;
2908 }
2909}
2910
2911static int
2912function_arg_advance_ms_64 (CUMULATIVE_ARGS *cum, HOST_WIDE_INTlong bytes,
2913 HOST_WIDE_INTlong words)
2914{
2915 /* Otherwise, this should be passed indirect. */
2916 gcc_assert (bytes == 1 || bytes == 2 || bytes == 4 || bytes == 8)((void)(!(bytes == 1 || bytes == 2 || bytes == 4 || bytes == 8
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 2916, __FUNCTION__), 0 : 0))
;
2917
2918 cum->words += words;
2919 if (cum->nregs > 0)
2920 {
2921 cum->nregs -= 1;
2922 cum->regno += 1;
2923 return 1;
2924 }
2925 return 0;
2926}
2927
2928/* Update the data in CUM to advance over argument ARG. */
2929
2930static void
2931ix86_function_arg_advance (cumulative_args_t cum_v,
2932 const function_arg_info &arg)
2933{
2934 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2935 machine_mode mode = arg.mode;
2936 HOST_WIDE_INTlong bytes, words;
2937 int nregs;
2938
2939 /* The argument of interrupt handler is a special case and is
2940 handled in ix86_function_arg. */
2941 if (!cum->caller && cfun(cfun + 0)->machine->func_type != TYPE_NORMAL)
2942 return;
2943
2944 bytes = arg.promoted_size_in_bytes ();
2945 words = CEIL (bytes, UNITS_PER_WORD)(((bytes) + ((((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4)) - 1) / ((((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4)))
;
2946
2947 if (arg.type)
2948 mode = type_natural_mode (arg.type, NULL__null, false);
2949
2950 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
2951 {
2952 enum calling_abi call_abi = cum ? cum->call_abi : ix86_abiglobal_options.x_ix86_abi;
2953
2954 if (call_abi == MS_ABI)
2955 nregs = function_arg_advance_ms_64 (cum, bytes, words);
2956 else
2957 nregs = function_arg_advance_64 (cum, mode, arg.type, words,
2958 arg.named);
2959 }
2960 else
2961 nregs = function_arg_advance_32 (cum, mode, arg.type, bytes, words);
2962
2963 if (!nregs)
2964 {
2965 /* Track if there are outgoing arguments on stack. */
2966 if (cum->caller)
2967 cfun(cfun + 0)->machine->outgoing_args_on_stack = true;
2968 }
2969}
2970
2971/* Define where to put the arguments to a function.
2972 Value is zero to push the argument on the stack,
2973 or a hard register in which to store the argument.
2974
2975 MODE is the argument's machine mode.
2976 TYPE is the data type of the argument (as a tree).
2977 This is null for libcalls where that information may
2978 not be available.
2979 CUM is a variable of type CUMULATIVE_ARGS which gives info about
2980 the preceding args and about the function being called.
2981 NAMED is nonzero if this argument is a named parameter
2982 (otherwise it is an extra parameter matching an ellipsis). */
2983
2984static rtx
2985function_arg_32 (CUMULATIVE_ARGS *cum, machine_mode mode,
2986 machine_mode orig_mode, const_tree type,
2987 HOST_WIDE_INTlong bytes, HOST_WIDE_INTlong words)
2988{
2989 bool error_p = false;
2990
2991 /* Avoid the AL settings for the Unix64 ABI. */
2992 if (mode == VOIDmode((void) 0, E_VOIDmode))
2993 return constm1_rtx(const_int_rtx[64 -1]);
2994
2995 if (TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0))
2996 {
2997 /* Intel MCU psABI passes scalars and aggregates no larger than 8
2998 bytes in registers. */
2999 if (!VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
&& bytes <= 8)
3000 goto pass_in_reg;
3001 return NULL_RTX(rtx) 0;
3002 }
3003
3004 switch (mode)
3005 {
3006 default:
3007 break;
3008
3009 case E_BLKmode:
3010 if (bytes < 0)
3011 break;
3012 /* FALLTHRU */
3013 case E_DImode:
3014 case E_SImode:
3015 case E_HImode:
3016 case E_QImode:
3017pass_in_reg:
3018 if (words <= cum->nregs)
3019 {
3020 int regno = cum->regno;
3021
3022 /* Fastcall allocates the first two DWORD (SImode) or
3023 smaller arguments to ECX and EDX if it isn't an
3024 aggregate type . */
3025 if (cum->fastcall)
3026 {
3027 if (mode == BLKmode((void) 0, E_BLKmode)
3028 || mode == DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode))
3029 || (type && AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
))
3030 break;
3031
3032 /* ECX not EAX is the first allocated register. */
3033 if (regno == AX_REG0)
3034 regno = CX_REG2;
3035 }
3036 return gen_rtx_REG (mode, regno);
3037 }
3038 break;
3039
3040 case E_DFmode:
3041 if (cum->float_in_sse == -1)
3042 error_p = true;
3043 if (cum->float_in_sse < 2)
3044 break;
3045 /* FALLTHRU */
3046 case E_SFmode:
3047 if (cum->float_in_sse == -1)
3048 error_p = true;
3049 if (cum->float_in_sse < 1)
3050 break;
3051 /* FALLTHRU */
3052 case E_TImode:
3053 /* In 32bit, we pass TImode in xmm registers. */
3054 case E_V16QImode:
3055 case E_V8HImode:
3056 case E_V4SImode:
3057 case E_V2DImode:
3058 case E_V4SFmode:
3059 case E_V2DFmode:
3060 if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
3061 {
3062 if (cum->sse_nregs)
3063 return gen_reg_or_parallel (mode, orig_mode,
3064 cum->sse_regno + FIRST_SSE_REG20);
3065 }
3066 break;
3067
3068 case E_OImode:
3069 case E_XImode:
3070 /* OImode and XImode shouldn't be used directly. */
3071 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3071, __FUNCTION__))
;
3072
3073 case E_V64QImode:
3074 case E_V32HImode:
3075 case E_V16SImode:
3076 case E_V8DImode:
3077 case E_V16SFmode:
3078 case E_V8DFmode:
3079 case E_V8SFmode:
3080 case E_V8SImode:
3081 case E_V32QImode:
3082 case E_V16HImode:
3083 case E_V4DFmode:
3084 case E_V4DImode:
3085 if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
3086 {
3087 if (cum->sse_nregs)
3088 return gen_reg_or_parallel (mode, orig_mode,
3089 cum->sse_regno + FIRST_SSE_REG20);
3090 }
3091 break;
3092
3093 case E_V8QImode:
3094 case E_V4HImode:
3095 case E_V2SImode:
3096 case E_V2SFmode:
3097 case E_V1TImode:
3098 case E_V1DImode:
3099 if (!type || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
3100 {
3101 if (cum->mmx_nregs)
3102 return gen_reg_or_parallel (mode, orig_mode,
3103 cum->mmx_regno + FIRST_MMX_REG28);
3104 }
3105 break;
3106 }
3107 if (error_p)
3108 {
3109 cum->float_in_sse = 0;
3110 error ("calling %qD with SSE calling convention without "
3111 "SSE/SSE2 enabled", cum->decl);
3112 sorry ("this is a GCC bug that can be worked around by adding "
3113 "attribute used to function called");
3114 }
3115
3116 return NULL_RTX(rtx) 0;
3117}
3118
3119static rtx
3120function_arg_64 (const CUMULATIVE_ARGS *cum, machine_mode mode,
3121 machine_mode orig_mode, const_tree type, bool named)
3122{
3123 /* Handle a hidden AL argument containing number of registers
3124 for varargs x86-64 functions. */
3125 if (mode == VOIDmode((void) 0, E_VOIDmode))
3126 return GEN_INT (cum->maybe_vaarggen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg
? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1))
3127 ? (cum->sse_nregs < 0gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg
? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1))
3128 ? X86_64_SSE_REGPARM_MAXgen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg
? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1))
3129 : cum->sse_regno)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg
? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1))
3130 : -1)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (cum->maybe_vaarg
? (cum->sse_nregs < 0 ? 8 : cum->sse_regno) : -1))
;
3131
3132 switch (mode)
3133 {
3134 default:
3135 break;
3136
3137 case E_V8SFmode:
3138 case E_V8SImode:
3139 case E_V32QImode:
3140 case E_V16HImode:
3141 case E_V4DFmode:
3142 case E_V4DImode:
3143 case E_V16SFmode:
3144 case E_V16SImode:
3145 case E_V64QImode:
3146 case E_V32HImode:
3147 case E_V8DFmode:
3148 case E_V8DImode:
3149 /* Unnamed 256 and 512bit vector mode parameters are passed on stack. */
3150 if (!named)
3151 return NULL__null;
3152 break;
3153 }
3154
3155 return construct_container (mode, orig_mode, type, 0, cum->nregs,
3156 cum->sse_nregs,
3157 &x86_64_int_parameter_registers [cum->regno],
3158 cum->sse_regno);
3159}
3160
3161static rtx
3162function_arg_ms_64 (const CUMULATIVE_ARGS *cum, machine_mode mode,
3163 machine_mode orig_mode, bool named, const_tree type,
3164 HOST_WIDE_INTlong bytes)
3165{
3166 unsigned int regno;
3167
3168 /* We need to add clobber for MS_ABI->SYSV ABI calls in expand_call.
3169 We use value of -2 to specify that current function call is MSABI. */
3170 if (mode == VOIDmode((void) 0, E_VOIDmode))
3171 return GEN_INT (-2)gen_rtx_CONST_INT (((void) 0, E_VOIDmode), (-2));
3172
3173 /* If we've run out of registers, it goes on the stack. */
3174 if (cum->nregs == 0)
3175 return NULL_RTX(rtx) 0;
3176
3177 regno = x86_64_ms_abi_int_parameter_registers[cum->regno];
3178
3179 /* Only floating point modes are passed in anything but integer regs. */
3180 if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
&& (mode == SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)) || mode == DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode))))
3181 {
3182 if (named)
3183 {
3184 if (type == NULL_TREE(tree) __null || !AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
3185 regno = cum->regno + FIRST_SSE_REG20;
3186 }
3187 else
3188 {
3189 rtx t1, t2;
3190
3191 /* Unnamed floating parameters are passed in both the
3192 SSE and integer registers. */
3193 t1 = gen_rtx_REG (mode, cum->regno + FIRST_SSE_REG20);
3194 t2 = gen_rtx_REG (mode, regno);
3195 t1 = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), t1, const0_rtx(const_int_rtx[64]));
3196 t2 = gen_rtx_EXPR_LIST (VOIDmode((void) 0, E_VOIDmode), t2, const0_rtx(const_int_rtx[64]));
3197 return gen_rtx_PARALLEL (mode, gen_rtvec (2, t1, t2))gen_rtx_fmt_E_stat ((PARALLEL), ((mode)), ((gen_rtvec (2, t1,
t2))) )
;
3198 }
3199 }
3200 /* Handle aggregated types passed in register. */
3201 if (orig_mode == BLKmode((void) 0, E_BLKmode))
3202 {
3203 if (bytes > 0 && bytes <= 8)
3204 mode = (bytes > 4 ? DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode)) : SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)));
3205 if (mode == BLKmode((void) 0, E_BLKmode))
3206 mode = DImode(scalar_int_mode ((scalar_int_mode::from_int) E_DImode));
3207 }
3208
3209 return gen_reg_or_parallel (mode, orig_mode, regno);
3210}
3211
3212/* Return where to put the arguments to a function.
3213 Return zero to push the argument on the stack, or a hard register in which to store the argument.
3214
3215 ARG describes the argument while CUM gives information about the
3216 preceding args and about the function being called. */
3217
3218static rtx
3219ix86_function_arg (cumulative_args_t cum_v, const function_arg_info &arg)
3220{
3221 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3222 machine_mode mode = arg.mode;
3223 HOST_WIDE_INTlong bytes, words;
3224 rtx reg;
3225
3226 if (!cum->caller && cfun(cfun + 0)->machine->func_type != TYPE_NORMAL)
3227 {
3228 gcc_assert (arg.type != NULL_TREE)((void)(!(arg.type != (tree) __null) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3228, __FUNCTION__), 0 : 0))
;
3229 if (POINTER_TYPE_P (arg.type)(((enum tree_code) (arg.type)->base.code) == POINTER_TYPE ||
((enum tree_code) (arg.type)->base.code) == REFERENCE_TYPE
)
)
3230 {
3231 /* This is the pointer argument. */
3232 gcc_assert (TYPE_MODE (arg.type) == Pmode)((void)(!(((((enum tree_code) ((tree_class_check ((arg.type),
(tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3232, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(arg.type) : (arg.type)->type_common.mode) == (global_options
.x_ix86_pmode == PMODE_DI ? (scalar_int_mode ((scalar_int_mode
::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode::
from_int) E_SImode)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3232, __FUNCTION__), 0 : 0))
;
3233 /* It is at -WORD(AP) in the current frame in interrupt and
3234 exception handlers. */
3235 reg = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, arg_pointer_rtx((this_target_rtl->x_global_rtl)[GR_ARG_POINTER]), -UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
);
3236 }
3237 else
3238 {
3239 gcc_assert (cfun->machine->func_type == TYPE_EXCEPTION((void)(!((cfun + 0)->machine->func_type == TYPE_EXCEPTION
&& ((enum tree_code) (arg.type)->base.code) == INTEGER_TYPE
&& ((((enum tree_code) ((tree_class_check ((arg.type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3241, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(arg.type) : (arg.type)->type_common.mode) == word_mode) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3241, __FUNCTION__), 0 : 0))
3240 && TREE_CODE (arg.type) == INTEGER_TYPE((void)(!((cfun + 0)->machine->func_type == TYPE_EXCEPTION
&& ((enum tree_code) (arg.type)->base.code) == INTEGER_TYPE
&& ((((enum tree_code) ((tree_class_check ((arg.type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3241, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(arg.type) : (arg.type)->type_common.mode) == word_mode) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3241, __FUNCTION__), 0 : 0))
3241 && TYPE_MODE (arg.type) == word_mode)((void)(!((cfun + 0)->machine->func_type == TYPE_EXCEPTION
&& ((enum tree_code) (arg.type)->base.code) == INTEGER_TYPE
&& ((((enum tree_code) ((tree_class_check ((arg.type
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3241, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(arg.type) : (arg.type)->type_common.mode) == word_mode) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3241, __FUNCTION__), 0 : 0))
;
3242 /* The error code is the word-mode integer argument at
3243 -2 * WORD(AP) in the current frame of the exception
3244 handler. */
3245 reg = gen_rtx_MEM (word_mode,
3246 plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
,
3247 arg_pointer_rtx((this_target_rtl->x_global_rtl)[GR_ARG_POINTER]),
3248 -2 * UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
));
3249 }
3250 return reg;
3251 }
3252
3253 bytes = arg.promoted_size_in_bytes ();
3254 words = CEIL (bytes, UNITS_PER_WORD)(((bytes) + ((((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4)) - 1) / ((((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4)))
;
3255
3256 /* To simplify the code below, represent vector types with a vector mode
3257 even if MMX/SSE are not active. */
3258 if (arg.type && TREE_CODE (arg.type)((enum tree_code) (arg.type)->base.code) == VECTOR_TYPE)
3259 mode = type_natural_mode (arg.type, cum, false);
3260
3261 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
3262 {
3263 enum calling_abi call_abi = cum ? cum->call_abi : ix86_abiglobal_options.x_ix86_abi;
3264
3265 if (call_abi == MS_ABI)
3266 reg = function_arg_ms_64 (cum, mode, arg.mode, arg.named,
3267 arg.type, bytes);
3268 else
3269 reg = function_arg_64 (cum, mode, arg.mode, arg.type, arg.named);
3270 }
3271 else
3272 reg = function_arg_32 (cum, mode, arg.mode, arg.type, bytes, words);
3273
3274 /* Track if there are outgoing arguments on stack. */
3275 if (reg == NULL_RTX(rtx) 0 && cum->caller)
3276 cfun(cfun + 0)->machine->outgoing_args_on_stack = true;
3277
3278 return reg;
3279}
3280
3281/* A C expression that indicates when an argument must be passed by
3282 reference. If nonzero for an argument, a copy of that argument is
3283 made in memory and a pointer to the argument is passed instead of
3284 the argument itself. The pointer is passed in whatever way is
3285 appropriate for passing a pointer to that type. */
3286
3287static bool
3288ix86_pass_by_reference (cumulative_args_t cum_v, const function_arg_info &arg)
3289{
3290 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
3291
3292 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
3293 {
3294 enum calling_abi call_abi = cum ? cum->call_abi : ix86_abiglobal_options.x_ix86_abi;
3295
3296 /* See Windows x64 Software Convention. */
3297 if (call_abi == MS_ABI)
3298 {
3299 HOST_WIDE_INTlong msize = GET_MODE_SIZE (arg.mode)((unsigned short) mode_to_bytes (arg.mode).coeffs[0]);
3300
3301 if (tree type = arg.type)
3302 {
3303 /* Arrays are passed by reference. */
3304 if (TREE_CODE (type)((enum tree_code) (type)->base.code) == ARRAY_TYPE)
3305 return true;
3306
3307 if (RECORD_OR_UNION_TYPE_P (type)(((enum tree_code) (type)->base.code) == RECORD_TYPE || ((
enum tree_code) (type)->base.code) == UNION_TYPE || ((enum
tree_code) (type)->base.code) == QUAL_UNION_TYPE)
)
3308 {
3309 /* Structs/unions of sizes other than 8, 16, 32, or 64 bits
3310 are passed by reference. */
3311 msize = int_size_in_bytes (type);
3312 }
3313 }
3314
3315 /* __m128 is passed by reference. */
3316 return msize != 1 && msize != 2 && msize != 4 && msize != 8;
3317 }
3318 else if (arg.type && int_size_in_bytes (arg.type) == -1)
3319 return true;
3320 }
3321
3322 return false;
3323}
3324
3325/* Return true when TYPE should be 128bit aligned for 32bit argument
3326 passing ABI. XXX: This function is obsolete and is only used for
3327 checking psABI compatibility with previous versions of GCC. */
3328
3329static bool
3330ix86_compat_aligned_value_p (const_tree type)
3331{
3332 machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3332, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
;
3333 if (((TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
&& SSE_REG_MODE_P (mode)((mode) == ((void) 0, E_V1TImode) || (mode) == (scalar_int_mode
((scalar_int_mode::from_int) E_TImode)) || (mode) == ((void)
0, E_V16QImode) || (mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_TFmode)) || (mode) == ((void) 0, E_V8HImode) ||
(mode) == ((void) 0, E_V2DFmode) || (mode) == ((void) 0, E_V2DImode
) || (mode) == ((void) 0, E_V4SFmode) || (mode) == ((void) 0,
E_V4SImode) || (mode) == ((void) 0, E_V32QImode) || (mode) ==
((void) 0, E_V16HImode) || (mode) == ((void) 0, E_V8SImode) ||
(mode) == ((void) 0, E_V4DImode) || (mode) == ((void) 0, E_V8SFmode
) || (mode) == ((void) 0, E_V4DFmode) || (mode) == ((void) 0,
E_V2TImode) || (mode) == ((void) 0, E_V8DImode) || (mode) ==
((void) 0, E_V64QImode) || (mode) == ((void) 0, E_V16SImode)
|| (mode) == ((void) 0, E_V32HImode) || (mode) == ((void) 0,
E_V8DFmode) || (mode) == ((void) 0, E_V16SFmode))
)
3334 || mode == TDmode(scalar_float_mode ((scalar_float_mode::from_int) E_TDmode))
3335 || mode == TFmode(scalar_float_mode ((scalar_float_mode::from_int) E_TFmode))
3336 || mode == TCmode(complex_mode ((complex_mode::from_int) E_TCmode)))
3337 && (!TYPE_USER_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3337, __FUNCTION__))->base.u.bits.user_align)
|| TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3337, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
> 128))
3338 return true;
3339 if (TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3339, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
< 128)
3340 return false;
3341
3342 if (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
3343 {
3344 /* Walk the aggregates recursively. */
3345 switch (TREE_CODE (type)((enum tree_code) (type)->base.code))
3346 {
3347 case RECORD_TYPE:
3348 case UNION_TYPE:
3349 case QUAL_UNION_TYPE:
3350 {
3351 tree field;
3352
3353 /* Walk all the structure fields. */
3354 for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3354, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
; field; field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3354, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3354, __FUNCTION__))->common.chain))
)
3355 {
3356 if (TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL
3357 && ix86_compat_aligned_value_p (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3357, __FUNCTION__))->typed.type)
))
3358 return true;
3359 }
3360 break;
3361 }
3362
3363 case ARRAY_TYPE:
3364 /* Just for use if some languages passes arrays by value. */
3365 if (ix86_compat_aligned_value_p (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3365, __FUNCTION__))->typed.type)
))
3366 return true;
3367 break;
3368
3369 default:
3370 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3370, __FUNCTION__))
;
3371 }
3372 }
3373 return false;
3374}
3375
3376/* Return the alignment boundary for MODE and TYPE with alignment ALIGN.
3377 XXX: This function is obsolete and is only used for checking psABI
3378 compatibility with previous versions of GCC. */
3379
3380static unsigned int
3381ix86_compat_function_arg_boundary (machine_mode mode,
3382 const_tree type, unsigned int align)
3383{
3384 /* In 32bit, only _Decimal128 and __float128 are aligned to their
3385 natural boundaries. */
3386 if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
&& mode != TDmode(scalar_float_mode ((scalar_float_mode::from_int) E_TDmode)) && mode != TFmode(scalar_float_mode ((scalar_float_mode::from_int) E_TFmode)))
3387 {
3388 /* i386 ABI defines all arguments to be 4 byte aligned. We have to
3389 make an exception for SSE modes since these require 128bit
3390 alignment.
3391
3392 The handling here differs from field_alignment. ICC aligns MMX
3393 arguments to 4 byte boundaries, while structure fields are aligned
3394 to 8 byte boundaries. */
3395 if (!type)
3396 {
3397 if (!(TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
&& SSE_REG_MODE_P (mode)((mode) == ((void) 0, E_V1TImode) || (mode) == (scalar_int_mode
((scalar_int_mode::from_int) E_TImode)) || (mode) == ((void)
0, E_V16QImode) || (mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_TFmode)) || (mode) == ((void) 0, E_V8HImode) ||
(mode) == ((void) 0, E_V2DFmode) || (mode) == ((void) 0, E_V2DImode
) || (mode) == ((void) 0, E_V4SFmode) || (mode) == ((void) 0,
E_V4SImode) || (mode) == ((void) 0, E_V32QImode) || (mode) ==
((void) 0, E_V16HImode) || (mode) == ((void) 0, E_V8SImode) ||
(mode) == ((void) 0, E_V4DImode) || (mode) == ((void) 0, E_V8SFmode
) || (mode) == ((void) 0, E_V4DFmode) || (mode) == ((void) 0,
E_V2TImode) || (mode) == ((void) 0, E_V8DImode) || (mode) ==
((void) 0, E_V64QImode) || (mode) == ((void) 0, E_V16SImode)
|| (mode) == ((void) 0, E_V32HImode) || (mode) == ((void) 0,
E_V8DFmode) || (mode) == ((void) 0, E_V16SFmode))
))
3398 align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
;
3399 }
3400 else
3401 {
3402 if (!ix86_compat_aligned_value_p (type))
3403 align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
;
3404 }
3405 }
3406 if (align > BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
)
3407 align = BIGGEST_ALIGNMENT(((global_options.x_target_flags & (1U << 12)) != 0
) ? 32 : (((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0) ? 512 : (((global_options.x_ix86_isa_flags & (
1UL << 8)) != 0) ? 256 : 128)))
;
3408 return align;
3409}
3410
3411/* Return true when TYPE should be 128bit aligned for 32bit argument
3412 passing ABI. */
3413
3414static bool
3415ix86_contains_aligned_value_p (const_tree type)
3416{
3417 machine_mode mode = TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3417, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
;
3418
3419 if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)) || mode == XCmode(complex_mode ((complex_mode::from_int) E_XCmode)))
3420 return false;
3421
3422 if (TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3422, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
< 128)
3423 return false;
3424
3425 if (AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
)
3426 {
3427 /* Walk the aggregates recursively. */
3428 switch (TREE_CODE (type)((enum tree_code) (type)->base.code))
3429 {
3430 case RECORD_TYPE:
3431 case UNION_TYPE:
3432 case QUAL_UNION_TYPE:
3433 {
3434 tree field;
3435
3436 /* Walk all the structure fields. */
3437 for (field = TYPE_FIELDS (type)((tree_check3 ((type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3437, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
;
3438 field;
3439 field = DECL_CHAIN (field)(((contains_struct_check (((contains_struct_check ((field), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3439, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3439, __FUNCTION__))->common.chain))
)
3440 {
3441 if (TREE_CODE (field)((enum tree_code) (field)->base.code) == FIELD_DECL
3442 && ix86_contains_aligned_value_p (TREE_TYPE (field)((contains_struct_check ((field), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3442, __FUNCTION__))->typed.type)
))
3443 return true;
3444 }
3445 break;
3446 }
3447
3448 case ARRAY_TYPE:
3449 /* Just for use if some languages passes arrays by value. */
3450 if (ix86_contains_aligned_value_p (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3450, __FUNCTION__))->typed.type)
))
3451 return true;
3452 break;
3453
3454 default:
3455 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3455, __FUNCTION__))
;
3456 }
3457 }
3458 else
3459 return TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3459, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
>= 128;
3460
3461 return false;
3462}
3463
3464/* Gives the alignment boundary, in bits, of an argument with the
3465 specified mode and type. */
3466
3467static unsigned int
3468ix86_function_arg_boundary (machine_mode mode, const_tree type)
3469{
3470 unsigned int align;
3471 if (type)
3472 {
3473 /* Since the main variant type is used for call, we convert it to
3474 the main variant type. */
3475 type = TYPE_MAIN_VARIANT (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3475, __FUNCTION__))->type_common.main_variant)
;
3476 align = TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3476, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
;
3477 if (TYPE_EMPTY_P (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3477, __FUNCTION__))->type_common.empty_flag)
)
3478 return PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
;
3479 }
3480 else
3481 align = GET_MODE_ALIGNMENT (mode)get_mode_alignment (mode);
3482 if (align < PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
)
3483 align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
;
3484 else
3485 {
3486 static bool warned;
3487 unsigned int saved_align = align;
3488
3489 if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
3490 {
3491 /* i386 ABI defines XFmode arguments to be 4 byte aligned. */
3492 if (!type)
3493 {
3494 if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)) || mode == XCmode(complex_mode ((complex_mode::from_int) E_XCmode)))
3495 align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
;
3496 }
3497 else if (!ix86_contains_aligned_value_p (type))
3498 align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
;
3499
3500 if (align < 128)
3501 align = PARM_BOUNDARY((8) * (((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4))
;
3502 }
3503
3504 if (warn_psabiglobal_options.x_warn_psabi
3505 && !warned
3506 && align != ix86_compat_function_arg_boundary (mode, type,
3507 saved_align))
3508 {
3509 warned = true;
3510 inform (input_location,
3511 "the ABI for passing parameters with %d-byte"
3512 " alignment has changed in GCC 4.6",
3513 align / BITS_PER_UNIT(8));
3514 }
3515 }
3516
3517 return align;
3518}
3519
3520/* Return true if N is a possible register number of function value. */
3521
3522static bool
3523ix86_function_value_regno_p (const unsigned int regno)
3524{
3525 switch (regno)
3526 {
3527 case AX_REG0:
3528 return true;
3529 case DX_REG1:
3530 return (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
|| ix86_cfun_abi () != MS_ABI);
3531 case DI_REG5:
3532 case SI_REG4:
3533 return TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
&& ix86_cfun_abi () != MS_ABI;
3534
3535 /* Complex values are returned in %st(0)/%st(1) pair. */
3536 case ST0_REG8:
3537 case ST1_REG9:
3538 /* TODO: The function should depend on current function ABI but
3539 builtins.c would need updating then. Therefore we use the
3540 default ABI. */
3541 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
&& ix86_cfun_abi () == MS_ABI)
3542 return false;
3543 return TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0
) && ((global_options.x_target_flags & (1U <<
1)) != 0) && !((global_options.x_target_flags & (
1U << 12)) != 0))
;
3544
3545 /* Complex values are returned in %xmm0/%xmm1 pair. */
3546 case XMM0_REG20:
3547 case XMM1_REG21:
3548 return TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
;
3549
3550 case MM0_REG28:
3551 if (TARGET_MACHO0 || TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
3552 return false;
3553 return TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) !=
0)
;
3554 }
3555
3556 return false;
3557}
3558
3559/* Check whether the register REGNO should be zeroed on X86.
3560 When ALL_SSE_ZEROED is true, all SSE registers have been zeroed
3561 together, no need to zero it again.
3562 When NEED_ZERO_MMX is true, MMX registers should be cleared. */
3563
3564static bool
3565zero_call_used_regno_p (const unsigned int regno,
3566 bool all_sse_zeroed,
3567 bool need_zero_mmx)
3568{
3569 return GENERAL_REGNO_P (regno)((((unsigned long) ((regno)) - (unsigned long) (0) <= (unsigned
long) (7) - (unsigned long) (0))) || ((unsigned long) ((regno
)) - (unsigned long) (36) <= (unsigned long) (43) - (unsigned
long) (36)))
3570 || (!all_sse_zeroed && SSE_REGNO_P (regno)(((unsigned long) ((regno)) - (unsigned long) (20) <= (unsigned
long) (27) - (unsigned long) (20)) || ((unsigned long) ((regno
)) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned
long) (44)) || ((unsigned long) ((regno)) - (unsigned long) (
52) <= (unsigned long) (67) - (unsigned long) (52)))
)
3571 || MASK_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (68) <= (unsigned
long) (75) - (unsigned long) (68))
3572 || (need_zero_mmx && MMX_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (28) <= (unsigned
long) (35) - (unsigned long) (28))
);
3573}
3574
3575/* Return the machine_mode that is used to zero register REGNO. */
3576
3577static machine_mode
3578zero_call_used_regno_mode (const unsigned int regno)
3579{
3580 /* NB: We only need to zero the lower 32 bits for integer registers
3581 and the lower 128 bits for vector registers since destination are
3582 zero-extended to the full register width. */
3583 if (GENERAL_REGNO_P (regno)((((unsigned long) ((regno)) - (unsigned long) (0) <= (unsigned
long) (7) - (unsigned long) (0))) || ((unsigned long) ((regno
)) - (unsigned long) (36) <= (unsigned long) (43) - (unsigned
long) (36)))
)
3584 return SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode));
3585 else if (SSE_REGNO_P (regno)(((unsigned long) ((regno)) - (unsigned long) (20) <= (unsigned
long) (27) - (unsigned long) (20)) || ((unsigned long) ((regno
)) - (unsigned long) (44) <= (unsigned long) (51) - (unsigned
long) (44)) || ((unsigned long) ((regno)) - (unsigned long) (
52) <= (unsigned long) (67) - (unsigned long) (52)))
)
3586 return V4SFmode((void) 0, E_V4SFmode);
3587 else if (MASK_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (68) <= (unsigned
long) (75) - (unsigned long) (68))
)
3588 return HImode(scalar_int_mode ((scalar_int_mode::from_int) E_HImode));
3589 else if (MMX_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (28) <= (unsigned
long) (35) - (unsigned long) (28))
)
3590 return V4HImode((void) 0, E_V4HImode);
3591 else
3592 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3592, __FUNCTION__))
;
3593}
3594
3595/* Generate a rtx to zero all vector registers together if possible,
3596 otherwise, return NULL. */
3597
3598static rtx
3599zero_all_vector_registers (HARD_REG_SET need_zeroed_hardregs)
3600{
3601 if (!TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0
)
)
3602 return NULL__null;
3603
3604 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
3605 if ((IN_RANGE (regno, FIRST_SSE_REG, LAST_SSE_REG)((unsigned long) (regno) - (unsigned long) (20) <= (unsigned
long) (27) - (unsigned long) (20))
3606 || (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
3607 && (REX_SSE_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (44) <= (unsigned
long) (51) - (unsigned long) (44))
3608 || (TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) !=
0)
&& EXT_REX_SSE_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (52) <= (unsigned
long) (67) - (unsigned long) (52))
))))
3609 && !TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
3610 return NULL__null;
3611
3612 return gen_avx_vzeroall ();
3613}
3614
3615/* Generate insns to zero all st registers together.
3616 Return true when zeroing instructions are generated.
3617 Assume the number of st registers that are zeroed is num_of_st,
3618 we will emit the following sequence to zero them together:
3619 fldz; \
3620 fldz; \
3621 ...
3622 fldz; \
3623 fstp %%st(0); \
3624 fstp %%st(0); \
3625 ...
3626 fstp %%st(0);
3627 i.e., num_of_st fldz followed by num_of_st fstp to clear the stack
3628 mark stack slots empty.
3629
3630 How to compute the num_of_st:
3631 There is no direct mapping from stack registers to hard register
3632 numbers. If one stack register needs to be cleared, we don't know
3633 where in the stack the value remains. So, if any stack register
3634 needs to be cleared, the whole stack should be cleared. However,
3635 x87 stack registers that hold the return value should be excluded.
3636 x87 returns in the top (two for complex values) register, so
3637 num_of_st should be 7/6 when x87 returns, otherwise it will be 8. */
3638
3639
3640static bool
3641zero_all_st_registers (HARD_REG_SET need_zeroed_hardregs)
3642{
3643
3644 /* If the FPU is disabled, no need to zero all st registers. */
3645 if (! (TARGET_80387((global_options.x_target_flags & (1U << 1)) != 0) || TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0
) && ((global_options.x_target_flags & (1U <<
1)) != 0) && !((global_options.x_target_flags & (
1U << 12)) != 0))
))
3646 return false;
3647
3648 unsigned int num_of_st = 0;
3649 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
3650 if ((STACK_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (8) <= (unsigned
long) (15) - (unsigned long) (8))
|| MMX_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (28) <= (unsigned
long) (35) - (unsigned long) (28))
)
3651 && TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
3652 {
3653 num_of_st++;
3654 break;
3655 }
3656
3657 if (num_of_st == 0)
3658 return false;
3659
3660 bool return_with_x87 = false;
3661 return_with_x87 = (crtl(&x_rtl)->return_rtx
3662 && (STACK_REG_P (crtl->return_rtx)((((enum rtx_code) ((&x_rtl)->return_rtx)->code) ==
REG) && ((unsigned long) (((rhs_regno((&x_rtl)->
return_rtx)))) - (unsigned long) (8) <= (unsigned long) (15
) - (unsigned long) (8)))
));
3663
3664 bool complex_return = false;
3665 complex_return = (crtl(&x_rtl)->return_rtx
3666 && COMPLEX_MODE_P (GET_MODE (crtl->return_rtx))(((enum mode_class) mode_class[((machine_mode) ((&x_rtl)->
return_rtx)->mode)]) == MODE_COMPLEX_INT || ((enum mode_class
) mode_class[((machine_mode) ((&x_rtl)->return_rtx)->
mode)]) == MODE_COMPLEX_FLOAT)
);
3667
3668 if (return_with_x87)
3669 if (complex_return)
3670 num_of_st = 6;
3671 else
3672 num_of_st = 7;
3673 else
3674 num_of_st = 8;
3675
3676 rtx st_reg = gen_rtx_REG (XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)), FIRST_STACK_REG8);
3677 for (unsigned int i = 0; i < num_of_st; i++)
3678 emit_insn (gen_rtx_SET (st_reg, CONST0_RTX (XFmode))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((st_reg
)), (((const_tiny_rtx[0][(int) ((scalar_float_mode ((scalar_float_mode
::from_int) E_XFmode)))]))) )
);
3679
3680 for (unsigned int i = 0; i < num_of_st; i++)
3681 {
3682 rtx insn;
3683 insn = emit_insn (gen_rtx_SET (st_reg, st_reg)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((st_reg
)), ((st_reg)) )
);
3684 add_reg_note (insn, REG_DEAD, st_reg);
3685 }
3686 return true;
3687}
3688
3689
3690/* When the routine exit in MMX mode, if any ST register needs
3691 to be zeroed, we should clear all MMX registers except the
3692 RET_MMX_REGNO that holds the return value. */
3693static bool
3694zero_all_mm_registers (HARD_REG_SET need_zeroed_hardregs,
3695 unsigned int ret_mmx_regno)
3696{
3697 bool need_zero_all_mm = false;
3698 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
3699 if (STACK_REGNO_P (regno)((unsigned long) ((regno)) - (unsigned long) (8) <= (unsigned
long) (15) - (unsigned long) (8))
3700 && TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
3701 {
3702 need_zero_all_mm = true;
3703 break;
3704 }
3705
3706 if (!need_zero_all_mm)
3707 return false;
3708
3709 rtx zero_mmx = NULL_RTX(rtx) 0;
3710 machine_mode mode = V4HImode((void) 0, E_V4HImode);
3711 for (unsigned int regno = FIRST_MMX_REG28; regno <= LAST_MMX_REG35; regno++)
3712 if (regno != ret_mmx_regno)
3713 {
3714 rtx reg = gen_rtx_REG (mode, regno);
3715 if (zero_mmx == NULL_RTX(rtx) 0)
3716 {
3717 zero_mmx = reg;
3718 emit_insn (gen_rtx_SET (reg, CONST0_RTX (mode))gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg))
, (((const_tiny_rtx[0][(int) (mode)]))) )
);
3719 }
3720 else
3721 emit_move_insn (reg, zero_mmx);
3722 }
3723 return true;
3724}
3725
3726/* TARGET_ZERO_CALL_USED_REGS. */
3727/* Generate a sequence of instructions that zero registers specified by
3728 NEED_ZEROED_HARDREGS. Return the ZEROED_HARDREGS that are actually
3729 zeroed. */
3730static HARD_REG_SET
3731ix86_zero_call_used_regs (HARD_REG_SET need_zeroed_hardregs)
3732{
3733 HARD_REG_SET zeroed_hardregs;
3734 bool all_sse_zeroed = false;
3735 bool all_st_zeroed = false;
3736 bool all_mm_zeroed = false;
3737
3738 CLEAR_HARD_REG_SET (zeroed_hardregs);
3739
3740 /* first, let's see whether we can zero all vector registers together. */
3741 rtx zero_all_vec_insn = zero_all_vector_registers (need_zeroed_hardregs);
3742 if (zero_all_vec_insn)
3743 {
3744 emit_insn (zero_all_vec_insn);
3745 all_sse_zeroed = true;
3746 }
3747
3748 /* mm/st registers are shared registers set, we should follow the following
3749 rules to clear them:
3750 MMX exit mode x87 exit mode
3751 -------------|----------------------|---------------
3752 uses x87 reg | clear all MMX | clear all x87
3753 uses MMX reg | clear individual MMX | clear all x87
3754 x87 + MMX | clear all MMX | clear all x87
3755
3756 first, we should decide which mode (MMX mode or x87 mode) the function
3757 exit with. */
3758
3759 bool exit_with_mmx_mode = (crtl(&x_rtl)->return_rtx
3760 && (MMX_REG_P (crtl->return_rtx)((((enum rtx_code) ((&x_rtl)->return_rtx)->code) ==
REG) && ((unsigned long) (((rhs_regno((&x_rtl)->
return_rtx)))) - (unsigned long) (28) <= (unsigned long) (
35) - (unsigned long) (28)))
));
3761
3762 if (!exit_with_mmx_mode)
3763 /* x87 exit mode, we should zero all st registers together. */
3764 {
3765 all_st_zeroed = zero_all_st_registers (need_zeroed_hardregs);
3766 if (all_st_zeroed)
3767 SET_HARD_REG_BIT (zeroed_hardregs, FIRST_STACK_REG8);
3768 }
3769 else
3770 /* MMX exit mode, check whether we can zero all mm registers. */
3771 {
3772 unsigned int exit_mmx_regno = REGNO (crtl->return_rtx)(rhs_regno((&x_rtl)->return_rtx));
3773 all_mm_zeroed = zero_all_mm_registers (need_zeroed_hardregs,
3774 exit_mmx_regno);
3775 if (all_mm_zeroed)
3776 for (unsigned int regno = FIRST_MMX_REG28; regno <= LAST_MMX_REG35; regno++)
3777 if (regno != exit_mmx_regno)
3778 SET_HARD_REG_BIT (zeroed_hardregs, regno);
3779 }
3780
3781 /* Now, generate instructions to zero all the other registers. */
3782
3783 rtx zero_gpr = NULL_RTX(rtx) 0;
3784 rtx zero_vector = NULL_RTX(rtx) 0;
3785 rtx zero_mask = NULL_RTX(rtx) 0;
3786 rtx zero_mmx = NULL_RTX(rtx) 0;
3787
3788 for (unsigned int regno = 0; regno < FIRST_PSEUDO_REGISTER76; regno++)
3789 {
3790 if (!TEST_HARD_REG_BIT (need_zeroed_hardregs, regno))
3791 continue;
3792 if (!zero_call_used_regno_p (regno, all_sse_zeroed,
3793 exit_with_mmx_mode && !all_mm_zeroed))
3794 continue;
3795
3796 SET_HARD_REG_BIT (zeroed_hardregs, regno);
3797
3798 rtx reg, tmp, zero_rtx;
3799 machine_mode mode = zero_call_used_regno_mode (regno);
3800
3801 reg = gen_rtx_REG (mode, regno);
3802 zero_rtx = CONST0_RTX (mode)(const_tiny_rtx[0][(int) (mode)]);
3803
3804 if (mode == SImode(scalar_int_mode ((scalar_int_mode::from_int) E_SImode)))
3805 if (zero_gpr == NULL_RTX(rtx) 0)
3806 {
3807 zero_gpr = reg;
3808 tmp = gen_rtx_SET (reg, zero_rtx)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg))
, ((zero_rtx)) )
;
3809 if (!TARGET_USE_MOV0ix86_tune_features[X86_TUNE_USE_MOV0] || optimize_insn_for_size_p ())
3810 {
3811 rtx clob = gen_rtx_CLOBBER (VOIDmode,gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), ((
gen_rtx_REG (((void) 0, E_CCmode), 17))) )
3812 gen_rtx_REG (CCmode,gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), ((
gen_rtx_REG (((void) 0, E_CCmode), 17))) )
3813 FLAGS_REG))gen_rtx_fmt_e_stat ((CLOBBER), ((((void) 0, E_VOIDmode))), ((
gen_rtx_REG (((void) 0, E_CCmode), 17))) )
;
3814 tmp = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, tmp, clob))) )
3815 tmp,gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, tmp, clob))) )
3816 clob))gen_rtx_fmt_E_stat ((PARALLEL), ((((void) 0, E_VOIDmode))), (
(gen_rtvec (2, tmp, clob))) )
;
3817 }
3818 emit_insn (tmp);
3819 }
3820 else
3821 emit_move_insn (reg, zero_gpr);
3822 else if (mode == V4SFmode((void) 0, E_V4SFmode))
3823 if (zero_vector == NULL_RTX(rtx) 0)
3824 {
3825 zero_vector = reg;
3826 tmp = gen_rtx_SET (reg, zero_rtx)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg))
, ((zero_rtx)) )
;
3827 emit_insn (tmp);
3828 }
3829 else
3830 emit_move_insn (reg, zero_vector);
3831 else if (mode == HImode(scalar_int_mode ((scalar_int_mode::from_int) E_HImode)))
3832 if (zero_mask == NULL_RTX(rtx) 0)
3833 {
3834 zero_mask = reg;
3835 tmp = gen_rtx_SET (reg, zero_rtx)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg))
, ((zero_rtx)) )
;
3836 emit_insn (tmp);
3837 }
3838 else
3839 emit_move_insn (reg, zero_mask);
3840 else if (mode == V4HImode((void) 0, E_V4HImode))
3841 if (zero_mmx == NULL_RTX(rtx) 0)
3842 {
3843 zero_mmx = reg;
3844 tmp = gen_rtx_SET (reg, zero_rtx)gen_rtx_fmt_ee_stat ((SET), (((void) 0, E_VOIDmode)), ((reg))
, ((zero_rtx)) )
;
3845 emit_insn (tmp);
3846 }
3847 else
3848 emit_move_insn (reg, zero_mmx);
3849 else
3850 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3850, __FUNCTION__))
;
3851 }
3852 return zeroed_hardregs;
3853}
3854
3855/* Define how to find the value returned by a function.
3856 VALTYPE is the data type of the value (as a tree).
3857 If the precise function being called is known, FUNC is its FUNCTION_DECL;
3858 otherwise, FUNC is 0. */
3859
3860static rtx
3861function_value_32 (machine_mode orig_mode, machine_mode mode,
3862 const_tree fntype, const_tree fn)
3863{
3864 unsigned int regno;
3865
3866 /* 8-byte vector modes in %mm0. See ix86_return_in_memory for where
3867 we normally prevent this case when mmx is not available. However
3868 some ABIs may require the result to be returned like DImode. */
3869 if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
&& GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 8)
3870 regno = FIRST_MMX_REG28;
3871
3872 /* 16-byte vector modes in %xmm0. See ix86_return_in_memory for where
3873 we prevent this case when sse is not available. However some ABIs
3874 may require the result to be returned like integer TImode. */
3875 else if (mode == TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode))
3876 || (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
&& GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 16))
3877 regno = FIRST_SSE_REG20;
3878
3879 /* 32-byte vector modes in %ymm0. */
3880 else if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
&& GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 32)
3881 regno = FIRST_SSE_REG20;
3882
3883 /* 64-byte vector modes in %zmm0. */
3884 else if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
&& GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 64)
3885 regno = FIRST_SSE_REG20;
3886
3887 /* Floating point return values in %st(0) (unless -mno-fp-ret-in-387). */
3888 else if (X87_FLOAT_MODE_P (mode)(((global_options.x_target_flags & (1U << 1)) != 0)
&& ((mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_SFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_DFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_XFmode))))
&& TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0
) && ((global_options.x_target_flags & (1U <<
1)) != 0) && !((global_options.x_target_flags & (
1U << 12)) != 0))
)
3889 regno = FIRST_FLOAT_REG8;
3890 else
3891 /* Most things go in %eax. */
3892 regno = AX_REG0;
3893
3894 /* Override FP return register with %xmm0 for local functions when
3895 SSE math is enabled or for functions with sseregparm attribute. */
3896 if ((fn || fntype) && (mode == SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)) || mode == DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode))))
3897 {
3898 int sse_level = ix86_function_sseregparm (fntype, fn, false);
3899 if (sse_level == -1)
3900 {
3901 error ("calling %qD with SSE calling convention without "
3902 "SSE/SSE2 enabled", fn);
3903 sorry ("this is a GCC bug that can be worked around by adding "
3904 "attribute used to function called");
3905 }
3906 else if ((sse_level >= 1 && mode == SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)))
3907 || (sse_level == 2 && mode == DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode))))
3908 regno = FIRST_SSE_REG20;
3909 }
3910
3911 /* OImode shouldn't be used directly. */
3912 gcc_assert (mode != OImode)((void)(!(mode != (scalar_int_mode ((scalar_int_mode::from_int
) E_OImode))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 3912, __FUNCTION__), 0 : 0))
;
3913
3914 return gen_rtx_REG (orig_mode, regno);
3915}
3916
3917static rtx
3918function_value_64 (machine_mode orig_mode, machine_mode mode,
3919 const_tree valtype)
3920{
3921 rtx ret;
3922
3923 /* Handle libcalls, which don't provide a type node. */
3924 if (valtype == NULL__null)
3925 {
3926 unsigned int regno;
3927
3928 switch (mode)
3929 {
3930 case E_SFmode:
3931 case E_SCmode:
3932 case E_DFmode:
3933 case E_DCmode:
3934 case E_TFmode:
3935 case E_SDmode:
3936 case E_DDmode:
3937 case E_TDmode:
3938 regno = FIRST_SSE_REG20;
3939 break;
3940 case E_XFmode:
3941 case E_XCmode:
3942 regno = FIRST_FLOAT_REG8;
3943 break;
3944 case E_TCmode:
3945 return NULL__null;
3946 default:
3947 regno = AX_REG0;
3948 }
3949
3950 return gen_rtx_REG (mode, regno);
3951 }
3952 else if (POINTER_TYPE_P (valtype)(((enum tree_code) (valtype)->base.code) == POINTER_TYPE ||
((enum tree_code) (valtype)->base.code) == REFERENCE_TYPE
)
)
3953 {
3954 /* Pointers are always returned in word_mode. */
3955 mode = word_mode;
3956 }
3957
3958 ret = construct_container (mode, orig_mode, valtype, 1,
3959 X86_64_REGPARM_MAX6, X86_64_SSE_REGPARM_MAX8,
3960 x86_64_int_return_registers, 0);
3961
3962 /* For zero sized structures, construct_container returns NULL, but we
3963 need to keep rest of compiler happy by returning meaningful value. */
3964 if (!ret)
3965 ret = gen_rtx_REG (orig_mode, AX_REG0);
3966
3967 return ret;
3968}
3969
3970static rtx
3971function_value_ms_32 (machine_mode orig_mode, machine_mode mode,
3972 const_tree fntype, const_tree fn, const_tree valtype)
3973{
3974 unsigned int regno;
3975
3976 /* Floating point return values in %st(0)
3977 (unless -mno-fp-ret-in-387 or aggregate type of up to 8 bytes). */
3978 if (X87_FLOAT_MODE_P (mode)(((global_options.x_target_flags & (1U << 1)) != 0)
&& ((mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_SFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_DFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_XFmode))))
&& TARGET_FLOAT_RETURNS_IN_80387(((global_options.x_target_flags & (1U << 11)) != 0
) && ((global_options.x_target_flags & (1U <<
1)) != 0) && !((global_options.x_target_flags & (
1U << 12)) != 0))
3979 && (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) > 8
3980 || valtype == NULL_TREE(tree) __null || !AGGREGATE_TYPE_P (valtype)(((enum tree_code) (valtype)->base.code) == ARRAY_TYPE || (
((enum tree_code) (valtype)->base.code) == RECORD_TYPE || (
(enum tree_code) (valtype)->base.code) == UNION_TYPE || ((
enum tree_code) (valtype)->base.code) == QUAL_UNION_TYPE))
))
3981 {
3982 regno = FIRST_FLOAT_REG8;
3983 return gen_rtx_REG (orig_mode, regno);
3984 }
3985 else
3986 return function_value_32(orig_mode, mode, fntype,fn);
3987}
3988
3989static rtx
3990function_value_ms_64 (machine_mode orig_mode, machine_mode mode,
3991 const_tree valtype)
3992{
3993 unsigned int regno = AX_REG0;
3994
3995 if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
)
3996 {
3997 switch (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]))
3998 {
3999 case 16:
4000 if (valtype != NULL_TREE(tree) __null
4001 && !VECTOR_INTEGER_TYPE_P (valtype)((((enum tree_code) (valtype)->base.code) == VECTOR_TYPE) &&
((enum tree_code) (((contains_struct_check ((valtype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4001, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
4002 && !VECTOR_INTEGER_TYPE_P (valtype)((((enum tree_code) (valtype)->base.code) == VECTOR_TYPE) &&
((enum tree_code) (((contains_struct_check ((valtype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4002, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
4003 && !INTEGRAL_TYPE_P (valtype)(((enum tree_code) (valtype)->base.code) == ENUMERAL_TYPE ||
((enum tree_code) (valtype)->base.code) == BOOLEAN_TYPE ||
((enum tree_code) (valtype)->base.code) == INTEGER_TYPE)
4004 && !VECTOR_FLOAT_TYPE_P (valtype)((((enum tree_code) (valtype)->base.code) == VECTOR_TYPE) &&
((enum tree_code) (((contains_struct_check ((valtype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4004, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
)
4005 break;
4006 if ((SCALAR_INT_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_INT || ((enum mode_class
) mode_class[mode]) == MODE_PARTIAL_INT)
|| VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
)
4007 && !COMPLEX_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_COMPLEX_INT || (
(enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT)
)
4008 regno = FIRST_SSE_REG20;
4009 break;
4010 case 8:
4011 case 4:
4012 if (valtype != NULL_TREE(tree) __null && AGGREGATE_TYPE_P (valtype)(((enum tree_code) (valtype)->base.code) == ARRAY_TYPE || (
((enum tree_code) (valtype)->base.code) == RECORD_TYPE || (
(enum tree_code) (valtype)->base.code) == UNION_TYPE || ((
enum tree_code) (valtype)->base.code) == QUAL_UNION_TYPE))
)
4013 break;
4014 if (mode == SFmode(scalar_float_mode ((scalar_float_mode::from_int) E_SFmode)) || mode == DFmode(scalar_float_mode ((scalar_float_mode::from_int) E_DFmode)))
4015 regno = FIRST_SSE_REG20;
4016 break;
4017 default:
4018 break;
4019 }
4020 }
4021 return gen_rtx_REG (orig_mode, regno);
4022}
4023
4024static rtx
4025ix86_function_value_1 (const_tree valtype, const_tree fntype_or_decl,
4026 machine_mode orig_mode, machine_mode mode)
4027{
4028 const_tree fn, fntype;
4029
4030 fn = NULL_TREE(tree) __null;
4031 if (fntype_or_decl && DECL_P (fntype_or_decl)(tree_code_type[(int) (((enum tree_code) (fntype_or_decl)->
base.code))] == tcc_declaration)
)
4032 fn = fntype_or_decl;
4033 fntype = fn ? TREE_TYPE (fn)((contains_struct_check ((fn), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4033, __FUNCTION__))->typed.type)
: fntype_or_decl;
4034
4035 if (ix86_function_type_abi (fntype) == MS_ABI)
4036 {
4037 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
4038 return function_value_ms_64 (orig_mode, mode, valtype);
4039 else
4040 return function_value_ms_32 (orig_mode, mode, fntype, fn, valtype);
4041 }
4042 else if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
4043 return function_value_64 (orig_mode, mode, valtype);
4044 else
4045 return function_value_32 (orig_mode, mode, fntype, fn);
4046}
4047
4048static rtx
4049ix86_function_value (const_tree valtype, const_tree fntype_or_decl, bool)
4050{
4051 machine_mode mode, orig_mode;
4052
4053 orig_mode = TYPE_MODE (valtype)((((enum tree_code) ((tree_class_check ((valtype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4053, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(valtype) : (valtype)->type_common.mode)
;
4054 mode = type_natural_mode (valtype, NULL__null, true);
4055 return ix86_function_value_1 (valtype, fntype_or_decl, orig_mode, mode);
4056}
4057
4058/* Pointer function arguments and return values are promoted to
4059 word_mode for normal functions. */
4060
4061static machine_mode
4062ix86_promote_function_mode (const_tree type, machine_mode mode,
4063 int *punsignedp, const_tree fntype,
4064 int for_return)
4065{
4066 if (cfun(cfun + 0)->machine->func_type == TYPE_NORMAL
4067 && type != NULL_TREE(tree) __null
4068 && POINTER_TYPE_P (type)(((enum tree_code) (type)->base.code) == POINTER_TYPE || (
(enum tree_code) (type)->base.code) == REFERENCE_TYPE)
)
4069 {
4070 *punsignedp = POINTERS_EXTEND_UNSIGNED1;
4071 return word_mode;
4072 }
4073 return default_promote_function_mode (type, mode, punsignedp, fntype,
4074 for_return);
4075}
4076
4077/* Return true if a structure, union or array with MODE containing FIELD
4078 should be accessed using BLKmode. */
4079
4080static bool
4081ix86_member_type_forces_blk (const_tree field, machine_mode mode)
4082{
4083 /* Union with XFmode must be in BLKmode. */
4084 return (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode))
4085 && (TREE_CODE (DECL_FIELD_CONTEXT (field))((enum tree_code) (((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4085, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context
))->base.code)
== UNION_TYPE
4086 || TREE_CODE (DECL_FIELD_CONTEXT (field))((enum tree_code) (((tree_check ((field), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4086, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context
))->base.code)
== QUAL_UNION_TYPE));
4087}
4088
4089rtx
4090ix86_libcall_value (machine_mode mode)
4091{
4092 return ix86_function_value_1 (NULL__null, NULL__null, mode, mode);
4093}
4094
4095/* Return true iff type is returned in memory. */
4096
4097static bool
4098ix86_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED__attribute__ ((__unused__)))
4099{
4100 const machine_mode mode = type_natural_mode (type, NULL__null, true);
4101 HOST_WIDE_INTlong size;
4102
4103 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
4104 {
4105 if (ix86_function_type_abi (fntype) == MS_ABI)
4106 {
4107 size = int_size_in_bytes (type);
4108
4109 /* __m128 is returned in xmm0. */
4110 if ((!type || VECTOR_INTEGER_TYPE_P (type)((((enum tree_code) (type)->base.code) == VECTOR_TYPE) &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4110, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
4111 || INTEGRAL_TYPE_P (type)(((enum tree_code) (type)->base.code) == ENUMERAL_TYPE || (
(enum tree_code) (type)->base.code) == BOOLEAN_TYPE || ((enum
tree_code) (type)->base.code) == INTEGER_TYPE)
4112 || VECTOR_FLOAT_TYPE_P (type)((((enum tree_code) (type)->base.code) == VECTOR_TYPE) &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4112, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
)
4113 && (SCALAR_INT_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_INT || ((enum mode_class
) mode_class[mode]) == MODE_PARTIAL_INT)
|| VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
)
4114 && !COMPLEX_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_COMPLEX_INT || (
(enum mode_class) mode_class[mode]) == MODE_COMPLEX_FLOAT)
4115 && (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) == 16 || size == 16))
4116 return false;
4117
4118 /* Otherwise, the size must be exactly in [1248]. */
4119 return size != 1 && size != 2 && size != 4 && size != 8;
4120 }
4121 else
4122 {
4123 int needed_intregs, needed_sseregs;
4124
4125 return examine_argument (mode, type, 1,
4126 &needed_intregs, &needed_sseregs);
4127 }
4128 }
4129 else
4130 {
4131 size = int_size_in_bytes (type);
4132
4133 /* Intel MCU psABI returns scalars and aggregates no larger than 8
4134 bytes in registers. */
4135 if (TARGET_IAMCU((global_options.x_target_flags & (1U << 12)) != 0))
4136 return VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
|| size < 0 || size > 8;
4137
4138 if (mode == BLKmode((void) 0, E_BLKmode))
4139 return true;
4140
4141 if (MS_AGGREGATE_RETURN0 && AGGREGATE_TYPE_P (type)(((enum tree_code) (type)->base.code) == ARRAY_TYPE || (((
enum tree_code) (type)->base.code) == RECORD_TYPE || ((enum
tree_code) (type)->base.code) == UNION_TYPE || ((enum tree_code
) (type)->base.code) == QUAL_UNION_TYPE))
&& size <= 8)
4142 return false;
4143
4144 if (VECTOR_MODE_P (mode)(((enum mode_class) mode_class[mode]) == MODE_VECTOR_BOOL || (
(enum mode_class) mode_class[mode]) == MODE_VECTOR_INT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FLOAT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_FRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UFRACT || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_ACCUM || ((enum
mode_class) mode_class[mode]) == MODE_VECTOR_UACCUM)
|| mode == TImode(scalar_int_mode ((scalar_int_mode::from_int) E_TImode)))
4145 {
4146 /* User-created vectors small enough to fit in EAX. */
4147 if (size < 8)
4148 return false;
4149
4150 /* Unless ABI prescibes otherwise,
4151 MMX/3dNow values are returned in MM0 if available. */
4152
4153 if (size == 8)
4154 return TARGET_VECT8_RETURNS((global_options.x_target_flags & (1U << 28)) != 0) || !TARGET_MMX((global_options.x_ix86_isa_flags & (1UL << 36)) !=
0)
;
4155
4156 /* SSE values are returned in XMM0 if available. */
4157 if (size == 16)
4158 return !TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
;
4159
4160 /* AVX values are returned in YMM0 if available. */
4161 if (size == 32)
4162 return !TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0
)
;
4163
4164 /* AVX512F values are returned in ZMM0 if available. */
4165 if (size == 64)
4166 return !TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) !=
0)
;
4167 }
4168
4169 if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)))
4170 return false;
4171
4172 if (size > 12)
4173 return true;
4174
4175 /* OImode shouldn't be used directly. */
4176 gcc_assert (mode != OImode)((void)(!(mode != (scalar_int_mode ((scalar_int_mode::from_int
) E_OImode))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4176, __FUNCTION__), 0 : 0))
;
4177
4178 return false;
4179 }
4180}
4181
4182
4183/* Create the va_list data type. */
4184
4185static tree
4186ix86_build_builtin_va_list_64 (void)
4187{
4188 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl;
4189
4190 record = lang_hooks.types.make_type (RECORD_TYPE);
4191 type_decl = build_decl (BUILTINS_LOCATION((location_t) 1),
4192 TYPE_DECL, get_identifier ("__va_list_tag")(__builtin_constant_p ("__va_list_tag") ? get_identifier_with_length
(("__va_list_tag"), strlen ("__va_list_tag")) : get_identifier
("__va_list_tag"))
, record);
4193
4194 f_gpr = build_decl (BUILTINS_LOCATION((location_t) 1),
4195 FIELD_DECL, get_identifier ("gp_offset")(__builtin_constant_p ("gp_offset") ? get_identifier_with_length
(("gp_offset"), strlen ("gp_offset")) : get_identifier ("gp_offset"
))
,
4196 unsigned_type_nodeinteger_types[itk_unsigned_int]);
4197 f_fpr = build_decl (BUILTINS_LOCATION((location_t) 1),
4198 FIELD_DECL, get_identifier ("fp_offset")(__builtin_constant_p ("fp_offset") ? get_identifier_with_length
(("fp_offset"), strlen ("fp_offset")) : get_identifier ("fp_offset"
))
,
4199 unsigned_type_nodeinteger_types[itk_unsigned_int]);
4200 f_ovf = build_decl (BUILTINS_LOCATION((location_t) 1),
4201 FIELD_DECL, get_identifier ("overflow_arg_area")(__builtin_constant_p ("overflow_arg_area") ? get_identifier_with_length
(("overflow_arg_area"), strlen ("overflow_arg_area")) : get_identifier
("overflow_arg_area"))
,
4202 ptr_type_nodeglobal_trees[TI_PTR_TYPE]);
4203 f_sav = build_decl (BUILTINS_LOCATION((location_t) 1),
4204 FIELD_DECL, get_identifier ("reg_save_area")(__builtin_constant_p ("reg_save_area") ? get_identifier_with_length
(("reg_save_area"), strlen ("reg_save_area")) : get_identifier
("reg_save_area"))
,
4205 ptr_type_nodeglobal_trees[TI_PTR_TYPE]);
4206
4207 va_list_gpr_counter_fieldglobal_trees[TI_VA_LIST_GPR_COUNTER_FIELD] = f_gpr;
4208 va_list_fpr_counter_fieldglobal_trees[TI_VA_LIST_FPR_COUNTER_FIELD] = f_fpr;
4209
4210 DECL_FIELD_CONTEXT (f_gpr)((tree_check ((f_gpr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4210, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context
)
= record;
4211 DECL_FIELD_CONTEXT (f_fpr)((tree_check ((f_fpr), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4211, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context
)
= record;
4212 DECL_FIELD_CONTEXT (f_ovf)((tree_check ((f_ovf), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4212, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context
)
= record;
4213 DECL_FIELD_CONTEXT (f_sav)((tree_check ((f_sav), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4213, __FUNCTION__, (FIELD_DECL)))->decl_minimal.context
)
= record;
4214
4215 TYPE_STUB_DECL (record)(((contains_struct_check (((tree_class_check ((record), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4215, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4215, __FUNCTION__))->common.chain))
= type_decl;
4216 TYPE_NAME (record)((tree_class_check ((record), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4216, __FUNCTION__))->type_common.name)
= type_decl;
4217 TYPE_FIELDS (record)((tree_check3 ((record), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4217, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
= f_gpr;
4218 DECL_CHAIN (f_gpr)(((contains_struct_check (((contains_struct_check ((f_gpr), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4218, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4218, __FUNCTION__))->common.chain))
= f_fpr;
4219 DECL_CHAIN (f_fpr)(((contains_struct_check (((contains_struct_check ((f_fpr), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4219, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4219, __FUNCTION__))->common.chain))
= f_ovf;
4220 DECL_CHAIN (f_ovf)(((contains_struct_check (((contains_struct_check ((f_ovf), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4220, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4220, __FUNCTION__))->common.chain))
= f_sav;
4221
4222 layout_type (record);
4223
4224 TYPE_ATTRIBUTES (record)((tree_class_check ((record), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4224, __FUNCTION__))->type_common.attributes)
= tree_cons (get_identifier ("sysv_abi va_list")(__builtin_constant_p ("sysv_abi va_list") ? get_identifier_with_length
(("sysv_abi va_list"), strlen ("sysv_abi va_list")) : get_identifier
("sysv_abi va_list"))
,
4225 NULL_TREE(tree) __null, TYPE_ATTRIBUTES (record)((tree_class_check ((record), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4225, __FUNCTION__))->type_common.attributes)
);
4226
4227 /* The correct type is an array type of one element. */
4228 return build_array_type (record, build_index_type (size_zero_nodeglobal_trees[TI_SIZE_ZERO]));
4229}
4230
4231/* Setup the builtin va_list data type and for 64-bit the additional
4232 calling convention specific va_list data types. */
4233
4234static tree
4235ix86_build_builtin_va_list (void)
4236{
4237 if (TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
4238 {
4239 /* Initialize ABI specific va_list builtin types.
4240
4241 In lto1, we can encounter two va_list types:
4242 - one as a result of the type-merge across TUs, and
4243 - the one constructed here.
4244 These two types will not have the same TYPE_MAIN_VARIANT, and therefore
4245 a type identity check in canonical_va_list_type based on
4246 TYPE_MAIN_VARIANT (which we used to have) will not work.
4247 Instead, we tag each va_list_type_node with its unique attribute, and
4248 look for the attribute in the type identity check in
4249 canonical_va_list_type.
4250
4251 Tagging sysv_va_list_type_node directly with the attribute is
4252 problematic since it's a array of one record, which will degrade into a
4253 pointer to record when used as parameter (see build_va_arg comments for
4254 an example), dropping the attribute in the process. So we tag the
4255 record instead. */
4256
4257 /* For SYSV_ABI we use an array of one record. */
4258 sysv_va_list_type_node = ix86_build_builtin_va_list_64 ();
4259
4260 /* For MS_ABI we use plain pointer to argument area. */
4261 tree char_ptr_type = build_pointer_type (char_type_nodeinteger_types[itk_char]);
4262 tree attr = tree_cons (get_identifier ("ms_abi va_list")(__builtin_constant_p ("ms_abi va_list") ? get_identifier_with_length
(("ms_abi va_list"), strlen ("ms_abi va_list")) : get_identifier
("ms_abi va_list"))
, NULL_TREE(tree) __null,
4263 TYPE_ATTRIBUTES (char_ptr_type)((tree_class_check ((char_ptr_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4263, __FUNCTION__))->type_common.attributes)
);
4264 ms_va_list_type_node = build_type_attribute_variant (char_ptr_type, attr);
4265
4266 return ((ix86_abiglobal_options.x_ix86_abi == MS_ABI)
4267 ? ms_va_list_type_node
4268 : sysv_va_list_type_node);
4269 }
4270 else
4271 {
4272 /* For i386 we use plain pointer to argument area. */
4273 return build_pointer_type (char_type_nodeinteger_types[itk_char]);
4274 }
4275}
4276
4277/* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
4278
4279static void
4280setup_incoming_varargs_64 (CUMULATIVE_ARGS *cum)
4281{
4282 rtx save_area, mem;
4283 alias_set_type set;
4284 int i, max;
4285
4286 /* GPR size of varargs save area. */
4287 if (cfun(cfun + 0)->va_list_gpr_size)
4288 ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) = X86_64_REGPARM_MAX6 * UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
;
4289 else
4290 ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) = 0;
4291
4292 /* FPR size of varargs save area. We don't need it if we don't pass
4293 anything in SSE registers. */
4294 if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
&& cfun(cfun + 0)->va_list_fpr_size)
4295 ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size) = X86_64_SSE_REGPARM_MAX8 * 16;
4296 else
4297 ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size) = 0;
4298
4299 if (! ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) && ! ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size))
4300 return;
4301
4302 save_area = frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]);
4303 set = get_varargs_alias_set ();
4304
4305 max = cum->regno + cfun(cfun + 0)->va_list_gpr_size / UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
;
4306 if (max > X86_64_REGPARM_MAX6)
4307 max = X86_64_REGPARM_MAX6;
4308
4309 for (i = cum->regno; i < max; i++)
4310 {
4311 mem = gen_rtx_MEM (word_mode,
4312 plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, save_area, i * UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
));
4313 MEM_NOTRAP_P (mem)(__extension__ ({ __typeof ((mem)) const _rtx = ((mem)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4313, __FUNCTION__); _rtx; })->call)
= 1;
4314 set_mem_alias_set (mem, set);
4315 emit_move_insn (mem,
4316 gen_rtx_REG (word_mode,
4317 x86_64_int_parameter_registers[i]));
4318 }
4319
4320 if (ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size))
4321 {
4322 machine_mode smode;
4323 rtx_code_label *label;
4324 rtx test;
4325
4326 /* Now emit code to save SSE registers. The AX parameter contains number
4327 of SSE parameter registers used to call this function, though all we
4328 actually check here is the zero/non-zero status. */
4329
4330 label = gen_label_rtx ();
4331 test = gen_rtx_EQ (VOIDmode, gen_rtx_REG (QImode, AX_REG), const0_rtx)gen_rtx_fmt_ee_stat ((EQ), ((((void) 0, E_VOIDmode))), ((gen_rtx_REG
((scalar_int_mode ((scalar_int_mode::from_int) E_QImode)), 0
))), (((const_int_rtx[64]))) )
;
4332 emit_jump_insn (gen_cbranchqi4 (test, XEXP (test, 0)(((test)->u.fld[0]).rt_rtx), XEXP (test, 1)(((test)->u.fld[1]).rt_rtx),
4333 label));
4334
4335 /* ??? If !TARGET_SSE_TYPELESS_STORES, would we perform better if
4336 we used movdqa (i.e. TImode) instead? Perhaps even better would
4337 be if we could determine the real mode of the data, via a hook
4338 into pass_stdarg. Ignore all that for now. */
4339 smode = V4SFmode((void) 0, E_V4SFmode);
4340 if (crtl(&x_rtl)->stack_alignment_needed < GET_MODE_ALIGNMENT (smode)get_mode_alignment (smode))
4341 crtl(&x_rtl)->stack_alignment_needed = GET_MODE_ALIGNMENT (smode)get_mode_alignment (smode);
4342
4343 max = cum->sse_regno + cfun(cfun + 0)->va_list_fpr_size / 16;
4344 if (max > X86_64_SSE_REGPARM_MAX8)
4345 max = X86_64_SSE_REGPARM_MAX8;
4346
4347 for (i = cum->sse_regno; i < max; ++i)
4348 {
4349 mem = plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, save_area,
4350 i * 16 + ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size));
4351 mem = gen_rtx_MEM (smode, mem);
4352 MEM_NOTRAP_P (mem)(__extension__ ({ __typeof ((mem)) const _rtx = ((mem)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4352, __FUNCTION__); _rtx; })->call)
= 1;
4353 set_mem_alias_set (mem, set);
4354 set_mem_align (mem, GET_MODE_ALIGNMENT (smode)get_mode_alignment (smode));
4355
4356 emit_move_insn (mem, gen_rtx_REG (smode, GET_SSE_REGNO (i)((i) < 8 ? 20 + (i) : (i) < 16 ? 44 + (i) - 8 : 52 + (i
) - 16)
));
4357 }
4358
4359 emit_label (label);
4360 }
4361}
4362
4363static void
4364setup_incoming_varargs_ms_64 (CUMULATIVE_ARGS *cum)
4365{
4366 alias_set_type set = get_varargs_alias_set ();
4367 int i;
4368
4369 /* Reset to zero, as there might be a sysv vaarg used
4370 before. */
4371 ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) = 0;
4372 ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size) = 0;
4373
4374 for (i = cum->regno; i < X86_64_MS_REGPARM_MAX4; i++)
4375 {
4376 rtx reg, mem;
4377
4378 mem = gen_rtx_MEM (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
,
4379 plus_constant (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, virtual_incoming_args_rtx((this_target_rtl->x_global_rtl)[GR_VIRTUAL_INCOMING_ARGS]
)
,
4380 i * UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
));
4381 MEM_NOTRAP_P (mem)(__extension__ ({ __typeof ((mem)) const _rtx = ((mem)); if (
((enum rtx_code) (_rtx)->code) != MEM) rtl_check_failed_flag
("MEM_NOTRAP_P", _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4381, __FUNCTION__); _rtx; })->call)
= 1;
4382 set_mem_alias_set (mem, set);
4383
4384 reg = gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, x86_64_ms_abi_int_parameter_registers[i]);
4385 emit_move_insn (mem, reg);
4386 }
4387}
4388
4389static void
4390ix86_setup_incoming_varargs (cumulative_args_t cum_v,
4391 const function_arg_info &arg,
4392 int *, int no_rtl)
4393{
4394 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
4395 CUMULATIVE_ARGS next_cum;
4396 tree fntype;
4397
4398 /* This argument doesn't appear to be used anymore. Which is good,
4399 because the old code here didn't suppress rtl generation. */
4400 gcc_assert (!no_rtl)((void)(!(!no_rtl) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4400, __FUNCTION__), 0 : 0))
;
4401
4402 if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
4403 return;
4404
4405 fntype = TREE_TYPE (current_function_decl)((contains_struct_check ((current_function_decl), (TS_TYPED),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4405, __FUNCTION__))->typed.type)
;
4406
4407 /* For varargs, we do not want to skip the dummy va_dcl argument.
4408 For stdargs, we do want to skip the last named argument. */
4409 next_cum = *cum;
4410 if (stdarg_p (fntype))
4411 ix86_function_arg_advance (pack_cumulative_args (&next_cum), arg);
4412
4413 if (cum->call_abi == MS_ABI)
4414 setup_incoming_varargs_ms_64 (&next_cum);
4415 else
4416 setup_incoming_varargs_64 (&next_cum);
4417}
4418
4419/* Checks if TYPE is of kind va_list char *. */
4420
4421static bool
4422is_va_list_char_pointer (tree type)
4423{
4424 tree canonic;
4425
4426 /* For 32-bit it is always true. */
4427 if (!TARGET_64BIT((global_options.x_ix86_isa_flags & (1UL << 1)) != 0
)
)
4428 return true;
4429 canonic = ix86_canonical_va_list_type (type);
4430 return (canonic == ms_va_list_type_node
4431 || (ix86_abiglobal_options.x_ix86_abi == MS_ABI && canonic == va_list_type_nodeglobal_trees[TI_VA_LIST_TYPE]));
4432}
4433
4434/* Implement va_start. */
4435
4436static void
4437ix86_va_start (tree valist, rtx nextarg)
4438{
4439 HOST_WIDE_INTlong words, n_gpr, n_fpr;
4440 tree f_gpr, f_fpr, f_ovf, f_sav;
4441 tree gpr, fpr, ovf, sav, t;
4442 tree type;
4443 rtx ovf_rtx;
4444
4445 if (flag_split_stackglobal_options.x_flag_split_stack
4446 && cfun(cfun + 0)->machine->split_stack_varargs_pointer == NULL_RTX(rtx) 0)
4447 {
4448 unsigned int scratch_regno;
4449
4450 /* When we are splitting the stack, we can't refer to the stack
4451 arguments using internal_arg_pointer, because they may be on
4452 the old stack. The split stack prologue will arrange to
4453 leave a pointer to the old stack arguments in a scratch
4454 register, which we here copy to a pseudo-register. The split
4455 stack prologue can't set the pseudo-register directly because
4456 it (the prologue) runs before any registers have been saved. */
4457
4458 scratch_regno = split_stack_prologue_scratch_regno ();
4459 if (scratch_regno != INVALID_REGNUM(~(unsigned int) 0))
4460 {
4461 rtx reg;
4462 rtx_insn *seq;
4463
4464 reg = gen_reg_rtx (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
);
4465 cfun(cfun + 0)->machine->split_stack_varargs_pointer = reg;
4466
4467 start_sequence ();
4468 emit_move_insn (reg, gen_rtx_REG (Pmode(global_options.x_ix86_pmode == PMODE_DI ? (scalar_int_mode (
(scalar_int_mode::from_int) E_DImode)) : (scalar_int_mode ((scalar_int_mode
::from_int) E_SImode)))
, scratch_regno));
4469 seq = get_insns ();
4470 end_sequence ();
4471
4472 push_topmost_sequence ();
4473 emit_insn_after (seq, entry_of_function ());
4474 pop_topmost_sequence ();
4475 }
4476 }
4477
4478 /* Only 64bit target needs something special. */
4479 if (is_va_list_char_pointer (TREE_TYPE (valist)((contains_struct_check ((valist), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4479, __FUNCTION__))->typed.type)
))
4480 {
4481 if (cfun(cfun + 0)->machine->split_stack_varargs_pointer == NULL_RTX(rtx) 0)
4482 std_expand_builtin_va_start (valist, nextarg);
4483 else
4484 {
4485 rtx va_r, next;
4486
4487 va_r = expand_expr (valist, NULL_RTX(rtx) 0, VOIDmode((void) 0, E_VOIDmode), EXPAND_WRITE);
4488 next = expand_binop (ptr_mode, add_optab,
4489 cfun(cfun + 0)->machine->split_stack_varargs_pointer,
4490 crtl(&x_rtl)->args.arg_offset_rtx,
4491 NULL_RTX(rtx) 0, 0, OPTAB_LIB_WIDEN);
4492 convert_move (va_r, next, 0);
4493 }
4494 return;
4495 }
4496
4497 f_gpr = TYPE_FIELDS (TREE_TYPE (sysv_va_list_type_node))((tree_check3 ((((contains_struct_check ((sysv_va_list_type_node
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4497, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4497, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
;
4498 f_fpr = DECL_CHAIN (f_gpr)(((contains_struct_check (((contains_struct_check ((f_gpr), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4498, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4498, __FUNCTION__))->common.chain))
;
4499 f_ovf = DECL_CHAIN (f_fpr)(((contains_struct_check (((contains_struct_check ((f_fpr), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4499, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4499, __FUNCTION__))->common.chain))
;
4500 f_sav = DECL_CHAIN (f_ovf)(((contains_struct_check (((contains_struct_check ((f_ovf), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4500, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4500, __FUNCTION__))->common.chain))
;
4501
4502 valist = build_simple_mem_ref (valist)build_simple_mem_ref_loc (((location_t) 0), valist);
4503 TREE_TYPE (valist)((contains_struct_check ((valist), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4503, __FUNCTION__))->typed.type)
= TREE_TYPE (sysv_va_list_type_node)((contains_struct_check ((sysv_va_list_type_node), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4503, __FUNCTION__))->typed.type)
;
4504 /* The following should be folded into the MEM_REF offset. */
4505 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr)((contains_struct_check ((f_gpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4505, __FUNCTION__))->typed.type)
, unshare_expr (valist),
4506 f_gpr, NULL_TREE(tree) __null);
4507 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr)((contains_struct_check ((f_fpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4507, __FUNCTION__))->typed.type)
, unshare_expr (valist),
4508 f_fpr, NULL_TREE(tree) __null);
4509 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf)((contains_struct_check ((f_ovf), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4509, __FUNCTION__))->typed.type)
, unshare_expr (valist),
4510 f_ovf, NULL_TREE(tree) __null);
4511 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav)((contains_struct_check ((f_sav), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4511, __FUNCTION__))->typed.type)
, unshare_expr (valist),
4512 f_sav, NULL_TREE(tree) __null);
4513
4514 /* Count number of gp and fp argument registers used. */
4515 words = crtl(&x_rtl)->args.info.words;
4516 n_gpr = crtl(&x_rtl)->args.info.regno;
4517 n_fpr = crtl(&x_rtl)->args.info.sse_regno;
4518
4519 if (cfun(cfun + 0)->va_list_gpr_size)
4520 {
4521 type = TREE_TYPE (gpr)((contains_struct_check ((gpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4521, __FUNCTION__))->typed.type)
;
4522 t = build2 (MODIFY_EXPR, type,
4523 gpr, build_int_cst (type, n_gpr * 8));
4524 TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4524, __FUNCTION__))->base.side_effects_flag)
= 1;
4525 expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4526 }
4527
4528 if (TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
&& cfun(cfun + 0)->va_list_fpr_size)
4529 {
4530 type = TREE_TYPE (fpr)((contains_struct_check ((fpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4530, __FUNCTION__))->typed.type)
;
4531 t = build2 (MODIFY_EXPR, type, fpr,
4532 build_int_cst (type, n_fpr * 16 + 8*X86_64_REGPARM_MAX6));
4533 TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4533, __FUNCTION__))->base.side_effects_flag)
= 1;
4534 expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4535 }
4536
4537 /* Find the overflow area. */
4538 type = TREE_TYPE (ovf)((contains_struct_check ((ovf), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4538, __FUNCTION__))->typed.type)
;
4539 if (cfun(cfun + 0)->machine->split_stack_varargs_pointer == NULL_RTX(rtx) 0)
4540 ovf_rtx = crtl(&x_rtl)->args.internal_arg_pointer;
4541 else
4542 ovf_rtx = cfun(cfun + 0)->machine->split_stack_varargs_pointer;
4543 t = make_tree (type, ovf_rtx);
4544 if (words != 0)
4545 t = fold_build_pointer_plus_hwi (t, words * UNITS_PER_WORD)fold_build_pointer_plus_hwi_loc (((location_t) 0), t, words *
(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4))
;
4546
4547 t = build2 (MODIFY_EXPR, type, ovf, t);
4548 TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4548, __FUNCTION__))->base.side_effects_flag)
= 1;
4549 expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4550
4551 if (ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size) || ix86_varargs_fpr_size((cfun + 0)->machine->varargs_fpr_size))
4552 {
4553 /* Find the register save area.
4554 Prologue of the function save it right above stack frame. */
4555 type = TREE_TYPE (sav)((contains_struct_check ((sav), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4555, __FUNCTION__))->typed.type)
;
4556 t = make_tree (type, frame_pointer_rtx((this_target_rtl->x_global_rtl)[GR_FRAME_POINTER]));
4557 if (!ix86_varargs_gpr_size((cfun + 0)->machine->varargs_gpr_size))
4558 t = fold_build_pointer_plus_hwi (t, -8 * X86_64_REGPARM_MAX)fold_build_pointer_plus_hwi_loc (((location_t) 0), t, -8 * 6);
4559
4560 t = build2 (MODIFY_EXPR, type, sav, t);
4561 TREE_SIDE_EFFECTS (t)((non_type_check ((t), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4561, __FUNCTION__))->base.side_effects_flag)
= 1;
4562 expand_expr (t, const0_rtx(const_int_rtx[64]), VOIDmode((void) 0, E_VOIDmode), EXPAND_NORMAL);
4563 }
4564}
4565
4566/* Implement va_arg. */
4567
4568static tree
4569ix86_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
4570 gimple_seq *post_p)
4571{
4572 static const int intreg[6] = { 0, 1, 2, 3, 4, 5 };
4573 tree f_gpr, f_fpr, f_ovf, f_sav;
4574 tree gpr, fpr, ovf, sav, t;
4575 int size, rsize;
4576 tree lab_false, lab_over = NULL_TREE(tree) __null;
4577 tree addr, t2;
4578 rtx container;
4579 int indirect_p = 0;
4580 tree ptrtype;
4581 machine_mode nat_mode;
4582 unsigned int arg_boundary;
4583 unsigned int type_align;
4584
4585 /* Only 64bit target needs something special. */
4586 if (is_va_list_char_pointer (TREE_TYPE (valist)((contains_struct_check ((valist), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4586, __FUNCTION__))->typed.type)
))
4587 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
4588
4589 f_gpr = TYPE_FIELDS (TREE_TYPE (sysv_va_list_type_node))((tree_check3 ((((contains_struct_check ((sysv_va_list_type_node
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4589, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4589, __FUNCTION__, (RECORD_TYPE), (UNION_TYPE), (QUAL_UNION_TYPE
)))->type_non_common.values)
;
4590 f_fpr = DECL_CHAIN (f_gpr)(((contains_struct_check (((contains_struct_check ((f_gpr), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4590, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4590, __FUNCTION__))->common.chain))
;
4591 f_ovf = DECL_CHAIN (f_fpr)(((contains_struct_check (((contains_struct_check ((f_fpr), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4591, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4591, __FUNCTION__))->common.chain))
;
4592 f_sav = DECL_CHAIN (f_ovf)(((contains_struct_check (((contains_struct_check ((f_ovf), (
TS_DECL_MINIMAL), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4592, __FUNCTION__))), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4592, __FUNCTION__))->common.chain))
;
4593
4594 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr)((contains_struct_check ((f_gpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4594, __FUNCTION__))->typed.type)
,
4595 valist, f_gpr, NULL_TREE(tree) __null);
4596
4597 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr)((contains_struct_check ((f_fpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4597, __FUNCTION__))->typed.type)
, valist, f_fpr, NULL_TREE(tree) __null);
4598 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf)((contains_struct_check ((f_ovf), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4598, __FUNCTION__))->typed.type)
, valist, f_ovf, NULL_TREE(tree) __null);
4599 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav)((contains_struct_check ((f_sav), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4599, __FUNCTION__))->typed.type)
, valist, f_sav, NULL_TREE(tree) __null);
4600
4601 indirect_p = pass_va_arg_by_reference (type);
4602 if (indirect_p)
4603 type = build_pointer_type (type);
4604 size = arg_int_size_in_bytes (type);
4605 rsize = CEIL (size, UNITS_PER_WORD)(((size) + ((((global_options.x_ix86_isa_flags & (1UL <<
1)) != 0) ? 8 : 4)) - 1) / ((((global_options.x_ix86_isa_flags
& (1UL << 1)) != 0) ? 8 : 4)))
;
4606
4607 nat_mode = type_natural_mode (type, NULL__null, false);
4608 switch (nat_mode)
4609 {
4610 case E_V8SFmode:
4611 case E_V8SImode:
4612 case E_V32QImode:
4613 case E_V16HImode:
4614 case E_V4DFmode:
4615 case E_V4DImode:
4616 case E_V16SFmode:
4617 case E_V16SImode:
4618 case E_V64QImode:
4619 case E_V32HImode:
4620 case E_V8DFmode:
4621 case E_V8DImode:
4622 /* Unnamed 256 and 512bit vector mode parameters are passed on stack. */
4623 if (!TARGET_64BIT_MS_ABI(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) && ix86_cfun_abi () == MS_ABI)
)
4624 {
4625 container = NULL__null;
4626 break;
4627 }
4628 /* FALLTHRU */
4629
4630 default:
4631 container = construct_container (nat_mode, TYPE_MODE (type)((((enum tree_code) ((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4631, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(type) : (type)->type_common.mode)
,
4632 type, 0, X86_64_REGPARM_MAX6,
4633 X86_64_SSE_REGPARM_MAX8, intreg,
4634 0);
4635 break;
4636 }
4637
4638 /* Pull the value out of the saved registers. */
4639
4640 addr = create_tmp_var (ptr_type_nodeglobal_trees[TI_PTR_TYPE], "addr");
4641 type_align = TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4641, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
;
4642
4643 if (container)
4644 {
4645 int needed_intregs, needed_sseregs;
4646 bool need_temp;
4647 tree int_addr, sse_addr;
4648
4649 lab_false = create_artificial_label (UNKNOWN_LOCATION((location_t) 0));
4650 lab_over = create_artificial_label (UNKNOWN_LOCATION((location_t) 0));
4651
4652 examine_argument (nat_mode, type, 0, &needed_intregs, &needed_sseregs);
4653
4654 need_temp = (!REG_P (container)(((enum rtx_code) (container)->code) == REG)
4655 && ((needed_intregs && TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4655, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
> 64)
4656 || TYPE_ALIGN (type)((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4656, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0)
> 128));
4657
4658 /* In case we are passing structure, verify that it is consecutive block
4659 on the register save area. If not we need to do moves. */
4660 if (!need_temp && !REG_P (container)(((enum rtx_code) (container)->code) == REG))
4661 {
4662 /* Verify that all registers are strictly consecutive */
4663 if (SSE_REGNO_P (REGNO (XEXP (XVECEXP (container, 0, 0), 0)))(((unsigned long) (((rhs_regno(((((((((container)->u.fld[0
]).rt_rtvec))->elem[0]))->u.fld[0]).rt_rtx))))) - (unsigned
long) (20) <= (unsigned long) (27) - (unsigned long) (20)
) || ((unsigned long) (((rhs_regno(((((((((container)->u.fld
[0]).rt_rtvec))->elem[0]))->u.fld[0]).rt_rtx))))) - (unsigned
long) (44) <= (unsigned long) (51) - (unsigned long) (44)
) || ((unsigned long) (((rhs_regno(((((((((container)->u.fld
[0]).rt_rtvec))->elem[0]))->u.fld[0]).rt_rtx))))) - (unsigned
long) (52) <= (unsigned long) (67) - (unsigned long) (52)
))
)
4664 {
4665 int i;
4666
4667 for (i = 0; i < XVECLEN (container, 0)(((((container)->u.fld[0]).rt_rtvec))->num_elem) && !need_temp; i++)
4668 {
4669 rtx slot = XVECEXP (container, 0, i)(((((container)->u.fld[0]).rt_rtvec))->elem[i]);
4670 if (REGNO (XEXP (slot, 0))(rhs_regno((((slot)->u.fld[0]).rt_rtx))) != FIRST_SSE_REG20 + (unsigned int) i
4671 || INTVAL (XEXP (slot, 1))(((((slot)->u.fld[1]).rt_rtx))->u.hwint[0]) != i * 16)
4672 need_temp = true;
4673 }
4674 }
4675 else
4676 {
4677 int i;
4678
4679 for (i = 0; i < XVECLEN (container, 0)(((((container)->u.fld[0]).rt_rtvec))->num_elem) && !need_temp; i++)
4680 {
4681 rtx slot = XVECEXP (container, 0, i)(((((container)->u.fld[0]).rt_rtvec))->elem[i]);
4682 if (REGNO (XEXP (slot, 0))(rhs_regno((((slot)->u.fld[0]).rt_rtx))) != (unsigned int) i
4683 || INTVAL (XEXP (slot, 1))(((((slot)->u.fld[1]).rt_rtx))->u.hwint[0]) != i * 8)
4684 need_temp = true;
4685 }
4686 }
4687 }
4688 if (!need_temp)
4689 {
4690 int_addr = addr;
4691 sse_addr = addr;
4692 }
4693 else
4694 {
4695 int_addr = create_tmp_var (ptr_type_nodeglobal_trees[TI_PTR_TYPE], "int_addr");
4696 sse_addr = create_tmp_var (ptr_type_nodeglobal_trees[TI_PTR_TYPE], "sse_addr");
4697 }
4698
4699 /* First ensure that we fit completely in registers. */
4700 if (needed_intregs)
4701 {
4702 t = build_int_cst (TREE_TYPE (gpr)((contains_struct_check ((gpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4702, __FUNCTION__))->typed.type)
,
4703 (X86_64_REGPARM_MAX6 - needed_intregs + 1) * 8);
4704 t = build2 (GE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], gpr, t);
4705 t2 = build1 (GOTO_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], lab_false);
4706 t = build3 (COND_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], t, t2, NULL_TREE(tree) __null);
4707 gimplify_and_add (t, pre_p);
4708 }
4709 if (needed_sseregs)
4710 {
4711 t = build_int_cst (TREE_TYPE (fpr)((contains_struct_check ((fpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4711, __FUNCTION__))->typed.type)
,
4712 (X86_64_SSE_REGPARM_MAX8 - needed_sseregs + 1) * 16
4713 + X86_64_REGPARM_MAX6 * 8);
4714 t = build2 (GE_EXPR, boolean_type_nodeglobal_trees[TI_BOOLEAN_TYPE], fpr, t);
4715 t2 = build1 (GOTO_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], lab_false);
4716 t = build3 (COND_EXPR, void_type_nodeglobal_trees[TI_VOID_TYPE], t, t2, NULL_TREE(tree) __null);
4717 gimplify_and_add (t, pre_p);
4718 }
4719
4720 /* Compute index to start of area used for integer regs. */
4721 if (needed_intregs)
4722 {
4723 /* int_addr = gpr + sav; */
4724 t = fold_build_pointer_plus (sav, gpr)fold_build_pointer_plus_loc (((location_t) 0), sav, gpr);
4725 gimplify_assign (int_addr, t, pre_p);
4726 }
4727 if (needed_sseregs)
4728 {
4729 /* sse_addr = fpr + sav; */
4730 t = fold_build_pointer_plus (sav, fpr)fold_build_pointer_plus_loc (((location_t) 0), sav, fpr);
4731 gimplify_assign (sse_addr, t, pre_p);
4732 }
4733 if (need_temp)
4734 {
4735 int i, prev_size = 0;
4736 tree temp = create_tmp_var (type, "va_arg_tmp");
4737
4738 /* addr = &temp; */
4739 t = build1 (ADDR_EXPR, build_pointer_type (type), temp);
4740 gimplify_assign (addr, t, pre_p);
4741
4742 for (i = 0; i < XVECLEN (container, 0)(((((container)->u.fld[0]).rt_rtvec))->num_elem); i++)
4743 {
4744 rtx slot = XVECEXP (container, 0, i)(((((container)->u.fld[0]).rt_rtvec))->elem[i]);
4745 rtx reg = XEXP (slot, 0)(((slot)->u.fld[0]).rt_rtx);
4746 machine_mode mode = GET_MODE (reg)((machine_mode) (reg)->mode);
4747 tree piece_type;
4748 tree addr_type;
4749 tree daddr_type;
4750 tree src_addr, src;
4751 int src_offset;
4752 tree dest_addr, dest;
4753 int cur_size = GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]);
4754
4755 gcc_assert (prev_size <= INTVAL (XEXP (slot, 1)))((void)(!(prev_size <= (((((slot)->u.fld[1]).rt_rtx))->
u.hwint[0])) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4755, __FUNCTION__), 0 : 0))
;
4756 prev_size = INTVAL (XEXP (slot, 1))(((((slot)->u.fld[1]).rt_rtx))->u.hwint[0]);
4757 if (prev_size + cur_size > size)
4758 {
4759 cur_size = size - prev_size;
4760 unsigned int nbits = cur_size * BITS_PER_UNIT(8);
4761 if (!int_mode_for_size (nbits, 1).exists (&mode))
4762 mode = QImode(scalar_int_mode ((scalar_int_mode::from_int) E_QImode));
4763 }
4764 piece_type = lang_hooks.types.type_for_mode (mode, 1);
4765 if (mode == GET_MODE (reg)((machine_mode) (reg)->mode))
4766 addr_type = build_pointer_type (piece_type);
4767 else
4768 addr_type = build_pointer_type_for_mode (piece_type, ptr_mode,
4769 true);
4770 daddr_type = build_pointer_type_for_mode (piece_type, ptr_mode,
4771 true);
4772
4773 if (SSE_REGNO_P (REGNO (reg))(((unsigned long) (((rhs_regno(reg)))) - (unsigned long) (20)
<= (unsigned long) (27) - (unsigned long) (20)) || ((unsigned
long) (((rhs_regno(reg)))) - (unsigned long) (44) <= (unsigned
long) (51) - (unsigned long) (44)) || ((unsigned long) (((rhs_regno
(reg)))) - (unsigned long) (52) <= (unsigned long) (67) - (
unsigned long) (52)))
)
4774 {
4775 src_addr = sse_addr;
4776 src_offset = (REGNO (reg)(rhs_regno(reg)) - FIRST_SSE_REG20) * 16;
4777 }
4778 else
4779 {
4780 src_addr = int_addr;
4781 src_offset = REGNO (reg)(rhs_regno(reg)) * 8;
4782 }
4783 src_addr = fold_convert (addr_type, src_addr)fold_convert_loc (((location_t) 0), addr_type, src_addr);
4784 src_addr = fold_build_pointer_plus_hwi (src_addr, src_offset)fold_build_pointer_plus_hwi_loc (((location_t) 0), src_addr, src_offset
)
;
4785
4786 dest_addr = fold_convert (daddr_type, addr)fold_convert_loc (((location_t) 0), daddr_type, addr);
4787 dest_addr = fold_build_pointer_plus_hwi (dest_addr, prev_size)fold_build_pointer_plus_hwi_loc (((location_t) 0), dest_addr,
prev_size)
;
4788 if (cur_size == GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]))
4789 {
4790 src = build_va_arg_indirect_ref (src_addr);
4791 dest = build_va_arg_indirect_ref (dest_addr);
4792
4793 gimplify_assign (dest, src, pre_p);
4794 }
4795 else
4796 {
4797 tree copy
4798 = build_call_expr (builtin_decl_implicit (BUILT_IN_MEMCPY),
4799 3, dest_addr, src_addr,
4800 size_int (cur_size)size_int_kind (cur_size, stk_sizetype));
4801 gimplify_and_add (copy, pre_p);
4802 }
4803 prev_size += cur_size;
4804 }
4805 }
4806
4807 if (needed_intregs)
4808 {
4809 t = build2 (PLUS_EXPR, TREE_TYPE (gpr)((contains_struct_check ((gpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4809, __FUNCTION__))->typed.type)
, gpr,
4810 build_int_cst (TREE_TYPE (gpr)((contains_struct_check ((gpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4810, __FUNCTION__))->typed.type)
, needed_intregs * 8));
4811 gimplify_assign (gpr, t, pre_p);
4812 /* The GPR save area guarantees only 8-byte alignment. */
4813 if (!need_temp)
4814 type_align = MIN (type_align, 64)((type_align) < (64) ? (type_align) : (64));
4815 }
4816
4817 if (needed_sseregs)
4818 {
4819 t = build2 (PLUS_EXPR, TREE_TYPE (fpr)((contains_struct_check ((fpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4819, __FUNCTION__))->typed.type)
, fpr,
4820 build_int_cst (TREE_TYPE (fpr)((contains_struct_check ((fpr), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4820, __FUNCTION__))->typed.type)
, needed_sseregs * 16));
4821 gimplify_assign (unshare_expr (fpr), t, pre_p);
4822 }
4823
4824 gimple_seq_add_stmt (pre_p, gimple_build_goto (lab_over));
4825
4826 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_false));
4827 }
4828
4829 /* ... otherwise out of the overflow area. */
4830
4831 /* When we align parameter on stack for caller, if the parameter
4832 alignment is beyond MAX_SUPPORTED_STACK_ALIGNMENT, it will be
4833 aligned at MAX_SUPPORTED_STACK_ALIGNMENT. We will match callee
4834 here with caller. */
4835 arg_boundary = ix86_function_arg_boundary (VOIDmode((void) 0, E_VOIDmode), type);
4836 if ((unsigned int) arg_boundary > MAX_SUPPORTED_STACK_ALIGNMENT(((unsigned int) 1 << 28) * 8))
4837 arg_boundary = MAX_SUPPORTED_STACK_ALIGNMENT(((unsigned int) 1 << 28) * 8);
4838
4839 /* Care for on-stack alignment if needed. */
4840 if (arg_boundary <= 64 || size == 0)
4841 t = ovf;
4842 else
4843 {
4844 HOST_WIDE_INTlong align = arg_boundary / 8;
4845 t = fold_build_pointer_plus_hwi (ovf, align - 1)fold_build_pointer_plus_hwi_loc (((location_t) 0), ovf, align
- 1)
;
4846 t = build2 (BIT_AND_EXPR, TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4846, __FUNCTION__))->typed.type)
, t,
4847 build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4847, __FUNCTION__))->typed.type)
, -align));
4848 }
4849
4850 gimplify_expr (&t, pre_p, NULL__null, is_gimple_val, fb_rvalue);
4851 gimplify_assign (addr, t, pre_p);
4852
4853 t = fold_build_pointer_plus_hwi (t, rsize * UNITS_PER_WORD)fold_build_pointer_plus_hwi_loc (((location_t) 0), t, rsize *
(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4))
;
4854 gimplify_assign (unshare_expr (ovf), t, pre_p);
4855
4856 if (container)
4857 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over));
4858
4859 type = build_aligned_type (type, type_align);
4860 ptrtype = build_pointer_type_for_mode (type, ptr_mode, true);
4861 addr = fold_convert (ptrtype, addr)fold_convert_loc (((location_t) 0), ptrtype, addr);
4862
4863 if (indirect_p)
4864 addr = build_va_arg_indirect_ref (addr);
4865 return build_va_arg_indirect_ref (addr);
4866}
4867
4868/* Return true if OPNUM's MEM should be matched
4869 in movabs* patterns. */
4870
4871bool
4872ix86_check_movabs (rtx insn, int opnum)
4873{
4874 rtx set, mem;
4875
4876 set = PATTERN (insn);
4877 if (GET_CODE (set)((enum rtx_code) (set)->code) == PARALLEL)
4878 set = XVECEXP (set, 0, 0)(((((set)->u.fld[0]).rt_rtvec))->elem[0]);
4879 gcc_assert (GET_CODE (set) == SET)((void)(!(((enum rtx_code) (set)->code) == SET) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4879, __FUNCTION__), 0 : 0))
;
4880 mem = XEXP (set, opnum)(((set)->u.fld[opnum]).rt_rtx);
4881 while (SUBREG_P (mem)(((enum rtx_code) (mem)->code) == SUBREG))
4882 mem = SUBREG_REG (mem)(((mem)->u.fld[0]).rt_rtx);
4883 gcc_assert (MEM_P (mem))((void)(!((((enum rtx_code) (mem)->code) == MEM)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4883, __FUNCTION__), 0 : 0))
;
4884 return volatile_ok || !MEM_VOLATILE_P (mem)(__extension__ ({ __typeof ((mem)) const _rtx = ((mem)); if (
((enum rtx_code) (_rtx)->code) != MEM && ((enum rtx_code
) (_rtx)->code) != ASM_OPERANDS && ((enum rtx_code
) (_rtx)->code) != ASM_INPUT) rtl_check_failed_flag ("MEM_VOLATILE_P"
, _rtx, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4884, __FUNCTION__); _rtx; })->volatil)
;
4885}
4886
4887/* Return false if INSN contains a MEM with a non-default address space. */
4888bool
4889ix86_check_no_addr_space (rtx insn)
4890{
4891 subrtx_var_iterator::array_type array;
4892 FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), ALL)for (subrtx_var_iterator iter (array, PATTERN (insn), rtx_all_subrtx_bounds
); !iter.at_end (); iter.next ())
4893 {
4894 rtx x = *iter;
4895 if (MEM_P (x)(((enum rtx_code) (x)->code) == MEM) && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))(((get_mem_attrs (x)->addrspace)) == 0))
4896 return false;
4897 }
4898 return true;
4899}
4900
4901/* Initialize the table of extra 80387 mathematical constants. */
4902
4903static void
4904init_ext_80387_constants (void)
4905{
4906 static const char * cst[5] =
4907 {
4908 "0.3010299956639811952256464283594894482", /* 0: fldlg2 */
4909 "0.6931471805599453094286904741849753009", /* 1: fldln2 */
4910 "1.4426950408889634073876517827983434472", /* 2: fldl2e */
4911 "3.3219280948873623478083405569094566090", /* 3: fldl2t */
4912 "3.1415926535897932385128089594061862044", /* 4: fldpi */
4913 };
4914 int i;
4915
4916 for (i = 0; i < 5; i++)
4917 {
4918 real_from_string (&ext_80387_constants_table[i], cst[i]);
4919 /* Ensure each constant is rounded to XFmode precision. */
4920 real_convert (&ext_80387_constants_table[i],
4921 XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)), &ext_80387_constants_table[i]);
4922 }
4923
4924 ext_80387_constants_init = 1;
4925}
4926
4927/* Return non-zero if the constant is something that
4928 can be loaded with a special instruction. */
4929
4930int
4931standard_80387_constant_p (rtx x)
4932{
4933 machine_mode mode = GET_MODE (x)((machine_mode) (x)->mode);
4934
4935 const REAL_VALUE_TYPEstruct real_value *r;
4936
4937 if (!(CONST_DOUBLE_P (x)(((enum rtx_code) (x)->code) == CONST_DOUBLE) && X87_FLOAT_MODE_P (mode)(((global_options.x_target_flags & (1U << 1)) != 0)
&& ((mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_SFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_DFmode)) || (mode) == (scalar_float_mode ((scalar_float_mode
::from_int) E_XFmode))))
))
4938 return -1;
4939
4940 if (x == CONST0_RTX (mode)(const_tiny_rtx[0][(int) (mode)]))
4941 return 1;
4942 if (x == CONST1_RTX (mode)(const_tiny_rtx[1][(int) (mode)]))
4943 return 2;
4944
4945 r = CONST_DOUBLE_REAL_VALUE (x)((const struct real_value *) (&(x)->u.rv));
4946
4947 /* For XFmode constants, try to find a special 80387 instruction when
4948 optimizing for size or on those CPUs that benefit from them. */
4949 if (mode == XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode))
4950 && (optimize_function_for_size_p (cfun(cfun + 0)) || TARGET_EXT_80387_CONSTANTSix86_tune_features[X86_TUNE_EXT_80387_CONSTANTS]))
4951 {
4952 int i;
4953
4954 if (! ext_80387_constants_init)
4955 init_ext_80387_constants ();
4956
4957 for (i = 0; i < 5; i++)
4958 if (real_identical (r, &ext_80387_constants_table[i]))
4959 return i + 3;
4960 }
4961
4962 /* Load of the constant -0.0 or -1.0 will be split as
4963 fldz;fchs or fld1;fchs sequence. */
4964 if (real_isnegzero (r))
4965 return 8;
4966 if (real_identical (r, &dconstm1))
4967 return 9;
4968
4969 return 0;
4970}
4971
4972/* Return the opcode of the special instruction to be used to load
4973 the constant X. */
4974
4975const char *
4976standard_80387_constant_opcode (rtx x)
4977{
4978 switch (standard_80387_constant_p (x))
4979 {
4980 case 1:
4981 return "fldz";
4982 case 2:
4983 return "fld1";
4984 case 3:
4985 return "fldlg2";
4986 case 4:
4987 return "fldln2";
4988 case 5:
4989 return "fldl2e";
4990 case 6:
4991 return "fldl2t";
4992 case 7:
4993 return "fldpi";
4994 case 8:
4995 case 9:
4996 return "#";
4997 default:
4998 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 4998, __FUNCTION__))
;
4999 }
5000}
5001
5002/* Return the CONST_DOUBLE representing the 80387 constant that is
5003 loaded by the specified special instruction. The argument IDX
5004 matches the return value from standard_80387_constant_p. */
5005
5006rtx
5007standard_80387_constant_rtx (int idx)
5008{
5009 int i;
5010
5011 if (! ext_80387_constants_init)
5012 init_ext_80387_constants ();
5013
5014 switch (idx)
5015 {
5016 case 3:
5017 case 4:
5018 case 5:
5019 case 6:
5020 case 7:
5021 i = idx - 3;
5022 break;
5023
5024 default:
5025 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5025, __FUNCTION__))
;
5026 }
5027
5028 return const_double_from_real_value (ext_80387_constants_table[i],
5029 XFmode(scalar_float_mode ((scalar_float_mode::from_int) E_XFmode)));
5030}
5031
5032/* Return 1 if X is all bits 0 and 2 if X is all bits 1
5033 in supported SSE/AVX vector mode. */
5034
5035int
5036standard_sse_constant_p (rtx x, machine_mode pred_mode)
5037{
5038 machine_mode mode;
5039
5040 if (!TARGET_SSE((global_options.x_ix86_isa_flags & (1UL << 50)) !=
0)
)
5041 return 0;
5042
5043 mode = GET_MODE (x)((machine_mode) (x)->mode);
5044
5045 if (x == const0_rtx(const_int_rtx[64]) || const0_operand (x, mode))
5046 return 1;
5047
5048 if (x == constm1_rtx(const_int_rtx[64 -1]) || vector_all_ones_operand (x, mode))
5049 {
5050 /* VOIDmode integer constant, get mode from the predicate. */
5051 if (mode == VOIDmode((void) 0, E_VOIDmode))
5052 mode = pred_mode;
5053
5054 switch (GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]))
5055 {
5056 case 64:
5057 if (TARGET_AVX512F((global_options.x_ix86_isa_flags & (1UL << 15)) !=
0)
)
5058 return 2;
5059 break;
5060 case 32:
5061 if (TARGET_AVX2((global_options.x_ix86_isa_flags & (1UL << 9)) != 0
)
)
5062 return 2;
5063 break;
5064 case 16:
5065 if (TARGET_SSE2((global_options.x_ix86_isa_flags & (1UL << 51)) !=
0)
)
5066 return 2;
5067 break;
5068 case 0:
5069 /* VOIDmode */
5070 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5070, __FUNCTION__))
;
5071 default:
5072 break;
5073 }
5074 }
5075
5076 return 0;
5077}
5078
5079/* Return the opcode of the special instruction to be used to load
5080 the constant operands[1] into operands[0]. */
5081
5082const char *
5083standard_sse_constant_opcode (rtx_insn *insn, rtx *operands)
5084{
5085 machine_mode mode;
5086 rtx x = operands[1];
5087
5088 gcc_assert (TARGET_SSE)((void)(!(((global_options.x_ix86_isa_flags & (1UL <<
50)) != 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5088, __FUNCTION__), 0 : 0))
;
5089
5090 mode = GET_MODE (x)((machine_mode) (x)->mode);
5091
5092 if (x == const0_rtx(const_int_rtx[64]) || const0_operand (x, mode))
5093 {
5094 switch (get_attr_mode (insn))
5095 {
5096 case MODE_TI:
5097 if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
)
5098 return "%vpxor\t%0, %d0";
5099 /* FALLTHRU */
5100 case MODE_XI:
5101 case MODE_OI:
5102 if (EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
)
5103 return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) !=
0)
5104 ? "vpxord\t%x0, %x0, %x0"
5105 : "vpxord\t%g0, %g0, %g0");
5106 return "vpxor\t%x0, %x0, %x0";
5107
5108 case MODE_V2DF:
5109 if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
)
5110 return "%vxorpd\t%0, %d0";
5111 /* FALLTHRU */
5112 case MODE_V8DF:
5113 case MODE_V4DF:
5114 if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
)
5115 return "vxorpd\t%x0, %x0, %x0";
5116 else if (TARGET_AVX512DQ((global_options.x_ix86_isa_flags & (1UL << 13)) !=
0)
)
5117 return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) !=
0)
5118 ? "vxorpd\t%x0, %x0, %x0"
5119 : "vxorpd\t%g0, %g0, %g0");
5120 else
5121 return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) !=
0)
5122 ? "vpxorq\t%x0, %x0, %x0"
5123 : "vpxorq\t%g0, %g0, %g0");
5124
5125 case MODE_V4SF:
5126 if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
)
5127 return "%vxorps\t%0, %d0";
5128 /* FALLTHRU */
5129 case MODE_V16SF:
5130 case MODE_V8SF:
5131 if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
)
5132 return "vxorps\t%x0, %x0, %x0";
5133 else if (TARGET_AVX512DQ((global_options.x_ix86_isa_flags & (1UL << 13)) !=
0)
)
5134 return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) !=
0)
5135 ? "vxorps\t%x0, %x0, %x0"
5136 : "vxorps\t%g0, %g0, %g0");
5137 else
5138 return (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) !=
0)
5139 ? "vpxord\t%x0, %x0, %x0"
5140 : "vpxord\t%g0, %g0, %g0");
5141
5142 default:
5143 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5143, __FUNCTION__))
;
5144 }
5145 }
5146 else if (x == constm1_rtx(const_int_rtx[64 -1]) || vector_all_ones_operand (x, mode))
5147 {
5148 enum attr_mode insn_mode = get_attr_mode (insn);
5149
5150 switch (insn_mode)
5151 {
5152 case MODE_XI:
5153 case MODE_V8DF:
5154 case MODE_V16SF:
5155 gcc_assert (TARGET_AVX512F)((void)(!(((global_options.x_ix86_isa_flags & (1UL <<
15)) != 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5155, __FUNCTION__), 0 : 0))
;
5156 return "vpternlogd\t{$0xFF, %g0, %g0, %g0|%g0, %g0, %g0, 0xFF}";
5157
5158 case MODE_OI:
5159 case MODE_V4DF:
5160 case MODE_V8SF:
5161 gcc_assert (TARGET_AVX2)((void)(!(((global_options.x_ix86_isa_flags & (1UL <<
9)) != 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5161, __FUNCTION__), 0 : 0))
;
5162 /* FALLTHRU */
5163 case MODE_TI:
5164 case MODE_V2DF:
5165 case MODE_V4SF:
5166 gcc_assert (TARGET_SSE2)((void)(!(((global_options.x_ix86_isa_flags & (1UL <<
51)) != 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5166, __FUNCTION__), 0 : 0))
;
5167 if (!EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
)
5168 return (TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0
)
5169 ? "vpcmpeqd\t%0, %0, %0"
5170 : "pcmpeqd\t%0, %0");
5171 else if (TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) !=
0)
)
5172 return "vpternlogd\t{$0xFF, %0, %0, %0|%0, %0, %0, 0xFF}";
5173 else
5174 return "vpternlogd\t{$0xFF, %g0, %g0, %g0|%g0, %g0, %g0, 0xFF}";
5175
5176 default:
5177 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5177, __FUNCTION__))
;
5178 }
5179 }
5180
5181 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5181, __FUNCTION__))
;
5182}
5183
5184/* Returns true if INSN can be transformed from a memory load
5185 to a supported FP constant load. */
5186
5187bool
5188ix86_standard_x87sse_constant_load_p (const rtx_insn *insn, rtx dst)
5189{
5190 rtx src = find_constant_src (insn);
5191
5192 gcc_assert (REG_P (dst))((void)(!((((enum rtx_code) (dst)->code) == REG)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5192, __FUNCTION__), 0 : 0))
;
5193
5194 if (src == NULL__null
5195 || (SSE_REGNO_P (REGNO (dst))(((unsigned long) (((rhs_regno(dst)))) - (unsigned long) (20)
<= (unsigned long) (27) - (unsigned long) (20)) || ((unsigned
long) (((rhs_regno(dst)))) - (unsigned long) (44) <= (unsigned
long) (51) - (unsigned long) (44)) || ((unsigned long) (((rhs_regno
(dst)))) - (unsigned long) (52) <= (unsigned long) (67) - (
unsigned long) (52)))
5196 && standard_sse_constant_p (src, GET_MODE (dst)((machine_mode) (dst)->mode)) != 1)
5197 || (STACK_REGNO_P (REGNO (dst))((unsigned long) (((rhs_regno(dst)))) - (unsigned long) (8) <=
(unsigned long) (15) - (unsigned long) (8))
5198 && standard_80387_constant_p (src) < 1))
5199 return false;
5200
5201 return true;
5202}
5203
5204/* Predicate for pre-reload splitters with associated instructions,
5205 which can match any time before the split1 pass (usually combine),
5206 then are unconditionally split in that pass and should not be
5207 matched again afterwards. */
5208
5209bool
5210ix86_pre_reload_split (void)
5211{
5212 return (can_create_pseudo_p ()(!reload_in_progress && !reload_completed)
5213 && !(cfun(cfun + 0)->curr_properties & PROP_rtl_split_insns(1 << 17)));
5214}
5215
5216/* Return the opcode of the TYPE_SSEMOV instruction. To move from
5217 or to xmm16-xmm31/ymm16-ymm31 registers, we either require
5218 TARGET_AVX512VL or it is a register to register move which can
5219 be done with zmm register move. */
5220
5221static const char *
5222ix86_get_ssemov (rtx *operands, unsigned size,
5223 enum attr_mode insn_mode, machine_mode mode)
5224{
5225 char buf[128];
5226 bool misaligned_p = (misaligned_operand (operands[0], mode)
5227 || misaligned_operand (operands[1], mode));
5228 bool evex_reg_p = (size == 64
5229 || EXT_REX_SSE_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
5230 || EXT_REX_SSE_REG_P (operands[1])((((enum rtx_code) (operands[1])->code) == REG) &&
((unsigned long) (((rhs_regno(operands[1])))) - (unsigned long
) (52) <= (unsigned long) (67) - (unsigned long) (52)))
);
5231 machine_mode scalar_mode;
5232
5233 const char *opcode = NULL__null;
5234 enum
5235 {
5236 opcode_int,
5237 opcode_float,
5238 opcode_double
5239 } type = opcode_int;
5240
5241 switch (insn_mode)
5242 {
5243 case MODE_V16SF:
5244 case MODE_V8SF:
5245 case MODE_V4SF:
5246 scalar_mode = E_SFmode;
5247 type = opcode_float;
5248 break;
5249 case MODE_V8DF:
5250 case MODE_V4DF:
5251 case MODE_V2DF:
5252 scalar_mode = E_DFmode;
5253 type = opcode_double;
5254 break;
5255 case MODE_XI:
5256 case MODE_OI:
5257 case MODE_TI:
5258 scalar_mode = GET_MODE_INNER (mode)(mode_to_inner (mode));
5259 break;
5260 default:
5261 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5261, __FUNCTION__))
;
5262 }
5263
5264 /* NB: To move xmm16-xmm31/ymm16-ymm31 registers without AVX512VL,
5265 we can only use zmm register move without memory operand. */
5266 if (evex_reg_p
5267 && !TARGET_AVX512VL((global_options.x_ix86_isa_flags & (1UL << 20)) !=
0)
5268 && GET_MODE_SIZE (mode)((unsigned short) mode_to_bytes (mode).coeffs[0]) < 64)
5269 {
5270 /* NB: Even though ix86_hard_regno_mode_ok doesn't allow
5271 xmm16-xmm31 nor ymm16-ymm31 in 128/256 bit modes when
5272 AVX512VL is disabled, LRA can still generate reg to
5273 reg moves with xmm16-xmm31 and ymm16-ymm31 in 128/256 bit
5274 modes. */
5275 if (memory_operand (operands[0], mode)
5276 || memory_operand (operands[1], mode))
5277 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5277, __FUNCTION__))
;
5278 size = 64;
5279 switch (type)
5280 {
5281 case opcode_int:
5282 opcode = misaligned_p ? "vmovdqu32" : "vmovdqa32";
5283 break;
5284 case opcode_float:
5285 opcode = misaligned_p ? "vmovups" : "vmovaps";
5286 break;
5287 case opcode_double:
5288 opcode = misaligned_p ? "vmovupd" : "vmovapd";
5289 break;
5290 }
5291 }
5292 else if (SCALAR_FLOAT_MODE_P (scalar_mode)(((enum mode_class) mode_class[scalar_mode]) == MODE_FLOAT ||
((enum mode_class) mode_class[scalar_mode]) == MODE_DECIMAL_FLOAT
)
)
5293 {
5294 switch (scalar_mode)
5295 {
5296 case E_SFmode:
5297 opcode = misaligned_p ? "%vmovups" : "%vmovaps";
5298 break;
5299 case E_DFmode:
5300 opcode = misaligned_p ? "%vmovupd" : "%vmovapd";
5301 break;
5302 case E_TFmode:
5303 if (evex_reg_p)
5304 opcode = misaligned_p ? "vmovdqu64" : "vmovdqa64";
5305 else
5306 opcode = misaligned_p ? "%vmovdqu" : "%vmovdqa";
5307 break;
5308 default:
5309 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5309, __FUNCTION__))
;
5310 }
5311 }
5312 else if (SCALAR_INT_MODE_P (scalar_mode)(((enum mode_class) mode_class[scalar_mode]) == MODE_INT || (
(enum mode_class) mode_class[scalar_mode]) == MODE_PARTIAL_INT
)
)
5313 {
5314 switch (scalar_mode)
5315 {
5316 case E_QImode:
5317 if (evex_reg_p)
5318 opcode = (misaligned_p
5319 ? (TARGET_AVX512BW((global_options.x_ix86_isa_flags & (1UL << 11)) !=
0)
5320 ? "vmovdqu8"
5321 : "vmovdqu64")
5322 : "vmovdqa64");
5323 else
5324 opcode = (misaligned_p
5325 ? (TARGET_AVX512BW((global_options.x_ix86_isa_flags & (1UL << 11)) !=
0)
5326 ? "vmovdqu8"
5327 : "%vmovdqu")
5328 : "%vmovdqa");
5329 break;
5330 case E_HImode:
5331 if (evex_reg_p)
5332 opcode = (misaligned_p
5333 ? (TARGET_AVX512BW((global_options.x_ix86_isa_flags & (1UL << 11)) !=
0)
5334 ? "vmovdqu16"
5335 : "vmovdqu64")
5336 : "vmovdqa64");
5337 else
5338 opcode = (misaligned_p
5339 ? (TARGET_AVX512BW((global_options.x_ix86_isa_flags & (1UL << 11)) !=
0)
5340 ? "vmovdqu16"
5341 : "%vmovdqu")
5342 : "%vmovdqa");
5343 break;
5344 case E_SImode:
5345 if (evex_reg_p)
5346 opcode = misaligned_p ? "vmovdqu32" : "vmovdqa32";
5347 else
5348 opcode = misaligned_p ? "%vmovdqu" : "%vmovdqa";
5349 break;
5350 case E_DImode:
5351 case E_TImode:
5352 case E_OImode:
5353 if (evex_reg_p)
5354 opcode = misaligned_p ? "vmovdqu64" : "vmovdqa64";
5355 else
5356 opcode = misaligned_p ? "%vmovdqu" : "%vmovdqa";
5357 break;
5358 case E_XImode:
5359 opcode = misaligned_p ? "vmovdqu64" : "vmovdqa64";
5360 break;
5361 default:
5362 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5362, __FUNCTION__))
;
5363 }
5364 }
5365 else
5366 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5366, __FUNCTION__))
;
5367
5368 switch (size)
5369 {
5370 case 64:
5371 snprintf (buf, sizeof (buf), "%s\t{%%g1, %%g0|%%g0, %%g1}",
5372 opcode);
5373 break;
5374 case 32:
5375 snprintf (buf, sizeof (buf), "%s\t{%%t1, %%t0|%%t0, %%t1}",
5376 opcode);
5377 break;
5378 case 16:
5379 snprintf (buf, sizeof (buf), "%s\t{%%x1, %%x0|%%x0, %%x1}",
5380 opcode);
5381 break;
5382 default:
5383 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5383, __FUNCTION__))
;
5384 }
5385 output_asm_insn (buf, operands);
5386 return "";
5387}
5388
5389/* Return the template of the TYPE_SSEMOV instruction to move
5390 operands[1] into operands[0]. */
5391
5392const char *
5393ix86_output_ssemov (rtx_insn *insn, rtx *operands)
5394{
5395 machine_mode mode = GET_MODE (operands[0])((machine_mode) (operands[0])->mode);
5396 if (get_attr_type (insn) != TYPE_SSEMOV
5397 || mode != GET_MODE (operands[1])((machine_mode) (operands[1])->mode))
5398 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5398, __FUNCTION__))
;
5399
5400 enum attr_mode insn_mode = get_attr_mode (insn);
5401
5402 switch (insn_mode)
5403 {
5404 case MODE_XI:
5405 case MODE_V8DF:
5406 case MODE_V16SF:
5407 return ix86_get_ssemov (operands, 64, insn_mode, mode);
5408
5409 case MODE_OI:
5410 case MODE_V4DF:
5411 case MODE_V8SF:
5412 return ix86_get_ssemov (operands, 32, insn_mode, mode);
5413
5414 case MODE_TI:
5415 case MODE_V2DF:
5416 case MODE_V4SF:
5417 return ix86_get_ssemov (operands, 16, insn_mode, mode);
5418
5419 case MODE_DI:
5420 /* Handle broken assemblers that require movd instead of movq. */
5421 if (!HAVE_AS_IX86_INTERUNIT_MOVQ1
5422 && (GENERAL_REG_P (operands[0])((((enum rtx_code) (operands[0])->code) == REG) &&
((((unsigned long) (((rhs_regno(operands[0])))) - (unsigned long
) (0) <= (unsigned long) (7) - (unsigned long) (0))) || ((
unsigned long) (((rhs_regno(operands[0])))) - (unsigned long)
(36) <= (unsigned long) (43) - (unsigned long) (36))))
5423 || GENERAL_REG_P (operands[1])((((enum rtx_code) (operands[1])->code) == REG) &&
((((unsigned long) (((rhs_regno(operands[1])))) - (unsigned long
) (0) <= (unsigned long) (7) - (unsigned long) (0))) || ((
unsigned long) (((rhs_regno(operands[1])))) - (unsigned long)
(36) <= (unsigned long) (43) - (unsigned long) (36))))
))
5424 return "%vmovd\t{%1, %0|%0, %1}";
5425 else
5426 return "%vmovq\t{%1, %0|%0, %1}";
5427
5428 case MODE_SI:
5429 return "%vmovd\t{%1, %0|%0, %1}";
5430
5431 case MODE_DF:
5432 if (TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0
)
&& REG_P (operands[0])(((enum rtx_code) (operands[0])->code) == REG) && REG_P (operands[1])(((enum rtx_code) (operands[1])->code) == REG))
5433 return "vmovsd\t{%d1, %0|%0, %d1}";
5434 else
5435 return "%vmovsd\t{%1, %0|%0, %1}";
5436
5437 case MODE_SF:
5438 if (TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0
)
&& REG_P (operands[0])(((enum rtx_code) (operands[0])->code) == REG) && REG_P (operands[1])(((enum rtx_code) (operands[1])->code) == REG))
5439 return "vmovss\t{%d1, %0|%0, %d1}";
5440 else
5441 return "%vmovss\t{%1, %0|%0, %1}";
5442
5443 case MODE_V1DF:
5444 gcc_assert (!TARGET_AVX)((void)(!(!((global_options.x_ix86_isa_flags & (1UL <<
8)) != 0)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/config/i386/i386.c"
, 5444, __FUNCTION__), 0 : 0))
;
5445 return "movlpd\t{%1, %0|%0, %1}";
5446
5447 case MODE_V2SF:
5448 if (TARGET_AVX((global_options.x_ix86_isa_flags & (1UL << 8)) != 0
)
&& REG_P (operands[0])(((enum rtx_code) (operands[0])->code) == REG))
5449 return "vmovlps\t{%1, %d0|%d0, %1}";
5450 else
5451 return "%vmovlps\t{%1, %0|%0, %1}";
5452