Bug Summary

File:build/gcc/tree-vect-stmts.c
Warning:line 7419, column 13
Although the value stored to 'vec_mask' is used in the enclosing expression, the value is never actually read from 'vec_mask'

Annotated Source Code

Press '?' to see keyboard shortcuts

clang -cc1 -cc1 -triple x86_64-unknown-linux-gnu -analyze -disable-free -disable-llvm-verifier -discard-value-names -main-file-name tree-vect-stmts.c -analyzer-store=region -analyzer-opt-analyze-nested-blocks -analyzer-checker=core -analyzer-checker=apiModeling -analyzer-checker=unix -analyzer-checker=deadcode -analyzer-checker=cplusplus -analyzer-checker=security.insecureAPI.UncheckedReturn -analyzer-checker=security.insecureAPI.getpw -analyzer-checker=security.insecureAPI.gets -analyzer-checker=security.insecureAPI.mktemp -analyzer-checker=security.insecureAPI.mkstemp -analyzer-checker=security.insecureAPI.vfork -analyzer-checker=nullability.NullPassedToNonnull -analyzer-checker=nullability.NullReturnedFromNonnull -analyzer-output plist -w -setup-static-analyzer -analyzer-config-compatibility-mode=true -mrelocation-model static -mframe-pointer=none -fmath-errno -fno-rounding-math -mconstructor-aliases -munwind-tables -target-cpu x86-64 -fno-split-dwarf-inlining -debugger-tuning=gdb -resource-dir /usr/lib64/clang/11.0.0 -D IN_GCC -D HAVE_CONFIG_H -I . -I . -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/. -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcpp/include -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libcody -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libdecnumber/bid -I ../libdecnumber -I /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/../libbacktrace -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10 -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/x86_64-suse-linux -internal-isystem /usr/bin/../lib64/gcc/x86_64-suse-linux/10/../../../../include/c++/10/backward -internal-isystem /usr/local/include -internal-isystem /usr/lib64/clang/11.0.0/include -internal-externc-isystem /include -internal-externc-isystem /usr/include -O2 -Wno-narrowing -Wwrite-strings -Wno-error=format-diag -Wno-long-long -Wno-variadic-macros -Wno-overlength-strings -fdeprecated-macro -fdebug-compilation-dir /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/gcc -ferror-limit 19 -fno-rtti -fgnuc-version=4.2.1 -vectorize-loops -vectorize-slp -analyzer-output=plist-html -analyzer-config silence-checkers=core.NullDereference -faddrsig -o /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/objdir/clang-static-analyzer/2021-01-16-135054-17580-1/report-CmKtJD.plist -x c++ /home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c
1/* Statement Analysis and Transformation for Vectorization
2 Copyright (C) 2003-2021 Free Software Foundation, Inc.
3 Contributed by Dorit Naishlos <dorit@il.ibm.com>
4 and Ira Rosen <irar@il.ibm.com>
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "backend.h"
26#include "target.h"
27#include "rtl.h"
28#include "tree.h"
29#include "gimple.h"
30#include "ssa.h"
31#include "optabs-tree.h"
32#include "insn-config.h"
33#include "recog.h" /* FIXME: for insn_data */
34#include "cgraph.h"
35#include "dumpfile.h"
36#include "alias.h"
37#include "fold-const.h"
38#include "stor-layout.h"
39#include "tree-eh.h"
40#include "gimplify.h"
41#include "gimple-iterator.h"
42#include "gimplify-me.h"
43#include "tree-cfg.h"
44#include "tree-ssa-loop-manip.h"
45#include "cfgloop.h"
46#include "explow.h"
47#include "tree-ssa-loop.h"
48#include "tree-scalar-evolution.h"
49#include "tree-vectorizer.h"
50#include "builtins.h"
51#include "internal-fn.h"
52#include "tree-vector-builder.h"
53#include "vec-perm-indices.h"
54#include "tree-ssa-loop-niter.h"
55#include "gimple-fold.h"
56#include "regs.h"
57#include "attribs.h"
58
59/* For lang_hooks.types.type_for_mode. */
60#include "langhooks.h"
61
62/* Return the vectorized type for the given statement. */
63
64tree
65stmt_vectype (class _stmt_vec_info *stmt_info)
66{
67 return STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
68}
69
70/* Return TRUE iff the given statement is in an inner loop relative to
71 the loop being vectorized. */
72bool
73stmt_in_inner_loop_p (vec_info *vinfo, class _stmt_vec_info *stmt_info)
74{
75 gimple *stmt = STMT_VINFO_STMT (stmt_info)(stmt_info)->stmt;
76 basic_block bb = gimple_bb (stmt);
77 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
78 class loop* loop;
79
80 if (!loop_vinfo)
81 return false;
82
83 loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
84
85 return (bb->loop_father == loop->inner);
86}
87
88/* Record the cost of a statement, either by directly informing the
89 target model or by saving it in a vector for later processing.
90 Return a preliminary estimate of the statement's cost. */
91
92unsigned
93record_stmt_cost (stmt_vector_for_cost *body_cost_vec, int count,
94 enum vect_cost_for_stmt kind, stmt_vec_info stmt_info,
95 tree vectype, int misalign,
96 enum vect_cost_model_location where)
97{
98 if ((kind == vector_load || kind == unaligned_load)
99 && (stmt_info && STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p))
100 kind = vector_gather_load;
101 if ((kind == vector_store || kind == unaligned_store)
102 && (stmt_info && STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p))
103 kind = vector_scatter_store;
104
105 stmt_info_for_cost si = { count, kind, where, stmt_info, vectype, misalign };
106 body_cost_vec->safe_push (si);
107
108 return (unsigned)
109 (builtin_vectorization_cost (kind, vectype, misalign) * count);
110}
111
112/* Return a variable of type ELEM_TYPE[NELEMS]. */
113
114static tree
115create_vector_array (tree elem_type, unsigned HOST_WIDE_INTlong nelems)
116{
117 return create_tmp_var (build_array_type_nelts (elem_type, nelems),
118 "vect_array");
119}
120
121/* ARRAY is an array of vectors created by create_vector_array.
122 Return an SSA_NAME for the vector in index N. The reference
123 is part of the vectorization of STMT_INFO and the vector is associated
124 with scalar destination SCALAR_DEST. */
125
126static tree
127read_vector_array (vec_info *vinfo,
128 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
129 tree scalar_dest, tree array, unsigned HOST_WIDE_INTlong n)
130{
131 tree vect_type, vect, vect_name, array_ref;
132 gimple *new_stmt;
133
134 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE)((void)(!(((enum tree_code) (((contains_struct_check ((array)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 134, __FUNCTION__))->typed.type))->base.code) == ARRAY_TYPE
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 134, __FUNCTION__), 0 : 0))
;
135 vect_type = TREE_TYPE (TREE_TYPE (array))((contains_struct_check ((((contains_struct_check ((array), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 135, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 135, __FUNCTION__))->typed.type)
;
136 vect = vect_create_destination_var (scalar_dest, vect_type);
137 array_ref = build4 (ARRAY_REF, vect_type, array,
138 build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], n),
139 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
140
141 new_stmt = gimple_build_assign (vect, array_ref);
142 vect_name = make_ssa_name (vect, new_stmt);
143 gimple_assign_set_lhs (new_stmt, vect_name);
144 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
145
146 return vect_name;
147}
148
149/* ARRAY is an array of vectors created by create_vector_array.
150 Emit code to store SSA_NAME VECT in index N of the array.
151 The store is part of the vectorization of STMT_INFO. */
152
153static void
154write_vector_array (vec_info *vinfo,
155 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
156 tree vect, tree array, unsigned HOST_WIDE_INTlong n)
157{
158 tree array_ref;
159 gimple *new_stmt;
160
161 array_ref = build4 (ARRAY_REF, TREE_TYPE (vect)((contains_struct_check ((vect), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 161, __FUNCTION__))->typed.type)
, array,
162 build_int_cst (size_type_nodeglobal_trees[TI_SIZE_TYPE], n),
163 NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
164
165 new_stmt = gimple_build_assign (array_ref, vect);
166 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
167}
168
169/* PTR is a pointer to an array of type TYPE. Return a representation
170 of *PTR. The memory reference replaces those in FIRST_DR
171 (and its group). */
172
173static tree
174create_array_ref (tree type, tree ptr, tree alias_ptr_type)
175{
176 tree mem_ref;
177
178 mem_ref = build2 (MEM_REF, type, ptr, build_int_cst (alias_ptr_type, 0));
179 /* Arrays have the same alignment as their type. */
180 set_ptr_info_alignment (get_ptr_info (ptr), TYPE_ALIGN_UNIT (type)(((tree_class_check ((type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 180, __FUNCTION__))->type_common.align ? ((unsigned)1) <<
((type)->type_common.align - 1) : 0) / (8))
, 0);
181 return mem_ref;
182}
183
184/* Add a clobber of variable VAR to the vectorization of STMT_INFO.
185 Emit the clobber before *GSI. */
186
187static void
188vect_clobber_variable (vec_info *vinfo, stmt_vec_info stmt_info,
189 gimple_stmt_iterator *gsi, tree var)
190{
191 tree clobber = build_clobber (TREE_TYPE (var)((contains_struct_check ((var), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 191, __FUNCTION__))->typed.type)
);
192 gimple *new_stmt = gimple_build_assign (var, clobber);
193 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
194}
195
196/* Utility functions used by vect_mark_stmts_to_be_vectorized. */
197
198/* Function vect_mark_relevant.
199
200 Mark STMT_INFO as "relevant for vectorization" and add it to WORKLIST. */
201
202static void
203vect_mark_relevant (vec<stmt_vec_info> *worklist, stmt_vec_info stmt_info,
204 enum vect_relevant relevant, bool live_p)
205{
206 enum vect_relevant save_relevant = STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant;
207 bool save_live_p = STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live;
208
209 if (dump_enabled_p ())
210 dump_printf_loc (MSG_NOTE, vect_location,
211 "mark relevant %d, live %d: %G", relevant, live_p,
212 stmt_info->stmt);
213
214 /* If this stmt is an original stmt in a pattern, we might need to mark its
215 related pattern stmt instead of the original stmt. However, such stmts
216 may have their own uses that are not in any pattern, in such cases the
217 stmt itself should be marked. */
218 if (STMT_VINFO_IN_PATTERN_P (stmt_info)(stmt_info)->in_pattern_p)
219 {
220 /* This is the last stmt in a sequence that was detected as a
221 pattern that can potentially be vectorized. Don't mark the stmt
222 as relevant/live because it's not going to be vectorized.
223 Instead mark the pattern-stmt that replaces it. */
224
225 if (dump_enabled_p ())
226 dump_printf_loc (MSG_NOTE, vect_location,
227 "last stmt in pattern. don't mark"
228 " relevant/live.\n");
229 stmt_vec_info old_stmt_info = stmt_info;
230 stmt_info = STMT_VINFO_RELATED_STMT (stmt_info)(stmt_info)->related_stmt;
231 gcc_assert (STMT_VINFO_RELATED_STMT (stmt_info) == old_stmt_info)((void)(!((stmt_info)->related_stmt == old_stmt_info) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 231, __FUNCTION__), 0 : 0))
;
232 save_relevant = STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant;
233 save_live_p = STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live;
234 }
235
236 STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live |= live_p;
237 if (relevant > STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant)
238 STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant = relevant;
239
240 if (STMT_VINFO_RELEVANT (stmt_info)(stmt_info)->relevant == save_relevant
241 && STMT_VINFO_LIVE_P (stmt_info)(stmt_info)->live == save_live_p)
242 {
243 if (dump_enabled_p ())
244 dump_printf_loc (MSG_NOTE, vect_location,
245 "already marked relevant/live.\n");
246 return;
247 }
248
249 worklist->safe_push (stmt_info);
250}
251
252
253/* Function is_simple_and_all_uses_invariant
254
255 Return true if STMT_INFO is simple and all uses of it are invariant. */
256
257bool
258is_simple_and_all_uses_invariant (stmt_vec_info stmt_info,
259 loop_vec_info loop_vinfo)
260{
261 tree op;
262 ssa_op_iter iter;
263
264 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
265 if (!stmt)
266 return false;
267
268 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)for (op = op_iter_init_tree (&(iter), stmt, 0x01); !op_iter_done
(&(iter)); (void) (op = op_iter_next_tree (&(iter)))
)
269 {
270 enum vect_def_type dt = vect_uninitialized_def;
271
272 if (!vect_is_simple_use (op, loop_vinfo, &dt))
273 {
274 if (dump_enabled_p ())
275 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
276 "use not simple.\n");
277 return false;
278 }
279
280 if (dt != vect_external_def && dt != vect_constant_def)
281 return false;
282 }
283 return true;
284}
285
286/* Function vect_stmt_relevant_p.
287
288 Return true if STMT_INFO, in the loop that is represented by LOOP_VINFO,
289 is "relevant for vectorization".
290
291 A stmt is considered "relevant for vectorization" if:
292 - it has uses outside the loop.
293 - it has vdefs (it alters memory).
294 - control stmts in the loop (except for the exit condition).
295
296 CHECKME: what other side effects would the vectorizer allow? */
297
298static bool
299vect_stmt_relevant_p (stmt_vec_info stmt_info, loop_vec_info loop_vinfo,
300 enum vect_relevant *relevant, bool *live_p)
301{
302 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
303 ssa_op_iter op_iter;
304 imm_use_iterator imm_iter;
305 use_operand_p use_p;
306 def_operand_p def_p;
307
308 *relevant = vect_unused_in_scope;
309 *live_p = false;
310
311 /* cond stmt other than loop exit cond. */
312 if (is_ctrl_stmt (stmt_info->stmt)
313 && STMT_VINFO_TYPE (stmt_info)(stmt_info)->type != loop_exit_ctrl_vec_info_type)
314 *relevant = vect_used_in_scope;
315
316 /* changing memory. */
317 if (gimple_code (stmt_info->stmt) != GIMPLE_PHI)
318 if (gimple_vdef (stmt_info->stmt)
319 && !gimple_clobber_p (stmt_info->stmt))
320 {
321 if (dump_enabled_p ())
322 dump_printf_loc (MSG_NOTE, vect_location,
323 "vec_stmt_relevant_p: stmt has vdefs.\n");
324 *relevant = vect_used_in_scope;
325 }
326
327 /* uses outside the loop. */
328 FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt_info->stmt, op_iter, SSA_OP_DEF)for ((def_p) = (gimple_code (stmt_info->stmt) == GIMPLE_PHI
? op_iter_init_phidef (&(op_iter), as_a <gphi *> (
stmt_info->stmt), 0x02) : op_iter_init_def (&(op_iter)
, stmt_info->stmt, 0x02)); !op_iter_done (&(op_iter));
(def_p) = op_iter_next_def (&(op_iter)))
329 {
330 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, DEF_FROM_PTR (def_p))for ((use_p) = first_readonly_imm_use (&(imm_iter), (get_def_from_ptr
(def_p))); !end_readonly_imm_use_p (&(imm_iter)); (void)
((use_p) = next_readonly_imm_use (&(imm_iter))))
331 {
332 basic_block bb = gimple_bb (USE_STMT (use_p)(use_p)->loc.stmt);
333 if (!flow_bb_inside_loop_p (loop, bb))
334 {
335 if (is_gimple_debug (USE_STMT (use_p)(use_p)->loc.stmt))
336 continue;
337
338 if (dump_enabled_p ())
339 dump_printf_loc (MSG_NOTE, vect_location,
340 "vec_stmt_relevant_p: used out of loop.\n");
341
342 /* We expect all such uses to be in the loop exit phis
343 (because of loop closed form) */
344 gcc_assert (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI)((void)(!(gimple_code ((use_p)->loc.stmt) == GIMPLE_PHI) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 344, __FUNCTION__), 0 : 0))
;
345 gcc_assert (bb == single_exit (loop)->dest)((void)(!(bb == single_exit (loop)->dest) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 345, __FUNCTION__), 0 : 0))
;
346
347 *live_p = true;
348 }
349 }
350 }
351
352 if (*live_p && *relevant == vect_unused_in_scope
353 && !is_simple_and_all_uses_invariant (stmt_info, loop_vinfo))
354 {
355 if (dump_enabled_p ())
356 dump_printf_loc (MSG_NOTE, vect_location,
357 "vec_stmt_relevant_p: stmt live but not relevant.\n");
358 *relevant = vect_used_only_live;
359 }
360
361 return (*live_p || *relevant);
362}
363
364
365/* Function exist_non_indexing_operands_for_use_p
366
367 USE is one of the uses attached to STMT_INFO. Check if USE is
368 used in STMT_INFO for anything other than indexing an array. */
369
370static bool
371exist_non_indexing_operands_for_use_p (tree use, stmt_vec_info stmt_info)
372{
373 tree operand;
374
375 /* USE corresponds to some operand in STMT. If there is no data
376 reference in STMT, then any operand that corresponds to USE
377 is not indexing an array. */
378 if (!STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0))
379 return true;
380
381 /* STMT has a data_ref. FORNOW this means that its of one of
382 the following forms:
383 -1- ARRAY_REF = var
384 -2- var = ARRAY_REF
385 (This should have been verified in analyze_data_refs).
386
387 'var' in the second case corresponds to a def, not a use,
388 so USE cannot correspond to any operands that are not used
389 for array indexing.
390
391 Therefore, all we need to check is if STMT falls into the
392 first case, and whether var corresponds to USE. */
393
394 gassign *assign = dyn_cast <gassign *> (stmt_info->stmt);
395 if (!assign || !gimple_assign_copy_p (assign))
396 {
397 gcall *call = dyn_cast <gcall *> (stmt_info->stmt);
398 if (call && gimple_call_internal_p (call))
399 {
400 internal_fn ifn = gimple_call_internal_fn (call);
401 int mask_index = internal_fn_mask_index (ifn);
402 if (mask_index >= 0
403 && use == gimple_call_arg (call, mask_index))
404 return true;
405 int stored_value_index = internal_fn_stored_value_index (ifn);
406 if (stored_value_index >= 0
407 && use == gimple_call_arg (call, stored_value_index))
408 return true;
409 if (internal_gather_scatter_fn_p (ifn)
410 && use == gimple_call_arg (call, 1))
411 return true;
412 }
413 return false;
414 }
415
416 if (TREE_CODE (gimple_assign_lhs (assign))((enum tree_code) (gimple_assign_lhs (assign))->base.code) == SSA_NAME)
417 return false;
418 operand = gimple_assign_rhs1 (assign);
419 if (TREE_CODE (operand)((enum tree_code) (operand)->base.code) != SSA_NAME)
420 return false;
421
422 if (operand == use)
423 return true;
424
425 return false;
426}
427
428
429/*
430 Function process_use.
431
432 Inputs:
433 - a USE in STMT_VINFO in a loop represented by LOOP_VINFO
434 - RELEVANT - enum value to be set in the STMT_VINFO of the stmt
435 that defined USE. This is done by calling mark_relevant and passing it
436 the WORKLIST (to add DEF_STMT to the WORKLIST in case it is relevant).
437 - FORCE is true if exist_non_indexing_operands_for_use_p check shouldn't
438 be performed.
439
440 Outputs:
441 Generally, LIVE_P and RELEVANT are used to define the liveness and
442 relevance info of the DEF_STMT of this USE:
443 STMT_VINFO_LIVE_P (DEF_stmt_vinfo) <-- live_p
444 STMT_VINFO_RELEVANT (DEF_stmt_vinfo) <-- relevant
445 Exceptions:
446 - case 1: If USE is used only for address computations (e.g. array indexing),
447 which does not need to be directly vectorized, then the liveness/relevance
448 of the respective DEF_STMT is left unchanged.
449 - case 2: If STMT_VINFO is a reduction phi and DEF_STMT is a reduction stmt,
450 we skip DEF_STMT cause it had already been processed.
451 - case 3: If DEF_STMT and STMT_VINFO are in different nests, then
452 "relevant" will be modified accordingly.
453
454 Return true if everything is as expected. Return false otherwise. */
455
456static opt_result
457process_use (stmt_vec_info stmt_vinfo, tree use, loop_vec_info loop_vinfo,
458 enum vect_relevant relevant, vec<stmt_vec_info> *worklist,
459 bool force)
460{
461 stmt_vec_info dstmt_vinfo;
462 enum vect_def_type dt;
463
464 /* case 1: we are only interested in uses that need to be vectorized. Uses
465 that are used for address computation are not considered relevant. */
466 if (!force && !exist_non_indexing_operands_for_use_p (use, stmt_vinfo))
467 return opt_result::success ();
468
469 if (!vect_is_simple_use (use, loop_vinfo, &dt, &dstmt_vinfo))
470 return opt_result::failure_at (stmt_vinfo->stmt,
471 "not vectorized:"
472 " unsupported use in stmt.\n");
473
474 if (!dstmt_vinfo)
475 return opt_result::success ();
476
477 basic_block def_bb = gimple_bb (dstmt_vinfo->stmt);
478 basic_block bb = gimple_bb (stmt_vinfo->stmt);
479
480 /* case 2: A reduction phi (STMT) defined by a reduction stmt (DSTMT_VINFO).
481 We have to force the stmt live since the epilogue loop needs it to
482 continue computing the reduction. */
483 if (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
484 && STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_reduction_def
485 && gimple_code (dstmt_vinfo->stmt) != GIMPLE_PHI
486 && STMT_VINFO_DEF_TYPE (dstmt_vinfo)(dstmt_vinfo)->def_type == vect_reduction_def
487 && bb->loop_father == def_bb->loop_father)
488 {
489 if (dump_enabled_p ())
490 dump_printf_loc (MSG_NOTE, vect_location,
491 "reduc-stmt defining reduc-phi in the same nest.\n");
492 vect_mark_relevant (worklist, dstmt_vinfo, relevant, true);
493 return opt_result::success ();
494 }
495
496 /* case 3a: outer-loop stmt defining an inner-loop stmt:
497 outer-loop-header-bb:
498 d = dstmt_vinfo
499 inner-loop:
500 stmt # use (d)
501 outer-loop-tail-bb:
502 ... */
503 if (flow_loop_nested_p (def_bb->loop_father, bb->loop_father))
504 {
505 if (dump_enabled_p ())
506 dump_printf_loc (MSG_NOTE, vect_location,
507 "outer-loop def-stmt defining inner-loop stmt.\n");
508
509 switch (relevant)
510 {
511 case vect_unused_in_scope:
512 relevant = (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_nested_cycle) ?
513 vect_used_in_scope : vect_unused_in_scope;
514 break;
515
516 case vect_used_in_outer_by_reduction:
517 gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)((void)(!((stmt_vinfo)->def_type != vect_reduction_def) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 517, __FUNCTION__), 0 : 0))
;
518 relevant = vect_used_by_reduction;
519 break;
520
521 case vect_used_in_outer:
522 gcc_assert (STMT_VINFO_DEF_TYPE (stmt_vinfo) != vect_reduction_def)((void)(!((stmt_vinfo)->def_type != vect_reduction_def) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 522, __FUNCTION__), 0 : 0))
;
523 relevant = vect_used_in_scope;
524 break;
525
526 case vect_used_in_scope:
527 break;
528
529 default:
530 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 530, __FUNCTION__))
;
531 }
532 }
533
534 /* case 3b: inner-loop stmt defining an outer-loop stmt:
535 outer-loop-header-bb:
536 ...
537 inner-loop:
538 d = dstmt_vinfo
539 outer-loop-tail-bb (or outer-loop-exit-bb in double reduction):
540 stmt # use (d) */
541 else if (flow_loop_nested_p (bb->loop_father, def_bb->loop_father))
542 {
543 if (dump_enabled_p ())
544 dump_printf_loc (MSG_NOTE, vect_location,
545 "inner-loop def-stmt defining outer-loop stmt.\n");
546
547 switch (relevant)
548 {
549 case vect_unused_in_scope:
550 relevant = (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_reduction_def
551 || STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_double_reduction_def) ?
552 vect_used_in_outer_by_reduction : vect_unused_in_scope;
553 break;
554
555 case vect_used_by_reduction:
556 case vect_used_only_live:
557 relevant = vect_used_in_outer_by_reduction;
558 break;
559
560 case vect_used_in_scope:
561 relevant = vect_used_in_outer;
562 break;
563
564 default:
565 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 565, __FUNCTION__))
;
566 }
567 }
568 /* We are also not interested in uses on loop PHI backedges that are
569 inductions. Otherwise we'll needlessly vectorize the IV increment
570 and cause hybrid SLP for SLP inductions. Unless the PHI is live
571 of course. */
572 else if (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
573 && STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type == vect_induction_def
574 && ! STMT_VINFO_LIVE_P (stmt_vinfo)(stmt_vinfo)->live
575 && (PHI_ARG_DEF_FROM_EDGE (stmt_vinfo->stmt,gimple_phi_arg_def (((stmt_vinfo->stmt)), ((loop_latch_edge
(bb->loop_father))->dest_idx))
576 loop_latch_edge (bb->loop_father))gimple_phi_arg_def (((stmt_vinfo->stmt)), ((loop_latch_edge
(bb->loop_father))->dest_idx))
577 == use))
578 {
579 if (dump_enabled_p ())
580 dump_printf_loc (MSG_NOTE, vect_location,
581 "induction value on backedge.\n");
582 return opt_result::success ();
583 }
584
585
586 vect_mark_relevant (worklist, dstmt_vinfo, relevant, false);
587 return opt_result::success ();
588}
589
590
591/* Function vect_mark_stmts_to_be_vectorized.
592
593 Not all stmts in the loop need to be vectorized. For example:
594
595 for i...
596 for j...
597 1. T0 = i + j
598 2. T1 = a[T0]
599
600 3. j = j + 1
601
602 Stmt 1 and 3 do not need to be vectorized, because loop control and
603 addressing of vectorized data-refs are handled differently.
604
605 This pass detects such stmts. */
606
607opt_result
608vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo, bool *fatal)
609{
610 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
611 basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo)(loop_vinfo)->bbs;
612 unsigned int nbbs = loop->num_nodes;
613 gimple_stmt_iterator si;
614 unsigned int i;
615 basic_block bb;
616 bool live_p;
617 enum vect_relevant relevant;
618
619 DUMP_VECT_SCOPE ("vect_mark_stmts_to_be_vectorized")auto_dump_scope scope ("vect_mark_stmts_to_be_vectorized", vect_location
)
;
620
621 auto_vec<stmt_vec_info, 64> worklist;
622
623 /* 1. Init worklist. */
624 for (i = 0; i < nbbs; i++)
625 {
626 bb = bbs[i];
627 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
628 {
629 stmt_vec_info phi_info = loop_vinfo->lookup_stmt (gsi_stmt (si));
630 if (dump_enabled_p ())
631 dump_printf_loc (MSG_NOTE, vect_location, "init: phi relevant? %G",
632 phi_info->stmt);
633
634 if (vect_stmt_relevant_p (phi_info, loop_vinfo, &relevant, &live_p))
635 vect_mark_relevant (&worklist, phi_info, relevant, live_p);
636 }
637 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
638 {
639 if (is_gimple_debug (gsi_stmt (si)))
640 continue;
641 stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (gsi_stmt (si));
642 if (dump_enabled_p ())
643 dump_printf_loc (MSG_NOTE, vect_location,
644 "init: stmt relevant? %G", stmt_info->stmt);
645
646 if (vect_stmt_relevant_p (stmt_info, loop_vinfo, &relevant, &live_p))
647 vect_mark_relevant (&worklist, stmt_info, relevant, live_p);
648 }
649 }
650
651 /* 2. Process_worklist */
652 while (worklist.length () > 0)
653 {
654 use_operand_p use_p;
655 ssa_op_iter iter;
656
657 stmt_vec_info stmt_vinfo = worklist.pop ();
658 if (dump_enabled_p ())
659 dump_printf_loc (MSG_NOTE, vect_location,
660 "worklist: examine stmt: %G", stmt_vinfo->stmt);
661
662 /* Examine the USEs of STMT. For each USE, mark the stmt that defines it
663 (DEF_STMT) as relevant/irrelevant according to the relevance property
664 of STMT. */
665 relevant = STMT_VINFO_RELEVANT (stmt_vinfo)(stmt_vinfo)->relevant;
666
667 /* Generally, the relevance property of STMT (in STMT_VINFO_RELEVANT) is
668 propagated as is to the DEF_STMTs of its USEs.
669
670 One exception is when STMT has been identified as defining a reduction
671 variable; in this case we set the relevance to vect_used_by_reduction.
672 This is because we distinguish between two kinds of relevant stmts -
673 those that are used by a reduction computation, and those that are
674 (also) used by a regular computation. This allows us later on to
675 identify stmts that are used solely by a reduction, and therefore the
676 order of the results that they produce does not have to be kept. */
677
678 switch (STMT_VINFO_DEF_TYPE (stmt_vinfo)(stmt_vinfo)->def_type)
679 {
680 case vect_reduction_def:
681 gcc_assert (relevant != vect_unused_in_scope)((void)(!(relevant != vect_unused_in_scope) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 681, __FUNCTION__), 0 : 0))
;
682 if (relevant != vect_unused_in_scope
683 && relevant != vect_used_in_scope
684 && relevant != vect_used_by_reduction
685 && relevant != vect_used_only_live)
686 return opt_result::failure_at
687 (stmt_vinfo->stmt, "unsupported use of reduction.\n");
688 break;
689
690 case vect_nested_cycle:
691 if (relevant != vect_unused_in_scope
692 && relevant != vect_used_in_outer_by_reduction
693 && relevant != vect_used_in_outer)
694 return opt_result::failure_at
695 (stmt_vinfo->stmt, "unsupported use of nested cycle.\n");
696 break;
697
698 case vect_double_reduction_def:
699 if (relevant != vect_unused_in_scope
700 && relevant != vect_used_by_reduction
701 && relevant != vect_used_only_live)
702 return opt_result::failure_at
703 (stmt_vinfo->stmt, "unsupported use of double reduction.\n");
704 break;
705
706 default:
707 break;
708 }
709
710 if (is_pattern_stmt_p (stmt_vinfo))
711 {
712 /* Pattern statements are not inserted into the code, so
713 FOR_EACH_PHI_OR_STMT_USE optimizes their operands out, and we
714 have to scan the RHS or function arguments instead. */
715 if (gassign *assign = dyn_cast <gassign *> (stmt_vinfo->stmt))
716 {
717 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
718 tree op = gimple_assign_rhs1 (assign);
719
720 i = 1;
721 if (rhs_code == COND_EXPR && COMPARISON_CLASS_P (op)(tree_code_type[(int) (((enum tree_code) (op)->base.code))
] == tcc_comparison)
)
722 {
723 opt_result res
724 = process_use (stmt_vinfo, TREE_OPERAND (op, 0)(*((const_cast<tree*> (tree_operand_check ((op), (0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 724, __FUNCTION__)))))
,
725 loop_vinfo, relevant, &worklist, false);
726 if (!res)
727 return res;
728 res = process_use (stmt_vinfo, TREE_OPERAND (op, 1)(*((const_cast<tree*> (tree_operand_check ((op), (1), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 728, __FUNCTION__)))))
,
729 loop_vinfo, relevant, &worklist, false);
730 if (!res)
731 return res;
732 i = 2;
733 }
734 for (; i < gimple_num_ops (assign); i++)
735 {
736 op = gimple_op (assign, i);
737 if (TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
738 {
739 opt_result res
740 = process_use (stmt_vinfo, op, loop_vinfo, relevant,
741 &worklist, false);
742 if (!res)
743 return res;
744 }
745 }
746 }
747 else if (gcall *call = dyn_cast <gcall *> (stmt_vinfo->stmt))
748 {
749 for (i = 0; i < gimple_call_num_args (call); i++)
750 {
751 tree arg = gimple_call_arg (call, i);
752 opt_result res
753 = process_use (stmt_vinfo, arg, loop_vinfo, relevant,
754 &worklist, false);
755 if (!res)
756 return res;
757 }
758 }
759 }
760 else
761 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt_vinfo->stmt, iter, SSA_OP_USE)for ((use_p) = (gimple_code (stmt_vinfo->stmt) == GIMPLE_PHI
? op_iter_init_phiuse (&(iter), as_a <gphi *> (stmt_vinfo
->stmt), 0x01) : op_iter_init_use (&(iter), stmt_vinfo
->stmt, 0x01)); !op_iter_done (&(iter)); (use_p) = op_iter_next_use
(&(iter)))
762 {
763 tree op = USE_FROM_PTR (use_p)get_use_from_ptr (use_p);
764 opt_result res
765 = process_use (stmt_vinfo, op, loop_vinfo, relevant,
766 &worklist, false);
767 if (!res)
768 return res;
769 }
770
771 if (STMT_VINFO_GATHER_SCATTER_P (stmt_vinfo)(stmt_vinfo)->gather_scatter_p)
772 {
773 gather_scatter_info gs_info;
774 if (!vect_check_gather_scatter (stmt_vinfo, loop_vinfo, &gs_info))
775 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 775, __FUNCTION__))
;
776 opt_result res
777 = process_use (stmt_vinfo, gs_info.offset, loop_vinfo, relevant,
778 &worklist, true);
779 if (!res)
780 {
781 if (fatal)
782 *fatal = false;
783 return res;
784 }
785 }
786 } /* while worklist */
787
788 return opt_result::success ();
789}
790
791/* Function vect_model_simple_cost.
792
793 Models cost for simple operations, i.e. those that only emit ncopies of a
794 single op. Right now, this does not account for multiple insns that could
795 be generated for the single vector op. We will handle that shortly. */
796
797static void
798vect_model_simple_cost (vec_info *,
799 stmt_vec_info stmt_info, int ncopies,
800 enum vect_def_type *dt,
801 int ndts,
802 slp_tree node,
803 stmt_vector_for_cost *cost_vec,
804 vect_cost_for_stmt kind = vector_stmt)
805{
806 int inside_cost = 0, prologue_cost = 0;
807
808 gcc_assert (cost_vec != NULL)((void)(!(cost_vec != nullptr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 808, __FUNCTION__), 0 : 0))
;
809
810 /* ??? Somehow we need to fix this at the callers. */
811 if (node)
812 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (node)(node)->vec_stmts_size;
813
814 if (!node)
815 /* Cost the "broadcast" of a scalar operand in to a vector operand.
816 Use scalar_to_vec to cost the broadcast, as elsewhere in the vector
817 cost model. */
818 for (int i = 0; i < ndts; i++)
819 if (dt[i] == vect_constant_def || dt[i] == vect_external_def)
820 prologue_cost += record_stmt_cost (cost_vec, 1, scalar_to_vec,
821 stmt_info, 0, vect_prologue);
822
823 /* Pass the inside-of-loop statements to the target-specific cost model. */
824 inside_cost += record_stmt_cost (cost_vec, ncopies, kind,
825 stmt_info, 0, vect_body);
826
827 if (dump_enabled_p ())
828 dump_printf_loc (MSG_NOTE, vect_location,
829 "vect_model_simple_cost: inside_cost = %d, "
830 "prologue_cost = %d .\n", inside_cost, prologue_cost);
831}
832
833
834/* Model cost for type demotion and promotion operations. PWR is
835 normally zero for single-step promotions and demotions. It will be
836 one if two-step promotion/demotion is required, and so on. NCOPIES
837 is the number of vector results (and thus number of instructions)
838 for the narrowest end of the operation chain. Each additional
839 step doubles the number of instructions required. */
840
841static void
842vect_model_promotion_demotion_cost (stmt_vec_info stmt_info,
843 enum vect_def_type *dt,
844 unsigned int ncopies, int pwr,
845 stmt_vector_for_cost *cost_vec)
846{
847 int i;
848 int inside_cost = 0, prologue_cost = 0;
849
850 for (i = 0; i < pwr + 1; i++)
851 {
852 inside_cost += record_stmt_cost (cost_vec, ncopies, vec_promote_demote,
853 stmt_info, 0, vect_body);
854 ncopies *= 2;
855 }
856
857 /* FORNOW: Assuming maximum 2 args per stmts. */
858 for (i = 0; i < 2; i++)
859 if (dt[i] == vect_constant_def || dt[i] == vect_external_def)
860 prologue_cost += record_stmt_cost (cost_vec, 1, vector_stmt,
861 stmt_info, 0, vect_prologue);
862
863 if (dump_enabled_p ())
864 dump_printf_loc (MSG_NOTE, vect_location,
865 "vect_model_promotion_demotion_cost: inside_cost = %d, "
866 "prologue_cost = %d .\n", inside_cost, prologue_cost);
867}
868
869/* Returns true if the current function returns DECL. */
870
871static bool
872cfun_returns (tree decl)
873{
874 edge_iterator ei;
875 edge e;
876 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)for ((ei) = ei_start_1 (&(((((cfun + 0))->cfg->x_exit_block_ptr
)->preds))); ei_cond ((ei), &(e)); ei_next (&(ei))
)
877 {
878 greturn *ret = safe_dyn_cast <greturn *> (last_stmt (e->src));
879 if (!ret)
880 continue;
881 if (gimple_return_retval (ret) == decl)
882 return true;
883 /* We often end up with an aggregate copy to the result decl,
884 handle that case as well. First skip intermediate clobbers
885 though. */
886 gimple *def = ret;
887 do
888 {
889 def = SSA_NAME_DEF_STMT (gimple_vuse (def))(tree_check ((gimple_vuse (def)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 889, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
890 }
891 while (gimple_clobber_p (def));
892 if (is_a <gassign *> (def)
893 && gimple_assign_lhs (def) == gimple_return_retval (ret)
894 && gimple_assign_rhs1 (def) == decl)
895 return true;
896 }
897 return false;
898}
899
900/* Function vect_model_store_cost
901
902 Models cost for stores. In the case of grouped accesses, one access
903 has the overhead of the grouped access attributed to it. */
904
905static void
906vect_model_store_cost (vec_info *vinfo, stmt_vec_info stmt_info, int ncopies,
907 vect_memory_access_type memory_access_type,
908 vec_load_store_type vls_type, slp_tree slp_node,
909 stmt_vector_for_cost *cost_vec)
910{
911 unsigned int inside_cost = 0, prologue_cost = 0;
912 stmt_vec_info first_stmt_info = stmt_info;
913 bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 913, __FUNCTION__), 0 : 0)), (stmt_info)->first_element)
)
;
914
915 /* ??? Somehow we need to fix this at the callers. */
916 if (slp_node)
917 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size;
918
919 if (vls_type == VLS_STORE_INVARIANT)
920 {
921 if (!slp_node)
922 prologue_cost += record_stmt_cost (cost_vec, 1, scalar_to_vec,
923 stmt_info, 0, vect_prologue);
924 }
925
926 /* Grouped stores update all elements in the group at once,
927 so we want the DR for the first statement. */
928 if (!slp_node && grouped_access_p)
929 first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 929, __FUNCTION__), 0 : 0)), (stmt_info)->first_element)
;
930
931 /* True if we should include any once-per-group costs as well as
932 the cost of the statement itself. For SLP we only get called
933 once per group anyhow. */
934 bool first_stmt_p = (first_stmt_info == stmt_info);
935
936 /* We assume that the cost of a single store-lanes instruction is
937 equivalent to the cost of DR_GROUP_SIZE separate stores. If a grouped
938 access is instead being provided by a permute-and-store operation,
939 include the cost of the permutes. */
940 if (first_stmt_p
941 && memory_access_type == VMAT_CONTIGUOUS_PERMUTE)
942 {
943 /* Uses a high and low interleave or shuffle operations for each
944 needed permute. */
945 int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 945, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
946 int nstmts = ncopies * ceil_log2 (group_size) * group_size;
947 inside_cost = record_stmt_cost (cost_vec, nstmts, vec_perm,
948 stmt_info, 0, vect_body);
949
950 if (dump_enabled_p ())
951 dump_printf_loc (MSG_NOTE, vect_location,
952 "vect_model_store_cost: strided group_size = %d .\n",
953 group_size);
954 }
955
956 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
957 /* Costs of the stores. */
958 if (memory_access_type == VMAT_ELEMENTWISE
959 || memory_access_type == VMAT_GATHER_SCATTER)
960 {
961 /* N scalar stores plus extracting the elements. */
962 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
963 inside_cost += record_stmt_cost (cost_vec,
964 ncopies * assumed_nunits,
965 scalar_store, stmt_info, 0, vect_body);
966 }
967 else
968 vect_get_store_cost (vinfo, stmt_info, ncopies, &inside_cost, cost_vec);
969
970 if (memory_access_type == VMAT_ELEMENTWISE
971 || memory_access_type == VMAT_STRIDED_SLP)
972 {
973 /* N scalar stores plus extracting the elements. */
974 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
975 inside_cost += record_stmt_cost (cost_vec,
976 ncopies * assumed_nunits,
977 vec_to_scalar, stmt_info, 0, vect_body);
978 }
979
980 /* When vectorizing a store into the function result assign
981 a penalty if the function returns in a multi-register location.
982 In this case we assume we'll end up with having to spill the
983 vector result and do piecewise loads as a conservative estimate. */
984 tree base = get_base_address (STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0)->ref);
985 if (base
986 && (TREE_CODE (base)((enum tree_code) (base)->base.code) == RESULT_DECL
987 || (DECL_P (base)(tree_code_type[(int) (((enum tree_code) (base)->base.code
))] == tcc_declaration)
&& cfun_returns (base)))
988 && !aggregate_value_p (base, cfun(cfun + 0)->decl))
989 {
990 rtx reg = hard_function_value (TREE_TYPE (base)((contains_struct_check ((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 990, __FUNCTION__))->typed.type)
, cfun(cfun + 0)->decl, 0, 1);
991 /* ??? Handle PARALLEL in some way. */
992 if (REG_P (reg)(((enum rtx_code) (reg)->code) == REG))
993 {
994 int nregs = hard_regno_nregs (REGNO (reg)(rhs_regno(reg)), GET_MODE (reg)((machine_mode) (reg)->mode));
995 /* Assume that a single reg-reg move is possible and cheap,
996 do not account for vector to gp register move cost. */
997 if (nregs > 1)
998 {
999 /* Spill. */
1000 prologue_cost += record_stmt_cost (cost_vec, ncopies,
1001 vector_store,
1002 stmt_info, 0, vect_epilogue);
1003 /* Loads. */
1004 prologue_cost += record_stmt_cost (cost_vec, ncopies * nregs,
1005 scalar_load,
1006 stmt_info, 0, vect_epilogue);
1007 }
1008 }
1009 }
1010
1011 if (dump_enabled_p ())
1012 dump_printf_loc (MSG_NOTE, vect_location,
1013 "vect_model_store_cost: inside_cost = %d, "
1014 "prologue_cost = %d .\n", inside_cost, prologue_cost);
1015}
1016
1017
1018/* Calculate cost of DR's memory access. */
1019void
1020vect_get_store_cost (vec_info *vinfo, stmt_vec_info stmt_info, int ncopies,
1021 unsigned int *inside_cost,
1022 stmt_vector_for_cost *body_cost_vec)
1023{
1024 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1024, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1025 int alignment_support_scheme
1026 = vect_supportable_dr_alignment (vinfo, dr_info, false);
1027
1028 switch (alignment_support_scheme)
1029 {
1030 case dr_aligned:
1031 {
1032 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1033 vector_store, stmt_info, 0,
1034 vect_body);
1035
1036 if (dump_enabled_p ())
1037 dump_printf_loc (MSG_NOTE, vect_location,
1038 "vect_model_store_cost: aligned.\n");
1039 break;
1040 }
1041
1042 case dr_unaligned_supported:
1043 {
1044 /* Here, we assign an additional cost for the unaligned store. */
1045 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1046 unaligned_store, stmt_info,
1047 DR_MISALIGNMENT (dr_info)dr_misalignment (dr_info),
1048 vect_body);
1049 if (dump_enabled_p ())
1050 dump_printf_loc (MSG_NOTE, vect_location,
1051 "vect_model_store_cost: unaligned supported by "
1052 "hardware.\n");
1053 break;
1054 }
1055
1056 case dr_unaligned_unsupported:
1057 {
1058 *inside_cost = VECT_MAX_COST1000;
1059
1060 if (dump_enabled_p ())
1061 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1062 "vect_model_store_cost: unsupported access.\n");
1063 break;
1064 }
1065
1066 default:
1067 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1067, __FUNCTION__))
;
1068 }
1069}
1070
1071
1072/* Function vect_model_load_cost
1073
1074 Models cost for loads. In the case of grouped accesses, one access has
1075 the overhead of the grouped access attributed to it. Since unaligned
1076 accesses are supported for loads, we also account for the costs of the
1077 access scheme chosen. */
1078
1079static void
1080vect_model_load_cost (vec_info *vinfo,
1081 stmt_vec_info stmt_info, unsigned ncopies, poly_uint64 vf,
1082 vect_memory_access_type memory_access_type,
1083 slp_tree slp_node,
1084 stmt_vector_for_cost *cost_vec)
1085{
1086 unsigned int inside_cost = 0, prologue_cost = 0;
1087 bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1087, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
))
;
1088
1089 gcc_assert (cost_vec)((void)(!(cost_vec) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1089, __FUNCTION__), 0 : 0))
;
1090
1091 /* ??? Somehow we need to fix this at the callers. */
1092 if (slp_node)
1093 ncopies = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size;
1094
1095 if (slp_node && SLP_TREE_LOAD_PERMUTATION (slp_node)(slp_node)->load_permutation.exists ())
1096 {
1097 /* If the load is permuted then the alignment is determined by
1098 the first group element not by the first scalar stmt DR. */
1099 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1099, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
1100 /* Record the cost for the permutation. */
1101 unsigned n_perms, n_loads;
1102 vect_transform_slp_perm_load (vinfo, slp_node, vNULL, NULLnullptr,
1103 vf, true, &n_perms, &n_loads);
1104 inside_cost += record_stmt_cost (cost_vec, n_perms, vec_perm,
1105 first_stmt_info, 0, vect_body);
1106
1107 /* And adjust the number of loads performed. This handles
1108 redundancies as well as loads that are later dead. */
1109 ncopies = n_loads;
1110 }
1111
1112 /* Grouped loads read all elements in the group at once,
1113 so we want the DR for the first statement. */
1114 stmt_vec_info first_stmt_info = stmt_info;
1115 if (!slp_node && grouped_access_p)
1116 first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1116, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
1117
1118 /* True if we should include any once-per-group costs as well as
1119 the cost of the statement itself. For SLP we only get called
1120 once per group anyhow. */
1121 bool first_stmt_p = (first_stmt_info == stmt_info);
1122
1123 /* An IFN_LOAD_LANES will load all its vector results, regardless of which
1124 ones we actually need. Account for the cost of unused results. */
1125 if (first_stmt_p && !slp_node && memory_access_type == VMAT_LOAD_STORE_LANES)
1126 {
1127 unsigned int gaps = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1127, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
1128 stmt_vec_info next_stmt_info = first_stmt_info;
1129 do
1130 {
1131 gaps -= 1;
1132 next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info)(((void)(!((next_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1132, __FUNCTION__), 0 : 0)), (next_stmt_info)->next_element
)
;
1133 }
1134 while (next_stmt_info);
1135 if (gaps)
1136 {
1137 if (dump_enabled_p ())
1138 dump_printf_loc (MSG_NOTE, vect_location,
1139 "vect_model_load_cost: %d unused vectors.\n",
1140 gaps);
1141 vect_get_load_cost (vinfo, stmt_info, ncopies * gaps, false,
1142 &inside_cost, &prologue_cost,
1143 cost_vec, cost_vec, true);
1144 }
1145 }
1146
1147 /* We assume that the cost of a single load-lanes instruction is
1148 equivalent to the cost of DR_GROUP_SIZE separate loads. If a grouped
1149 access is instead being provided by a load-and-permute operation,
1150 include the cost of the permutes. */
1151 if (first_stmt_p
1152 && memory_access_type == VMAT_CONTIGUOUS_PERMUTE)
1153 {
1154 /* Uses an even and odd extract operations or shuffle operations
1155 for each needed permute. */
1156 int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1156, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
1157 int nstmts = ncopies * ceil_log2 (group_size) * group_size;
1158 inside_cost += record_stmt_cost (cost_vec, nstmts, vec_perm,
1159 stmt_info, 0, vect_body);
1160
1161 if (dump_enabled_p ())
1162 dump_printf_loc (MSG_NOTE, vect_location,
1163 "vect_model_load_cost: strided group_size = %d .\n",
1164 group_size);
1165 }
1166
1167 /* The loads themselves. */
1168 if (memory_access_type == VMAT_ELEMENTWISE
1169 || memory_access_type == VMAT_GATHER_SCATTER)
1170 {
1171 /* N scalar loads plus gathering them into a vector. */
1172 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
1173 unsigned int assumed_nunits = vect_nunits_for_cost (vectype);
1174 inside_cost += record_stmt_cost (cost_vec,
1175 ncopies * assumed_nunits,
1176 scalar_load, stmt_info, 0, vect_body);
1177 }
1178 else
1179 vect_get_load_cost (vinfo, stmt_info, ncopies, first_stmt_p,
1180 &inside_cost, &prologue_cost,
1181 cost_vec, cost_vec, true);
1182 if (memory_access_type == VMAT_ELEMENTWISE
1183 || memory_access_type == VMAT_STRIDED_SLP)
1184 inside_cost += record_stmt_cost (cost_vec, ncopies, vec_construct,
1185 stmt_info, 0, vect_body);
1186
1187 if (dump_enabled_p ())
1188 dump_printf_loc (MSG_NOTE, vect_location,
1189 "vect_model_load_cost: inside_cost = %d, "
1190 "prologue_cost = %d .\n", inside_cost, prologue_cost);
1191}
1192
1193
1194/* Calculate cost of DR's memory access. */
1195void
1196vect_get_load_cost (vec_info *vinfo, stmt_vec_info stmt_info, int ncopies,
1197 bool add_realign_cost, unsigned int *inside_cost,
1198 unsigned int *prologue_cost,
1199 stmt_vector_for_cost *prologue_cost_vec,
1200 stmt_vector_for_cost *body_cost_vec,
1201 bool record_prologue_costs)
1202{
1203 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1203, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1204 int alignment_support_scheme
1205 = vect_supportable_dr_alignment (vinfo, dr_info, false);
1206
1207 switch (alignment_support_scheme)
1208 {
1209 case dr_aligned:
1210 {
1211 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vector_load,
1212 stmt_info, 0, vect_body);
1213
1214 if (dump_enabled_p ())
1215 dump_printf_loc (MSG_NOTE, vect_location,
1216 "vect_model_load_cost: aligned.\n");
1217
1218 break;
1219 }
1220 case dr_unaligned_supported:
1221 {
1222 /* Here, we assign an additional cost for the unaligned load. */
1223 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1224 unaligned_load, stmt_info,
1225 DR_MISALIGNMENT (dr_info)dr_misalignment (dr_info),
1226 vect_body);
1227
1228 if (dump_enabled_p ())
1229 dump_printf_loc (MSG_NOTE, vect_location,
1230 "vect_model_load_cost: unaligned supported by "
1231 "hardware.\n");
1232
1233 break;
1234 }
1235 case dr_explicit_realign:
1236 {
1237 *inside_cost += record_stmt_cost (body_cost_vec, ncopies * 2,
1238 vector_load, stmt_info, 0, vect_body);
1239 *inside_cost += record_stmt_cost (body_cost_vec, ncopies,
1240 vec_perm, stmt_info, 0, vect_body);
1241
1242 /* FIXME: If the misalignment remains fixed across the iterations of
1243 the containing loop, the following cost should be added to the
1244 prologue costs. */
1245 if (targetm.vectorize.builtin_mask_for_load)
1246 *inside_cost += record_stmt_cost (body_cost_vec, 1, vector_stmt,
1247 stmt_info, 0, vect_body);
1248
1249 if (dump_enabled_p ())
1250 dump_printf_loc (MSG_NOTE, vect_location,
1251 "vect_model_load_cost: explicit realign\n");
1252
1253 break;
1254 }
1255 case dr_explicit_realign_optimized:
1256 {
1257 if (dump_enabled_p ())
1258 dump_printf_loc (MSG_NOTE, vect_location,
1259 "vect_model_load_cost: unaligned software "
1260 "pipelined.\n");
1261
1262 /* Unaligned software pipeline has a load of an address, an initial
1263 load, and possibly a mask operation to "prime" the loop. However,
1264 if this is an access in a group of loads, which provide grouped
1265 access, then the above cost should only be considered for one
1266 access in the group. Inside the loop, there is a load op
1267 and a realignment op. */
1268
1269 if (add_realign_cost && record_prologue_costs)
1270 {
1271 *prologue_cost += record_stmt_cost (prologue_cost_vec, 2,
1272 vector_stmt, stmt_info,
1273 0, vect_prologue);
1274 if (targetm.vectorize.builtin_mask_for_load)
1275 *prologue_cost += record_stmt_cost (prologue_cost_vec, 1,
1276 vector_stmt, stmt_info,
1277 0, vect_prologue);
1278 }
1279
1280 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vector_load,
1281 stmt_info, 0, vect_body);
1282 *inside_cost += record_stmt_cost (body_cost_vec, ncopies, vec_perm,
1283 stmt_info, 0, vect_body);
1284
1285 if (dump_enabled_p ())
1286 dump_printf_loc (MSG_NOTE, vect_location,
1287 "vect_model_load_cost: explicit realign optimized"
1288 "\n");
1289
1290 break;
1291 }
1292
1293 case dr_unaligned_unsupported:
1294 {
1295 *inside_cost = VECT_MAX_COST1000;
1296
1297 if (dump_enabled_p ())
1298 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1299 "vect_model_load_cost: unsupported access.\n");
1300 break;
1301 }
1302
1303 default:
1304 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1304, __FUNCTION__))
;
1305 }
1306}
1307
1308/* Insert the new stmt NEW_STMT at *GSI or at the appropriate place in
1309 the loop preheader for the vectorized stmt STMT_VINFO. */
1310
1311static void
1312vect_init_vector_1 (vec_info *vinfo, stmt_vec_info stmt_vinfo, gimple *new_stmt,
1313 gimple_stmt_iterator *gsi)
1314{
1315 if (gsi)
1316 vect_finish_stmt_generation (vinfo, stmt_vinfo, new_stmt, gsi);
1317 else
1318 vinfo->insert_on_entry (stmt_vinfo, new_stmt);
1319
1320 if (dump_enabled_p ())
1321 dump_printf_loc (MSG_NOTE, vect_location,
1322 "created new init_stmt: %G", new_stmt);
1323}
1324
1325/* Function vect_init_vector.
1326
1327 Insert a new stmt (INIT_STMT) that initializes a new variable of type
1328 TYPE with the value VAL. If TYPE is a vector type and VAL does not have
1329 vector type a vector with all elements equal to VAL is created first.
1330 Place the initialization at GSI if it is not NULL. Otherwise, place the
1331 initialization at the loop preheader.
1332 Return the DEF of INIT_STMT.
1333 It will be used in the vectorization of STMT_INFO. */
1334
1335tree
1336vect_init_vector (vec_info *vinfo, stmt_vec_info stmt_info, tree val, tree type,
1337 gimple_stmt_iterator *gsi)
1338{
1339 gimple *init_stmt;
1340 tree new_temp;
1341
1342 /* We abuse this function to push sth to a SSA name with initial 'val'. */
1343 if (! useless_type_conversion_p (type, TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1343, __FUNCTION__))->typed.type)
))
1344 {
1345 gcc_assert (TREE_CODE (type) == VECTOR_TYPE)((void)(!(((enum tree_code) (type)->base.code) == VECTOR_TYPE
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1345, __FUNCTION__), 0 : 0))
;
1346 if (! types_compatible_p (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1346, __FUNCTION__))->typed.type)
, TREE_TYPE (val)((contains_struct_check ((val), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1346, __FUNCTION__))->typed.type)
))
1347 {
1348 /* Scalar boolean value should be transformed into
1349 all zeros or all ones value before building a vector. */
1350 if (VECTOR_BOOLEAN_TYPE_P (type)(((enum tree_code) (type)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((type), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1350, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
1351 {
1352 tree true_val = build_all_ones_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1352, __FUNCTION__))->typed.type)
);
1353 tree false_val = build_zero_cst (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1353, __FUNCTION__))->typed.type)
);
1354
1355 if (CONSTANT_CLASS_P (val)(tree_code_type[(int) (((enum tree_code) (val)->base.code)
)] == tcc_constant)
)
1356 val = integer_zerop (val) ? false_val : true_val;
1357 else
1358 {
1359 new_temp = make_ssa_name (TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1359, __FUNCTION__))->typed.type)
);
1360 init_stmt = gimple_build_assign (new_temp, COND_EXPR,
1361 val, true_val, false_val);
1362 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1363 val = new_temp;
1364 }
1365 }
1366 else
1367 {
1368 gimple_seq stmts = NULLnullptr;
1369 if (! INTEGRAL_TYPE_P (TREE_TYPE (val))(((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1369, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1369, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((val), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1369, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
1370 val = gimple_build (&stmts, VIEW_CONVERT_EXPR,
1371 TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1371, __FUNCTION__))->typed.type)
, val);
1372 else
1373 /* ??? Condition vectorization expects us to do
1374 promotion of invariant/external defs. */
1375 val = gimple_convert (&stmts, TREE_TYPE (type)((contains_struct_check ((type), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1375, __FUNCTION__))->typed.type)
, val);
1376 for (gimple_stmt_iterator gsi2 = gsi_start (stmts)gsi_start_1 (&(stmts));
1377 !gsi_end_p (gsi2); )
1378 {
1379 init_stmt = gsi_stmt (gsi2);
1380 gsi_remove (&gsi2, false);
1381 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1382 }
1383 }
1384 }
1385 val = build_vector_from_val (type, val);
1386 }
1387
1388 new_temp = vect_get_new_ssa_name (type, vect_simple_var, "cst_");
1389 init_stmt = gimple_build_assign (new_temp, val);
1390 vect_init_vector_1 (vinfo, stmt_info, init_stmt, gsi);
1391 return new_temp;
1392}
1393
1394
1395/* Function vect_get_vec_defs_for_operand.
1396
1397 OP is an operand in STMT_VINFO. This function returns a vector of
1398 NCOPIES defs that will be used in the vectorized stmts for STMT_VINFO.
1399
1400 In the case that OP is an SSA_NAME which is defined in the loop, then
1401 STMT_VINFO_VEC_STMTS of the defining stmt holds the relevant defs.
1402
1403 In case OP is an invariant or constant, a new stmt that creates a vector def
1404 needs to be introduced. VECTYPE may be used to specify a required type for
1405 vector invariant. */
1406
1407void
1408vect_get_vec_defs_for_operand (vec_info *vinfo, stmt_vec_info stmt_vinfo,
1409 unsigned ncopies,
1410 tree op, vec<tree> *vec_oprnds, tree vectype)
1411{
1412 gimple *def_stmt;
1413 enum vect_def_type dt;
1414 bool is_simple_use;
1415 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
1416
1417 if (dump_enabled_p ())
1418 dump_printf_loc (MSG_NOTE, vect_location,
1419 "vect_get_vec_defs_for_operand: %T\n", op);
1420
1421 stmt_vec_info def_stmt_info;
1422 is_simple_use = vect_is_simple_use (op, loop_vinfo, &dt,
1423 &def_stmt_info, &def_stmt);
1424 gcc_assert (is_simple_use)((void)(!(is_simple_use) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1424, __FUNCTION__), 0 : 0))
;
1425 if (def_stmt && dump_enabled_p ())
1426 dump_printf_loc (MSG_NOTE, vect_location, " def_stmt = %G", def_stmt);
1427
1428 vec_oprnds->create (ncopies);
1429 if (dt == vect_constant_def || dt == vect_external_def)
1430 {
1431 tree stmt_vectype = STMT_VINFO_VECTYPE (stmt_vinfo)(stmt_vinfo)->vectype;
1432 tree vector_type;
1433
1434 if (vectype)
1435 vector_type = vectype;
1436 else if (VECT_SCALAR_BOOLEAN_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1436, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1436, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1436, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
) && ((tree_class_check ((((contains_struct_check ((op
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1436, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1436, __FUNCTION__))->type_common.precision) == 1 &&
((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1436, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1436, __FUNCTION__))->base.u.bits.unsigned_flag)))
1437 && VECTOR_BOOLEAN_TYPE_P (stmt_vectype)(((enum tree_code) (stmt_vectype)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((stmt_vectype
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1437, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
1438 vector_type = truth_type_for (stmt_vectype);
1439 else
1440 vector_type = get_vectype_for_scalar_type (loop_vinfo, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1440, __FUNCTION__))->typed.type)
);
1441
1442 gcc_assert (vector_type)((void)(!(vector_type) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1442, __FUNCTION__), 0 : 0))
;
1443 tree vop = vect_init_vector (vinfo, stmt_vinfo, op, vector_type, NULLnullptr);
1444 while (ncopies--)
1445 vec_oprnds->quick_push (vop);
1446 }
1447 else
1448 {
1449 def_stmt_info = vect_stmt_to_vectorize (def_stmt_info);
1450 gcc_assert (STMT_VINFO_VEC_STMTS (def_stmt_info).length () == ncopies)((void)(!((def_stmt_info)->vec_stmts.length () == ncopies)
? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1450, __FUNCTION__), 0 : 0))
;
1451 for (unsigned i = 0; i < ncopies; ++i)
1452 vec_oprnds->quick_push (gimple_get_lhs
1453 (STMT_VINFO_VEC_STMTS (def_stmt_info)(def_stmt_info)->vec_stmts[i]));
1454 }
1455}
1456
1457
1458/* Get vectorized definitions for OP0 and OP1. */
1459
1460void
1461vect_get_vec_defs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node,
1462 unsigned ncopies,
1463 tree op0, vec<tree> *vec_oprnds0, tree vectype0,
1464 tree op1, vec<tree> *vec_oprnds1, tree vectype1,
1465 tree op2, vec<tree> *vec_oprnds2, tree vectype2,
1466 tree op3, vec<tree> *vec_oprnds3, tree vectype3)
1467{
1468 if (slp_node)
1469 {
1470 if (op0)
1471 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[0], vec_oprnds0);
1472 if (op1)
1473 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[1], vec_oprnds1);
1474 if (op2)
1475 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[2], vec_oprnds2);
1476 if (op3)
1477 vect_get_slp_defs (SLP_TREE_CHILDREN (slp_node)(slp_node)->children[3], vec_oprnds3);
1478 }
1479 else
1480 {
1481 if (op0)
1482 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1483 op0, vec_oprnds0, vectype0);
1484 if (op1)
1485 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1486 op1, vec_oprnds1, vectype1);
1487 if (op2)
1488 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1489 op2, vec_oprnds2, vectype2);
1490 if (op3)
1491 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
1492 op3, vec_oprnds3, vectype3);
1493 }
1494}
1495
1496void
1497vect_get_vec_defs (vec_info *vinfo, stmt_vec_info stmt_info, slp_tree slp_node,
1498 unsigned ncopies,
1499 tree op0, vec<tree> *vec_oprnds0,
1500 tree op1, vec<tree> *vec_oprnds1,
1501 tree op2, vec<tree> *vec_oprnds2,
1502 tree op3, vec<tree> *vec_oprnds3)
1503{
1504 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
1505 op0, vec_oprnds0, NULL_TREE(tree) nullptr,
1506 op1, vec_oprnds1, NULL_TREE(tree) nullptr,
1507 op2, vec_oprnds2, NULL_TREE(tree) nullptr,
1508 op3, vec_oprnds3, NULL_TREE(tree) nullptr);
1509}
1510
1511/* Helper function called by vect_finish_replace_stmt and
1512 vect_finish_stmt_generation. Set the location of the new
1513 statement and create and return a stmt_vec_info for it. */
1514
1515static void
1516vect_finish_stmt_generation_1 (vec_info *,
1517 stmt_vec_info stmt_info, gimple *vec_stmt)
1518{
1519 if (dump_enabled_p ())
1520 dump_printf_loc (MSG_NOTE, vect_location, "add new stmt: %G", vec_stmt);
1521
1522 if (stmt_info)
1523 {
1524 gimple_set_location (vec_stmt, gimple_location (stmt_info->stmt));
1525
1526 /* While EH edges will generally prevent vectorization, stmt might
1527 e.g. be in a must-not-throw region. Ensure newly created stmts
1528 that could throw are part of the same region. */
1529 int lp_nr = lookup_stmt_eh_lp (stmt_info->stmt);
1530 if (lp_nr != 0 && stmt_could_throw_p (cfun(cfun + 0), vec_stmt))
1531 add_stmt_to_eh_lp (vec_stmt, lp_nr);
1532 }
1533 else
1534 gcc_assert (!stmt_could_throw_p (cfun, vec_stmt))((void)(!(!stmt_could_throw_p ((cfun + 0), vec_stmt)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1534, __FUNCTION__), 0 : 0))
;
1535}
1536
1537/* Replace the scalar statement STMT_INFO with a new vector statement VEC_STMT,
1538 which sets the same scalar result as STMT_INFO did. Create and return a
1539 stmt_vec_info for VEC_STMT. */
1540
1541void
1542vect_finish_replace_stmt (vec_info *vinfo,
1543 stmt_vec_info stmt_info, gimple *vec_stmt)
1544{
1545 gimple *scalar_stmt = vect_orig_stmt (stmt_info)->stmt;
1546 gcc_assert (gimple_get_lhs (scalar_stmt) == gimple_get_lhs (vec_stmt))((void)(!(gimple_get_lhs (scalar_stmt) == gimple_get_lhs (vec_stmt
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1546, __FUNCTION__), 0 : 0))
;
1547
1548 gimple_stmt_iterator gsi = gsi_for_stmt (scalar_stmt);
1549 gsi_replace (&gsi, vec_stmt, true);
1550
1551 vect_finish_stmt_generation_1 (vinfo, stmt_info, vec_stmt);
1552}
1553
1554/* Add VEC_STMT to the vectorized implementation of STMT_INFO and insert it
1555 before *GSI. Create and return a stmt_vec_info for VEC_STMT. */
1556
1557void
1558vect_finish_stmt_generation (vec_info *vinfo,
1559 stmt_vec_info stmt_info, gimple *vec_stmt,
1560 gimple_stmt_iterator *gsi)
1561{
1562 gcc_assert (!stmt_info || gimple_code (stmt_info->stmt) != GIMPLE_LABEL)((void)(!(!stmt_info || gimple_code (stmt_info->stmt) != GIMPLE_LABEL
) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1562, __FUNCTION__), 0 : 0))
;
1563
1564 if (!gsi_end_p (*gsi)
1565 && gimple_has_mem_ops (vec_stmt))
1566 {
1567 gimple *at_stmt = gsi_stmt (*gsi);
1568 tree vuse = gimple_vuse (at_stmt);
1569 if (vuse && TREE_CODE (vuse)((enum tree_code) (vuse)->base.code) == SSA_NAME)
1570 {
1571 tree vdef = gimple_vdef (at_stmt);
1572 gimple_set_vuse (vec_stmt, gimple_vuse (at_stmt));
1573 gimple_set_modified (vec_stmt, true);
1574 /* If we have an SSA vuse and insert a store, update virtual
1575 SSA form to avoid triggering the renamer. Do so only
1576 if we can easily see all uses - which is what almost always
1577 happens with the way vectorized stmts are inserted. */
1578 if ((vdef && TREE_CODE (vdef)((enum tree_code) (vdef)->base.code) == SSA_NAME)
1579 && ((is_gimple_assign (vec_stmt)
1580 && !is_gimple_reg (gimple_assign_lhs (vec_stmt)))
1581 || (is_gimple_call (vec_stmt)
1582 && !(gimple_call_flags (vec_stmt)
1583 & (ECF_CONST(1 << 0)|ECF_PURE(1 << 1)|ECF_NOVOPS(1 << 9))))))
1584 {
1585 tree new_vdef = copy_ssa_name (vuse, vec_stmt);
1586 gimple_set_vdef (vec_stmt, new_vdef);
1587 SET_USE (gimple_vuse_op (at_stmt), new_vdef)set_ssa_use_from_ptr (gimple_vuse_op (at_stmt), new_vdef);
1588 }
1589 }
1590 }
1591 gsi_insert_before (gsi, vec_stmt, GSI_SAME_STMT);
1592 vect_finish_stmt_generation_1 (vinfo, stmt_info, vec_stmt);
1593}
1594
1595/* We want to vectorize a call to combined function CFN with function
1596 decl FNDECL, using VECTYPE_OUT as the type of the output and VECTYPE_IN
1597 as the types of all inputs. Check whether this is possible using
1598 an internal function, returning its code if so or IFN_LAST if not. */
1599
1600static internal_fn
1601vectorizable_internal_function (combined_fn cfn, tree fndecl,
1602 tree vectype_out, tree vectype_in)
1603{
1604 internal_fn ifn;
1605 if (internal_fn_p (cfn))
1606 ifn = as_internal_fn (cfn);
1607 else
1608 ifn = associated_internal_fn (fndecl);
1609 if (ifn != IFN_LAST && direct_internal_fn_p (ifn))
1610 {
1611 const direct_internal_fn_info &info = direct_internal_fn (ifn);
1612 if (info.vectorizable)
1613 {
1614 tree type0 = (info.type0 < 0 ? vectype_out : vectype_in);
1615 tree type1 = (info.type1 < 0 ? vectype_out : vectype_in);
1616 if (direct_internal_fn_supported_p (ifn, tree_pair (type0, type1),
1617 OPTIMIZE_FOR_SPEED))
1618 return ifn;
1619 }
1620 }
1621 return IFN_LAST;
1622}
1623
1624
1625static tree permute_vec_elements (vec_info *, tree, tree, tree, stmt_vec_info,
1626 gimple_stmt_iterator *);
1627
1628/* Check whether a load or store statement in the loop described by
1629 LOOP_VINFO is possible in a loop using partial vectors. This is
1630 testing whether the vectorizer pass has the appropriate support,
1631 as well as whether the target does.
1632
1633 VLS_TYPE says whether the statement is a load or store and VECTYPE
1634 is the type of the vector being loaded or stored. MEMORY_ACCESS_TYPE
1635 says how the load or store is going to be implemented and GROUP_SIZE
1636 is the number of load or store statements in the containing group.
1637 If the access is a gather load or scatter store, GS_INFO describes
1638 its arguments. If the load or store is conditional, SCALAR_MASK is the
1639 condition under which it occurs.
1640
1641 Clear LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P if a loop using partial
1642 vectors is not supported, otherwise record the required rgroup control
1643 types. */
1644
1645static void
1646check_load_store_for_partial_vectors (loop_vec_info loop_vinfo, tree vectype,
1647 vec_load_store_type vls_type,
1648 int group_size,
1649 vect_memory_access_type
1650 memory_access_type,
1651 gather_scatter_info *gs_info,
1652 tree scalar_mask)
1653{
1654 /* Invariant loads need no special support. */
1655 if (memory_access_type == VMAT_INVARIANT)
1656 return;
1657
1658 vec_loop_masks *masks = &LOOP_VINFO_MASKS (loop_vinfo)(loop_vinfo)->masks;
1659 machine_mode vecmode = TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1659, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
;
1660 bool is_load = (vls_type == VLS_LOAD);
1661 if (memory_access_type == VMAT_LOAD_STORE_LANES)
1662 {
1663 if (is_load
1664 ? !vect_load_lanes_supported (vectype, group_size, true)
1665 : !vect_store_lanes_supported (vectype, group_size, true))
1666 {
1667 if (dump_enabled_p ())
1668 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1669 "can't operate on partial vectors because"
1670 " the target doesn't have an appropriate"
1671 " load/store-lanes instruction.\n");
1672 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1673 return;
1674 }
1675 unsigned int ncopies = vect_get_num_copies (loop_vinfo, vectype);
1676 vect_record_loop_mask (loop_vinfo, masks, ncopies, vectype, scalar_mask);
1677 return;
1678 }
1679
1680 if (memory_access_type == VMAT_GATHER_SCATTER)
1681 {
1682 internal_fn ifn = (is_load
1683 ? IFN_MASK_GATHER_LOAD
1684 : IFN_MASK_SCATTER_STORE);
1685 if (!internal_gather_scatter_fn_supported_p (ifn, vectype,
1686 gs_info->memory_type,
1687 gs_info->offset_vectype,
1688 gs_info->scale))
1689 {
1690 if (dump_enabled_p ())
1691 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1692 "can't operate on partial vectors because"
1693 " the target doesn't have an appropriate"
1694 " gather load or scatter store instruction.\n");
1695 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1696 return;
1697 }
1698 unsigned int ncopies = vect_get_num_copies (loop_vinfo, vectype);
1699 vect_record_loop_mask (loop_vinfo, masks, ncopies, vectype, scalar_mask);
1700 return;
1701 }
1702
1703 if (memory_access_type != VMAT_CONTIGUOUS
1704 && memory_access_type != VMAT_CONTIGUOUS_PERMUTE)
1705 {
1706 /* Element X of the data must come from iteration i * VF + X of the
1707 scalar loop. We need more work to support other mappings. */
1708 if (dump_enabled_p ())
1709 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1710 "can't operate on partial vectors because an"
1711 " access isn't contiguous.\n");
1712 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1713 return;
1714 }
1715
1716 if (!VECTOR_MODE_P (vecmode)(((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_INT ||
((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[vecmode]) == MODE_VECTOR_UACCUM
)
)
1717 {
1718 if (dump_enabled_p ())
1719 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1720 "can't operate on partial vectors when emulating"
1721 " vector operations.\n");
1722 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1723 return;
1724 }
1725
1726 /* We might load more scalars than we need for permuting SLP loads.
1727 We checked in get_group_load_store_type that the extra elements
1728 don't leak into a new vector. */
1729 auto get_valid_nvectors = [] (poly_uint64 size, poly_uint64 nunits)
1730 {
1731 unsigned int nvectors;
1732 if (can_div_away_from_zero_p (size, nunits, &nvectors))
1733 return nvectors;
1734 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1734, __FUNCTION__))
;
1735 };
1736
1737 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
1738 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo)(loop_vinfo)->vectorization_factor;
1739 machine_mode mask_mode;
1740 bool using_partial_vectors_p = false;
1741 if (targetm.vectorize.get_mask_mode (vecmode).exists (&mask_mode)
1742 && can_vec_mask_load_store_p (vecmode, mask_mode, is_load))
1743 {
1744 unsigned int nvectors = get_valid_nvectors (group_size * vf, nunits);
1745 vect_record_loop_mask (loop_vinfo, masks, nvectors, vectype, scalar_mask);
1746 using_partial_vectors_p = true;
1747 }
1748
1749 machine_mode vmode;
1750 if (get_len_load_store_mode (vecmode, is_load).exists (&vmode))
1751 {
1752 unsigned int nvectors = get_valid_nvectors (group_size * vf, nunits);
1753 vec_loop_lens *lens = &LOOP_VINFO_LENS (loop_vinfo)(loop_vinfo)->lens;
1754 unsigned factor = (vecmode == vmode) ? 1 : GET_MODE_UNIT_SIZE (vecmode)mode_to_unit_size (vecmode);
1755 vect_record_loop_len (loop_vinfo, lens, nvectors, vectype, factor);
1756 using_partial_vectors_p = true;
1757 }
1758
1759 if (!using_partial_vectors_p)
1760 {
1761 if (dump_enabled_p ())
1762 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1763 "can't operate on partial vectors because the"
1764 " target doesn't have the appropriate partial"
1765 " vectorization load or store.\n");
1766 LOOP_VINFO_CAN_USE_PARTIAL_VECTORS_P (loop_vinfo)(loop_vinfo)->can_use_partial_vectors_p = false;
1767 }
1768}
1769
1770/* Return the mask input to a masked load or store. VEC_MASK is the vectorized
1771 form of the scalar mask condition and LOOP_MASK, if nonnull, is the mask
1772 that needs to be applied to all loads and stores in a vectorized loop.
1773 Return VEC_MASK if LOOP_MASK is null, otherwise return VEC_MASK & LOOP_MASK.
1774
1775 MASK_TYPE is the type of both masks. If new statements are needed,
1776 insert them before GSI. */
1777
1778static tree
1779prepare_load_store_mask (tree mask_type, tree loop_mask, tree vec_mask,
1780 gimple_stmt_iterator *gsi)
1781{
1782 gcc_assert (useless_type_conversion_p (mask_type, TREE_TYPE (vec_mask)))((void)(!(useless_type_conversion_p (mask_type, ((contains_struct_check
((vec_mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1782, __FUNCTION__))->typed.type))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1782, __FUNCTION__), 0 : 0))
;
1783 if (!loop_mask)
1784 return vec_mask;
1785
1786 gcc_assert (TREE_TYPE (loop_mask) == mask_type)((void)(!(((contains_struct_check ((loop_mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1786, __FUNCTION__))->typed.type) == mask_type) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1786, __FUNCTION__), 0 : 0))
;
1787 tree and_res = make_temp_ssa_name (mask_type, NULLnullptr, "vec_mask_and");
1788 gimple *and_stmt = gimple_build_assign (and_res, BIT_AND_EXPR,
1789 vec_mask, loop_mask);
1790 gsi_insert_before (gsi, and_stmt, GSI_SAME_STMT);
1791 return and_res;
1792}
1793
1794/* Determine whether we can use a gather load or scatter store to vectorize
1795 strided load or store STMT_INFO by truncating the current offset to a
1796 smaller width. We need to be able to construct an offset vector:
1797
1798 { 0, X, X*2, X*3, ... }
1799
1800 without loss of precision, where X is STMT_INFO's DR_STEP.
1801
1802 Return true if this is possible, describing the gather load or scatter
1803 store in GS_INFO. MASKED_P is true if the load or store is conditional. */
1804
1805static bool
1806vect_truncate_gather_scatter_offset (stmt_vec_info stmt_info,
1807 loop_vec_info loop_vinfo, bool masked_p,
1808 gather_scatter_info *gs_info)
1809{
1810 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1810, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1811 data_reference *dr = dr_info->dr;
1812 tree step = DR_STEP (dr)(dr)->innermost.step;
1813 if (TREE_CODE (step)((enum tree_code) (step)->base.code) != INTEGER_CST)
1814 {
1815 /* ??? Perhaps we could use range information here? */
1816 if (dump_enabled_p ())
1817 dump_printf_loc (MSG_NOTE, vect_location,
1818 "cannot truncate variable step.\n");
1819 return false;
1820 }
1821
1822 /* Get the number of bits in an element. */
1823 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
1824 scalar_mode element_mode = SCALAR_TYPE_MODE (TREE_TYPE (vectype))(as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1824, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1824, __FUNCTION__))->type_common.mode))
;
1825 unsigned int element_bits = GET_MODE_BITSIZE (element_mode);
1826
1827 /* Set COUNT to the upper limit on the number of elements - 1.
1828 Start with the maximum vectorization factor. */
1829 unsigned HOST_WIDE_INTlong count = vect_max_vf (loop_vinfo) - 1;
1830
1831 /* Try lowering COUNT to the number of scalar latch iterations. */
1832 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
1833 widest_int max_iters;
1834 if (max_loop_iterations (loop, &max_iters)
1835 && max_iters < count)
1836 count = max_iters.to_shwi ();
1837
1838 /* Try scales of 1 and the element size. */
1839 int scales[] = { 1, vect_get_scalar_dr_size (dr_info) };
1840 wi::overflow_type overflow = wi::OVF_NONE;
1841 for (int i = 0; i < 2; ++i)
1842 {
1843 int scale = scales[i];
1844 widest_int factor;
1845 if (!wi::multiple_of_p (wi::to_widest (step), scale, SIGNED, &factor))
1846 continue;
1847
1848 /* Determine the minimum precision of (COUNT - 1) * STEP / SCALE. */
1849 widest_int range = wi::mul (count, factor, SIGNED, &overflow);
1850 if (overflow)
1851 continue;
1852 signop sign = range >= 0 ? UNSIGNED : SIGNED;
1853 unsigned int min_offset_bits = wi::min_precision (range, sign);
1854
1855 /* Find the narrowest viable offset type. */
1856 unsigned int offset_bits = 1U << ceil_log2 (min_offset_bits);
1857 tree offset_type = build_nonstandard_integer_type (offset_bits,
1858 sign == UNSIGNED);
1859
1860 /* See whether the target supports the operation with an offset
1861 no narrower than OFFSET_TYPE. */
1862 tree memory_type = TREE_TYPE (DR_REF (dr))((contains_struct_check (((dr)->ref), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1862, __FUNCTION__))->typed.type)
;
1863 if (!vect_gather_scatter_fn_p (loop_vinfo, DR_IS_READ (dr)(dr)->is_read, masked_p,
1864 vectype, memory_type, offset_type, scale,
1865 &gs_info->ifn, &gs_info->offset_vectype))
1866 continue;
1867
1868 gs_info->decl = NULL_TREE(tree) nullptr;
1869 /* Logically the sum of DR_BASE_ADDRESS, DR_INIT and DR_OFFSET,
1870 but we don't need to store that here. */
1871 gs_info->base = NULL_TREE(tree) nullptr;
1872 gs_info->element_type = TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1872, __FUNCTION__))->typed.type)
;
1873 gs_info->offset = fold_convert (offset_type, step)fold_convert_loc (((location_t) 0), offset_type, step);
1874 gs_info->offset_dt = vect_constant_def;
1875 gs_info->scale = scale;
1876 gs_info->memory_type = memory_type;
1877 return true;
1878 }
1879
1880 if (overflow && dump_enabled_p ())
1881 dump_printf_loc (MSG_NOTE, vect_location,
1882 "truncating gather/scatter offset to %d bits"
1883 " might change its value.\n", element_bits);
1884
1885 return false;
1886}
1887
1888/* Return true if we can use gather/scatter internal functions to
1889 vectorize STMT_INFO, which is a grouped or strided load or store.
1890 MASKED_P is true if load or store is conditional. When returning
1891 true, fill in GS_INFO with the information required to perform the
1892 operation. */
1893
1894static bool
1895vect_use_strided_gather_scatters_p (stmt_vec_info stmt_info,
1896 loop_vec_info loop_vinfo, bool masked_p,
1897 gather_scatter_info *gs_info)
1898{
1899 if (!vect_check_gather_scatter (stmt_info, loop_vinfo, gs_info)
1900 || gs_info->decl)
1901 return vect_truncate_gather_scatter_offset (stmt_info, loop_vinfo,
1902 masked_p, gs_info);
1903
1904 tree old_offset_type = TREE_TYPE (gs_info->offset)((contains_struct_check ((gs_info->offset), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1904, __FUNCTION__))->typed.type)
;
1905 tree new_offset_type = TREE_TYPE (gs_info->offset_vectype)((contains_struct_check ((gs_info->offset_vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1905, __FUNCTION__))->typed.type)
;
1906
1907 gcc_assert (TYPE_PRECISION (new_offset_type)((void)(!(((tree_class_check ((new_offset_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1907, __FUNCTION__))->type_common.precision) >= ((tree_class_check
((old_offset_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1908, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1908, __FUNCTION__), 0 : 0))
1908 >= TYPE_PRECISION (old_offset_type))((void)(!(((tree_class_check ((new_offset_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1907, __FUNCTION__))->type_common.precision) >= ((tree_class_check
((old_offset_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1908, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1908, __FUNCTION__), 0 : 0))
;
1909 gs_info->offset = fold_convert (new_offset_type, gs_info->offset)fold_convert_loc (((location_t) 0), new_offset_type, gs_info->
offset)
;
1910
1911 if (dump_enabled_p ())
1912 dump_printf_loc (MSG_NOTE, vect_location,
1913 "using gather/scatter for strided/grouped access,"
1914 " scale = %d\n", gs_info->scale);
1915
1916 return true;
1917}
1918
1919/* STMT_INFO is a non-strided load or store, meaning that it accesses
1920 elements with a known constant step. Return -1 if that step
1921 is negative, 0 if it is zero, and 1 if it is greater than zero. */
1922
1923static int
1924compare_step_with_zero (vec_info *vinfo, stmt_vec_info stmt_info)
1925{
1926 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1926, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1927 return tree_int_cst_compare (vect_dr_behavior (vinfo, dr_info)->step,
1928 size_zero_nodeglobal_trees[TI_SIZE_ZERO]);
1929}
1930
1931/* If the target supports a permute mask that reverses the elements in
1932 a vector of type VECTYPE, return that mask, otherwise return null. */
1933
1934static tree
1935perm_mask_for_reverse (tree vectype)
1936{
1937 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
1938
1939 /* The encoding has a single stepped pattern. */
1940 vec_perm_builder sel (nunits, 1, 3);
1941 for (int i = 0; i < 3; ++i)
1942 sel.quick_push (nunits - 1 - i);
1943
1944 vec_perm_indices indices (sel, 1, nunits);
1945 if (!can_vec_perm_const_p (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1945, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
, indices))
1946 return NULL_TREE(tree) nullptr;
1947 return vect_gen_perm_mask_checked (vectype, indices);
1948}
1949
1950/* A subroutine of get_load_store_type, with a subset of the same
1951 arguments. Handle the case where STMT_INFO is a load or store that
1952 accesses consecutive elements with a negative step. */
1953
1954static vect_memory_access_type
1955get_negative_load_store_type (vec_info *vinfo,
1956 stmt_vec_info stmt_info, tree vectype,
1957 vec_load_store_type vls_type,
1958 unsigned int ncopies)
1959{
1960 dr_vec_info *dr_info = STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 1960, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
;
1961 dr_alignment_support alignment_support_scheme;
1962
1963 if (ncopies > 1)
1964 {
1965 if (dump_enabled_p ())
1966 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1967 "multiple types with negative step.\n");
1968 return VMAT_ELEMENTWISE;
1969 }
1970
1971 alignment_support_scheme = vect_supportable_dr_alignment (vinfo,
1972 dr_info, false);
1973 if (alignment_support_scheme != dr_aligned
1974 && alignment_support_scheme != dr_unaligned_supported)
1975 {
1976 if (dump_enabled_p ())
1977 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1978 "negative step but alignment required.\n");
1979 return VMAT_ELEMENTWISE;
1980 }
1981
1982 if (vls_type == VLS_STORE_INVARIANT)
1983 {
1984 if (dump_enabled_p ())
1985 dump_printf_loc (MSG_NOTE, vect_location,
1986 "negative step with invariant source;"
1987 " no permute needed.\n");
1988 return VMAT_CONTIGUOUS_DOWN;
1989 }
1990
1991 if (!perm_mask_for_reverse (vectype))
1992 {
1993 if (dump_enabled_p ())
1994 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
1995 "negative step and reversing not supported.\n");
1996 return VMAT_ELEMENTWISE;
1997 }
1998
1999 return VMAT_CONTIGUOUS_REVERSE;
2000}
2001
2002/* STMT_INFO is either a masked or unconditional store. Return the value
2003 being stored. */
2004
2005tree
2006vect_get_store_rhs (stmt_vec_info stmt_info)
2007{
2008 if (gassign *assign = dyn_cast <gassign *> (stmt_info->stmt))
2009 {
2010 gcc_assert (gimple_assign_single_p (assign))((void)(!(gimple_assign_single_p (assign)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2010, __FUNCTION__), 0 : 0))
;
2011 return gimple_assign_rhs1 (assign);
2012 }
2013 if (gcall *call = dyn_cast <gcall *> (stmt_info->stmt))
2014 {
2015 internal_fn ifn = gimple_call_internal_fn (call);
2016 int index = internal_fn_stored_value_index (ifn);
2017 gcc_assert (index >= 0)((void)(!(index >= 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2017, __FUNCTION__), 0 : 0))
;
2018 return gimple_call_arg (call, index);
2019 }
2020 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2020, __FUNCTION__))
;
2021}
2022
2023/* Function VECTOR_VECTOR_COMPOSITION_TYPE
2024
2025 This function returns a vector type which can be composed with NETLS pieces,
2026 whose type is recorded in PTYPE. VTYPE should be a vector type, and has the
2027 same vector size as the return vector. It checks target whether supports
2028 pieces-size vector mode for construction firstly, if target fails to, check
2029 pieces-size scalar mode for construction further. It returns NULL_TREE if
2030 fails to find the available composition.
2031
2032 For example, for (vtype=V16QI, nelts=4), we can probably get:
2033 - V16QI with PTYPE V4QI.
2034 - V4SI with PTYPE SI.
2035 - NULL_TREE. */
2036
2037static tree
2038vector_vector_composition_type (tree vtype, poly_uint64 nelts, tree *ptype)
2039{
2040 gcc_assert (VECTOR_TYPE_P (vtype))((void)(!((((enum tree_code) (vtype)->base.code) == VECTOR_TYPE
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2040, __FUNCTION__), 0 : 0))
;
2041 gcc_assert (known_gt (nelts, 0U))((void)(!((!maybe_le (nelts, 0U))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2041, __FUNCTION__), 0 : 0))
;
2042
2043 machine_mode vmode = TYPE_MODE (vtype)((((enum tree_code) ((tree_class_check ((vtype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2043, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vtype) : (vtype)->type_common.mode)
;
2044 if (!VECTOR_MODE_P (vmode)(((enum mode_class) mode_class[vmode]) == MODE_VECTOR_BOOL ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_INT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_FLOAT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_FRACT || (
(enum mode_class) mode_class[vmode]) == MODE_VECTOR_UFRACT ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_ACCUM ||
((enum mode_class) mode_class[vmode]) == MODE_VECTOR_UACCUM)
)
2045 return NULL_TREE(tree) nullptr;
2046
2047 poly_uint64 vbsize = GET_MODE_BITSIZE (vmode);
2048 unsigned int pbsize;
2049 if (constant_multiple_p (vbsize, nelts, &pbsize))
2050 {
2051 /* First check if vec_init optab supports construction from
2052 vector pieces directly. */
2053 scalar_mode elmode = SCALAR_TYPE_MODE (TREE_TYPE (vtype))(as_a <scalar_mode> ((tree_class_check ((((contains_struct_check
((vtype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2053, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2053, __FUNCTION__))->type_common.mode))
;
2054 poly_uint64 inelts = pbsize / GET_MODE_BITSIZE (elmode);
2055 machine_mode rmode;
2056 if (related_vector_mode (vmode, elmode, inelts).exists (&rmode)
2057 && (convert_optab_handler (vec_init_optab, vmode, rmode)
2058 != CODE_FOR_nothing))
2059 {
2060 *ptype = build_vector_type (TREE_TYPE (vtype)((contains_struct_check ((vtype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2060, __FUNCTION__))->typed.type)
, inelts);
2061 return vtype;
2062 }
2063
2064 /* Otherwise check if exists an integer type of the same piece size and
2065 if vec_init optab supports construction from it directly. */
2066 if (int_mode_for_size (pbsize, 0).exists (&elmode)
2067 && related_vector_mode (vmode, elmode, nelts).exists (&rmode)
2068 && (convert_optab_handler (vec_init_optab, rmode, elmode)
2069 != CODE_FOR_nothing))
2070 {
2071 *ptype = build_nonstandard_integer_type (pbsize, 1);
2072 return build_vector_type (*ptype, nelts);
2073 }
2074 }
2075
2076 return NULL_TREE(tree) nullptr;
2077}
2078
2079/* A subroutine of get_load_store_type, with a subset of the same
2080 arguments. Handle the case where STMT_INFO is part of a grouped load
2081 or store.
2082
2083 For stores, the statements in the group are all consecutive
2084 and there is no gap at the end. For loads, the statements in the
2085 group might not be consecutive; there can be gaps between statements
2086 as well as at the end. */
2087
2088static bool
2089get_group_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info,
2090 tree vectype, slp_tree slp_node,
2091 bool masked_p, vec_load_store_type vls_type,
2092 vect_memory_access_type *memory_access_type,
2093 dr_alignment_support *alignment_support_scheme,
2094 gather_scatter_info *gs_info)
2095{
2096 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2097 class loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop : NULLnullptr;
2098 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2098, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
2099 dr_vec_info *first_dr_info = STMT_VINFO_DR_INFO (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.stmt == (first_stmt_info
)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2099, __FUNCTION__), 0 : 0)), &(first_stmt_info)->dr_aux
)
;
2100 unsigned int group_size = DR_GROUP_SIZE (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2100, __FUNCTION__), 0 : 0)), (first_stmt_info)->size)
;
2101 bool single_element_p = (stmt_info == first_stmt_info
2102 && !DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2102, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
);
2103 unsigned HOST_WIDE_INTlong gap = DR_GROUP_GAP (first_stmt_info)(((void)(!((first_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2103, __FUNCTION__), 0 : 0)), (first_stmt_info)->gap)
;
2104 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2105
2106 /* True if the vectorized statements would access beyond the last
2107 statement in the group. */
2108 bool overrun_p = false;
2109
2110 /* True if we can cope with such overrun by peeling for gaps, so that
2111 there is at least one final scalar iteration after the vector loop. */
2112 bool can_overrun_p = (!masked_p
2113 && vls_type == VLS_LOAD
2114 && loop_vinfo
2115 && !loop->inner);
2116
2117 /* There can only be a gap at the end of the group if the stride is
2118 known at compile time. */
2119 gcc_assert (!STMT_VINFO_STRIDED_P (first_stmt_info) || gap == 0)((void)(!(!(first_stmt_info)->strided_p || gap == 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2119, __FUNCTION__), 0 : 0))
;
2120
2121 /* Stores can't yet have gaps. */
2122 gcc_assert (slp_node || vls_type == VLS_LOAD || gap == 0)((void)(!(slp_node || vls_type == VLS_LOAD || gap == 0) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2122, __FUNCTION__), 0 : 0))
;
2123
2124 if (slp_node)
2125 {
2126 /* For SLP vectorization we directly vectorize a subchain
2127 without permutation. */
2128 if (! SLP_TREE_LOAD_PERMUTATION (slp_node)(slp_node)->load_permutation.exists ())
2129 first_dr_info
2130 = STMT_VINFO_DR_INFO (SLP_TREE_SCALAR_STMTS (slp_node)[0])(((void)(!(((slp_node)->stmts[0])->dr_aux.stmt == ((slp_node
)->stmts[0])) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2130, __FUNCTION__), 0 : 0)), &((slp_node)->stmts[0]
)->dr_aux)
;
2131 if (STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p)
2132 {
2133 /* Try to use consecutive accesses of DR_GROUP_SIZE elements,
2134 separated by the stride, until we have a complete vector.
2135 Fall back to scalar accesses if that isn't possible. */
2136 if (multiple_p (nunits, group_size))
2137 *memory_access_type = VMAT_STRIDED_SLP;
2138 else
2139 *memory_access_type = VMAT_ELEMENTWISE;
2140 }
2141 else
2142 {
2143 overrun_p = loop_vinfo && gap != 0;
2144 if (overrun_p && vls_type != VLS_LOAD)
2145 {
2146 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2147 "Grouped store with gaps requires"
2148 " non-consecutive accesses\n");
2149 return false;
2150 }
2151 /* An overrun is fine if the trailing elements are smaller
2152 than the alignment boundary B. Every vector access will
2153 be a multiple of B and so we are guaranteed to access a
2154 non-gap element in the same B-sized block. */
2155 if (overrun_p
2156 && gap < (vect_known_alignment_in_bytes (first_dr_info)
2157 / vect_get_scalar_dr_size (first_dr_info)))
2158 overrun_p = false;
2159
2160 /* If the gap splits the vector in half and the target
2161 can do half-vector operations avoid the epilogue peeling
2162 by simply loading half of the vector only. Usually
2163 the construction with an upper zero half will be elided. */
2164 dr_alignment_support alignment_support_scheme;
2165 tree half_vtype;
2166 if (overrun_p
2167 && !masked_p
2168 && (((alignment_support_scheme
2169 = vect_supportable_dr_alignment (vinfo,
2170 first_dr_info, false)))
2171 == dr_aligned
2172 || alignment_support_scheme == dr_unaligned_supported)
2173 && known_eq (nunits, (group_size - gap) * 2)(!maybe_ne (nunits, (group_size - gap) * 2))
2174 && known_eq (nunits, group_size)(!maybe_ne (nunits, group_size))
2175 && (vector_vector_composition_type (vectype, 2, &half_vtype)
2176 != NULL_TREE(tree) nullptr))
2177 overrun_p = false;
2178
2179 if (overrun_p && !can_overrun_p)
2180 {
2181 if (dump_enabled_p ())
2182 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2183 "Peeling for outer loop is not supported\n");
2184 return false;
2185 }
2186 int cmp = compare_step_with_zero (vinfo, stmt_info);
2187 if (cmp < 0)
2188 {
2189 if (single_element_p)
2190 /* ??? The VMAT_CONTIGUOUS_REVERSE code generation is
2191 only correct for single element "interleaving" SLP. */
2192 *memory_access_type = get_negative_load_store_type
2193 (vinfo, stmt_info, vectype, vls_type, 1);
2194 else
2195 {
2196 /* Try to use consecutive accesses of DR_GROUP_SIZE elements,
2197 separated by the stride, until we have a complete vector.
2198 Fall back to scalar accesses if that isn't possible. */
2199 if (multiple_p (nunits, group_size))
2200 *memory_access_type = VMAT_STRIDED_SLP;
2201 else
2202 *memory_access_type = VMAT_ELEMENTWISE;
2203 }
2204 }
2205 else
2206 {
2207 gcc_assert (!loop_vinfo || cmp > 0)((void)(!(!loop_vinfo || cmp > 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2207, __FUNCTION__), 0 : 0))
;
2208 *memory_access_type = VMAT_CONTIGUOUS;
2209 }
2210 }
2211 }
2212 else
2213 {
2214 /* We can always handle this case using elementwise accesses,
2215 but see if something more efficient is available. */
2216 *memory_access_type = VMAT_ELEMENTWISE;
2217
2218 /* If there is a gap at the end of the group then these optimizations
2219 would access excess elements in the last iteration. */
2220 bool would_overrun_p = (gap != 0);
2221 /* An overrun is fine if the trailing elements are smaller than the
2222 alignment boundary B. Every vector access will be a multiple of B
2223 and so we are guaranteed to access a non-gap element in the
2224 same B-sized block. */
2225 if (would_overrun_p
2226 && !masked_p
2227 && gap < (vect_known_alignment_in_bytes (first_dr_info)
2228 / vect_get_scalar_dr_size (first_dr_info)))
2229 would_overrun_p = false;
2230
2231 if (!STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p
2232 && (can_overrun_p || !would_overrun_p)
2233 && compare_step_with_zero (vinfo, stmt_info) > 0)
2234 {
2235 /* First cope with the degenerate case of a single-element
2236 vector. */
2237 if (known_eq (TYPE_VECTOR_SUBPARTS (vectype), 1U)(!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), 1U)))
2238 ;
2239
2240 /* Otherwise try using LOAD/STORE_LANES. */
2241 else if (vls_type == VLS_LOAD
2242 ? vect_load_lanes_supported (vectype, group_size, masked_p)
2243 : vect_store_lanes_supported (vectype, group_size,
2244 masked_p))
2245 {
2246 *memory_access_type = VMAT_LOAD_STORE_LANES;
2247 overrun_p = would_overrun_p;
2248 }
2249
2250 /* If that fails, try using permuting loads. */
2251 else if (vls_type == VLS_LOAD
2252 ? vect_grouped_load_supported (vectype, single_element_p,
2253 group_size)
2254 : vect_grouped_store_supported (vectype, group_size))
2255 {
2256 *memory_access_type = VMAT_CONTIGUOUS_PERMUTE;
2257 overrun_p = would_overrun_p;
2258 }
2259 }
2260
2261 /* As a last resort, trying using a gather load or scatter store.
2262
2263 ??? Although the code can handle all group sizes correctly,
2264 it probably isn't a win to use separate strided accesses based
2265 on nearby locations. Or, even if it's a win over scalar code,
2266 it might not be a win over vectorizing at a lower VF, if that
2267 allows us to use contiguous accesses. */
2268 if (*memory_access_type == VMAT_ELEMENTWISE
2269 && single_element_p
2270 && loop_vinfo
2271 && vect_use_strided_gather_scatters_p (stmt_info, loop_vinfo,
2272 masked_p, gs_info))
2273 *memory_access_type = VMAT_GATHER_SCATTER;
2274 }
2275
2276 if (*memory_access_type == VMAT_GATHER_SCATTER
2277 || *memory_access_type == VMAT_ELEMENTWISE)
2278 *alignment_support_scheme = dr_unaligned_supported;
2279 else
2280 *alignment_support_scheme
2281 = vect_supportable_dr_alignment (vinfo, first_dr_info, false);
2282
2283 if (vls_type != VLS_LOAD && first_stmt_info == stmt_info)
2284 {
2285 /* STMT is the leader of the group. Check the operands of all the
2286 stmts of the group. */
2287 stmt_vec_info next_stmt_info = DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2287, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
;
2288 while (next_stmt_info)
2289 {
2290 tree op = vect_get_store_rhs (next_stmt_info);
2291 enum vect_def_type dt;
2292 if (!vect_is_simple_use (op, vinfo, &dt))
2293 {
2294 if (dump_enabled_p ())
2295 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2296 "use not simple.\n");
2297 return false;
2298 }
2299 next_stmt_info = DR_GROUP_NEXT_ELEMENT (next_stmt_info)(((void)(!((next_stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2299, __FUNCTION__), 0 : 0)), (next_stmt_info)->next_element
)
;
2300 }
2301 }
2302
2303 if (overrun_p)
2304 {
2305 gcc_assert (can_overrun_p)((void)(!(can_overrun_p) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2305, __FUNCTION__), 0 : 0))
;
2306 if (dump_enabled_p ())
2307 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2308 "Data access with gaps requires scalar "
2309 "epilogue loop\n");
2310 LOOP_VINFO_PEELING_FOR_GAPS (loop_vinfo)(loop_vinfo)->peeling_for_gaps = true;
2311 }
2312
2313 return true;
2314}
2315
2316/* Analyze load or store statement STMT_INFO of type VLS_TYPE. Return true
2317 if there is a memory access type that the vectorized form can use,
2318 storing it in *MEMORY_ACCESS_TYPE if so. If we decide to use gathers
2319 or scatters, fill in GS_INFO accordingly. In addition
2320 *ALIGNMENT_SUPPORT_SCHEME is filled out and false is returned if
2321 the target does not support the alignment scheme.
2322
2323 SLP says whether we're performing SLP rather than loop vectorization.
2324 MASKED_P is true if the statement is conditional on a vectorized mask.
2325 VECTYPE is the vector type that the vectorized statements will use.
2326 NCOPIES is the number of vector statements that will be needed. */
2327
2328static bool
2329get_load_store_type (vec_info *vinfo, stmt_vec_info stmt_info,
2330 tree vectype, slp_tree slp_node,
2331 bool masked_p, vec_load_store_type vls_type,
2332 unsigned int ncopies,
2333 vect_memory_access_type *memory_access_type,
2334 dr_alignment_support *alignment_support_scheme,
2335 gather_scatter_info *gs_info)
2336{
2337 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2338 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2339 if (STMT_VINFO_GATHER_SCATTER_P (stmt_info)(stmt_info)->gather_scatter_p)
2340 {
2341 *memory_access_type = VMAT_GATHER_SCATTER;
2342 if (!vect_check_gather_scatter (stmt_info, loop_vinfo, gs_info))
2343 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2343, __FUNCTION__))
;
2344 else if (!vect_is_simple_use (gs_info->offset, vinfo,
2345 &gs_info->offset_dt,
2346 &gs_info->offset_vectype))
2347 {
2348 if (dump_enabled_p ())
2349 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2350 "%s index use not simple.\n",
2351 vls_type == VLS_LOAD ? "gather" : "scatter");
2352 return false;
2353 }
2354 /* Gather-scatter accesses perform only component accesses, alignment
2355 is irrelevant for them. */
2356 *alignment_support_scheme = dr_unaligned_supported;
2357 }
2358 else if (STMT_VINFO_GROUPED_ACCESS (stmt_info)((stmt_info)->dr_aux.dr && (((void)(!((stmt_info)->
dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2358, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
))
)
2359 {
2360 if (!get_group_load_store_type (vinfo, stmt_info, vectype, slp_node,
2361 masked_p,
2362 vls_type, memory_access_type,
2363 alignment_support_scheme, gs_info))
2364 return false;
2365 }
2366 else if (STMT_VINFO_STRIDED_P (stmt_info)(stmt_info)->strided_p)
2367 {
2368 gcc_assert (!slp_node)((void)(!(!slp_node) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2368, __FUNCTION__), 0 : 0))
;
2369 if (loop_vinfo
2370 && vect_use_strided_gather_scatters_p (stmt_info, loop_vinfo,
2371 masked_p, gs_info))
2372 *memory_access_type = VMAT_GATHER_SCATTER;
2373 else
2374 *memory_access_type = VMAT_ELEMENTWISE;
2375 /* Alignment is irrelevant here. */
2376 *alignment_support_scheme = dr_unaligned_supported;
2377 }
2378 else
2379 {
2380 int cmp = compare_step_with_zero (vinfo, stmt_info);
2381 if (cmp == 0)
2382 {
2383 gcc_assert (vls_type == VLS_LOAD)((void)(!(vls_type == VLS_LOAD) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2383, __FUNCTION__), 0 : 0))
;
2384 *memory_access_type = VMAT_INVARIANT;
2385 /* Invariant accesses perform only component accesses, alignment
2386 is irrelevant for them. */
2387 *alignment_support_scheme = dr_unaligned_supported;
2388 }
2389 else
2390 {
2391 if (cmp < 0)
2392 *memory_access_type = get_negative_load_store_type
2393 (vinfo, stmt_info, vectype, vls_type, ncopies);
2394 else
2395 *memory_access_type = VMAT_CONTIGUOUS;
2396 *alignment_support_scheme
2397 = vect_supportable_dr_alignment (vinfo,
2398 STMT_VINFO_DR_INFO (stmt_info)(((void)(!((stmt_info)->dr_aux.stmt == (stmt_info)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2398, __FUNCTION__), 0 : 0)), &(stmt_info)->dr_aux)
,
2399 false);
2400 }
2401 }
2402
2403 if ((*memory_access_type == VMAT_ELEMENTWISE
2404 || *memory_access_type == VMAT_STRIDED_SLP)
2405 && !nunits.is_constant ())
2406 {
2407 if (dump_enabled_p ())
2408 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2409 "Not using elementwise accesses due to variable "
2410 "vectorization factor.\n");
2411 return false;
2412 }
2413
2414 if (*alignment_support_scheme == dr_unaligned_unsupported)
2415 {
2416 if (dump_enabled_p ())
2417 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2418 "unsupported unaligned access\n");
2419 return false;
2420 }
2421
2422 /* FIXME: At the moment the cost model seems to underestimate the
2423 cost of using elementwise accesses. This check preserves the
2424 traditional behavior until that can be fixed. */
2425 stmt_vec_info first_stmt_info = DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2425, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
;
2426 if (!first_stmt_info)
2427 first_stmt_info = stmt_info;
2428 if (*memory_access_type == VMAT_ELEMENTWISE
2429 && !STMT_VINFO_STRIDED_P (first_stmt_info)(first_stmt_info)->strided_p
2430 && !(stmt_info == DR_GROUP_FIRST_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2430, __FUNCTION__), 0 : 0)), (stmt_info)->first_element
)
2431 && !DR_GROUP_NEXT_ELEMENT (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2431, __FUNCTION__), 0 : 0)), (stmt_info)->next_element)
2432 && !pow2p_hwi (DR_GROUP_SIZE (stmt_info)(((void)(!((stmt_info)->dr_aux.dr) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2432, __FUNCTION__), 0 : 0)), (stmt_info)->size)
)))
2433 {
2434 if (dump_enabled_p ())
2435 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2436 "not falling back to elementwise accesses\n");
2437 return false;
2438 }
2439 return true;
2440}
2441
2442/* Return true if boolean argument MASK is suitable for vectorizing
2443 conditional operation STMT_INFO. When returning true, store the type
2444 of the definition in *MASK_DT_OUT and the type of the vectorized mask
2445 in *MASK_VECTYPE_OUT. */
2446
2447static bool
2448vect_check_scalar_mask (vec_info *vinfo, stmt_vec_info stmt_info, tree mask,
2449 vect_def_type *mask_dt_out,
2450 tree *mask_vectype_out)
2451{
2452 if (!VECT_SCALAR_BOOLEAN_TYPE_P (TREE_TYPE (mask))(((enum tree_code) (((contains_struct_check ((mask), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2452, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((((enum tree_code) (((contains_struct_check ((mask), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2452, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
|| ((enum tree_code) (((contains_struct_check ((mask), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2452, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
) && ((tree_class_check ((((contains_struct_check ((mask
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2452, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2452, __FUNCTION__))->type_common.precision) == 1 &&
((tree_class_check ((((contains_struct_check ((mask), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2452, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2452, __FUNCTION__))->base.u.bits.unsigned_flag)))
)
2453 {
2454 if (dump_enabled_p ())
2455 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2456 "mask argument is not a boolean.\n");
2457 return false;
2458 }
2459
2460 if (TREE_CODE (mask)((enum tree_code) (mask)->base.code) != SSA_NAME)
2461 {
2462 if (dump_enabled_p ())
2463 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2464 "mask argument is not an SSA name.\n");
2465 return false;
2466 }
2467
2468 enum vect_def_type mask_dt;
2469 tree mask_vectype;
2470 if (!vect_is_simple_use (mask, vinfo, &mask_dt, &mask_vectype))
2471 {
2472 if (dump_enabled_p ())
2473 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2474 "mask use not simple.\n");
2475 return false;
2476 }
2477
2478 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2479 if (!mask_vectype)
2480 mask_vectype = get_mask_type_for_scalar_type (vinfo, TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2480, __FUNCTION__))->typed.type)
);
2481
2482 if (!mask_vectype || !VECTOR_BOOLEAN_TYPE_P (mask_vectype)(((enum tree_code) (mask_vectype)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((mask_vectype
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2482, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
2483 {
2484 if (dump_enabled_p ())
2485 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2486 "could not find an appropriate vector mask type.\n");
2487 return false;
2488 }
2489
2490 if (maybe_ne (TYPE_VECTOR_SUBPARTS (mask_vectype),
2491 TYPE_VECTOR_SUBPARTS (vectype)))
2492 {
2493 if (dump_enabled_p ())
2494 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2495 "vector mask type %T"
2496 " does not match vector data type %T.\n",
2497 mask_vectype, vectype);
2498
2499 return false;
2500 }
2501
2502 *mask_dt_out = mask_dt;
2503 *mask_vectype_out = mask_vectype;
2504 return true;
2505}
2506
2507/* Return true if stored value RHS is suitable for vectorizing store
2508 statement STMT_INFO. When returning true, store the type of the
2509 definition in *RHS_DT_OUT, the type of the vectorized store value in
2510 *RHS_VECTYPE_OUT and the type of the store in *VLS_TYPE_OUT. */
2511
2512static bool
2513vect_check_store_rhs (vec_info *vinfo, stmt_vec_info stmt_info,
2514 slp_tree slp_node, tree rhs,
2515 vect_def_type *rhs_dt_out, tree *rhs_vectype_out,
2516 vec_load_store_type *vls_type_out)
2517{
2518 /* In the case this is a store from a constant make sure
2519 native_encode_expr can handle it. */
2520 if (CONSTANT_CLASS_P (rhs)(tree_code_type[(int) (((enum tree_code) (rhs)->base.code)
)] == tcc_constant)
&& native_encode_expr (rhs, NULLnullptr, 64) == 0)
2521 {
2522 if (dump_enabled_p ())
2523 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2524 "cannot encode constant as a byte sequence.\n");
2525 return false;
2526 }
2527
2528 enum vect_def_type rhs_dt;
2529 tree rhs_vectype;
2530 slp_tree slp_op;
2531 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 0,
2532 &rhs, &slp_op, &rhs_dt, &rhs_vectype))
2533 {
2534 if (dump_enabled_p ())
2535 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2536 "use not simple.\n");
2537 return false;
2538 }
2539
2540 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2541 if (rhs_vectype && !useless_type_conversion_p (vectype, rhs_vectype))
2542 {
2543 if (dump_enabled_p ())
2544 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2545 "incompatible vector types.\n");
2546 return false;
2547 }
2548
2549 *rhs_dt_out = rhs_dt;
2550 *rhs_vectype_out = rhs_vectype;
2551 if (rhs_dt == vect_constant_def || rhs_dt == vect_external_def)
2552 *vls_type_out = VLS_STORE_INVARIANT;
2553 else
2554 *vls_type_out = VLS_STORE;
2555 return true;
2556}
2557
2558/* Build an all-ones vector mask of type MASKTYPE while vectorizing STMT_INFO.
2559 Note that we support masks with floating-point type, in which case the
2560 floats are interpreted as a bitmask. */
2561
2562static tree
2563vect_build_all_ones_mask (vec_info *vinfo,
2564 stmt_vec_info stmt_info, tree masktype)
2565{
2566 if (TREE_CODE (masktype)((enum tree_code) (masktype)->base.code) == INTEGER_TYPE)
2567 return build_int_cst (masktype, -1);
2568 else if (TREE_CODE (TREE_TYPE (masktype))((enum tree_code) (((contains_struct_check ((masktype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2568, __FUNCTION__))->typed.type))->base.code)
== INTEGER_TYPE)
2569 {
2570 tree mask = build_int_cst (TREE_TYPE (masktype)((contains_struct_check ((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2570, __FUNCTION__))->typed.type)
, -1);
2571 mask = build_vector_from_val (masktype, mask);
2572 return vect_init_vector (vinfo, stmt_info, mask, masktype, NULLnullptr);
2573 }
2574 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (masktype))(((enum tree_code) (((contains_struct_check ((masktype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2574, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
)
2575 {
2576 REAL_VALUE_TYPEstruct real_value r;
2577 long tmp[6];
2578 for (int j = 0; j < 6; ++j)
2579 tmp[j] = -1;
2580 real_from_target (&r, tmp, TYPE_MODE (TREE_TYPE (masktype))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2580, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2580, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2580, __FUNCTION__))->typed.type)) : (((contains_struct_check
((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2580, __FUNCTION__))->typed.type))->type_common.mode)
);
2581 tree mask = build_real (TREE_TYPE (masktype)((contains_struct_check ((masktype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2581, __FUNCTION__))->typed.type)
, r);
2582 mask = build_vector_from_val (masktype, mask);
2583 return vect_init_vector (vinfo, stmt_info, mask, masktype, NULLnullptr);
2584 }
2585 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2585, __FUNCTION__))
;
2586}
2587
2588/* Build an all-zero merge value of type VECTYPE while vectorizing
2589 STMT_INFO as a gather load. */
2590
2591static tree
2592vect_build_zero_merge_argument (vec_info *vinfo,
2593 stmt_vec_info stmt_info, tree vectype)
2594{
2595 tree merge;
2596 if (TREE_CODE (TREE_TYPE (vectype))((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2596, __FUNCTION__))->typed.type))->base.code)
== INTEGER_TYPE)
2597 merge = build_int_cst (TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2597, __FUNCTION__))->typed.type)
, 0);
2598 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (vectype))(((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2598, __FUNCTION__))->typed.type))->base.code) == REAL_TYPE
)
)
2599 {
2600 REAL_VALUE_TYPEstruct real_value r;
2601 long tmp[6];
2602 for (int j = 0; j < 6; ++j)
2603 tmp[j] = 0;
2604 real_from_target (&r, tmp, TYPE_MODE (TREE_TYPE (vectype))((((enum tree_code) ((tree_class_check ((((contains_struct_check
((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2604, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2604, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2604, __FUNCTION__))->typed.type)) : (((contains_struct_check
((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2604, __FUNCTION__))->typed.type))->type_common.mode)
);
2605 merge = build_real (TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2605, __FUNCTION__))->typed.type)
, r);
2606 }
2607 else
2608 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2608, __FUNCTION__))
;
2609 merge = build_vector_from_val (vectype, merge);
2610 return vect_init_vector (vinfo, stmt_info, merge, vectype, NULLnullptr);
2611}
2612
2613/* Build a gather load call while vectorizing STMT_INFO. Insert new
2614 instructions before GSI and add them to VEC_STMT. GS_INFO describes
2615 the gather load operation. If the load is conditional, MASK is the
2616 unvectorized condition and MASK_DT is its definition type, otherwise
2617 MASK is null. */
2618
2619static void
2620vect_build_gather_load_calls (vec_info *vinfo, stmt_vec_info stmt_info,
2621 gimple_stmt_iterator *gsi,
2622 gimple **vec_stmt,
2623 gather_scatter_info *gs_info,
2624 tree mask)
2625{
2626 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2627 class loop *loop = LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop;
2628 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2629 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2630 int ncopies = vect_get_num_copies (loop_vinfo, vectype);
2631 edge pe = loop_preheader_edge (loop);
2632 enum { NARROW, NONE, WIDEN } modifier;
2633 poly_uint64 gather_off_nunits
2634 = TYPE_VECTOR_SUBPARTS (gs_info->offset_vectype);
2635
2636 tree arglist = TYPE_ARG_TYPES (TREE_TYPE (gs_info->decl))((tree_check2 ((((contains_struct_check ((gs_info->decl), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2636, __FUNCTION__))->typed.type)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2636, __FUNCTION__, (FUNCTION_TYPE), (METHOD_TYPE)))->type_non_common
.values)
;
2637 tree rettype = TREE_TYPE (TREE_TYPE (gs_info->decl))((contains_struct_check ((((contains_struct_check ((gs_info->
decl), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2637, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2637, __FUNCTION__))->typed.type)
;
2638 tree srctype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2638, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2638, __FUNCTION__))->common.chain)
;
2639 tree ptrtype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2639, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2639, __FUNCTION__))->common.chain)
;
2640 tree idxtype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2640, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2640, __FUNCTION__))->common.chain)
;
2641 tree masktype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2641, __FUNCTION__, (TREE_LIST)))->list.value)
; arglist = TREE_CHAIN (arglist)((contains_struct_check ((arglist), (TS_COMMON), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2641, __FUNCTION__))->common.chain)
;
2642 tree scaletype = TREE_VALUE (arglist)((tree_check ((arglist), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2642, __FUNCTION__, (TREE_LIST)))->list.value)
;
2643 tree real_masktype = masktype;
2644 gcc_checking_assert (types_compatible_p (srctype, rettype)((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2647, __FUNCTION__), 0 : 0))
2645 && (!mask((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2647, __FUNCTION__), 0 : 0))
2646 || TREE_CODE (masktype) == INTEGER_TYPE((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2647, __FUNCTION__), 0 : 0))
2647 || types_compatible_p (srctype, masktype)))((void)(!(types_compatible_p (srctype, rettype) && (!
mask || ((enum tree_code) (masktype)->base.code) == INTEGER_TYPE
|| types_compatible_p (srctype, masktype))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2647, __FUNCTION__), 0 : 0))
;
2648 if (mask && TREE_CODE (masktype)((enum tree_code) (masktype)->base.code) == INTEGER_TYPE)
2649 masktype = truth_type_for (srctype);
2650
2651 tree mask_halftype = masktype;
2652 tree perm_mask = NULL_TREE(tree) nullptr;
2653 tree mask_perm_mask = NULL_TREE(tree) nullptr;
2654 if (known_eq (nunits, gather_off_nunits)(!maybe_ne (nunits, gather_off_nunits)))
2655 modifier = NONE;
2656 else if (known_eq (nunits * 2, gather_off_nunits)(!maybe_ne (nunits * 2, gather_off_nunits)))
2657 {
2658 modifier = WIDEN;
2659
2660 /* Currently widening gathers and scatters are only supported for
2661 fixed-length vectors. */
2662 int count = gather_off_nunits.to_constant ();
2663 vec_perm_builder sel (count, count, 1);
2664 for (int i = 0; i < count; ++i)
2665 sel.quick_push (i | (count / 2));
2666
2667 vec_perm_indices indices (sel, 1, count);
2668 perm_mask = vect_gen_perm_mask_checked (gs_info->offset_vectype,
2669 indices);
2670 }
2671 else if (known_eq (nunits, gather_off_nunits * 2)(!maybe_ne (nunits, gather_off_nunits * 2)))
2672 {
2673 modifier = NARROW;
2674
2675 /* Currently narrowing gathers and scatters are only supported for
2676 fixed-length vectors. */
2677 int count = nunits.to_constant ();
2678 vec_perm_builder sel (count, count, 1);
2679 sel.quick_grow (count);
2680 for (int i = 0; i < count; ++i)
2681 sel[i] = i < count / 2 ? i : i + count / 2;
2682 vec_perm_indices indices (sel, 2, count);
2683 perm_mask = vect_gen_perm_mask_checked (vectype, indices);
2684
2685 ncopies *= 2;
2686
2687 if (mask && masktype == real_masktype)
2688 {
2689 for (int i = 0; i < count; ++i)
2690 sel[i] = i | (count / 2);
2691 indices.new_vector (sel, 2, count);
2692 mask_perm_mask = vect_gen_perm_mask_checked (masktype, indices);
2693 }
2694 else if (mask)
2695 mask_halftype = truth_type_for (gs_info->offset_vectype);
2696 }
2697 else
2698 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2698, __FUNCTION__))
;
2699
2700 tree scalar_dest = gimple_get_lhs (stmt_info->stmt);
2701 tree vec_dest = vect_create_destination_var (scalar_dest, vectype);
2702
2703 tree ptr = fold_convert (ptrtype, gs_info->base)fold_convert_loc (((location_t) 0), ptrtype, gs_info->base
)
;
2704 if (!is_gimple_min_invariant (ptr))
2705 {
2706 gimple_seq seq;
2707 ptr = force_gimple_operand (ptr, &seq, true, NULL_TREE(tree) nullptr);
2708 basic_block new_bb = gsi_insert_seq_on_edge_immediate (pe, seq);
2709 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2709, __FUNCTION__), 0 : 0))
;
2710 }
2711
2712 tree scale = build_int_cst (scaletype, gs_info->scale);
2713
2714 tree vec_oprnd0 = NULL_TREE(tree) nullptr;
2715 tree vec_mask = NULL_TREE(tree) nullptr;
2716 tree src_op = NULL_TREE(tree) nullptr;
2717 tree mask_op = NULL_TREE(tree) nullptr;
2718 tree prev_res = NULL_TREE(tree) nullptr;
2719
2720 if (!mask)
2721 {
2722 src_op = vect_build_zero_merge_argument (vinfo, stmt_info, rettype);
2723 mask_op = vect_build_all_ones_mask (vinfo, stmt_info, masktype);
2724 }
2725
2726 auto_vec<tree> vec_oprnds0;
2727 auto_vec<tree> vec_masks;
2728 vect_get_vec_defs_for_operand (vinfo, stmt_info,
2729 modifier == WIDEN ? ncopies / 2 : ncopies,
2730 gs_info->offset, &vec_oprnds0);
2731 if (mask)
2732 vect_get_vec_defs_for_operand (vinfo, stmt_info,
2733 modifier == NARROW ? ncopies / 2 : ncopies,
2734 mask, &vec_masks);
2735 for (int j = 0; j < ncopies; ++j)
2736 {
2737 tree op, var;
2738 if (modifier == WIDEN && (j & 1))
2739 op = permute_vec_elements (vinfo, vec_oprnd0, vec_oprnd0,
2740 perm_mask, stmt_info, gsi);
2741 else
2742 op = vec_oprnd0 = vec_oprnds0[modifier == WIDEN ? j / 2 : j];
2743
2744 if (!useless_type_conversion_p (idxtype, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2744, __FUNCTION__))->typed.type)
))
2745 {
2746 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (op)),((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2746, __FUNCTION__))->typed.type)), TYPE_VECTOR_SUBPARTS
(idxtype)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2747, __FUNCTION__), 0 : 0))
2747 TYPE_VECTOR_SUBPARTS (idxtype)))((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (((contains_struct_check
((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2746, __FUNCTION__))->typed.type)), TYPE_VECTOR_SUBPARTS
(idxtype)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2747, __FUNCTION__), 0 : 0))
;
2748 var = vect_get_new_ssa_name (idxtype, vect_simple_var);
2749 op = build1 (VIEW_CONVERT_EXPR, idxtype, op);
2750 gassign *new_stmt = gimple_build_assign (var, VIEW_CONVERT_EXPR, op);
2751 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2752 op = var;
2753 }
2754
2755 if (mask)
2756 {
2757 if (mask_perm_mask && (j & 1))
2758 mask_op = permute_vec_elements (vinfo, mask_op, mask_op,
2759 mask_perm_mask, stmt_info, gsi);
2760 else
2761 {
2762 if (modifier == NARROW)
2763 {
2764 if ((j & 1) == 0)
2765 vec_mask = vec_masks[j / 2];
2766 }
2767 else
2768 vec_mask = vec_masks[j];
2769
2770 mask_op = vec_mask;
2771 if (!useless_type_conversion_p (masktype, TREE_TYPE (vec_mask)((contains_struct_check ((vec_mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2771, __FUNCTION__))->typed.type)
))
2772 {
2773 poly_uint64 sub1 = TYPE_VECTOR_SUBPARTS (TREE_TYPE (mask_op)((contains_struct_check ((mask_op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2773, __FUNCTION__))->typed.type)
);
2774 poly_uint64 sub2 = TYPE_VECTOR_SUBPARTS (masktype);
2775 gcc_assert (known_eq (sub1, sub2))((void)(!((!maybe_ne (sub1, sub2))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2775, __FUNCTION__), 0 : 0))
;
2776 var = vect_get_new_ssa_name (masktype, vect_simple_var);
2777 mask_op = build1 (VIEW_CONVERT_EXPR, masktype, mask_op);
2778 gassign *new_stmt
2779 = gimple_build_assign (var, VIEW_CONVERT_EXPR, mask_op);
2780 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2781 mask_op = var;
2782 }
2783 }
2784 if (modifier == NARROW && masktype != real_masktype)
2785 {
2786 var = vect_get_new_ssa_name (mask_halftype, vect_simple_var);
2787 gassign *new_stmt
2788 = gimple_build_assign (var, (j & 1) ? VEC_UNPACK_HI_EXPR
2789 : VEC_UNPACK_LO_EXPR,
2790 mask_op);
2791 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2792 mask_op = var;
2793 }
2794 src_op = mask_op;
2795 }
2796
2797 tree mask_arg = mask_op;
2798 if (masktype != real_masktype)
2799 {
2800 tree utype, optype = TREE_TYPE (mask_op)((contains_struct_check ((mask_op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2800, __FUNCTION__))->typed.type)
;
2801 if (TYPE_MODE (real_masktype)((((enum tree_code) ((tree_class_check ((real_masktype), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2801, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(real_masktype) : (real_masktype)->type_common.mode)
== TYPE_MODE (optype)((((enum tree_code) ((tree_class_check ((optype), (tcc_type),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2801, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(optype) : (optype)->type_common.mode)
)
2802 utype = real_masktype;
2803 else
2804 utype = lang_hooks.types.type_for_mode (TYPE_MODE (optype)((((enum tree_code) ((tree_class_check ((optype), (tcc_type),
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2804, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(optype) : (optype)->type_common.mode)
, 1);
2805 var = vect_get_new_ssa_name (utype, vect_scalar_var);
2806 mask_arg = build1 (VIEW_CONVERT_EXPR, utype, mask_op);
2807 gassign *new_stmt
2808 = gimple_build_assign (var, VIEW_CONVERT_EXPR, mask_arg);
2809 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2810 mask_arg = var;
2811 if (!useless_type_conversion_p (real_masktype, utype))
2812 {
2813 gcc_assert (TYPE_PRECISION (utype)((void)(!(((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2813, __FUNCTION__))->type_common.precision) <= ((tree_class_check
((real_masktype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2814, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2814, __FUNCTION__), 0 : 0))
2814 <= TYPE_PRECISION (real_masktype))((void)(!(((tree_class_check ((utype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2813, __FUNCTION__))->type_common.precision) <= ((tree_class_check
((real_masktype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2814, __FUNCTION__))->type_common.precision)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2814, __FUNCTION__), 0 : 0))
;
2815 var = vect_get_new_ssa_name (real_masktype, vect_scalar_var);
2816 new_stmt = gimple_build_assign (var, NOP_EXPR, mask_arg);
2817 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2818 mask_arg = var;
2819 }
2820 src_op = build_zero_cst (srctype);
2821 }
2822 gimple *new_stmt = gimple_build_call (gs_info->decl, 5, src_op, ptr, op,
2823 mask_arg, scale);
2824
2825 if (!useless_type_conversion_p (vectype, rettype))
2826 {
2827 gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (vectype),((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), TYPE_VECTOR_SUBPARTS
(rettype)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2828, __FUNCTION__), 0 : 0))
2828 TYPE_VECTOR_SUBPARTS (rettype)))((void)(!((!maybe_ne (TYPE_VECTOR_SUBPARTS (vectype), TYPE_VECTOR_SUBPARTS
(rettype)))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2828, __FUNCTION__), 0 : 0))
;
2829 op = vect_get_new_ssa_name (rettype, vect_simple_var);
2830 gimple_call_set_lhs (new_stmt, op);
2831 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2832 var = make_ssa_name (vec_dest);
2833 op = build1 (VIEW_CONVERT_EXPR, vectype, op);
2834 new_stmt = gimple_build_assign (var, VIEW_CONVERT_EXPR, op);
2835 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2836 }
2837 else
2838 {
2839 var = make_ssa_name (vec_dest, new_stmt);
2840 gimple_call_set_lhs (new_stmt, var);
2841 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
2842 }
2843
2844 if (modifier == NARROW)
2845 {
2846 if ((j & 1) == 0)
2847 {
2848 prev_res = var;
2849 continue;
2850 }
2851 var = permute_vec_elements (vinfo, prev_res, var, perm_mask,
2852 stmt_info, gsi);
2853 new_stmt = SSA_NAME_DEF_STMT (var)(tree_check ((var), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2853, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
2854 }
2855
2856 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
2857 }
2858 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
2859}
2860
2861/* Prepare the base and offset in GS_INFO for vectorization.
2862 Set *DATAREF_PTR to the loop-invariant base address and *VEC_OFFSET
2863 to the vectorized offset argument for the first copy of STMT_INFO.
2864 STMT_INFO is the statement described by GS_INFO and LOOP is the
2865 containing loop. */
2866
2867static void
2868vect_get_gather_scatter_ops (vec_info *vinfo,
2869 class loop *loop, stmt_vec_info stmt_info,
2870 gather_scatter_info *gs_info,
2871 tree *dataref_ptr, vec<tree> *vec_offset,
2872 unsigned ncopies)
2873{
2874 gimple_seq stmts = NULLnullptr;
2875 *dataref_ptr = force_gimple_operand (gs_info->base, &stmts, true, NULL_TREE(tree) nullptr);
2876 if (stmts != NULLnullptr)
2877 {
2878 basic_block new_bb;
2879 edge pe = loop_preheader_edge (loop);
2880 new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
2881 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2881, __FUNCTION__), 0 : 0))
;
2882 }
2883 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies, gs_info->offset,
2884 vec_offset, gs_info->offset_vectype);
2885}
2886
2887/* Prepare to implement a grouped or strided load or store using
2888 the gather load or scatter store operation described by GS_INFO.
2889 STMT_INFO is the load or store statement.
2890
2891 Set *DATAREF_BUMP to the amount that should be added to the base
2892 address after each copy of the vectorized statement. Set *VEC_OFFSET
2893 to an invariant offset vector in which element I has the value
2894 I * DR_STEP / SCALE. */
2895
2896static void
2897vect_get_strided_load_store_ops (stmt_vec_info stmt_info,
2898 loop_vec_info loop_vinfo,
2899 gather_scatter_info *gs_info,
2900 tree *dataref_bump, tree *vec_offset)
2901{
2902 struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0);
2903 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2904
2905 tree bump = size_binop (MULT_EXPR,size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
2906 fold_convert (sizetype, unshare_expr (DR_STEP (dr))),size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
2907 size_int (TYPE_VECTOR_SUBPARTS (vectype)))size_binop_loc (((location_t) 0), MULT_EXPR, fold_convert_loc
(((location_t) 0), sizetype_tab[(int) stk_sizetype], unshare_expr
((dr)->innermost.step)), size_int_kind (TYPE_VECTOR_SUBPARTS
(vectype), stk_sizetype))
;
2908 *dataref_bump = cse_and_gimplify_to_preheader (loop_vinfo, bump);
2909
2910 /* The offset given in GS_INFO can have pointer type, so use the element
2911 type of the vector instead. */
2912 tree offset_type = TREE_TYPE (gs_info->offset_vectype)((contains_struct_check ((gs_info->offset_vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2912, __FUNCTION__))->typed.type)
;
2913
2914 /* Calculate X = DR_STEP / SCALE and convert it to the appropriate type. */
2915 tree step = size_binop (EXACT_DIV_EXPR, unshare_expr (DR_STEP (dr)),size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, unshare_expr
((dr)->innermost.step), size_int_kind (gs_info->scale,
stk_ssizetype))
2916 ssize_int (gs_info->scale))size_binop_loc (((location_t) 0), EXACT_DIV_EXPR, unshare_expr
((dr)->innermost.step), size_int_kind (gs_info->scale,
stk_ssizetype))
;
2917 step = fold_convert (offset_type, step)fold_convert_loc (((location_t) 0), offset_type, step);
2918
2919 /* Create {0, X, X*2, X*3, ...}. */
2920 tree offset = fold_build2 (VEC_SERIES_EXPR, gs_info->offset_vectype,fold_build2_loc (((location_t) 0), VEC_SERIES_EXPR, gs_info->
offset_vectype, build_zero_cst (offset_type), step )
2921 build_zero_cst (offset_type), step)fold_build2_loc (((location_t) 0), VEC_SERIES_EXPR, gs_info->
offset_vectype, build_zero_cst (offset_type), step )
;
2922 *vec_offset = cse_and_gimplify_to_preheader (loop_vinfo, offset);
2923}
2924
2925/* Return the amount that should be added to a vector pointer to move
2926 to the next or previous copy of AGGR_TYPE. DR_INFO is the data reference
2927 being vectorized and MEMORY_ACCESS_TYPE describes the type of
2928 vectorization. */
2929
2930static tree
2931vect_get_data_ptr_increment (vec_info *vinfo,
2932 dr_vec_info *dr_info, tree aggr_type,
2933 vect_memory_access_type memory_access_type)
2934{
2935 if (memory_access_type == VMAT_INVARIANT)
2936 return size_zero_nodeglobal_trees[TI_SIZE_ZERO];
2937
2938 tree iv_step = TYPE_SIZE_UNIT (aggr_type)((tree_class_check ((aggr_type), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2938, __FUNCTION__))->type_common.size_unit)
;
2939 tree step = vect_dr_behavior (vinfo, dr_info)->step;
2940 if (tree_int_cst_sgn (step) == -1)
2941 iv_step = fold_build1 (NEGATE_EXPR, TREE_TYPE (iv_step), iv_step)fold_build1_loc (((location_t) 0), NEGATE_EXPR, ((contains_struct_check
((iv_step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2941, __FUNCTION__))->typed.type), iv_step )
;
2942 return iv_step;
2943}
2944
2945/* Check and perform vectorization of BUILT_IN_BSWAP{16,32,64,128}. */
2946
2947static bool
2948vectorizable_bswap (vec_info *vinfo,
2949 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
2950 gimple **vec_stmt, slp_tree slp_node,
2951 slp_tree *slp_op,
2952 tree vectype_in, stmt_vector_for_cost *cost_vec)
2953{
2954 tree op, vectype;
2955 gcall *stmt = as_a <gcall *> (stmt_info->stmt);
2956 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
2957 unsigned ncopies;
2958
2959 op = gimple_call_arg (stmt, 0);
2960 vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
2961 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
2962
2963 /* Multiple types in SLP are handled by creating the appropriate number of
2964 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
2965 case of SLP. */
2966 if (slp_node)
2967 ncopies = 1;
2968 else
2969 ncopies = vect_get_num_copies (loop_vinfo, vectype);
2970
2971 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2971, __FUNCTION__), 0 : 0))
;
2972
2973 tree char_vectype = get_same_sized_vectype (char_type_nodeinteger_types[itk_char], vectype_in);
2974 if (! char_vectype)
2975 return false;
2976
2977 poly_uint64 num_bytes = TYPE_VECTOR_SUBPARTS (char_vectype);
2978 unsigned word_bytes;
2979 if (!constant_multiple_p (num_bytes, nunits, &word_bytes))
2980 return false;
2981
2982 /* The encoding uses one stepped pattern for each byte in the word. */
2983 vec_perm_builder elts (num_bytes, word_bytes, 3);
2984 for (unsigned i = 0; i < 3; ++i)
2985 for (unsigned j = 0; j < word_bytes; ++j)
2986 elts.quick_push ((i + 1) * word_bytes - j - 1);
2987
2988 vec_perm_indices indices (elts, 1, num_bytes);
2989 if (!can_vec_perm_const_p (TYPE_MODE (char_vectype)((((enum tree_code) ((tree_class_check ((char_vectype), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 2989, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(char_vectype) : (char_vectype)->type_common.mode)
, indices))
2990 return false;
2991
2992 if (! vec_stmt)
2993 {
2994 if (slp_node
2995 && !vect_maybe_update_slp_op_vectype (slp_op[0], vectype_in))
2996 {
2997 if (dump_enabled_p ())
2998 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
2999 "incompatible vector types for invariants\n");
3000 return false;
3001 }
3002
3003 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_vec_info_type;
3004 DUMP_VECT_SCOPE ("vectorizable_bswap")auto_dump_scope scope ("vectorizable_bswap", vect_location);
3005 if (! slp_node)
3006 {
3007 record_stmt_cost (cost_vec,
3008 1, vector_stmt, stmt_info, 0, vect_prologue);
3009 record_stmt_cost (cost_vec,
3010 ncopies, vec_perm, stmt_info, 0, vect_body);
3011 }
3012 return true;
3013 }
3014
3015 tree bswap_vconst = vec_perm_indices_to_tree (char_vectype, indices);
3016
3017 /* Transform. */
3018 vec<tree> vec_oprnds = vNULL;
3019 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
3020 op, &vec_oprnds);
3021 /* Arguments are ready. create the new vector stmt. */
3022 unsigned i;
3023 tree vop;
3024 FOR_EACH_VEC_ELT (vec_oprnds, i, vop)for (i = 0; (vec_oprnds).iterate ((i), &(vop)); ++(i))
3025 {
3026 gimple *new_stmt;
3027 tree tem = make_ssa_name (char_vectype);
3028 new_stmt = gimple_build_assign (tem, build1 (VIEW_CONVERT_EXPR,
3029 char_vectype, vop));
3030 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3031 tree tem2 = make_ssa_name (char_vectype);
3032 new_stmt = gimple_build_assign (tem2, VEC_PERM_EXPR,
3033 tem, tem, bswap_vconst);
3034 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3035 tem = make_ssa_name (vectype);
3036 new_stmt = gimple_build_assign (tem, build1 (VIEW_CONVERT_EXPR,
3037 vectype, tem2));
3038 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3039 if (slp_node)
3040 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
3041 else
3042 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3043 }
3044
3045 if (!slp_node)
3046 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
3047
3048 vec_oprnds.release ();
3049 return true;
3050}
3051
3052/* Return true if vector types VECTYPE_IN and VECTYPE_OUT have
3053 integer elements and if we can narrow VECTYPE_IN to VECTYPE_OUT
3054 in a single step. On success, store the binary pack code in
3055 *CONVERT_CODE. */
3056
3057static bool
3058simple_integer_narrowing (tree vectype_out, tree vectype_in,
3059 tree_code *convert_code)
3060{
3061 if (!INTEGRAL_TYPE_P (TREE_TYPE (vectype_out))(((enum tree_code) (((contains_struct_check ((vectype_out), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3061, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_out)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3061, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_out)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3061, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
3062 || !INTEGRAL_TYPE_P (TREE_TYPE (vectype_in))(((enum tree_code) (((contains_struct_check ((vectype_in), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3062, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_in),
(TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3062, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((vectype_in),
(TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3062, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
)
3063 return false;
3064
3065 tree_code code;
3066 int multi_step_cvt = 0;
3067 auto_vec <tree, 8> interm_types;
3068 if (!supportable_narrowing_operation (NOP_EXPR, vectype_out, vectype_in,
3069 &code, &multi_step_cvt, &interm_types)
3070 || multi_step_cvt)
3071 return false;
3072
3073 *convert_code = code;
3074 return true;
3075}
3076
3077/* Function vectorizable_call.
3078
3079 Check if STMT_INFO performs a function call that can be vectorized.
3080 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
3081 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
3082 Return true if STMT_INFO is vectorizable in this way. */
3083
3084static bool
3085vectorizable_call (vec_info *vinfo,
3086 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
3087 gimple **vec_stmt, slp_tree slp_node,
3088 stmt_vector_for_cost *cost_vec)
3089{
3090 gcall *stmt;
3091 tree vec_dest;
3092 tree scalar_dest;
3093 tree op;
3094 tree vec_oprnd0 = NULL_TREE(tree) nullptr, vec_oprnd1 = NULL_TREE(tree) nullptr;
3095 tree vectype_out, vectype_in;
3096 poly_uint64 nunits_in;
3097 poly_uint64 nunits_out;
3098 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
3099 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
3100 tree fndecl, new_temp, rhs_type;
3101 enum vect_def_type dt[4]
3102 = { vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type,
3103 vect_unknown_def_type };
3104 tree vectypes[ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]))] = {};
3105 slp_tree slp_op[ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]))] = {};
3106 int ndts = ARRAY_SIZE (dt)(sizeof (dt) / sizeof ((dt)[0]));
3107 int ncopies, j;
3108 auto_vec<tree, 8> vargs;
3109 auto_vec<tree, 8> orig_vargs;
3110 enum { NARROW, NONE, WIDEN } modifier;
3111 size_t i, nargs;
3112 tree lhs;
3113
3114 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
3115 return false;
3116
3117 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
3118 && ! vec_stmt)
3119 return false;
3120
3121 /* Is STMT_INFO a vectorizable call? */
3122 stmt = dyn_cast <gcall *> (stmt_info->stmt);
3123 if (!stmt)
3124 return false;
3125
3126 if (gimple_call_internal_p (stmt)
3127 && (internal_load_fn_p (gimple_call_internal_fn (stmt))
3128 || internal_store_fn_p (gimple_call_internal_fn (stmt))))
3129 /* Handled by vectorizable_load and vectorizable_store. */
3130 return false;
3131
3132 if (gimple_call_lhs (stmt) == NULL_TREE(tree) nullptr
3133 || TREE_CODE (gimple_call_lhs (stmt))((enum tree_code) (gimple_call_lhs (stmt))->base.code) != SSA_NAME)
3134 return false;
3135
3136 gcc_checking_assert (!stmt_can_throw_internal (cfun, stmt))((void)(!(!stmt_can_throw_internal ((cfun + 0), stmt)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3136, __FUNCTION__), 0 : 0))
;
3137
3138 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3139
3140 /* Process function arguments. */
3141 rhs_type = NULL_TREE(tree) nullptr;
3142 vectype_in = NULL_TREE(tree) nullptr;
3143 nargs = gimple_call_num_args (stmt);
3144
3145 /* Bail out if the function has more than four arguments, we do not have
3146 interesting builtin functions to vectorize with more than two arguments
3147 except for fma. No arguments is also not good. */
3148 if (nargs == 0 || nargs > 4)
3149 return false;
3150
3151 /* Ignore the arguments of IFN_GOMP_SIMD_LANE, they are magic. */
3152 combined_fn cfn = gimple_call_combined_fn (stmt);
3153 if (cfn == CFN_GOMP_SIMD_LANE)
3154 {
3155 nargs = 0;
3156 rhs_type = unsigned_type_nodeinteger_types[itk_unsigned_int];
3157 }
3158
3159 int mask_opno = -1;
3160 if (internal_fn_p (cfn))
3161 mask_opno = internal_fn_mask_index (as_internal_fn (cfn));
3162
3163 for (i = 0; i < nargs; i++)
3164 {
3165 if ((int) i == mask_opno)
3166 {
3167 op = gimple_call_arg (stmt, i);
3168 if (!vect_check_scalar_mask (vinfo,
3169 stmt_info, op, &dt[i], &vectypes[i]))
3170 return false;
3171 continue;
3172 }
3173
3174 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
3175 i, &op, &slp_op[i], &dt[i], &vectypes[i]))
3176 {
3177 if (dump_enabled_p ())
3178 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3179 "use not simple.\n");
3180 return false;
3181 }
3182
3183 /* We can only handle calls with arguments of the same type. */
3184 if (rhs_type
3185 && !types_compatible_p (rhs_type, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3185, __FUNCTION__))->typed.type)
))
3186 {
3187 if (dump_enabled_p ())
3188 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3189 "argument types differ.\n");
3190 return false;
3191 }
3192 if (!rhs_type)
3193 rhs_type = TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3193, __FUNCTION__))->typed.type)
;
3194
3195 if (!vectype_in)
3196 vectype_in = vectypes[i];
3197 else if (vectypes[i]
3198 && !types_compatible_p (vectypes[i], vectype_in))
3199 {
3200 if (dump_enabled_p ())
3201 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3202 "argument vector types differ.\n");
3203 return false;
3204 }
3205 }
3206 /* If all arguments are external or constant defs, infer the vector type
3207 from the scalar type. */
3208 if (!vectype_in)
3209 vectype_in = get_vectype_for_scalar_type (vinfo, rhs_type, slp_node);
3210 if (vec_stmt)
3211 gcc_assert (vectype_in)((void)(!(vectype_in) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3211, __FUNCTION__), 0 : 0))
;
3212 if (!vectype_in)
3213 {
3214 if (dump_enabled_p ())
3215 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3216 "no vectype for scalar type %T\n", rhs_type);
3217
3218 return false;
3219 }
3220 /* FORNOW: we don't yet support mixtures of vector sizes for calls,
3221 just mixtures of nunits. E.g. DI->SI versions of __builtin_ctz*
3222 are traditionally vectorized as two VnDI->VnDI IFN_CTZs followed
3223 by a pack of the two vectors into an SI vector. We would need
3224 separate code to handle direct VnDI->VnSI IFN_CTZs. */
3225 if (TYPE_SIZE (vectype_in)((tree_class_check ((vectype_in), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3225, __FUNCTION__))->type_common.size)
!= TYPE_SIZE (vectype_out)((tree_class_check ((vectype_out), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3225, __FUNCTION__))->type_common.size)
)
3226 {
3227 if (dump_enabled_p ())
3228 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3229 "mismatched vector sizes %T and %T\n",
3230 vectype_in, vectype_out);
3231 return false;
3232 }
3233
3234 if (VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3234, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
3235 != VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3235, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
3236 {
3237 if (dump_enabled_p ())
3238 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3239 "mixed mask and nonmask vector types\n");
3240 return false;
3241 }
3242
3243 /* FORNOW */
3244 nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
3245 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
3246 if (known_eq (nunits_in * 2, nunits_out)(!maybe_ne (nunits_in * 2, nunits_out)))
3247 modifier = NARROW;
3248 else if (known_eq (nunits_out, nunits_in)(!maybe_ne (nunits_out, nunits_in)))
3249 modifier = NONE;
3250 else if (known_eq (nunits_out * 2, nunits_in)(!maybe_ne (nunits_out * 2, nunits_in)))
3251 modifier = WIDEN;
3252 else
3253 return false;
3254
3255 /* We only handle functions that do not read or clobber memory. */
3256 if (gimple_vuse (stmt))
3257 {
3258 if (dump_enabled_p ())
3259 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3260 "function reads from or writes to memory.\n");
3261 return false;
3262 }
3263
3264 /* For now, we only vectorize functions if a target specific builtin
3265 is available. TODO -- in some cases, it might be profitable to
3266 insert the calls for pieces of the vector, in order to be able
3267 to vectorize other operations in the loop. */
3268 fndecl = NULL_TREE(tree) nullptr;
3269 internal_fn ifn = IFN_LAST;
3270 tree callee = gimple_call_fndecl (stmt);
3271
3272 /* First try using an internal function. */
3273 tree_code convert_code = ERROR_MARK;
3274 if (cfn != CFN_LAST
3275 && (modifier == NONE
3276 || (modifier == NARROW
3277 && simple_integer_narrowing (vectype_out, vectype_in,
3278 &convert_code))))
3279 ifn = vectorizable_internal_function (cfn, callee, vectype_out,
3280 vectype_in);
3281
3282 /* If that fails, try asking for a target-specific built-in function. */
3283 if (ifn == IFN_LAST)
3284 {
3285 if (cfn != CFN_LAST)
3286 fndecl = targetm.vectorize.builtin_vectorized_function
3287 (cfn, vectype_out, vectype_in);
3288 else if (callee && fndecl_built_in_p (callee, BUILT_IN_MD))
3289 fndecl = targetm.vectorize.builtin_md_vectorized_function
3290 (callee, vectype_out, vectype_in);
3291 }
3292
3293 if (ifn == IFN_LAST && !fndecl)
3294 {
3295 if (cfn == CFN_GOMP_SIMD_LANE
3296 && !slp_node
3297 && loop_vinfo
3298 && LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop->simduid
3299 && TREE_CODE (gimple_call_arg (stmt, 0))((enum tree_code) (gimple_call_arg (stmt, 0))->base.code) == SSA_NAME
3300 && LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop->simduid
3301 == SSA_NAME_VAR (gimple_call_arg (stmt, 0))((tree_check ((gimple_call_arg (stmt, 0)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3301, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((gimple_call_arg (stmt, 0))->
ssa_name.var)->base.code) == IDENTIFIER_NODE ? (tree) nullptr
: (gimple_call_arg (stmt, 0))->ssa_name.var)
)
3302 {
3303 /* We can handle IFN_GOMP_SIMD_LANE by returning a
3304 { 0, 1, 2, ... vf - 1 } vector. */
3305 gcc_assert (nargs == 0)((void)(!(nargs == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3305, __FUNCTION__), 0 : 0))
;
3306 }
3307 else if (modifier == NONE
3308 && (gimple_call_builtin_p (stmt, BUILT_IN_BSWAP16)
3309 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP32)
3310 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP64)
3311 || gimple_call_builtin_p (stmt, BUILT_IN_BSWAP128)))
3312 return vectorizable_bswap (vinfo, stmt_info, gsi, vec_stmt, slp_node,
3313 slp_op, vectype_in, cost_vec);
3314 else
3315 {
3316 if (dump_enabled_p ())
3317 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3318 "function is not vectorizable.\n");
3319 return false;
3320 }
3321 }
3322
3323 if (slp_node)
3324 ncopies = 1;
3325 else if (modifier == NARROW && ifn == IFN_LAST)
3326 ncopies = vect_get_num_copies (loop_vinfo, vectype_out);
3327 else
3328 ncopies = vect_get_num_copies (loop_vinfo, vectype_in);
3329
3330 /* Sanity check: make sure that at least one copy of the vectorized stmt
3331 needs to be generated. */
3332 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3332, __FUNCTION__), 0 : 0))
;
3333
3334 vec_loop_masks *masks = (loop_vinfo ? &LOOP_VINFO_MASKS (loop_vinfo)(loop_vinfo)->masks : NULLnullptr);
3335 if (!vec_stmt) /* transformation not required. */
3336 {
3337 if (slp_node)
3338 for (i = 0; i < nargs; ++i)
3339 if (!vect_maybe_update_slp_op_vectype (slp_op[i], vectype_in))
3340 {
3341 if (dump_enabled_p ())
3342 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3343 "incompatible vector types for invariants\n");
3344 return false;
3345 }
3346 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_vec_info_type;
3347 DUMP_VECT_SCOPE ("vectorizable_call")auto_dump_scope scope ("vectorizable_call", vect_location);
3348 vect_model_simple_cost (vinfo, stmt_info,
3349 ncopies, dt, ndts, slp_node, cost_vec);
3350 if (ifn != IFN_LAST && modifier == NARROW && !slp_node)
3351 record_stmt_cost (cost_vec, ncopies / 2,
3352 vec_promote_demote, stmt_info, 0, vect_body);
3353
3354 if (loop_vinfo && mask_opno >= 0)
3355 {
3356 unsigned int nvectors = (slp_node
3357 ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size
3358 : ncopies);
3359 tree scalar_mask = gimple_call_arg (stmt_info->stmt, mask_opno);
3360 vect_record_loop_mask (loop_vinfo, masks, nvectors,
3361 vectype_out, scalar_mask);
3362 }
3363 return true;
3364 }
3365
3366 /* Transform. */
3367
3368 if (dump_enabled_p ())
3369 dump_printf_loc (MSG_NOTE, vect_location, "transform call.\n");
3370
3371 /* Handle def. */
3372 scalar_dest = gimple_call_lhs (stmt);
3373 vec_dest = vect_create_destination_var (scalar_dest, vectype_out);
3374
3375 bool masked_loop_p = loop_vinfo && LOOP_VINFO_FULLY_MASKED_P (loop_vinfo)((loop_vinfo)->using_partial_vectors_p && !(loop_vinfo
)->masks.is_empty ())
;
3376
3377 if (modifier == NONE || ifn != IFN_LAST)
3378 {
3379 tree prev_res = NULL_TREE(tree) nullptr;
3380 vargs.safe_grow (nargs, true);
3381 orig_vargs.safe_grow (nargs, true);
3382 auto_vec<vec<tree> > vec_defs (nargs);
3383 for (j = 0; j < ncopies; ++j)
3384 {
3385 /* Build argument list for the vectorized call. */
3386 if (slp_node)
3387 {
3388 vec<tree> vec_oprnds0;
3389
3390 vect_get_slp_defs (vinfo, slp_node, &vec_defs);
3391 vec_oprnds0 = vec_defs[0];
3392
3393 /* Arguments are ready. Create the new vector stmt. */
3394 FOR_EACH_VEC_ELT (vec_oprnds0, i, vec_oprnd0)for (i = 0; (vec_oprnds0).iterate ((i), &(vec_oprnd0)); ++
(i))
3395 {
3396 size_t k;
3397 for (k = 0; k < nargs; k++)
3398 {
3399 vec<tree> vec_oprndsk = vec_defs[k];
3400 vargs[k] = vec_oprndsk[i];
3401 }
3402 gimple *new_stmt;
3403 if (modifier == NARROW)
3404 {
3405 /* We don't define any narrowing conditional functions
3406 at present. */
3407 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3407, __FUNCTION__), 0 : 0))
;
3408 tree half_res = make_ssa_name (vectype_in);
3409 gcall *call
3410 = gimple_build_call_internal_vec (ifn, vargs);
3411 gimple_call_set_lhs (call, half_res);
3412 gimple_call_set_nothrow (call, true);
3413 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3414 if ((i & 1) == 0)
3415 {
3416 prev_res = half_res;
3417 continue;
3418 }
3419 new_temp = make_ssa_name (vec_dest);
3420 new_stmt = gimple_build_assign (new_temp, convert_code,
3421 prev_res, half_res);
3422 vect_finish_stmt_generation (vinfo, stmt_info,
3423 new_stmt, gsi);
3424 }
3425 else
3426 {
3427 if (mask_opno >= 0 && masked_loop_p)
3428 {
3429 unsigned int vec_num = vec_oprnds0.length ();
3430 /* Always true for SLP. */
3431 gcc_assert (ncopies == 1)((void)(!(ncopies == 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3431, __FUNCTION__), 0 : 0))
;
3432 tree mask = vect_get_loop_mask (gsi, masks, vec_num,
3433 vectype_out, i);
3434 vargs[mask_opno] = prepare_load_store_mask
3435 (TREE_TYPE (mask)((contains_struct_check ((mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3435, __FUNCTION__))->typed.type)
, mask, vargs[mask_opno], gsi);
3436 }
3437
3438 gcall *call;
3439 if (ifn != IFN_LAST)
3440 call = gimple_build_call_internal_vec (ifn, vargs);
3441 else
3442 call = gimple_build_call_vec (fndecl, vargs);
3443 new_temp = make_ssa_name (vec_dest, call);
3444 gimple_call_set_lhs (call, new_temp);
3445 gimple_call_set_nothrow (call, true);
3446 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3447 new_stmt = call;
3448 }
3449 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
3450 }
3451 continue;
3452 }
3453
3454 for (i = 0; i < nargs; i++)
3455 {
3456 op = gimple_call_arg (stmt, i);
3457 if (j == 0)
3458 {
3459 vec_defs.quick_push (vNULL);
3460 vect_get_vec_defs_for_operand (vinfo, stmt_info, ncopies,
3461 op, &vec_defs[i],
3462 vectypes[i]);
3463 }
3464 orig_vargs[i] = vargs[i] = vec_defs[i][j];
3465 }
3466
3467 if (mask_opno >= 0 && masked_loop_p)
3468 {
3469 tree mask = vect_get_loop_mask (gsi, masks, ncopies,
3470 vectype_out, j);
3471 vargs[mask_opno]
3472 = prepare_load_store_mask (TREE_TYPE (mask)((contains_struct_check ((mask), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3472, __FUNCTION__))->typed.type)
, mask,
3473 vargs[mask_opno], gsi);
3474 }
3475
3476 gimple *new_stmt;
3477 if (cfn == CFN_GOMP_SIMD_LANE)
3478 {
3479 tree cst = build_index_vector (vectype_out, j * nunits_out, 1);
3480 tree new_var
3481 = vect_get_new_ssa_name (vectype_out, vect_simple_var, "cst_");
3482 gimple *init_stmt = gimple_build_assign (new_var, cst);
3483 vect_init_vector_1 (vinfo, stmt_info, init_stmt, NULLnullptr);
3484 new_temp = make_ssa_name (vec_dest);
3485 new_stmt = gimple_build_assign (new_temp, new_var);
3486 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3487 }
3488 else if (modifier == NARROW)
3489 {
3490 /* We don't define any narrowing conditional functions at
3491 present. */
3492 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3492, __FUNCTION__), 0 : 0))
;
3493 tree half_res = make_ssa_name (vectype_in);
3494 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3495 gimple_call_set_lhs (call, half_res);
3496 gimple_call_set_nothrow (call, true);
3497 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3498 if ((j & 1) == 0)
3499 {
3500 prev_res = half_res;
3501 continue;
3502 }
3503 new_temp = make_ssa_name (vec_dest);
3504 new_stmt = gimple_build_assign (new_temp, convert_code,
3505 prev_res, half_res);
3506 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3507 }
3508 else
3509 {
3510 gcall *call;
3511 if (ifn != IFN_LAST)
3512 call = gimple_build_call_internal_vec (ifn, vargs);
3513 else
3514 call = gimple_build_call_vec (fndecl, vargs);
3515 new_temp = make_ssa_name (vec_dest, call);
3516 gimple_call_set_lhs (call, new_temp);
3517 gimple_call_set_nothrow (call, true);
3518 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3519 new_stmt = call;
3520 }
3521
3522 if (j == (modifier == NARROW ? 1 : 0))
3523 *vec_stmt = new_stmt;
3524 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3525 }
3526 for (i = 0; i < nargs; i++)
3527 {
3528 vec<tree> vec_oprndsi = vec_defs[i];
3529 vec_oprndsi.release ();
3530 }
3531 }
3532 else if (modifier == NARROW)
3533 {
3534 auto_vec<vec<tree> > vec_defs (nargs);
3535 /* We don't define any narrowing conditional functions at present. */
3536 gcc_assert (mask_opno < 0)((void)(!(mask_opno < 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3536, __FUNCTION__), 0 : 0))
;
3537 for (j = 0; j < ncopies; ++j)
3538 {
3539 /* Build argument list for the vectorized call. */
3540 if (j == 0)
3541 vargs.create (nargs * 2);
3542 else
3543 vargs.truncate (0);
3544
3545 if (slp_node)
3546 {
3547 vec<tree> vec_oprnds0;
3548
3549 vect_get_slp_defs (vinfo, slp_node, &vec_defs);
3550 vec_oprnds0 = vec_defs[0];
3551
3552 /* Arguments are ready. Create the new vector stmt. */
3553 for (i = 0; vec_oprnds0.iterate (i, &vec_oprnd0); i += 2)
3554 {
3555 size_t k;
3556 vargs.truncate (0);
3557 for (k = 0; k < nargs; k++)
3558 {
3559 vec<tree> vec_oprndsk = vec_defs[k];
3560 vargs.quick_push (vec_oprndsk[i]);
3561 vargs.quick_push (vec_oprndsk[i + 1]);
3562 }
3563 gcall *call;
3564 if (ifn != IFN_LAST)
3565 call = gimple_build_call_internal_vec (ifn, vargs);
3566 else
3567 call = gimple_build_call_vec (fndecl, vargs);
3568 new_temp = make_ssa_name (vec_dest, call);
3569 gimple_call_set_lhs (call, new_temp);
3570 gimple_call_set_nothrow (call, true);
3571 vect_finish_stmt_generation (vinfo, stmt_info, call, gsi);
3572 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (call);
3573 }
3574 continue;
3575 }
3576
3577 for (i = 0; i < nargs; i++)
3578 {
3579 op = gimple_call_arg (stmt, i);
3580 if (j == 0)
3581 {
3582 vec_defs.quick_push (vNULL);
3583 vect_get_vec_defs_for_operand (vinfo, stmt_info, 2 * ncopies,
3584 op, &vec_defs[i], vectypes[i]);
3585 }
3586 vec_oprnd0 = vec_defs[i][2*j];
3587 vec_oprnd1 = vec_defs[i][2*j+1];
3588
3589 vargs.quick_push (vec_oprnd0);
3590 vargs.quick_push (vec_oprnd1);
3591 }
3592
3593 gcall *new_stmt = gimple_build_call_vec (fndecl, vargs);
3594 new_temp = make_ssa_name (vec_dest, new_stmt);
3595 gimple_call_set_lhs (new_stmt, new_temp);
3596 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
3597
3598 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
3599 }
3600
3601 if (!slp_node)
3602 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
3603
3604 for (i = 0; i < nargs; i++)
3605 {
3606 vec<tree> vec_oprndsi = vec_defs[i];
3607 vec_oprndsi.release ();
3608 }
3609 }
3610 else
3611 /* No current target implements this case. */
3612 return false;
3613
3614 vargs.release ();
3615
3616 /* The call in STMT might prevent it from being removed in dce.
3617 We however cannot remove it here, due to the way the ssa name
3618 it defines is mapped to the new definition. So just replace
3619 rhs of the statement with something harmless. */
3620
3621 if (slp_node)
3622 return true;
3623
3624 stmt_info = vect_orig_stmt (stmt_info);
3625 lhs = gimple_get_lhs (stmt_info->stmt);
3626
3627 gassign *new_stmt
3628 = gimple_build_assign (lhs, build_zero_cst (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3628, __FUNCTION__))->typed.type)
));
3629 vinfo->replace_stmt (gsi, stmt_info, new_stmt);
3630
3631 return true;
3632}
3633
3634
3635struct simd_call_arg_info
3636{
3637 tree vectype;
3638 tree op;
3639 HOST_WIDE_INTlong linear_step;
3640 enum vect_def_type dt;
3641 unsigned int align;
3642 bool simd_lane_linear;
3643};
3644
3645/* Helper function of vectorizable_simd_clone_call. If OP, an SSA_NAME,
3646 is linear within simd lane (but not within whole loop), note it in
3647 *ARGINFO. */
3648
3649static void
3650vect_simd_lane_linear (tree op, class loop *loop,
3651 struct simd_call_arg_info *arginfo)
3652{
3653 gimple *def_stmt = SSA_NAME_DEF_STMT (op)(tree_check ((op), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3653, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3654
3655 if (!is_gimple_assign (def_stmt)
3656 || gimple_assign_rhs_code (def_stmt) != POINTER_PLUS_EXPR
3657 || !is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
3658 return;
3659
3660 tree base = gimple_assign_rhs1 (def_stmt);
3661 HOST_WIDE_INTlong linear_step = 0;
3662 tree v = gimple_assign_rhs2 (def_stmt);
3663 while (TREE_CODE (v)((enum tree_code) (v)->base.code) == SSA_NAME)
3664 {
3665 tree t;
3666 def_stmt = SSA_NAME_DEF_STMT (v)(tree_check ((v), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3666, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
3667 if (is_gimple_assign (def_stmt))
3668 switch (gimple_assign_rhs_code (def_stmt))
3669 {
3670 case PLUS_EXPR:
3671 t = gimple_assign_rhs2 (def_stmt);
3672 if (linear_step || TREE_CODE (t)((enum tree_code) (t)->base.code) != INTEGER_CST)
3673 return;
3674 base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t)fold_build2_loc (((location_t) 0), POINTER_PLUS_EXPR, ((contains_struct_check
((base), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3674, __FUNCTION__))->typed.type), base, t )
;
3675 v = gimple_assign_rhs1 (def_stmt);
3676 continue;
3677 case MULT_EXPR:
3678 t = gimple_assign_rhs2 (def_stmt);
3679 if (linear_step || !tree_fits_shwi_p (t) || integer_zerop (t))
3680 return;
3681 linear_step = tree_to_shwi (t);
3682 v = gimple_assign_rhs1 (def_stmt);
3683 continue;
3684 CASE_CONVERTcase NOP_EXPR: case CONVERT_EXPR:
3685 t = gimple_assign_rhs1 (def_stmt);
3686 if (TREE_CODE (TREE_TYPE (t))((enum tree_code) (((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3686, __FUNCTION__))->typed.type))->base.code)
!= INTEGER_TYPE
3687 || (TYPE_PRECISION (TREE_TYPE (v))((tree_class_check ((((contains_struct_check ((v), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3687, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3687, __FUNCTION__))->type_common.precision)
3688 < TYPE_PRECISION (TREE_TYPE (t))((tree_class_check ((((contains_struct_check ((t), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3688, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3688, __FUNCTION__))->type_common.precision)
))
3689 return;
3690 if (!linear_step)
3691 linear_step = 1;
3692 v = t;
3693 continue;
3694 default:
3695 return;
3696 }
3697 else if (gimple_call_internal_p (def_stmt, IFN_GOMP_SIMD_LANE)
3698 && loop->simduid
3699 && TREE_CODE (gimple_call_arg (def_stmt, 0))((enum tree_code) (gimple_call_arg (def_stmt, 0))->base.code
)
== SSA_NAME
3700 && (SSA_NAME_VAR (gimple_call_arg (def_stmt, 0))((tree_check ((gimple_call_arg (def_stmt, 0)), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3700, __FUNCTION__, (SSA_NAME)))->ssa_name.var == (tree)
nullptr || ((enum tree_code) ((gimple_call_arg (def_stmt, 0)
)->ssa_name.var)->base.code) == IDENTIFIER_NODE ? (tree
) nullptr : (gimple_call_arg (def_stmt, 0))->ssa_name.var)
3701 == loop->simduid))
3702 {
3703 if (!linear_step)
3704 linear_step = 1;
3705 arginfo->linear_step = linear_step;
3706 arginfo->op = base;
3707 arginfo->simd_lane_linear = true;
3708 return;
3709 }
3710 }
3711}
3712
3713/* Return the number of elements in vector type VECTYPE, which is associated
3714 with a SIMD clone. At present these vectors always have a constant
3715 length. */
3716
3717static unsigned HOST_WIDE_INTlong
3718simd_clone_subparts (tree vectype)
3719{
3720 return TYPE_VECTOR_SUBPARTS (vectype).to_constant ();
3721}
3722
3723/* Function vectorizable_simd_clone_call.
3724
3725 Check if STMT_INFO performs a function call that can be vectorized
3726 by calling a simd clone of the function.
3727 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
3728 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
3729 Return true if STMT_INFO is vectorizable in this way. */
3730
3731static bool
3732vectorizable_simd_clone_call (vec_info *vinfo, stmt_vec_info stmt_info,
3733 gimple_stmt_iterator *gsi,
3734 gimple **vec_stmt, slp_tree slp_node,
3735 stmt_vector_for_cost *)
3736{
3737 tree vec_dest;
3738 tree scalar_dest;
3739 tree op, type;
3740 tree vec_oprnd0 = NULL_TREE(tree) nullptr;
3741 tree vectype;
3742 poly_uint64 nunits;
3743 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
3744 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
3745 class loop *loop = loop_vinfo ? LOOP_VINFO_LOOP (loop_vinfo)(loop_vinfo)->loop : NULLnullptr;
3746 tree fndecl, new_temp;
3747 int ncopies, j;
3748 auto_vec<simd_call_arg_info> arginfo;
3749 vec<tree> vargs = vNULL;
3750 size_t i, nargs;
3751 tree lhs, rtype, ratype;
3752 vec<constructor_elt, va_gc> *ret_ctor_elts = NULLnullptr;
3753
3754 /* Is STMT a vectorizable call? */
3755 gcall *stmt = dyn_cast <gcall *> (stmt_info->stmt);
3756 if (!stmt)
3757 return false;
3758
3759 fndecl = gimple_call_fndecl (stmt);
3760 if (fndecl == NULL_TREE(tree) nullptr)
3761 return false;
3762
3763 struct cgraph_node *node = cgraph_node::get (fndecl);
3764 if (node == NULLnullptr || node->simd_clones == NULLnullptr)
3765 return false;
3766
3767 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
3768 return false;
3769
3770 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
3771 && ! vec_stmt)
3772 return false;
3773
3774 if (gimple_call_lhs (stmt)
3775 && TREE_CODE (gimple_call_lhs (stmt))((enum tree_code) (gimple_call_lhs (stmt))->base.code) != SSA_NAME)
3776 return false;
3777
3778 gcc_checking_assert (!stmt_can_throw_internal (cfun, stmt))((void)(!(!stmt_can_throw_internal ((cfun + 0), stmt)) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3778, __FUNCTION__), 0 : 0))
;
3779
3780 vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
3781
3782 if (loop_vinfo && nested_in_vect_loop_p (loop, stmt_info))
3783 return false;
3784
3785 /* FORNOW */
3786 if (slp_node)
3787 return false;
3788
3789 /* Process function arguments. */
3790 nargs = gimple_call_num_args (stmt);
3791
3792 /* Bail out if the function has zero arguments. */
3793 if (nargs == 0)
3794 return false;
3795
3796 arginfo.reserve (nargs, true);
3797
3798 for (i = 0; i < nargs; i++)
3799 {
3800 simd_call_arg_info thisarginfo;
3801 affine_iv iv;
3802
3803 thisarginfo.linear_step = 0;
3804 thisarginfo.align = 0;
3805 thisarginfo.op = NULL_TREE(tree) nullptr;
3806 thisarginfo.simd_lane_linear = false;
3807
3808 op = gimple_call_arg (stmt, i);
3809 if (!vect_is_simple_use (op, vinfo, &thisarginfo.dt,
3810 &thisarginfo.vectype)
3811 || thisarginfo.dt == vect_uninitialized_def)
3812 {
3813 if (dump_enabled_p ())
3814 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3815 "use not simple.\n");
3816 return false;
3817 }
3818
3819 if (thisarginfo.dt == vect_constant_def
3820 || thisarginfo.dt == vect_external_def)
3821 gcc_assert (thisarginfo.vectype == NULL_TREE)((void)(!(thisarginfo.vectype == (tree) nullptr) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3821, __FUNCTION__), 0 : 0))
;
3822 else
3823 {
3824 gcc_assert (thisarginfo.vectype != NULL_TREE)((void)(!(thisarginfo.vectype != (tree) nullptr) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3824, __FUNCTION__), 0 : 0))
;
3825 if (VECTOR_BOOLEAN_TYPE_P (thisarginfo.vectype)(((enum tree_code) (thisarginfo.vectype)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((thisarginfo
.vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3825, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
3826 {
3827 if (dump_enabled_p ())
3828 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3829 "vector mask arguments are not supported\n");
3830 return false;
3831 }
3832 }
3833
3834 /* For linear arguments, the analyze phase should have saved
3835 the base and step in STMT_VINFO_SIMD_CLONE_INFO. */
3836 if (i * 3 + 4 <= STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.length ()
3837 && STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2])
3838 {
3839 gcc_assert (vec_stmt)((void)(!(vec_stmt) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3839, __FUNCTION__), 0 : 0))
;
3840 thisarginfo.linear_step
3841 = tree_to_shwi (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2]);
3842 thisarginfo.op
3843 = STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 1];
3844 thisarginfo.simd_lane_linear
3845 = (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 3]
3846 == boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE]);
3847 /* If loop has been peeled for alignment, we need to adjust it. */
3848 tree n1 = LOOP_VINFO_NITERS_UNCHANGED (loop_vinfo)(loop_vinfo)->num_iters_unchanged;
3849 tree n2 = LOOP_VINFO_NITERS (loop_vinfo)(loop_vinfo)->num_iters;
3850 if (n1 != n2 && !thisarginfo.simd_lane_linear)
3851 {
3852 tree bias = fold_build2 (MINUS_EXPR, TREE_TYPE (n1), n1, n2)fold_build2_loc (((location_t) 0), MINUS_EXPR, ((contains_struct_check
((n1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3852, __FUNCTION__))->typed.type), n1, n2 )
;
3853 tree step = STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[i * 3 + 2];
3854 tree opt = TREE_TYPE (thisarginfo.op)((contains_struct_check ((thisarginfo.op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3854, __FUNCTION__))->typed.type)
;
3855 bias = fold_convert (TREE_TYPE (step), bias)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3855, __FUNCTION__))->typed.type), bias)
;
3856 bias = fold_build2 (MULT_EXPR, TREE_TYPE (step), bias, step)fold_build2_loc (((location_t) 0), MULT_EXPR, ((contains_struct_check
((step), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3856, __FUNCTION__))->typed.type), bias, step )
;
3857 thisarginfo.op
3858 = fold_build2 (POINTER_TYPE_P (opt)fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
3859 ? POINTER_PLUS_EXPR : PLUS_EXPR, opt,fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
3860 thisarginfo.op, bias)fold_build2_loc (((location_t) 0), (((enum tree_code) (opt)->
base.code) == POINTER_TYPE || ((enum tree_code) (opt)->base
.code) == REFERENCE_TYPE) ? POINTER_PLUS_EXPR : PLUS_EXPR, opt
, thisarginfo.op, bias )
;
3861 }
3862 }
3863 else if (!vec_stmt
3864 && thisarginfo.dt != vect_constant_def
3865 && thisarginfo.dt != vect_external_def
3866 && loop_vinfo
3867 && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME
3868 && simple_iv (loop, loop_containing_stmt (stmt), op,
3869 &iv, false)
3870 && tree_fits_shwi_p (iv.step))
3871 {
3872 thisarginfo.linear_step = tree_to_shwi (iv.step);
3873 thisarginfo.op = iv.base;
3874 }
3875 else if ((thisarginfo.dt == vect_constant_def
3876 || thisarginfo.dt == vect_external_def)
3877 && POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3877, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3877, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
)
3878 thisarginfo.align = get_pointer_alignment (op) / BITS_PER_UNIT(8);
3879 /* Addresses of array elements indexed by GOMP_SIMD_LANE are
3880 linear too. */
3881 if (POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3881, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3881, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
3882 && !thisarginfo.linear_step
3883 && !vec_stmt
3884 && thisarginfo.dt != vect_constant_def
3885 && thisarginfo.dt != vect_external_def
3886 && loop_vinfo
3887 && !slp_node
3888 && TREE_CODE (op)((enum tree_code) (op)->base.code) == SSA_NAME)
3889 vect_simd_lane_linear (op, loop, &thisarginfo);
3890
3891 arginfo.quick_push (thisarginfo);
3892 }
3893
3894 poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo)(loop_vinfo)->vectorization_factor;
3895 if (!vf.is_constant ())
3896 {
3897 if (dump_enabled_p ())
3898 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
3899 "not considering SIMD clones; not yet supported"
3900 " for variable-width vectors.\n");
3901 return false;
3902 }
3903
3904 unsigned int badness = 0;
3905 struct cgraph_node *bestn = NULLnullptr;
3906 if (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.exists ())
3907 bestn = cgraph_node::get (STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info[0]);
3908 else
3909 for (struct cgraph_node *n = node->simd_clones; n != NULLnullptr;
3910 n = n->simdclone->next_clone)
3911 {
3912 unsigned int this_badness = 0;
3913 unsigned int num_calls;
3914 if (!constant_multiple_p (vf, n->simdclone->simdlen, &num_calls)
3915 || n->simdclone->nargs != nargs)
3916 continue;
3917 if (num_calls != 1)
3918 this_badness += exact_log2 (num_calls) * 1024;
3919 if (n->simdclone->inbranch)
3920 this_badness += 2048;
3921 int target_badness = targetm.simd_clone.usable (n);
3922 if (target_badness < 0)
3923 continue;
3924 this_badness += target_badness * 512;
3925 /* FORNOW: Have to add code to add the mask argument. */
3926 if (n->simdclone->inbranch)
3927 continue;
3928 for (i = 0; i < nargs; i++)
3929 {
3930 switch (n->simdclone->args[i].arg_type)
3931 {
3932 case SIMD_CLONE_ARG_TYPE_VECTOR:
3933 if (!useless_type_conversion_p
3934 (n->simdclone->args[i].orig_type,
3935 TREE_TYPE (gimple_call_arg (stmt, i))((contains_struct_check ((gimple_call_arg (stmt, i)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3935, __FUNCTION__))->typed.type)
))
3936 i = -1;
3937 else if (arginfo[i].dt == vect_constant_def
3938 || arginfo[i].dt == vect_external_def
3939 || arginfo[i].linear_step)
3940 this_badness += 64;
3941 break;
3942 case SIMD_CLONE_ARG_TYPE_UNIFORM:
3943 if (arginfo[i].dt != vect_constant_def
3944 && arginfo[i].dt != vect_external_def)
3945 i = -1;
3946 break;
3947 case SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP:
3948 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP:
3949 if (arginfo[i].dt == vect_constant_def
3950 || arginfo[i].dt == vect_external_def
3951 || (arginfo[i].linear_step
3952 != n->simdclone->args[i].linear_step))
3953 i = -1;
3954 break;
3955 case SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP:
3956 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP:
3957 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP:
3958 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP:
3959 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP:
3960 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP:
3961 /* FORNOW */
3962 i = -1;
3963 break;
3964 case SIMD_CLONE_ARG_TYPE_MASK:
3965 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3965, __FUNCTION__))
;
3966 }
3967 if (i == (size_t) -1)
3968 break;
3969 if (n->simdclone->args[i].alignment > arginfo[i].align)
3970 {
3971 i = -1;
3972 break;
3973 }
3974 if (arginfo[i].align)
3975 this_badness += (exact_log2 (arginfo[i].align)
3976 - exact_log2 (n->simdclone->args[i].alignment));
3977 }
3978 if (i == (size_t) -1)
3979 continue;
3980 if (bestn == NULLnullptr || this_badness < badness)
3981 {
3982 bestn = n;
3983 badness = this_badness;
3984 }
3985 }
3986
3987 if (bestn == NULLnullptr)
3988 return false;
3989
3990 for (i = 0; i < nargs; i++)
3991 if ((arginfo[i].dt == vect_constant_def
3992 || arginfo[i].dt == vect_external_def)
3993 && bestn->simdclone->args[i].arg_type == SIMD_CLONE_ARG_TYPE_VECTOR)
3994 {
3995 tree arg_type = TREE_TYPE (gimple_call_arg (stmt, i))((contains_struct_check ((gimple_call_arg (stmt, i)), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 3995, __FUNCTION__))->typed.type)
;
3996 arginfo[i].vectype = get_vectype_for_scalar_type (vinfo, arg_type,
3997 slp_node);
3998 if (arginfo[i].vectype == NULLnullptr
3999 || !constant_multiple_p (bestn->simdclone->simdlen,
4000 simd_clone_subparts (arginfo[i].vectype)))
4001 return false;
4002 }
4003
4004 fndecl = bestn->decl;
4005 nunits = bestn->simdclone->simdlen;
4006 ncopies = vector_unroll_factor (vf, nunits)(exact_div (vf, nunits).to_constant ());
4007
4008 /* If the function isn't const, only allow it in simd loops where user
4009 has asserted that at least nunits consecutive iterations can be
4010 performed using SIMD instructions. */
4011 if ((loop == NULLnullptr || maybe_lt ((unsigned) loop->safelen, nunits))
4012 && gimple_vuse (stmt))
4013 return false;
4014
4015 /* Sanity check: make sure that at least one copy of the vectorized stmt
4016 needs to be generated. */
4017 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4017, __FUNCTION__), 0 : 0))
;
4018
4019 if (!vec_stmt) /* transformation not required. */
4020 {
4021 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (bestn->decl);
4022 for (i = 0; i < nargs; i++)
4023 if ((bestn->simdclone->args[i].arg_type
4024 == SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP)
4025 || (bestn->simdclone->args[i].arg_type
4026 == SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP))
4027 {
4028 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_grow_cleared (i * 3
4029 + 1,
4030 true);
4031 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (arginfo[i].op);
4032 tree lst = POINTER_TYPE_P (TREE_TYPE (arginfo[i].op))(((enum tree_code) (((contains_struct_check ((arginfo[i].op),
(TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4032, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((arginfo[i].op
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4032, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4033 ? size_type_nodeglobal_trees[TI_SIZE_TYPE] : TREE_TYPE (arginfo[i].op)((contains_struct_check ((arginfo[i].op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4033, __FUNCTION__))->typed.type)
;
4034 tree ls = build_int_cst (lst, arginfo[i].linear_step);
4035 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (ls);
4036 tree sll = arginfo[i].simd_lane_linear
4037 ? boolean_true_nodeglobal_trees[TI_BOOLEAN_TRUE] : boolean_false_nodeglobal_trees[TI_BOOLEAN_FALSE];
4038 STMT_VINFO_SIMD_CLONE_INFO (stmt_info)(stmt_info)->simd_clone_info.safe_push (sll);
4039 }
4040 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = call_simd_clone_vec_info_type;
4041 DUMP_VECT_SCOPE ("vectorizable_simd_clone_call")auto_dump_scope scope ("vectorizable_simd_clone_call", vect_location
)
;
4042/* vect_model_simple_cost (vinfo, stmt_info, ncopies,
4043 dt, slp_node, cost_vec); */
4044 return true;
4045 }
4046
4047 /* Transform. */
4048
4049 if (dump_enabled_p ())
4050 dump_printf_loc (MSG_NOTE, vect_location, "transform call.\n");
4051
4052 /* Handle def. */
4053 scalar_dest = gimple_call_lhs (stmt);
4054 vec_dest = NULL_TREE(tree) nullptr;
4055 rtype = NULL_TREE(tree) nullptr;
4056 ratype = NULL_TREE(tree) nullptr;
4057 if (scalar_dest)
4058 {
4059 vec_dest = vect_create_destination_var (scalar_dest, vectype);
4060 rtype = TREE_TYPE (TREE_TYPE (fndecl))((contains_struct_check ((((contains_struct_check ((fndecl), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4060, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4060, __FUNCTION__))->typed.type)
;
4061 if (TREE_CODE (rtype)((enum tree_code) (rtype)->base.code) == ARRAY_TYPE)
4062 {
4063 ratype = rtype;
4064 rtype = TREE_TYPE (ratype)((contains_struct_check ((ratype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4064, __FUNCTION__))->typed.type)
;
4065 }
4066 }
4067
4068 auto_vec<vec<tree> > vec_oprnds;
4069 auto_vec<unsigned> vec_oprnds_i;
4070 vec_oprnds.safe_grow_cleared (nargs, true);
4071 vec_oprnds_i.safe_grow_cleared (nargs, true);
4072 for (j = 0; j < ncopies; ++j)
4073 {
4074 /* Build argument list for the vectorized call. */
4075 if (j == 0)
4076 vargs.create (nargs);
4077 else
4078 vargs.truncate (0);
4079
4080 for (i = 0; i < nargs; i++)
4081 {
4082 unsigned int k, l, m, o;
4083 tree atype;
4084 op = gimple_call_arg (stmt, i);
4085 switch (bestn->simdclone->args[i].arg_type)
4086 {
4087 case SIMD_CLONE_ARG_TYPE_VECTOR:
4088 atype = bestn->simdclone->args[i].vector_type;
4089 o = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (atype)).to_constant (
))
4090 simd_clone_subparts (atype))(exact_div (nunits, simd_clone_subparts (atype)).to_constant (
))
;
4091 for (m = j * o; m < (j + 1) * o; m++)
4092 {
4093 if (simd_clone_subparts (atype)
4094 < simd_clone_subparts (arginfo[i].vectype))
4095 {
4096 poly_uint64 prec = GET_MODE_BITSIZE (TYPE_MODE (atype)((((enum tree_code) ((tree_class_check ((atype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4096, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(atype) : (atype)->type_common.mode)
);
4097 k = (simd_clone_subparts (arginfo[i].vectype)
4098 / simd_clone_subparts (atype));
4099 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4099, __FUNCTION__), 0 : 0))
;
4100 if (m == 0)
4101 {
4102 vect_get_vec_defs_for_operand (vinfo, stmt_info,
4103 ncopies * o / k, op,
4104 &vec_oprnds[i]);
4105 vec_oprnds_i[i] = 0;
4106 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4107 }
4108 else
4109 {
4110 vec_oprnd0 = arginfo[i].op;
4111 if ((m & (k - 1)) == 0)
4112 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4113 }
4114 arginfo[i].op = vec_oprnd0;
4115 vec_oprnd0
4116 = build3 (BIT_FIELD_REF, atype, vec_oprnd0,
4117 bitsize_int (prec)size_int_kind (prec, stk_bitsizetype),
4118 bitsize_int ((m & (k - 1)) * prec)size_int_kind ((m & (k - 1)) * prec, stk_bitsizetype));
4119 gassign *new_stmt
4120 = gimple_build_assign (make_ssa_name (atype),
4121 vec_oprnd0);
4122 vect_finish_stmt_generation (vinfo, stmt_info,
4123 new_stmt, gsi);
4124 vargs.safe_push (gimple_assign_lhs (new_stmt));
4125 }
4126 else
4127 {
4128 k = (simd_clone_subparts (atype)
4129 / simd_clone_subparts (arginfo[i].vectype));
4130 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4130, __FUNCTION__), 0 : 0))
;
4131 vec<constructor_elt, va_gc> *ctor_elts;
4132 if (k != 1)
4133 vec_alloc (ctor_elts, k);
4134 else
4135 ctor_elts = NULLnullptr;
4136 for (l = 0; l < k; l++)
4137 {
4138 if (m == 0 && l == 0)
4139 {
4140 vect_get_vec_defs_for_operand (vinfo, stmt_info,
4141 k * o * ncopies,
4142 op,
4143 &vec_oprnds[i]);
4144 vec_oprnds_i[i] = 0;
4145 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4146 }
4147 else
4148 vec_oprnd0 = vec_oprnds[i][vec_oprnds_i[i]++];
4149 arginfo[i].op = vec_oprnd0;
4150 if (k == 1)
4151 break;
4152 CONSTRUCTOR_APPEND_ELT (ctor_elts, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, vec_oprnd0}; vec_safe_push
((ctor_elts), _ce___); } while (0)
4153 vec_oprnd0)do { constructor_elt _ce___ = {(tree) nullptr, vec_oprnd0}; vec_safe_push
((ctor_elts), _ce___); } while (0)
;
4154 }
4155 if (k == 1)
4156 if (!useless_type_conversion_p (TREE_TYPE (vec_oprnd0)((contains_struct_check ((vec_oprnd0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4156, __FUNCTION__))->typed.type)
,
4157 atype))
4158 {
4159 vec_oprnd0
4160 = build1 (VIEW_CONVERT_EXPR, atype, vec_oprnd0);
4161 gassign *new_stmt
4162 = gimple_build_assign (make_ssa_name (atype),
4163 vec_oprnd0);
4164 vect_finish_stmt_generation (vinfo, stmt_info,
4165 new_stmt, gsi);
4166 vargs.safe_push (gimple_assign_lhs (new_stmt));
4167 }
4168 else
4169 vargs.safe_push (vec_oprnd0);
4170 else
4171 {
4172 vec_oprnd0 = build_constructor (atype, ctor_elts);
4173 gassign *new_stmt
4174 = gimple_build_assign (make_ssa_name (atype),
4175 vec_oprnd0);
4176 vect_finish_stmt_generation (vinfo, stmt_info,
4177 new_stmt, gsi);
4178 vargs.safe_push (gimple_assign_lhs (new_stmt));
4179 }
4180 }
4181 }
4182 break;
4183 case SIMD_CLONE_ARG_TYPE_UNIFORM:
4184 vargs.safe_push (op);
4185 break;
4186 case SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP:
4187 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP:
4188 if (j == 0)
4189 {
4190 gimple_seq stmts;
4191 arginfo[i].op
4192 = force_gimple_operand (unshare_expr (arginfo[i].op),
4193 &stmts, true, NULL_TREE(tree) nullptr);
4194 if (stmts != NULLnullptr)
4195 {
4196 basic_block new_bb;
4197 edge pe = loop_preheader_edge (loop);
4198 new_bb = gsi_insert_seq_on_edge_immediate (pe, stmts);
4199 gcc_assert (!new_bb)((void)(!(!new_bb) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4199, __FUNCTION__), 0 : 0))
;
4200 }
4201 if (arginfo[i].simd_lane_linear)
4202 {
4203 vargs.safe_push (arginfo[i].op);
4204 break;
4205 }
4206 tree phi_res = copy_ssa_name (op);
4207 gphi *new_phi = create_phi_node (phi_res, loop->header);
4208 add_phi_arg (new_phi, arginfo[i].op,
4209 loop_preheader_edge (loop), UNKNOWN_LOCATION((location_t) 0));
4210 enum tree_code code
4211 = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4211, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4211, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4212 ? POINTER_PLUS_EXPR : PLUS_EXPR;
4213 tree type = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4213, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4213, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4214 ? sizetypesizetype_tab[(int) stk_sizetype] : TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4214, __FUNCTION__))->typed.type)
;
4215 poly_widest_int cst
4216 = wi::mul (bestn->simdclone->args[i].linear_step,
4217 ncopies * nunits);
4218 tree tcst = wide_int_to_tree (type, cst);
4219 tree phi_arg = copy_ssa_name (op);
4220 gassign *new_stmt
4221 = gimple_build_assign (phi_arg, code, phi_res, tcst);
4222 gimple_stmt_iterator si = gsi_after_labels (loop->header);
4223 gsi_insert_after (&si, new_stmt, GSI_NEW_STMT);
4224 add_phi_arg (new_phi, phi_arg, loop_latch_edge (loop),
4225 UNKNOWN_LOCATION((location_t) 0));
4226 arginfo[i].op = phi_res;
4227 vargs.safe_push (phi_res);
4228 }
4229 else
4230 {
4231 enum tree_code code
4232 = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4232, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4232, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4233 ? POINTER_PLUS_EXPR : PLUS_EXPR;
4234 tree type = POINTER_TYPE_P (TREE_TYPE (op))(((enum tree_code) (((contains_struct_check ((op), (TS_TYPED)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4234, __FUNCTION__))->typed.type))->base.code) == POINTER_TYPE
|| ((enum tree_code) (((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4234, __FUNCTION__))->typed.type))->base.code) == REFERENCE_TYPE
)
4235 ? sizetypesizetype_tab[(int) stk_sizetype] : TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4235, __FUNCTION__))->typed.type)
;
4236 poly_widest_int cst
4237 = wi::mul (bestn->simdclone->args[i].linear_step,
4238 j * nunits);
4239 tree tcst = wide_int_to_tree (type, cst);
4240 new_temp = make_ssa_name (TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4240, __FUNCTION__))->typed.type)
);
4241 gassign *new_stmt
4242 = gimple_build_assign (new_temp, code,
4243 arginfo[i].op, tcst);
4244 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4245 vargs.safe_push (new_temp);
4246 }
4247 break;
4248 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP:
4249 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP:
4250 case SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP:
4251 case SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP:
4252 case SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP:
4253 case SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP:
4254 default:
4255 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4255, __FUNCTION__))
;
4256 }
4257 }
4258
4259 gcall *new_call = gimple_build_call_vec (fndecl, vargs);
4260 if (vec_dest)
4261 {
4262 gcc_assert (ratype((void)(!(ratype || (!maybe_ne (simd_clone_subparts (rtype), nunits
))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4263, __FUNCTION__), 0 : 0))
4263 || known_eq (simd_clone_subparts (rtype), nunits))((void)(!(ratype || (!maybe_ne (simd_clone_subparts (rtype), nunits
))) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4263, __FUNCTION__), 0 : 0))
;
4264 if (ratype)
4265 new_temp = create_tmp_var (ratype);
4266 else if (useless_type_conversion_p (vectype, rtype))
4267 new_temp = make_ssa_name (vec_dest, new_call);
4268 else
4269 new_temp = make_ssa_name (rtype, new_call);
4270 gimple_call_set_lhs (new_call, new_temp);
4271 }
4272 vect_finish_stmt_generation (vinfo, stmt_info, new_call, gsi);
4273 gimple *new_stmt = new_call;
4274
4275 if (vec_dest)
4276 {
4277 if (!multiple_p (simd_clone_subparts (vectype), nunits))
4278 {
4279 unsigned int k, l;
4280 poly_uint64 prec = GET_MODE_BITSIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4280, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
);
4281 poly_uint64 bytes = GET_MODE_SIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4281, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
);
4282 k = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (vectype)).to_constant
())
4283 simd_clone_subparts (vectype))(exact_div (nunits, simd_clone_subparts (vectype)).to_constant
())
;
4284 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4284, __FUNCTION__), 0 : 0))
;
4285 for (l = 0; l < k; l++)
4286 {
4287 tree t;
4288 if (ratype)
4289 {
4290 t = build_fold_addr_expr (new_temp)build_fold_addr_expr_loc (((location_t) 0), (new_temp));
4291 t = build2 (MEM_REF, vectype, t,
4292 build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4292, __FUNCTION__))->typed.type)
, l * bytes));
4293 }
4294 else
4295 t = build3 (BIT_FIELD_REF, vectype, new_temp,
4296 bitsize_int (prec)size_int_kind (prec, stk_bitsizetype), bitsize_int (l * prec)size_int_kind (l * prec, stk_bitsizetype));
4297 new_stmt = gimple_build_assign (make_ssa_name (vectype), t);
4298 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4299
4300 if (j == 0 && l == 0)
4301 *vec_stmt = new_stmt;
4302 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4303 }
4304
4305 if (ratype)
4306 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4307 continue;
4308 }
4309 else if (!multiple_p (nunits, simd_clone_subparts (vectype)))
4310 {
4311 unsigned int k = (simd_clone_subparts (vectype)
4312 / simd_clone_subparts (rtype));
4313 gcc_assert ((k & (k - 1)) == 0)((void)(!((k & (k - 1)) == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4313, __FUNCTION__), 0 : 0))
;
4314 if ((j & (k - 1)) == 0)
4315 vec_alloc (ret_ctor_elts, k);
4316 if (ratype)
4317 {
4318 unsigned int m, o;
4319 o = vector_unroll_factor (nunits,(exact_div (nunits, simd_clone_subparts (rtype)).to_constant (
))
4320 simd_clone_subparts (rtype))(exact_div (nunits, simd_clone_subparts (rtype)).to_constant (
))
;
4321 for (m = 0; m < o; m++)
4322 {
4323 tree tem = build4 (ARRAY_REF, rtype, new_temp,
4324 size_int (m)size_int_kind (m, stk_sizetype), NULL_TREE(tree) nullptr, NULL_TREE(tree) nullptr);
4325 new_stmt = gimple_build_assign (make_ssa_name (rtype),
4326 tem);
4327 vect_finish_stmt_generation (vinfo, stmt_info,
4328 new_stmt, gsi);
4329 CONSTRUCTOR_APPEND_ELT (ret_ctor_elts, NULL_TREE,do { constructor_elt _ce___ = {(tree) nullptr, gimple_assign_lhs
(new_stmt)}; vec_safe_push ((ret_ctor_elts), _ce___); } while
(0)
4330 gimple_assign_lhs (new_stmt))do { constructor_elt _ce___ = {(tree) nullptr, gimple_assign_lhs
(new_stmt)}; vec_safe_push ((ret_ctor_elts), _ce___); } while
(0)
;
4331 }
4332 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4333 }
4334 else
4335 CONSTRUCTOR_APPEND_ELT (ret_ctor_elts, NULL_TREE, new_temp)do { constructor_elt _ce___ = {(tree) nullptr, new_temp}; vec_safe_push
((ret_ctor_elts), _ce___); } while (0)
;
4336 if ((j & (k - 1)) != k - 1)
4337 continue;
4338 vec_oprnd0 = build_constructor (vectype, ret_ctor_elts);
4339 new_stmt
4340 = gimple_build_assign (make_ssa_name (vec_dest), vec_oprnd0);
4341 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4342
4343 if ((unsigned) j == k - 1)
4344 *vec_stmt = new_stmt;
4345 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4346 continue;
4347 }
4348 else if (ratype)
4349 {
4350 tree t = build_fold_addr_expr (new_temp)build_fold_addr_expr_loc (((location_t) 0), (new_temp));
4351 t = build2 (MEM_REF, vectype, t,
4352 build_int_cst (TREE_TYPE (t)((contains_struct_check ((t), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4352, __FUNCTION__))->typed.type)
, 0));
4353 new_stmt = gimple_build_assign (make_ssa_name (vec_dest), t);
4354 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4355 vect_clobber_variable (vinfo, stmt_info, gsi, new_temp);
4356 }
4357 else if (!useless_type_conversion_p (vectype, rtype))
4358 {
4359 vec_oprnd0 = build1 (VIEW_CONVERT_EXPR, vectype, new_temp);
4360 new_stmt
4361 = gimple_build_assign (make_ssa_name (vec_dest), vec_oprnd0);
4362 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4363 }
4364 }
4365
4366 if (j == 0)
4367 *vec_stmt = new_stmt;
4368 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4369 }
4370
4371 for (i = 0; i < nargs; ++i)
4372 {
4373 vec<tree> oprndsi = vec_oprnds[i];
4374 oprndsi.release ();
4375 }
4376 vargs.release ();
4377
4378 /* The call in STMT might prevent it from being removed in dce.
4379 We however cannot remove it here, due to the way the ssa name
4380 it defines is mapped to the new definition. So just replace
4381 rhs of the statement with something harmless. */
4382
4383 if (slp_node)
4384 return true;
4385
4386 gimple *new_stmt;
4387 if (scalar_dest)
4388 {
4389 type = TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4389, __FUNCTION__))->typed.type)
;
4390 lhs = gimple_call_lhs (vect_orig_stmt (stmt_info)->stmt);
4391 new_stmt = gimple_build_assign (lhs, build_zero_cst (type));
4392 }
4393 else
4394 new_stmt = gimple_build_nop ();
4395 vinfo->replace_stmt (gsi, vect_orig_stmt (stmt_info), new_stmt);
4396 unlink_stmt_vdef (stmt);
4397
4398 return true;
4399}
4400
4401
4402/* Function vect_gen_widened_results_half
4403
4404 Create a vector stmt whose code, type, number of arguments, and result
4405 variable are CODE, OP_TYPE, and VEC_DEST, and its arguments are
4406 VEC_OPRND0 and VEC_OPRND1. The new vector stmt is to be inserted at GSI.
4407 In the case that CODE is a CALL_EXPR, this means that a call to DECL
4408 needs to be created (DECL is a function-decl of a target-builtin).
4409 STMT_INFO is the original scalar stmt that we are vectorizing. */
4410
4411static gimple *
4412vect_gen_widened_results_half (vec_info *vinfo, enum tree_code code,
4413 tree vec_oprnd0, tree vec_oprnd1, int op_type,
4414 tree vec_dest, gimple_stmt_iterator *gsi,
4415 stmt_vec_info stmt_info)
4416{
4417 gimple *new_stmt;
4418 tree new_temp;
4419
4420 /* Generate half of the widened result: */
4421 gcc_assert (op_type == TREE_CODE_LENGTH (code))((void)(!(op_type == tree_code_length[(int) (code)]) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4421, __FUNCTION__), 0 : 0))
;
4422 if (op_type != binary_op)
4423 vec_oprnd1 = NULLnullptr;
4424 new_stmt = gimple_build_assign (vec_dest, code, vec_oprnd0, vec_oprnd1);
4425 new_temp = make_ssa_name (vec_dest, new_stmt);
4426 gimple_assign_set_lhs (new_stmt, new_temp);
4427 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4428
4429 return new_stmt;
4430}
4431
4432
4433/* Create vectorized demotion statements for vector operands from VEC_OPRNDS.
4434 For multi-step conversions store the resulting vectors and call the function
4435 recursively. */
4436
4437static void
4438vect_create_vectorized_demotion_stmts (vec_info *vinfo, vec<tree> *vec_oprnds,
4439 int multi_step_cvt,
4440 stmt_vec_info stmt_info,
4441 vec<tree> vec_dsts,
4442 gimple_stmt_iterator *gsi,
4443 slp_tree slp_node, enum tree_code code)
4444{
4445 unsigned int i;
4446 tree vop0, vop1, new_tmp, vec_dest;
4447
4448 vec_dest = vec_dsts.pop ();
4449
4450 for (i = 0; i < vec_oprnds->length (); i += 2)
4451 {
4452 /* Create demotion operation. */
4453 vop0 = (*vec_oprnds)[i];
4454 vop1 = (*vec_oprnds)[i + 1];
4455 gassign *new_stmt = gimple_build_assign (vec_dest, code, vop0, vop1);
4456 new_tmp = make_ssa_name (vec_dest, new_stmt);
4457 gimple_assign_set_lhs (new_stmt, new_tmp);
4458 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4459
4460 if (multi_step_cvt)
4461 /* Store the resulting vector for next recursive call. */
4462 (*vec_oprnds)[i/2] = new_tmp;
4463 else
4464 {
4465 /* This is the last step of the conversion sequence. Store the
4466 vectors in SLP_NODE or in vector info of the scalar statement
4467 (or in STMT_VINFO_RELATED_STMT chain). */
4468 if (slp_node)
4469 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
4470 else
4471 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4472 }
4473 }
4474
4475 /* For multi-step demotion operations we first generate demotion operations
4476 from the source type to the intermediate types, and then combine the
4477 results (stored in VEC_OPRNDS) in demotion operation to the destination
4478 type. */
4479 if (multi_step_cvt)
4480 {
4481 /* At each level of recursion we have half of the operands we had at the
4482 previous level. */
4483 vec_oprnds->truncate ((i+1)/2);
4484 vect_create_vectorized_demotion_stmts (vinfo, vec_oprnds,
4485 multi_step_cvt - 1,
4486 stmt_info, vec_dsts, gsi,
4487 slp_node, VEC_PACK_TRUNC_EXPR);
4488 }
4489
4490 vec_dsts.quick_push (vec_dest);
4491}
4492
4493
4494/* Create vectorized promotion statements for vector operands from VEC_OPRNDS0
4495 and VEC_OPRNDS1, for a binary operation associated with scalar statement
4496 STMT_INFO. For multi-step conversions store the resulting vectors and
4497 call the function recursively. */
4498
4499static void
4500vect_create_vectorized_promotion_stmts (vec_info *vinfo,
4501 vec<tree> *vec_oprnds0,
4502 vec<tree> *vec_oprnds1,
4503 stmt_vec_info stmt_info, tree vec_dest,
4504 gimple_stmt_iterator *gsi,
4505 enum tree_code code1,
4506 enum tree_code code2, int op_type)
4507{
4508 int i;
4509 tree vop0, vop1, new_tmp1, new_tmp2;
4510 gimple *new_stmt1, *new_stmt2;
4511 vec<tree> vec_tmp = vNULL;
4512
4513 vec_tmp.create (vec_oprnds0->length () * 2);
4514 FOR_EACH_VEC_ELT (*vec_oprnds0, i, vop0)for (i = 0; (*vec_oprnds0).iterate ((i), &(vop0)); ++(i))
4515 {
4516 if (op_type == binary_op)
4517 vop1 = (*vec_oprnds1)[i];
4518 else
4519 vop1 = NULL_TREE(tree) nullptr;
4520
4521 /* Generate the two halves of promotion operation. */
4522 new_stmt1 = vect_gen_widened_results_half (vinfo, code1, vop0, vop1,
4523 op_type, vec_dest, gsi,
4524 stmt_info);
4525 new_stmt2 = vect_gen_widened_results_half (vinfo, code2, vop0, vop1,
4526 op_type, vec_dest, gsi,
4527 stmt_info);
4528 if (is_gimple_call (new_stmt1))
4529 {
4530 new_tmp1 = gimple_call_lhs (new_stmt1);
4531 new_tmp2 = gimple_call_lhs (new_stmt2);
4532 }
4533 else
4534 {
4535 new_tmp1 = gimple_assign_lhs (new_stmt1);
4536 new_tmp2 = gimple_assign_lhs (new_stmt2);
4537 }
4538
4539 /* Store the results for the next step. */
4540 vec_tmp.quick_push (new_tmp1);
4541 vec_tmp.quick_push (new_tmp2);
4542 }
4543
4544 vec_oprnds0->release ();
4545 *vec_oprnds0 = vec_tmp;
4546}
4547
4548
4549/* Check if STMT_INFO performs a conversion operation that can be vectorized.
4550 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
4551 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
4552 Return true if STMT_INFO is vectorizable in this way. */
4553
4554static bool
4555vectorizable_conversion (vec_info *vinfo,
4556 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
4557 gimple **vec_stmt, slp_tree slp_node,
4558 stmt_vector_for_cost *cost_vec)
4559{
4560 tree vec_dest;
4561 tree scalar_dest;
4562 tree op0, op1 = NULL_TREE(tree) nullptr;
4563 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
4564 enum tree_code code, code1 = ERROR_MARK, code2 = ERROR_MARK;
4565 enum tree_code codecvt1 = ERROR_MARK, codecvt2 = ERROR_MARK;
4566 tree new_temp;
4567 enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
4568 int ndts = 2;
4569 poly_uint64 nunits_in;
4570 poly_uint64 nunits_out;
4571 tree vectype_out, vectype_in;
4572 int ncopies, i;
4573 tree lhs_type, rhs_type;
4574 enum { NARROW, NONE, WIDEN } modifier;
4575 vec<tree> vec_oprnds0 = vNULL;
4576 vec<tree> vec_oprnds1 = vNULL;
4577 tree vop0;
4578 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
4579 int multi_step_cvt = 0;
4580 vec<tree> interm_types = vNULL;
4581 tree intermediate_type, cvt_type = NULL_TREE(tree) nullptr;
4582 int op_type;
4583 unsigned short fltsz;
4584
4585 /* Is STMT a vectorizable conversion? */
4586
4587 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
4588 return false;
4589
4590 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
4591 && ! vec_stmt)
4592 return false;
4593
4594 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
4595 if (!stmt)
4596 return false;
4597
4598 if (TREE_CODE (gimple_assign_lhs (stmt))((enum tree_code) (gimple_assign_lhs (stmt))->base.code) != SSA_NAME)
4599 return false;
4600
4601 code = gimple_assign_rhs_code (stmt);
4602 if (!CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
4603 && code != FIX_TRUNC_EXPR
4604 && code != FLOAT_EXPR
4605 && code != WIDEN_PLUS_EXPR
4606 && code != WIDEN_MINUS_EXPR
4607 && code != WIDEN_MULT_EXPR
4608 && code != WIDEN_LSHIFT_EXPR)
4609 return false;
4610
4611 op_type = TREE_CODE_LENGTH (code)tree_code_length[(int) (code)];
4612
4613 /* Check types of lhs and rhs. */
4614 scalar_dest = gimple_assign_lhs (stmt);
4615 lhs_type = TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4615, __FUNCTION__))->typed.type)
;
4616 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
4617
4618 /* Check the operands of the operation. */
4619 slp_tree slp_op0, slp_op1 = NULLnullptr;
4620 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
4621 0, &op0, &slp_op0, &dt[0], &vectype_in))
4622 {
4623 if (dump_enabled_p ())
4624 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4625 "use not simple.\n");
4626 return false;
4627 }
4628
4629 rhs_type = TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4629, __FUNCTION__))->typed.type)
;
4630 if ((code != FIX_TRUNC_EXPR && code != FLOAT_EXPR)
4631 && !((INTEGRAL_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == INTEGER_TYPE
)
4632 && INTEGRAL_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == INTEGER_TYPE
)
)
4633 || (SCALAR_FLOAT_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == REAL_TYPE)
4634 && SCALAR_FLOAT_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == REAL_TYPE))))
4635 return false;
4636
4637 if (!VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4637, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
4638 && ((INTEGRAL_TYPE_P (lhs_type)(((enum tree_code) (lhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (lhs_type)->base.code) == INTEGER_TYPE
)
4639 && !type_has_mode_precision_p (lhs_type))
4640 || (INTEGRAL_TYPE_P (rhs_type)(((enum tree_code) (rhs_type)->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (rhs_type)->base.code) == INTEGER_TYPE
)
4641 && !type_has_mode_precision_p (rhs_type))))
4642 {
4643 if (dump_enabled_p ())
4644 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4645 "type conversion to/from bit-precision unsupported."
4646 "\n");
4647 return false;
4648 }
4649
4650 if (op_type == binary_op)
4651 {
4652 gcc_assert (code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR((void)(!(code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR
|| code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4653, __FUNCTION__), 0 : 0))
4653 || code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR)((void)(!(code == WIDEN_MULT_EXPR || code == WIDEN_LSHIFT_EXPR
|| code == WIDEN_PLUS_EXPR || code == WIDEN_MINUS_EXPR) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4653, __FUNCTION__), 0 : 0))
;
4654
4655 op1 = gimple_assign_rhs2 (stmt);
4656 tree vectype1_in;
4657 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 1,
4658 &op1, &slp_op1, &dt[1], &vectype1_in))
4659 {
4660 if (dump_enabled_p ())
4661 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4662 "use not simple.\n");
4663 return false;
4664 }
4665 /* For WIDEN_MULT_EXPR, if OP0 is a constant, use the type of
4666 OP1. */
4667 if (!vectype_in)
4668 vectype_in = vectype1_in;
4669 }
4670
4671 /* If op0 is an external or constant def, infer the vector type
4672 from the scalar type. */
4673 if (!vectype_in)
4674 vectype_in = get_vectype_for_scalar_type (vinfo, rhs_type, slp_node);
4675 if (vec_stmt)
4676 gcc_assert (vectype_in)((void)(!(vectype_in) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4676, __FUNCTION__), 0 : 0))
;
4677 if (!vectype_in)
4678 {
4679 if (dump_enabled_p ())
4680 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4681 "no vectype for scalar type %T\n", rhs_type);
4682
4683 return false;
4684 }
4685
4686 if (VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4686, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
4687 && !VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4687, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
4688 {
4689 if (dump_enabled_p ())
4690 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4691 "can't convert between boolean and non "
4692 "boolean vectors %T\n", rhs_type);
4693
4694 return false;
4695 }
4696
4697 nunits_in = TYPE_VECTOR_SUBPARTS (vectype_in);
4698 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
4699 if (known_eq (nunits_out, nunits_in)(!maybe_ne (nunits_out, nunits_in)))
4700 modifier = NONE;
4701 else if (multiple_p (nunits_out, nunits_in))
4702 modifier = NARROW;
4703 else
4704 {
4705 gcc_checking_assert (multiple_p (nunits_in, nunits_out))((void)(!(multiple_p (nunits_in, nunits_out)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4705, __FUNCTION__), 0 : 0))
;
4706 modifier = WIDEN;
4707 }
4708
4709 /* Multiple types in SLP are handled by creating the appropriate number of
4710 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
4711 case of SLP. */
4712 if (slp_node)
4713 ncopies = 1;
4714 else if (modifier == NARROW)
4715 ncopies = vect_get_num_copies (loop_vinfo, vectype_out);
4716 else
4717 ncopies = vect_get_num_copies (loop_vinfo, vectype_in);
4718
4719 /* Sanity check: make sure that at least one copy of the vectorized stmt
4720 needs to be generated. */
4721 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4721, __FUNCTION__), 0 : 0))
;
4722
4723 bool found_mode = false;
4724 scalar_mode lhs_mode = SCALAR_TYPE_MODE (lhs_type)(as_a <scalar_mode> ((tree_class_check ((lhs_type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4724, __FUNCTION__))->type_common.mode))
;
4725 scalar_mode rhs_mode = SCALAR_TYPE_MODE (rhs_type)(as_a <scalar_mode> ((tree_class_check ((rhs_type), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4725, __FUNCTION__))->type_common.mode))
;
4726 opt_scalar_mode rhs_mode_iter;
4727
4728 /* Supportable by target? */
4729 switch (modifier)
4730 {
4731 case NONE:
4732 if (code != FIX_TRUNC_EXPR
4733 && code != FLOAT_EXPR
4734 && !CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR))
4735 return false;
4736 if (supportable_convert_operation (code, vectype_out, vectype_in, &code1))
4737 break;
4738 /* FALLTHRU */
4739 unsupported:
4740 if (dump_enabled_p ())
4741 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4742 "conversion not supported by target.\n");
4743 return false;
4744
4745 case WIDEN:
4746 if (supportable_widening_operation (vinfo, code, stmt_info, vectype_out,
4747 vectype_in, &code1, &code2,
4748 &multi_step_cvt, &interm_types))
4749 {
4750 /* Binary widening operation can only be supported directly by the
4751 architecture. */
4752 gcc_assert (!(multi_step_cvt && op_type == binary_op))((void)(!(!(multi_step_cvt && op_type == binary_op)) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4752, __FUNCTION__), 0 : 0))
;
4753 break;
4754 }
4755
4756 if (code != FLOAT_EXPR
4757 || GET_MODE_SIZE (lhs_mode) <= GET_MODE_SIZE (rhs_mode))
4758 goto unsupported;
4759
4760 fltsz = GET_MODE_SIZE (lhs_mode);
4761 FOR_EACH_2XWIDER_MODE (rhs_mode_iter, rhs_mode)for ((rhs_mode_iter) = (rhs_mode), mode_iterator::get_2xwider
(&(rhs_mode_iter)); mode_iterator::iterate_p (&(rhs_mode_iter
)); mode_iterator::get_2xwider (&(rhs_mode_iter)))
4762 {
4763 rhs_mode = rhs_mode_iter.require ();
4764 if (GET_MODE_SIZE (rhs_mode) > fltsz)
4765 break;
4766
4767 cvt_type
4768 = build_nonstandard_integer_type (GET_MODE_BITSIZE (rhs_mode), 0);
4769 cvt_type = get_same_sized_vectype (cvt_type, vectype_in);
4770 if (cvt_type == NULL_TREE(tree) nullptr)
4771 goto unsupported;
4772
4773 if (GET_MODE_SIZE (rhs_mode) == fltsz)
4774 {
4775 if (!supportable_convert_operation (code, vectype_out,
4776 cvt_type, &codecvt1))
4777 goto unsupported;
4778 }
4779 else if (!supportable_widening_operation (vinfo, code, stmt_info,
4780 vectype_out, cvt_type,
4781 &codecvt1, &codecvt2,
4782 &multi_step_cvt,
4783 &interm_types))
4784 continue;
4785 else
4786 gcc_assert (multi_step_cvt == 0)((void)(!(multi_step_cvt == 0) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4786, __FUNCTION__), 0 : 0))
;
4787
4788 if (supportable_widening_operation (vinfo, NOP_EXPR, stmt_info,
4789 cvt_type,
4790 vectype_in, &code1, &code2,
4791 &multi_step_cvt, &interm_types))
4792 {
4793 found_mode = true;
4794 break;
4795 }
4796 }
4797
4798 if (!found_mode)
4799 goto unsupported;
4800
4801 if (GET_MODE_SIZE (rhs_mode) == fltsz)
4802 codecvt2 = ERROR_MARK;
4803 else
4804 {
4805 multi_step_cvt++;
4806 interm_types.safe_push (cvt_type);
4807 cvt_type = NULL_TREE(tree) nullptr;
4808 }
4809 break;
4810
4811 case NARROW:
4812 gcc_assert (op_type == unary_op)((void)(!(op_type == unary_op) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4812, __FUNCTION__), 0 : 0))
;
4813 if (supportable_narrowing_operation (code, vectype_out, vectype_in,
4814 &code1, &multi_step_cvt,
4815 &interm_types))
4816 break;
4817
4818 if (code != FIX_TRUNC_EXPR
4819 || GET_MODE_SIZE (lhs_mode) >= GET_MODE_SIZE (rhs_mode))
4820 goto unsupported;
4821
4822 cvt_type
4823 = build_nonstandard_integer_type (GET_MODE_BITSIZE (rhs_mode), 0);
4824 cvt_type = get_same_sized_vectype (cvt_type, vectype_in);
4825 if (cvt_type == NULL_TREE(tree) nullptr)
4826 goto unsupported;
4827 if (!supportable_convert_operation (code, cvt_type, vectype_in,
4828 &codecvt1))
4829 goto unsupported;
4830 if (supportable_narrowing_operation (NOP_EXPR, vectype_out, cvt_type,
4831 &code1, &multi_step_cvt,
4832 &interm_types))
4833 break;
4834 goto unsupported;
4835
4836 default:
4837 gcc_unreachable ()(fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4837, __FUNCTION__))
;
4838 }
4839
4840 if (!vec_stmt) /* transformation not required. */
4841 {
4842 if (slp_node
4843 && (!vect_maybe_update_slp_op_vectype (slp_op0, vectype_in)
4844 || !vect_maybe_update_slp_op_vectype (slp_op1, vectype_in)))
4845 {
4846 if (dump_enabled_p ())
4847 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
4848 "incompatible vector types for invariants\n");
4849 return false;
4850 }
4851 DUMP_VECT_SCOPE ("vectorizable_conversion")auto_dump_scope scope ("vectorizable_conversion", vect_location
)
;
4852 if (modifier == NONE)
4853 {
4854 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_conversion_vec_info_type;
4855 vect_model_simple_cost (vinfo, stmt_info, ncopies, dt, ndts, slp_node,
4856 cost_vec);
4857 }
4858 else if (modifier == NARROW)
4859 {
4860 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_demotion_vec_info_type;
4861 /* The final packing step produces one vector result per copy. */
4862 unsigned int nvectors
4863 = (slp_node ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size : ncopies);
4864 vect_model_promotion_demotion_cost (stmt_info, dt, nvectors,
4865 multi_step_cvt, cost_vec);
4866 }
4867 else
4868 {
4869 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = type_promotion_vec_info_type;
4870 /* The initial unpacking step produces two vector results
4871 per copy. MULTI_STEP_CVT is 0 for a single conversion,
4872 so >> MULTI_STEP_CVT divides by 2^(number of steps - 1). */
4873 unsigned int nvectors
4874 = (slp_node
4875 ? SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size >> multi_step_cvt
4876 : ncopies * 2);
4877 vect_model_promotion_demotion_cost (stmt_info, dt, nvectors,
4878 multi_step_cvt, cost_vec);
4879 }
4880 interm_types.release ();
4881 return true;
4882 }
4883
4884 /* Transform. */
4885 if (dump_enabled_p ())
4886 dump_printf_loc (MSG_NOTE, vect_location,
4887 "transform conversion. ncopies = %d.\n", ncopies);
4888
4889 if (op_type == binary_op)
4890 {
4891 if (CONSTANT_CLASS_P (op0)(tree_code_type[(int) (((enum tree_code) (op0)->base.code)
)] == tcc_constant)
)
4892 op0 = fold_convert (TREE_TYPE (op1), op0)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4892, __FUNCTION__))->typed.type), op0)
;
4893 else if (CONSTANT_CLASS_P (op1)(tree_code_type[(int) (((enum tree_code) (op1)->base.code)
)] == tcc_constant)
)
4894 op1 = fold_convert (TREE_TYPE (op0), op1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4894, __FUNCTION__))->typed.type), op1)
;
4895 }
4896
4897 /* In case of multi-step conversion, we first generate conversion operations
4898 to the intermediate types, and then from that types to the final one.
4899 We create vector destinations for the intermediate type (TYPES) received
4900 from supportable_*_operation, and store them in the correct order
4901 for future use in vect_create_vectorized_*_stmts (). */
4902 auto_vec<tree> vec_dsts (multi_step_cvt + 1);
4903 vec_dest = vect_create_destination_var (scalar_dest,
4904 (cvt_type && modifier == WIDEN)
4905 ? cvt_type : vectype_out);
4906 vec_dsts.quick_push (vec_dest);
4907
4908 if (multi_step_cvt)
4909 {
4910 for (i = interm_types.length () - 1;
4911 interm_types.iterate (i, &intermediate_type); i--)
4912 {
4913 vec_dest = vect_create_destination_var (scalar_dest,
4914 intermediate_type);
4915 vec_dsts.quick_push (vec_dest);
4916 }
4917 }
4918
4919 if (cvt_type)
4920 vec_dest = vect_create_destination_var (scalar_dest,
4921 modifier == WIDEN
4922 ? vectype_out : cvt_type);
4923
4924 int ninputs = 1;
4925 if (!slp_node)
4926 {
4927 if (modifier == WIDEN)
4928 ;
4929 else if (modifier == NARROW)
4930 {
4931 if (multi_step_cvt)
4932 ninputs = vect_pow2 (multi_step_cvt);
4933 ninputs *= 2;
4934 }
4935 }
4936
4937 switch (modifier)
4938 {
4939 case NONE:
4940 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
4941 op0, &vec_oprnds0);
4942 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
4943 {
4944 /* Arguments are ready, create the new vector stmt. */
4945 gcc_assert (TREE_CODE_LENGTH (code1) == unary_op)((void)(!(tree_code_length[(int) (code1)] == unary_op) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4945, __FUNCTION__), 0 : 0))
;
4946 gassign *new_stmt = gimple_build_assign (vec_dest, code1, vop0);
4947 new_temp = make_ssa_name (vec_dest, new_stmt);
4948 gimple_assign_set_lhs (new_stmt, new_temp);
4949 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4950
4951 if (slp_node)
4952 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
4953 else
4954 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
4955 }
4956 break;
4957
4958 case WIDEN:
4959 /* In case the vectorization factor (VF) is bigger than the number
4960 of elements that we can fit in a vectype (nunits), we have to
4961 generate more than one vector stmt - i.e - we need to "unroll"
4962 the vector stmt by a factor VF/nunits. */
4963 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies * ninputs,
4964 op0, &vec_oprnds0,
4965 code == WIDEN_LSHIFT_EXPR ? NULL_TREE(tree) nullptr : op1,
4966 &vec_oprnds1);
4967 if (code == WIDEN_LSHIFT_EXPR)
4968 {
4969 int oprnds_size = vec_oprnds0.length ();
4970 vec_oprnds1.create (oprnds_size);
4971 for (i = 0; i < oprnds_size; ++i)
4972 vec_oprnds1.quick_push (op1);
4973 }
4974 /* Arguments are ready. Create the new vector stmts. */
4975 for (i = multi_step_cvt; i >= 0; i--)
4976 {
4977 tree this_dest = vec_dsts[i];
4978 enum tree_code c1 = code1, c2 = code2;
4979 if (i == 0 && codecvt2 != ERROR_MARK)
4980 {
4981 c1 = codecvt1;
4982 c2 = codecvt2;
4983 }
4984 vect_create_vectorized_promotion_stmts (vinfo, &vec_oprnds0,
4985 &vec_oprnds1, stmt_info,
4986 this_dest, gsi,
4987 c1, c2, op_type);
4988 }
4989
4990 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
4991 {
4992 gimple *new_stmt;
4993 if (cvt_type)
4994 {
4995 gcc_assert (TREE_CODE_LENGTH (codecvt1) == unary_op)((void)(!(tree_code_length[(int) (codecvt1)] == unary_op) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 4995, __FUNCTION__), 0 : 0))
;
4996 new_temp = make_ssa_name (vec_dest);
4997 new_stmt = gimple_build_assign (new_temp, codecvt1, vop0);
4998 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
4999 }
5000 else
5001 new_stmt = SSA_NAME_DEF_STMT (vop0)(tree_check ((vop0), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5001, __FUNCTION__, (SSA_NAME)))->ssa_name.def_stmt
;
5002
5003 if (slp_node)
5004 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5005 else
5006 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5007 }
5008 break;
5009
5010 case NARROW:
5011 /* In case the vectorization factor (VF) is bigger than the number
5012 of elements that we can fit in a vectype (nunits), we have to
5013 generate more than one vector stmt - i.e - we need to "unroll"
5014 the vector stmt by a factor VF/nunits. */
5015 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies * ninputs,
5016 op0, &vec_oprnds0);
5017 /* Arguments are ready. Create the new vector stmts. */
5018 if (cvt_type)
5019 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5020 {
5021 gcc_assert (TREE_CODE_LENGTH (codecvt1) == unary_op)((void)(!(tree_code_length[(int) (codecvt1)] == unary_op) ? fancy_abort
("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5021, __FUNCTION__), 0 : 0))
;
5022 new_temp = make_ssa_name (vec_dest);
5023 gassign *new_stmt
5024 = gimple_build_assign (new_temp, codecvt1, vop0);
5025 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5026 vec_oprnds0[i] = new_temp;
5027 }
5028
5029 vect_create_vectorized_demotion_stmts (vinfo, &vec_oprnds0,
5030 multi_step_cvt,
5031 stmt_info, vec_dsts, gsi,
5032 slp_node, code1);
5033 break;
5034 }
5035 if (!slp_node)
5036 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
5037
5038 vec_oprnds0.release ();
5039 vec_oprnds1.release ();
5040 interm_types.release ();
5041
5042 return true;
5043}
5044
5045/* Return true if we can assume from the scalar form of STMT_INFO that
5046 neither the scalar nor the vector forms will generate code. STMT_INFO
5047 is known not to involve a data reference. */
5048
5049bool
5050vect_nop_conversion_p (stmt_vec_info stmt_info)
5051{
5052 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5053 if (!stmt)
5054 return false;
5055
5056 tree lhs = gimple_assign_lhs (stmt);
5057 tree_code code = gimple_assign_rhs_code (stmt);
5058 tree rhs = gimple_assign_rhs1 (stmt);
5059
5060 if (code == SSA_NAME || code == VIEW_CONVERT_EXPR)
5061 return true;
5062
5063 if (CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR))
5064 return tree_nop_conversion_p (TREE_TYPE (lhs)((contains_struct_check ((lhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5064, __FUNCTION__))->typed.type)
, TREE_TYPE (rhs)((contains_struct_check ((rhs), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5064, __FUNCTION__))->typed.type)
);
5065
5066 return false;
5067}
5068
5069/* Function vectorizable_assignment.
5070
5071 Check if STMT_INFO performs an assignment (copy) that can be vectorized.
5072 If VEC_STMT is also passed, vectorize the STMT_INFO: create a vectorized
5073 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
5074 Return true if STMT_INFO is vectorizable in this way. */
5075
5076static bool
5077vectorizable_assignment (vec_info *vinfo,
5078 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
5079 gimple **vec_stmt, slp_tree slp_node,
5080 stmt_vector_for_cost *cost_vec)
5081{
5082 tree vec_dest;
5083 tree scalar_dest;
5084 tree op;
5085 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
5086 tree new_temp;
5087 enum vect_def_type dt[1] = {vect_unknown_def_type};
5088 int ndts = 1;
5089 int ncopies;
5090 int i;
5091 vec<tree> vec_oprnds = vNULL;
5092 tree vop;
5093 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
5094 enum tree_code code;
5095 tree vectype_in;
5096
5097 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
5098 return false;
5099
5100 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
5101 && ! vec_stmt)
5102 return false;
5103
5104 /* Is vectorizable assignment? */
5105 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5106 if (!stmt)
5107 return false;
5108
5109 scalar_dest = gimple_assign_lhs (stmt);
5110 if (TREE_CODE (scalar_dest)((enum tree_code) (scalar_dest)->base.code) != SSA_NAME)
5111 return false;
5112
5113 if (STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0))
5114 return false;
5115
5116 code = gimple_assign_rhs_code (stmt);
5117 if (!(gimple_assign_single_p (stmt)
5118 || code == PAREN_EXPR
5119 || CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)))
5120 return false;
5121
5122 tree vectype = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
5123 poly_uint64 nunits = TYPE_VECTOR_SUBPARTS (vectype);
5124
5125 /* Multiple types in SLP are handled by creating the appropriate number of
5126 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
5127 case of SLP. */
5128 if (slp_node)
5129 ncopies = 1;
5130 else
5131 ncopies = vect_get_num_copies (loop_vinfo, vectype);
5132
5133 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5133, __FUNCTION__), 0 : 0))
;
5134
5135 slp_tree slp_op;
5136 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 0, &op, &slp_op,
5137 &dt[0], &vectype_in))
5138 {
5139 if (dump_enabled_p ())
5140 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5141 "use not simple.\n");
5142 return false;
5143 }
5144 if (!vectype_in)
5145 vectype_in = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5145, __FUNCTION__))->typed.type)
, slp_node);
5146
5147 /* We can handle NOP_EXPR conversions that do not change the number
5148 of elements or the vector size. */
5149 if ((CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5150 || code == VIEW_CONVERT_EXPR)
5151 && (!vectype_in
5152 || maybe_ne (TYPE_VECTOR_SUBPARTS (vectype_in), nunits)
5153 || maybe_ne (GET_MODE_SIZE (TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5153, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
),
5154 GET_MODE_SIZE (TYPE_MODE (vectype_in)((((enum tree_code) ((tree_class_check ((vectype_in), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5154, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype_in) : (vectype_in)->type_common.mode)
))))
5155 return false;
5156
5157 if (VECTOR_BOOLEAN_TYPE_P (vectype)(((enum tree_code) (vectype)->base.code) == VECTOR_TYPE &&
((enum tree_code) (((contains_struct_check ((vectype), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5157, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
5158 && !VECTOR_BOOLEAN_TYPE_P (vectype_in)(((enum tree_code) (vectype_in)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_in
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5158, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
)
5159 {
5160 if (dump_enabled_p ())
5161 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5162 "can't convert between boolean and non "
5163 "boolean vectors %T\n", TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5163, __FUNCTION__))->typed.type)
);
5164
5165 return false;
5166 }
5167
5168 /* We do not handle bit-precision changes. */
5169 if ((CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5170 || code == VIEW_CONVERT_EXPR)
5171 && INTEGRAL_TYPE_P (TREE_TYPE (scalar_dest))(((enum tree_code) (((contains_struct_check ((scalar_dest), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5171, __FUNCTION__))->typed.type))->base.code) == ENUMERAL_TYPE
|| ((enum tree_code) (((contains_struct_check ((scalar_dest)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5171, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
|| ((enum tree_code) (((contains_struct_check ((scalar_dest)
, (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5171, __FUNCTION__))->typed.type))->base.code) == INTEGER_TYPE
)
5172 && (!type_has_mode_precision_p (TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5172, __FUNCTION__))->typed.type)
)
5173 || !type_has_mode_precision_p (TREE_TYPE (op)((contains_struct_check ((op), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5173, __FUNCTION__))->typed.type)
))
5174 /* But a conversion that does not change the bit-pattern is ok. */
5175 && !((TYPE_PRECISION (TREE_TYPE (scalar_dest))((tree_class_check ((((contains_struct_check ((scalar_dest), (
TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5175, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5175, __FUNCTION__))->type_common.precision)
5176 > TYPE_PRECISION (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5176, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5176, __FUNCTION__))->type_common.precision)
)
5177 && TYPE_UNSIGNED (TREE_TYPE (op))((tree_class_check ((((contains_struct_check ((op), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5177, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5177, __FUNCTION__))->base.u.bits.unsigned_flag)
))
5178 {
5179 if (dump_enabled_p ())
5180 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5181 "type conversion to/from bit-precision "
5182 "unsupported.\n");
5183 return false;
5184 }
5185
5186 if (!vec_stmt) /* transformation not required. */
5187 {
5188 if (slp_node
5189 && !vect_maybe_update_slp_op_vectype (slp_op, vectype_in))
5190 {
5191 if (dump_enabled_p ())
5192 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5193 "incompatible vector types for invariants\n");
5194 return false;
5195 }
5196 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = assignment_vec_info_type;
5197 DUMP_VECT_SCOPE ("vectorizable_assignment")auto_dump_scope scope ("vectorizable_assignment", vect_location
)
;
5198 if (!vect_nop_conversion_p (stmt_info))
5199 vect_model_simple_cost (vinfo, stmt_info, ncopies, dt, ndts, slp_node,
5200 cost_vec);
5201 return true;
5202 }
5203
5204 /* Transform. */
5205 if (dump_enabled_p ())
5206 dump_printf_loc (MSG_NOTE, vect_location, "transform assignment.\n");
5207
5208 /* Handle def. */
5209 vec_dest = vect_create_destination_var (scalar_dest, vectype);
5210
5211 /* Handle use. */
5212 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies, op, &vec_oprnds);
5213
5214 /* Arguments are ready. create the new vector stmt. */
5215 FOR_EACH_VEC_ELT (vec_oprnds, i, vop)for (i = 0; (vec_oprnds).iterate ((i), &(vop)); ++(i))
5216 {
5217 if (CONVERT_EXPR_CODE_P (code)((code) == NOP_EXPR || (code) == CONVERT_EXPR)
5218 || code == VIEW_CONVERT_EXPR)
5219 vop = build1 (VIEW_CONVERT_EXPR, vectype, vop);
5220 gassign *new_stmt = gimple_build_assign (vec_dest, vop);
5221 new_temp = make_ssa_name (vec_dest, new_stmt);
5222 gimple_assign_set_lhs (new_stmt, new_temp);
5223 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5224 if (slp_node)
5225 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5226 else
5227 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5228 }
5229 if (!slp_node)
5230 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
5231
5232 vec_oprnds.release ();
5233 return true;
5234}
5235
5236
5237/* Return TRUE if CODE (a shift operation) is supported for SCALAR_TYPE
5238 either as shift by a scalar or by a vector. */
5239
5240bool
5241vect_supportable_shift (vec_info *vinfo, enum tree_code code, tree scalar_type)
5242{
5243
5244 machine_mode vec_mode;
5245 optab optab;
5246 int icode;
5247 tree vectype;
5248
5249 vectype = get_vectype_for_scalar_type (vinfo, scalar_type);
5250 if (!vectype)
5251 return false;
5252
5253 optab = optab_for_tree_code (code, vectype, optab_scalar);
5254 if (!optab
5255 || optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5255, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
) == CODE_FOR_nothing)
5256 {
5257 optab = optab_for_tree_code (code, vectype, optab_vector);
5258 if (!optab
5259 || (optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5259, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
)
5260 == CODE_FOR_nothing))
5261 return false;
5262 }
5263
5264 vec_mode = TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5264, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
;
5265 icode = (int) optab_handler (optab, vec_mode);
5266 if (icode == CODE_FOR_nothing)
5267 return false;
5268
5269 return true;
5270}
5271
5272
5273/* Function vectorizable_shift.
5274
5275 Check if STMT_INFO performs a shift operation that can be vectorized.
5276 If VEC_STMT is also passed, vectorize the STMT_INFO: create a vectorized
5277 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
5278 Return true if STMT_INFO is vectorizable in this way. */
5279
5280static bool
5281vectorizable_shift (vec_info *vinfo,
5282 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
5283 gimple **vec_stmt, slp_tree slp_node,
5284 stmt_vector_for_cost *cost_vec)
5285{
5286 tree vec_dest;
5287 tree scalar_dest;
5288 tree op0, op1 = NULLnullptr;
5289 tree vec_oprnd1 = NULL_TREE(tree) nullptr;
5290 tree vectype;
5291 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
5292 enum tree_code code;
5293 machine_mode vec_mode;
5294 tree new_temp;
5295 optab optab;
5296 int icode;
5297 machine_mode optab_op2_mode;
5298 enum vect_def_type dt[2] = {vect_unknown_def_type, vect_unknown_def_type};
5299 int ndts = 2;
5300 poly_uint64 nunits_in;
5301 poly_uint64 nunits_out;
5302 tree vectype_out;
5303 tree op1_vectype;
5304 int ncopies;
5305 int i;
5306 vec<tree> vec_oprnds0 = vNULL;
5307 vec<tree> vec_oprnds1 = vNULL;
5308 tree vop0, vop1;
5309 unsigned int k;
5310 bool scalar_shift_arg = true;
5311 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
5312 bool incompatible_op1_vectype_p = false;
5313
5314 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
5315 return false;
5316
5317 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
5318 && STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_nested_cycle
5319 && ! vec_stmt)
5320 return false;
5321
5322 /* Is STMT a vectorizable binary/unary operation? */
5323 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5324 if (!stmt)
5325 return false;
5326
5327 if (TREE_CODE (gimple_assign_lhs (stmt))((enum tree_code) (gimple_assign_lhs (stmt))->base.code) != SSA_NAME)
5328 return false;
5329
5330 code = gimple_assign_rhs_code (stmt);
5331
5332 if (!(code == LSHIFT_EXPR || code == RSHIFT_EXPR || code == LROTATE_EXPR
5333 || code == RROTATE_EXPR))
5334 return false;
5335
5336 scalar_dest = gimple_assign_lhs (stmt);
5337 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
5338 if (!type_has_mode_precision_p (TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5338, __FUNCTION__))->typed.type)
))
5339 {
5340 if (dump_enabled_p ())
5341 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5342 "bit-precision shifts not supported.\n");
5343 return false;
5344 }
5345
5346 slp_tree slp_op0;
5347 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
5348 0, &op0, &slp_op0, &dt[0], &vectype))
5349 {
5350 if (dump_enabled_p ())
5351 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5352 "use not simple.\n");
5353 return false;
5354 }
5355 /* If op0 is an external or constant def, infer the vector type
5356 from the scalar type. */
5357 if (!vectype)
5358 vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op0)((contains_struct_check ((op0), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5358, __FUNCTION__))->typed.type)
, slp_node);
5359 if (vec_stmt)
5360 gcc_assert (vectype)((void)(!(vectype) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5360, __FUNCTION__), 0 : 0))
;
5361 if (!vectype)
5362 {
5363 if (dump_enabled_p ())
5364 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5365 "no vectype for scalar type\n");
5366 return false;
5367 }
5368
5369 nunits_out = TYPE_VECTOR_SUBPARTS (vectype_out);
5370 nunits_in = TYPE_VECTOR_SUBPARTS (vectype);
5371 if (maybe_ne (nunits_out, nunits_in))
5372 return false;
5373
5374 stmt_vec_info op1_def_stmt_info;
5375 slp_tree slp_op1;
5376 if (!vect_is_simple_use (vinfo, stmt_info, slp_node, 1, &op1, &slp_op1,
5377 &dt[1], &op1_vectype, &op1_def_stmt_info))
5378 {
5379 if (dump_enabled_p ())
5380 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5381 "use not simple.\n");
5382 return false;
5383 }
5384
5385 /* Multiple types in SLP are handled by creating the appropriate number of
5386 vectorized stmts for each SLP node. Hence, NCOPIES is always 1 in
5387 case of SLP. */
5388 if (slp_node)
5389 ncopies = 1;
5390 else
5391 ncopies = vect_get_num_copies (loop_vinfo, vectype);
5392
5393 gcc_assert (ncopies >= 1)((void)(!(ncopies >= 1) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5393, __FUNCTION__), 0 : 0))
;
5394
5395 /* Determine whether the shift amount is a vector, or scalar. If the
5396 shift/rotate amount is a vector, use the vector/vector shift optabs. */
5397
5398 if ((dt[1] == vect_internal_def
5399 || dt[1] == vect_induction_def
5400 || dt[1] == vect_nested_cycle)
5401 && !slp_node)
5402 scalar_shift_arg = false;
5403 else if (dt[1] == vect_constant_def
5404 || dt[1] == vect_external_def
5405 || dt[1] == vect_internal_def)
5406 {
5407 /* In SLP, need to check whether the shift count is the same,
5408 in loops if it is a constant or invariant, it is always
5409 a scalar shift. */
5410 if (slp_node)
5411 {
5412 vec<stmt_vec_info> stmts = SLP_TREE_SCALAR_STMTS (slp_node)(slp_node)->stmts;
5413 stmt_vec_info slpstmt_info;
5414
5415 FOR_EACH_VEC_ELT (stmts, k, slpstmt_info)for (k = 0; (stmts).iterate ((k), &(slpstmt_info)); ++(k)
)
5416 {
5417 gassign *slpstmt = as_a <gassign *> (slpstmt_info->stmt);
5418 if (!operand_equal_p (gimple_assign_rhs2 (slpstmt), op1, 0))
5419 scalar_shift_arg = false;
5420 }
5421
5422 /* For internal SLP defs we have to make sure we see scalar stmts
5423 for all vector elements.
5424 ??? For different vectors we could resort to a different
5425 scalar shift operand but code-generation below simply always
5426 takes the first. */
5427 if (dt[1] == vect_internal_def
5428 && maybe_ne (nunits_out * SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node)(slp_node)->vec_stmts_size,
5429 stmts.length ()))
5430 scalar_shift_arg = false;
5431 }
5432
5433 /* If the shift amount is computed by a pattern stmt we cannot
5434 use the scalar amount directly thus give up and use a vector
5435 shift. */
5436 if (op1_def_stmt_info && is_pattern_stmt_p (op1_def_stmt_info))
5437 scalar_shift_arg = false;
5438 }
5439 else
5440 {
5441 if (dump_enabled_p ())
5442 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5443 "operand mode requires invariant argument.\n");
5444 return false;
5445 }
5446
5447 /* Vector shifted by vector. */
5448 bool was_scalar_shift_arg = scalar_shift_arg;
5449 if (!scalar_shift_arg)
5450 {
5451 optab = optab_for_tree_code (code, vectype, optab_vector);
5452 if (dump_enabled_p ())
5453 dump_printf_loc (MSG_NOTE, vect_location,
5454 "vector/vector shift/rotate found.\n");
5455
5456 if (!op1_vectype)
5457 op1_vectype = get_vectype_for_scalar_type (vinfo, TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5457, __FUNCTION__))->typed.type)
,
5458 slp_op1);
5459 incompatible_op1_vectype_p
5460 = (op1_vectype == NULL_TREE(tree) nullptr
5461 || maybe_ne (TYPE_VECTOR_SUBPARTS (op1_vectype),
5462 TYPE_VECTOR_SUBPARTS (vectype))
5463 || TYPE_MODE (op1_vectype)((((enum tree_code) ((tree_class_check ((op1_vectype), (tcc_type
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5463, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(op1_vectype) : (op1_vectype)->type_common.mode)
!= TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5463, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
);
5464 if (incompatible_op1_vectype_p
5465 && (!slp_node
5466 || SLP_TREE_DEF_TYPE (slp_op1)(slp_op1)->def_type != vect_constant_def
5467 || slp_op1->refcnt != 1))
5468 {
5469 if (dump_enabled_p ())
5470 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5471 "unusable type for last operand in"
5472 " vector/vector shift/rotate.\n");
5473 return false;
5474 }
5475 }
5476 /* See if the machine has a vector shifted by scalar insn and if not
5477 then see if it has a vector shifted by vector insn. */
5478 else
5479 {
5480 optab = optab_for_tree_code (code, vectype, optab_scalar);
5481 if (optab
5482 && optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5482, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
) != CODE_FOR_nothing)
5483 {
5484 if (dump_enabled_p ())
5485 dump_printf_loc (MSG_NOTE, vect_location,
5486 "vector/scalar shift/rotate found.\n");
5487 }
5488 else
5489 {
5490 optab = optab_for_tree_code (code, vectype, optab_vector);
5491 if (optab
5492 && (optab_handler (optab, TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5492, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
)
5493 != CODE_FOR_nothing))
5494 {
5495 scalar_shift_arg = false;
5496
5497 if (dump_enabled_p ())
5498 dump_printf_loc (MSG_NOTE, vect_location,
5499 "vector/vector shift/rotate found.\n");
5500
5501 if (!op1_vectype)
5502 op1_vectype = get_vectype_for_scalar_type (vinfo,
5503 TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5503, __FUNCTION__))->typed.type)
,
5504 slp_op1);
5505
5506 /* Unlike the other binary operators, shifts/rotates have
5507 the rhs being int, instead of the same type as the lhs,
5508 so make sure the scalar is the right type if we are
5509 dealing with vectors of long long/long/short/char. */
5510 incompatible_op1_vectype_p
5511 = (!op1_vectype
5512 || !tree_nop_conversion_p (TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5512, __FUNCTION__))->typed.type)
,
5513 TREE_TYPE (op1)((contains_struct_check ((op1), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5513, __FUNCTION__))->typed.type)
));
5514 if (incompatible_op1_vectype_p
5515 && dt[1] == vect_internal_def)
5516 {
5517 if (dump_enabled_p ())
5518 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5519 "unusable type for last operand in"
5520 " vector/vector shift/rotate.\n");
5521 return false;
5522 }
5523 }
5524 }
5525 }
5526
5527 /* Supportable by target? */
5528 if (!optab)
5529 {
5530 if (dump_enabled_p ())
5531 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5532 "no optab.\n");
5533 return false;
5534 }
5535 vec_mode = TYPE_MODE (vectype)((((enum tree_code) ((tree_class_check ((vectype), (tcc_type)
, "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5535, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)
;
5536 icode = (int) optab_handler (optab, vec_mode);
5537 if (icode == CODE_FOR_nothing)
5538 {
5539 if (dump_enabled_p ())
5540 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5541 "op not supported by target.\n");
5542 /* Check only during analysis. */
5543 if (maybe_ne (GET_MODE_SIZE (vec_mode), UNITS_PER_WORD(((global_options.x_ix86_isa_flags & (1UL << 1)) !=
0) ? 8 : 4)
)
5544 || (!vec_stmt
5545 && !vect_worthwhile_without_simd_p (vinfo, code)))
5546 return false;
5547 if (dump_enabled_p ())
5548 dump_printf_loc (MSG_NOTE, vect_location,
5549 "proceeding using word mode.\n");
5550 }
5551
5552 /* Worthwhile without SIMD support? Check only during analysis. */
5553 if (!vec_stmt
5554 && !VECTOR_MODE_P (TYPE_MODE (vectype))(((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5554, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)]) == MODE_VECTOR_BOOL
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5554, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)]) == MODE_VECTOR_INT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5554, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5554, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5554, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5554, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[((((enum tree_code) ((tree_class_check
((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5554, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)]) == MODE_VECTOR_UACCUM
)
5555 && !vect_worthwhile_without_simd_p (vinfo, code))
5556 {
5557 if (dump_enabled_p ())
5558 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5559 "not worthwhile without SIMD support.\n");
5560 return false;
5561 }
5562
5563 if (!vec_stmt) /* transformation not required. */
5564 {
5565 if (slp_node
5566 && (!vect_maybe_update_slp_op_vectype (slp_op0, vectype)
5567 || ((!scalar_shift_arg || dt[1] == vect_internal_def)
5568 && (!incompatible_op1_vectype_p
5569 || dt[1] == vect_constant_def)
5570 && !vect_maybe_update_slp_op_vectype
5571 (slp_op1,
5572 incompatible_op1_vectype_p ? vectype : op1_vectype))))
5573 {
5574 if (dump_enabled_p ())
5575 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5576 "incompatible vector types for invariants\n");
5577 return false;
5578 }
5579 /* Now adjust the constant shift amount in place. */
5580 if (slp_node
5581 && incompatible_op1_vectype_p
5582 && dt[1] == vect_constant_def)
5583 {
5584 for (unsigned i = 0;
5585 i < SLP_TREE_SCALAR_OPS (slp_op1)(slp_op1)->ops.length (); ++i)
5586 {
5587 SLP_TREE_SCALAR_OPS (slp_op1)(slp_op1)->ops[i]
5588 = fold_convert (TREE_TYPE (vectype),fold_convert_loc (((location_t) 0), ((contains_struct_check (
(vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5588, __FUNCTION__))->typed.type), (slp_op1)->ops[i])
5589 SLP_TREE_SCALAR_OPS (slp_op1)[i])fold_convert_loc (((location_t) 0), ((contains_struct_check (
(vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5588, __FUNCTION__))->typed.type), (slp_op1)->ops[i])
;
5590 gcc_assert ((TREE_CODE (SLP_TREE_SCALAR_OPS (slp_op1)[i])((void)(!((((enum tree_code) ((slp_op1)->ops[i])->base.
code) == INTEGER_CST)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5591, __FUNCTION__), 0 : 0))
5591 == INTEGER_CST))((void)(!((((enum tree_code) ((slp_op1)->ops[i])->base.
code) == INTEGER_CST)) ? fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5591, __FUNCTION__), 0 : 0))
;
5592 }
5593 }
5594 STMT_VINFO_TYPE (stmt_info)(stmt_info)->type = shift_vec_info_type;
5595 DUMP_VECT_SCOPE ("vectorizable_shift")auto_dump_scope scope ("vectorizable_shift", vect_location);
5596 vect_model_simple_cost (vinfo, stmt_info, ncopies, dt,
5597 scalar_shift_arg ? 1 : ndts, slp_node, cost_vec);
5598 return true;
5599 }
5600
5601 /* Transform. */
5602
5603 if (dump_enabled_p ())
5604 dump_printf_loc (MSG_NOTE, vect_location,
5605 "transform binary/unary operation.\n");
5606
5607 if (incompatible_op1_vectype_p && !slp_node)
5608 {
5609 gcc_assert (!scalar_shift_arg && was_scalar_shift_arg)((void)(!(!scalar_shift_arg && was_scalar_shift_arg) ?
fancy_abort ("/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5609, __FUNCTION__), 0 : 0))
;
5610 op1 = fold_convert (TREE_TYPE (vectype), op1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5610, __FUNCTION__))->typed.type), op1)
;
5611 if (dt[1] != vect_constant_def)
5612 op1 = vect_init_vector (vinfo, stmt_info, op1,
5613 TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5613, __FUNCTION__))->typed.type)
, NULLnullptr);
5614 }
5615
5616 /* Handle def. */
5617 vec_dest = vect_create_destination_var (scalar_dest, vectype);
5618
5619 if (scalar_shift_arg && dt[1] != vect_internal_def)
5620 {
5621 /* Vector shl and shr insn patterns can be defined with scalar
5622 operand 2 (shift operand). In this case, use constant or loop
5623 invariant op1 directly, without extending it to vector mode
5624 first. */
5625 optab_op2_mode = insn_data[icode].operand[2].mode;
5626 if (!VECTOR_MODE_P (optab_op2_mode)(((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_BOOL
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_INT
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_FLOAT
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_FRACT
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_UFRACT
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_ACCUM
|| ((enum mode_class) mode_class[optab_op2_mode]) == MODE_VECTOR_UACCUM
)
)
5627 {
5628 if (dump_enabled_p ())
5629 dump_printf_loc (MSG_NOTE, vect_location,
5630 "operand 1 using scalar mode.\n");
5631 vec_oprnd1 = op1;
5632 vec_oprnds1.create (slp_node ? slp_node->vec_stmts_size : ncopies);
5633 vec_oprnds1.quick_push (vec_oprnd1);
5634 /* Store vec_oprnd1 for every vector stmt to be created.
5635 We check during the analysis that all the shift arguments
5636 are the same.
5637 TODO: Allow different constants for different vector
5638 stmts generated for an SLP instance. */
5639 for (k = 0;
5640 k < (slp_node ? slp_node->vec_stmts_size - 1 : ncopies - 1); k++)
5641 vec_oprnds1.quick_push (vec_oprnd1);
5642 }
5643 }
5644 else if (!scalar_shift_arg && slp_node && incompatible_op1_vectype_p)
5645 {
5646 if (was_scalar_shift_arg)
5647 {
5648 /* If the argument was the same in all lanes create
5649 the correctly typed vector shift amount directly. */
5650 op1 = fold_convert (TREE_TYPE (vectype), op1)fold_convert_loc (((location_t) 0), ((contains_struct_check (
(vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5650, __FUNCTION__))->typed.type), op1)
;
5651 op1 = vect_init_vector (vinfo, stmt_info, op1, TREE_TYPE (vectype)((contains_struct_check ((vectype), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5651, __FUNCTION__))->typed.type)
,
5652 !loop_vinfo ? gsi : NULLnullptr);
5653 vec_oprnd1 = vect_init_vector (vinfo, stmt_info, op1, vectype,
5654 !loop_vinfo ? gsi : NULLnullptr);
5655 vec_oprnds1.create (slp_node->vec_stmts_size);
5656 for (k = 0; k < slp_node->vec_stmts_size; k++)
5657 vec_oprnds1.quick_push (vec_oprnd1);
5658 }
5659 else if (dt[1] == vect_constant_def)
5660 /* The constant shift amount has been adjusted in place. */
5661 ;
5662 else
5663 gcc_assert (TYPE_MODE (op1_vectype) == TYPE_MODE (vectype))((void)(!(((((enum tree_code) ((tree_class_check ((op1_vectype
), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5663, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(op1_vectype) : (op1_vectype)->type_common.mode) == ((((enum
tree_code) ((tree_class_check ((vectype), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5663, __FUNCTION__)))->base.code) == VECTOR_TYPE) ? vector_type_mode
(vectype) : (vectype)->type_common.mode)) ? fancy_abort (
"/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5663, __FUNCTION__), 0 : 0))
;
5664 }
5665
5666 /* vec_oprnd1 is available if operand 1 should be of a scalar-type
5667 (a special case for certain kind of vector shifts); otherwise,
5668 operand 1 should be of a vector type (the usual case). */
5669 vect_get_vec_defs (vinfo, stmt_info, slp_node, ncopies,
5670 op0, &vec_oprnds0,
5671 vec_oprnd1 ? NULL_TREE(tree) nullptr : op1, &vec_oprnds1);
5672
5673 /* Arguments are ready. Create the new vector stmt. */
5674 FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)for (i = 0; (vec_oprnds0).iterate ((i), &(vop0)); ++(i))
5675 {
5676 /* For internal defs where we need to use a scalar shift arg
5677 extract the first lane. */
5678 if (scalar_shift_arg && dt[1] == vect_internal_def)
5679 {
5680 vop1 = vec_oprnds1[0];
5681 new_temp = make_ssa_name (TREE_TYPE (TREE_TYPE (vop1))((contains_struct_check ((((contains_struct_check ((vop1), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5681, __FUNCTION__))->typed.type)), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5681, __FUNCTION__))->typed.type)
);
5682 gassign *new_stmt
5683 = gimple_build_assign (new_temp,
5684 build3 (BIT_FIELD_REF, TREE_TYPE (new_temp)((contains_struct_check ((new_temp), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5684, __FUNCTION__))->typed.type)
,
5685 vop1,
5686 TYPE_SIZE (TREE_TYPE (new_temp))((tree_class_check ((((contains_struct_check ((new_temp), (TS_TYPED
), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5686, __FUNCTION__))->typed.type)), (tcc_type), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5686, __FUNCTION__))->type_common.size)
,
5687 bitsize_zero_nodeglobal_trees[TI_BITSIZE_ZERO]));
5688 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5689 vop1 = new_temp;
5690 }
5691 else
5692 vop1 = vec_oprnds1[i];
5693 gassign *new_stmt = gimple_build_assign (vec_dest, code, vop0, vop1);
5694 new_temp = make_ssa_name (vec_dest, new_stmt);
5695 gimple_assign_set_lhs (new_stmt, new_temp);
5696 vect_finish_stmt_generation (vinfo, stmt_info, new_stmt, gsi);
5697 if (slp_node)
5698 SLP_TREE_VEC_STMTS (slp_node)(slp_node)->vec_stmts.quick_push (new_stmt);
5699 else
5700 STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts.safe_push (new_stmt);
5701 }
5702
5703 if (!slp_node)
5704 *vec_stmt = STMT_VINFO_VEC_STMTS (stmt_info)(stmt_info)->vec_stmts[0];
5705
5706 vec_oprnds0.release ();
5707 vec_oprnds1.release ();
5708
5709 return true;
5710}
5711
5712
5713/* Function vectorizable_operation.
5714
5715 Check if STMT_INFO performs a binary, unary or ternary operation that can
5716 be vectorized.
5717 If VEC_STMT is also passed, vectorize STMT_INFO: create a vectorized
5718 stmt to replace it, put it in VEC_STMT, and insert it at GSI.
5719 Return true if STMT_INFO is vectorizable in this way. */
5720
5721static bool
5722vectorizable_operation (vec_info *vinfo,
5723 stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
5724 gimple **vec_stmt, slp_tree slp_node,
5725 stmt_vector_for_cost *cost_vec)
5726{
5727 tree vec_dest;
5728 tree scalar_dest;
5729 tree op0, op1 = NULL_TREE(tree) nullptr, op2 = NULL_TREE(tree) nullptr;
5730 tree vectype;
5731 loop_vec_info loop_vinfo = dyn_cast <loop_vec_info> (vinfo);
5732 enum tree_code code, orig_code;
5733 machine_mode vec_mode;
5734 tree new_temp;
5735 int op_type;
5736 optab optab;
5737 bool target_support_p;
5738 enum vect_def_type dt[3]
5739 = {vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type};
5740 int ndts = 3;
5741 poly_uint64 nunits_in;
5742 poly_uint64 nunits_out;
5743 tree vectype_out;
5744 int ncopies, vec_num;
5745 int i;
5746 vec<tree> vec_oprnds0 = vNULL;
5747 vec<tree> vec_oprnds1 = vNULL;
5748 vec<tree> vec_oprnds2 = vNULL;
5749 tree vop0, vop1, vop2;
5750 bb_vec_info bb_vinfo = dyn_cast <bb_vec_info> (vinfo);
5751
5752 if (!STMT_VINFO_RELEVANT_P (stmt_info)((stmt_info)->relevant != vect_unused_in_scope) && !bb_vinfo)
5753 return false;
5754
5755 if (STMT_VINFO_DEF_TYPE (stmt_info)(stmt_info)->def_type != vect_internal_def
5756 && ! vec_stmt)
5757 return false;
5758
5759 /* Is STMT a vectorizable binary/unary operation? */
5760 gassign *stmt = dyn_cast <gassign *> (stmt_info->stmt);
5761 if (!stmt)
5762 return false;
5763
5764 /* Loads and stores are handled in vectorizable_{load,store}. */
5765 if (STMT_VINFO_DATA_REF (stmt_info)((stmt_info)->dr_aux.dr + 0))
5766 return false;
5767
5768 orig_code = code = gimple_assign_rhs_code (stmt);
5769
5770 /* Shifts are handled in vectorizable_shift. */
5771 if (code == LSHIFT_EXPR
5772 || code == RSHIFT_EXPR
5773 || code == LROTATE_EXPR
5774 || code == RROTATE_EXPR)
5775 return false;
5776
5777 /* Comparisons are handled in vectorizable_comparison. */
5778 if (TREE_CODE_CLASS (code)tree_code_type[(int) (code)] == tcc_comparison)
5779 return false;
5780
5781 /* Conditions are handled in vectorizable_condition. */
5782 if (code == COND_EXPR)
5783 return false;
5784
5785 /* For pointer addition and subtraction, we should use the normal
5786 plus and minus for the vector operation. */
5787 if (code == POINTER_PLUS_EXPR)
5788 code = PLUS_EXPR;
5789 if (code == POINTER_DIFF_EXPR)
5790 code = MINUS_EXPR;
5791
5792 /* Support only unary or binary operations. */
5793 op_type = TREE_CODE_LENGTH (code)tree_code_length[(int) (code)];
5794 if (op_type != unary_op && op_type != binary_op && op_type != ternary_op)
5795 {
5796 if (dump_enabled_p ())
5797 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5798 "num. args = %d (not unary/binary/ternary op).\n",
5799 op_type);
5800 return false;
5801 }
5802
5803 scalar_dest = gimple_assign_lhs (stmt);
5804 vectype_out = STMT_VINFO_VECTYPE (stmt_info)(stmt_info)->vectype;
5805
5806 /* Most operations cannot handle bit-precision types without extra
5807 truncations. */
5808 bool mask_op_p = VECTOR_BOOLEAN_TYPE_P (vectype_out)(((enum tree_code) (vectype_out)->base.code) == VECTOR_TYPE
&& ((enum tree_code) (((contains_struct_check ((vectype_out
), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5808, __FUNCTION__))->typed.type))->base.code) == BOOLEAN_TYPE
)
;
5809 if (!mask_op_p
5810 && !type_has_mode_precision_p (TREE_TYPE (scalar_dest)((contains_struct_check ((scalar_dest), (TS_TYPED), "/home/marxin/BIG/buildbot/buildworker/marxinbox-gcc-clang-static-analyzer/build/gcc/tree-vect-stmts.c"
, 5810, __FUNCTION__))->typed.type)
)
5811 /* Exception are bitwise binary operations. */
5812 && code != BIT_IOR_EXPR
5813 && code != BIT_XOR_EXPR
5814 && code != BIT_AND_EXPR)
5815 {
5816 if (dump_enabled_p ())
5817 dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
5818 "bit-precision arithmetic not supported.\n");
5819 return false;
5820 }
5821
5822 slp_tree slp_op0;
5823 if (!vect_is_simple_use (vinfo, stmt_info, slp_node,
5824 0, &op0, &slp_op0, &dt[0], &vectype))
5825 {
5826 if (dump