LCOV - code coverage report
Current view: top level - gcc - explow.c (source / functions) Hit Total Coverage
Test: gcc.info Lines: 605 824 73.4 %
Date: 2020-03-28 11:57:23 Functions: 40 44 90.9 %
Legend: Lines: hit not hit | Branches: + taken - not taken # not executed Branches: 0 0 -

           Branch data     Line data    Source code
       1                 :            : /* Subroutines for manipulating rtx's in semantically interesting ways.
       2                 :            :    Copyright (C) 1987-2020 Free Software Foundation, Inc.
       3                 :            : 
       4                 :            : This file is part of GCC.
       5                 :            : 
       6                 :            : GCC is free software; you can redistribute it and/or modify it under
       7                 :            : the terms of the GNU General Public License as published by the Free
       8                 :            : Software Foundation; either version 3, or (at your option) any later
       9                 :            : version.
      10                 :            : 
      11                 :            : GCC is distributed in the hope that it will be useful, but WITHOUT ANY
      12                 :            : WARRANTY; without even the implied warranty of MERCHANTABILITY or
      13                 :            : FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
      14                 :            : for more details.
      15                 :            : 
      16                 :            : You should have received a copy of the GNU General Public License
      17                 :            : along with GCC; see the file COPYING3.  If not see
      18                 :            : <http://www.gnu.org/licenses/>.  */
      19                 :            : 
      20                 :            : 
      21                 :            : #include "config.h"
      22                 :            : #include "system.h"
      23                 :            : #include "coretypes.h"
      24                 :            : #include "target.h"
      25                 :            : #include "function.h"
      26                 :            : #include "rtl.h"
      27                 :            : #include "tree.h"
      28                 :            : #include "memmodel.h"
      29                 :            : #include "tm_p.h"
      30                 :            : #include "expmed.h"
      31                 :            : #include "profile-count.h"
      32                 :            : #include "optabs.h"
      33                 :            : #include "emit-rtl.h"
      34                 :            : #include "recog.h"
      35                 :            : #include "diagnostic-core.h"
      36                 :            : #include "stor-layout.h"
      37                 :            : #include "except.h"
      38                 :            : #include "dojump.h"
      39                 :            : #include "explow.h"
      40                 :            : #include "expr.h"
      41                 :            : #include "stringpool.h"
      42                 :            : #include "common/common-target.h"
      43                 :            : #include "output.h"
      44                 :            : 
      45                 :            : static rtx break_out_memory_refs (rtx);
      46                 :            : static void anti_adjust_stack_and_probe_stack_clash (rtx);
      47                 :            : 
      48                 :            : 
      49                 :            : /* Truncate and perhaps sign-extend C as appropriate for MODE.  */
      50                 :            : 
      51                 :            : HOST_WIDE_INT
      52                 : 3279710000 : trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
      53                 :            : {
      54                 :            :   /* Not scalar_int_mode because we also allow pointer bound modes.  */
      55                 : 3279710000 :   scalar_mode smode = as_a <scalar_mode> (mode);
      56                 : 3279710000 :   int width = GET_MODE_PRECISION (smode);
      57                 :            : 
      58                 :            :   /* You want to truncate to a _what_?  */
      59                 : 3279710000 :   gcc_assert (SCALAR_INT_MODE_P (mode));
      60                 :            : 
      61                 :            :   /* Canonicalize BImode to 0 and STORE_FLAG_VALUE.  */
      62                 : 3279710000 :   if (smode == BImode)
      63                 :          0 :     return c & 1 ? STORE_FLAG_VALUE : 0;
      64                 :            : 
      65                 :            :   /* Sign-extend for the requested mode.  */
      66                 :            : 
      67                 : 3279710000 :   if (width < HOST_BITS_PER_WIDE_INT)
      68                 :            :     {
      69                 : 2840750000 :       HOST_WIDE_INT sign = 1;
      70                 : 2840750000 :       sign <<= width - 1;
      71                 : 2840750000 :       c &= (sign << 1) - 1;
      72                 : 2840750000 :       c ^= sign;
      73                 : 2840750000 :       c -= sign;
      74                 :            :     }
      75                 :            : 
      76                 :            :   return c;
      77                 :            : }
      78                 :            : 
      79                 :            : /* Likewise for polynomial values, using the sign-extended representation
      80                 :            :    for each individual coefficient.  */
      81                 :            : 
      82                 :            : poly_int64
      83                 :  558207000 : trunc_int_for_mode (poly_int64 x, machine_mode mode)
      84                 :            : {
      85                 : 1116410000 :   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
      86                 :  558207000 :     x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
      87                 :  558207000 :   return x;
      88                 :            : }
      89                 :            : 
      90                 :            : /* Return an rtx for the sum of X and the integer C, given that X has
      91                 :            :    mode MODE.  INPLACE is true if X can be modified inplace or false
      92                 :            :    if it must be treated as immutable.  */
      93                 :            : 
      94                 :            : rtx
      95                 :  214481000 : plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
      96                 :            : {
      97                 :  214481000 :   RTX_CODE code;
      98                 :  214481000 :   rtx y;
      99                 :  214481000 :   rtx tem;
     100                 :  214481000 :   int all_constant = 0;
     101                 :            : 
     102                 :  214481000 :   gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
     103                 :            : 
     104                 :  214481000 :   if (known_eq (c, 0))
     105                 :            :     return x;
     106                 :            : 
     107                 :  187004000 :  restart:
     108                 :            : 
     109                 :  187451000 :   code = GET_CODE (x);
     110                 :  187451000 :   y = x;
     111                 :            : 
     112                 :  187451000 :   switch (code)
     113                 :            :     {
     114                 :   41015800 :     CASE_CONST_SCALAR_INT:
     115                 :   41015800 :       return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
     116                 :     177002 :     case MEM:
     117                 :            :       /* If this is a reference to the constant pool, try replacing it with
     118                 :            :          a reference to a new constant.  If the resulting address isn't
     119                 :            :          valid, don't return it because we have no way to validize it.  */
     120                 :     177002 :       if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
     121                 :     177002 :           && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
     122                 :            :         {
     123                 :          0 :           rtx cst = get_pool_constant (XEXP (x, 0));
     124                 :            : 
     125                 :          0 :           if (GET_CODE (cst) == CONST_VECTOR
     126                 :          0 :               && GET_MODE_INNER (GET_MODE (cst)) == mode)
     127                 :            :             {
     128                 :          0 :               cst = gen_lowpart (mode, cst);
     129                 :          0 :               gcc_assert (cst);
     130                 :            :             }
     131                 :          0 :           else if (GET_MODE (cst) == VOIDmode
     132                 :          0 :                    && get_pool_mode (XEXP (x, 0)) != mode)
     133                 :            :             break;
     134                 :          0 :           if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
     135                 :            :             {
     136                 :          0 :               tem = plus_constant (mode, cst, c);
     137                 :          0 :               tem = force_const_mem (GET_MODE (x), tem);
     138                 :            :               /* Targets may disallow some constants in the constant pool, thus
     139                 :            :                  force_const_mem may return NULL_RTX.  */
     140                 :          0 :               if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
     141                 :            :                 return tem;
     142                 :            :             }
     143                 :            :         }
     144                 :            :       break;
     145                 :            : 
     146                 :     447006 :     case CONST:
     147                 :            :       /* If adding to something entirely constant, set a flag
     148                 :            :          so that we can add a CONST around the result.  */
     149                 :     447006 :       if (inplace && shared_const_p (x))
     150                 :            :         inplace = false;
     151                 :     447006 :       x = XEXP (x, 0);
     152                 :     447006 :       all_constant = 1;
     153                 :     447006 :       goto restart;
     154                 :            : 
     155                 :    3681090 :     case SYMBOL_REF:
     156                 :    3681090 :     case LABEL_REF:
     157                 :    3681090 :       all_constant = 1;
     158                 :    3681090 :       break;
     159                 :            : 
     160                 :   15450700 :     case PLUS:
     161                 :            :       /* The interesting case is adding the integer to a sum.  Look
     162                 :            :          for constant term in the sum and combine with C.  For an
     163                 :            :          integer constant term or a constant term that is not an
     164                 :            :          explicit integer, we combine or group them together anyway.
     165                 :            : 
     166                 :            :          We may not immediately return from the recursive call here, lest
     167                 :            :          all_constant gets lost.  */
     168                 :            : 
     169                 :   15450700 :       if (CONSTANT_P (XEXP (x, 1)))
     170                 :            :         {
     171                 :   14441800 :           rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
     172                 :   14441800 :           if (term == const0_rtx)
     173                 :      23096 :             x = XEXP (x, 0);
     174                 :   14418700 :           else if (inplace)
     175                 :          0 :             XEXP (x, 1) = term;
     176                 :            :           else
     177                 :   14418700 :             x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
     178                 :   14441800 :           c = 0;
     179                 :            :         }
     180                 :    1008880 :       else if (rtx *const_loc = find_constant_term_loc (&y))
     181                 :            :         {
     182                 :          0 :           if (!inplace)
     183                 :            :             {
     184                 :            :               /* We need to be careful since X may be shared and we can't
     185                 :            :                  modify it in place.  */
     186                 :          0 :               x = copy_rtx (x);
     187                 :          0 :               const_loc = find_constant_term_loc (&x);
     188                 :            :             }
     189                 :          0 :           *const_loc = plus_constant (mode, *const_loc, c, true);
     190                 :          0 :           c = 0;
     191                 :            :         }
     192                 :            :       break;
     193                 :            : 
     194                 :            :     default:
     195                 :            :       if (CONST_POLY_INT_P (x))
     196                 :            :         return immed_wide_int_const (const_poly_int_value (x) + c, mode);
     197                 :            :       break;
     198                 :            :     }
     199                 :            : 
     200                 :  145988000 :   if (maybe_ne (c, 0))
     201                 :  131546000 :     x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
     202                 :            : 
     203                 :  145988000 :   if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
     204                 :            :     return x;
     205                 :  145987000 :   else if (all_constant)
     206                 :    4127230 :     return gen_rtx_CONST (mode, x);
     207                 :            :   else
     208                 :            :     return x;
     209                 :            : }
     210                 :            : 
     211                 :            : /* If X is a sum, return a new sum like X but lacking any constant terms.
     212                 :            :    Add all the removed constant terms into *CONSTPTR.
     213                 :            :    X itself is not altered.  The result != X if and only if
     214                 :            :    it is not isomorphic to X.  */
     215                 :            : 
     216                 :            : rtx
     217                 :     232139 : eliminate_constant_term (rtx x, rtx *constptr)
     218                 :            : {
     219                 :     232139 :   rtx x0, x1;
     220                 :     232139 :   rtx tem;
     221                 :            : 
     222                 :     232139 :   if (GET_CODE (x) != PLUS)
     223                 :            :     return x;
     224                 :            : 
     225                 :            :   /* First handle constants appearing at this level explicitly.  */
     226                 :     103725 :   if (CONST_INT_P (XEXP (x, 1))
     227                 :      37695 :       && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
     228                 :            :                                            XEXP (x, 1))) != 0
     229                 :     141420 :       && CONST_INT_P (tem))
     230                 :            :     {
     231                 :      37695 :       *constptr = tem;
     232                 :      37695 :       return eliminate_constant_term (XEXP (x, 0), constptr);
     233                 :            :     }
     234                 :            : 
     235                 :      66030 :   tem = const0_rtx;
     236                 :      66030 :   x0 = eliminate_constant_term (XEXP (x, 0), &tem);
     237                 :      66030 :   x1 = eliminate_constant_term (XEXP (x, 1), &tem);
     238                 :      66030 :   if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
     239                 :          0 :       && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
     240                 :            :                                            *constptr, tem)) != 0
     241                 :      66030 :       && CONST_INT_P (tem))
     242                 :            :     {
     243                 :          0 :       *constptr = tem;
     244                 :          0 :       return gen_rtx_PLUS (GET_MODE (x), x0, x1);
     245                 :            :     }
     246                 :            : 
     247                 :            :   return x;
     248                 :            : }
     249                 :            : 
     250                 :            : 
     251                 :            : /* Return a copy of X in which all memory references
     252                 :            :    and all constants that involve symbol refs
     253                 :            :    have been replaced with new temporary registers.
     254                 :            :    Also emit code to load the memory locations and constants
     255                 :            :    into those registers.
     256                 :            : 
     257                 :            :    If X contains no such constants or memory references,
     258                 :            :    X itself (not a copy) is returned.
     259                 :            : 
     260                 :            :    If a constant is found in the address that is not a legitimate constant
     261                 :            :    in an insn, it is left alone in the hope that it might be valid in the
     262                 :            :    address.
     263                 :            : 
     264                 :            :    X may contain no arithmetic except addition, subtraction and multiplication.
     265                 :            :    Values returned by expand_expr with 1 for sum_ok fit this constraint.  */
     266                 :            : 
     267                 :            : static rtx
     268                 :   30147600 : break_out_memory_refs (rtx x)
     269                 :            : {
     270                 :   30147600 :   if (MEM_P (x)
     271                 :   30147600 :       || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
     272                 :    9586610 :           && GET_MODE (x) != VOIDmode))
     273                 :     240673 :     x = force_reg (GET_MODE (x), x);
     274                 :   29906900 :   else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
     275                 :   20069100 :            || GET_CODE (x) == MULT)
     276                 :            :     {
     277                 :   10112600 :       rtx op0 = break_out_memory_refs (XEXP (x, 0));
     278                 :   10112600 :       rtx op1 = break_out_memory_refs (XEXP (x, 1));
     279                 :            : 
     280                 :   10112600 :       if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
     281                 :     108880 :         x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
     282                 :            :     }
     283                 :            : 
     284                 :   30147600 :   return x;
     285                 :            : }
     286                 :            : 
     287                 :            : /* Given X, a memory address in address space AS' pointer mode, convert it to
     288                 :            :    an address in the address space's address mode, or vice versa (TO_MODE says
     289                 :            :    which way).  We take advantage of the fact that pointers are not allowed to
     290                 :            :    overflow by commuting arithmetic operations over conversions so that address
     291                 :            :    arithmetic insns can be used. IN_CONST is true if this conversion is inside
     292                 :            :    a CONST. NO_EMIT is true if no insns should be emitted, and instead
     293                 :            :    it should return NULL if it can't be simplified without emitting insns.  */
     294                 :            : 
     295                 :            : rtx
     296                 :   20825600 : convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
     297                 :            :                                      rtx x, addr_space_t as ATTRIBUTE_UNUSED,
     298                 :            :                                      bool in_const ATTRIBUTE_UNUSED,
     299                 :            :                                      bool no_emit ATTRIBUTE_UNUSED)
     300                 :            : {
     301                 :            : #ifndef POINTERS_EXTEND_UNSIGNED
     302                 :            :   gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
     303                 :            :   return x;
     304                 :            : #else /* defined(POINTERS_EXTEND_UNSIGNED) */
     305                 :   20825600 :   scalar_int_mode pointer_mode, address_mode, from_mode;
     306                 :   20825600 :   rtx temp;
     307                 :   20825600 :   enum rtx_code code;
     308                 :            : 
     309                 :            :   /* If X already has the right mode, just return it.  */
     310                 :   20825600 :   if (GET_MODE (x) == to_mode)
     311                 :            :     return x;
     312                 :            : 
     313                 :       4245 :   pointer_mode = targetm.addr_space.pointer_mode (as);
     314                 :       4245 :   address_mode = targetm.addr_space.address_mode (as);
     315                 :       4245 :   from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
     316                 :            : 
     317                 :            :   /* Here we handle some special cases.  If none of them apply, fall through
     318                 :            :      to the default case.  */
     319                 :       4245 :   switch (GET_CODE (x))
     320                 :            :     {
     321                 :       4065 :     CASE_CONST_SCALAR_INT:
     322                 :      12195 :       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
     323                 :            :         code = TRUNCATE;
     324                 :       4065 :       else if (POINTERS_EXTEND_UNSIGNED < 0)
     325                 :            :         break;
     326                 :       4065 :       else if (POINTERS_EXTEND_UNSIGNED > 0)
     327                 :       4065 :         code = ZERO_EXTEND;
     328                 :            :       else
     329                 :            :         code = SIGN_EXTEND;
     330                 :       4065 :       temp = simplify_unary_operation (code, to_mode, x, from_mode);
     331                 :       4065 :       if (temp)
     332                 :            :         return temp;
     333                 :            :       break;
     334                 :            : 
     335                 :         13 :     case SUBREG:
     336                 :         14 :       if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
     337                 :         13 :           && GET_MODE (SUBREG_REG (x)) == to_mode)
     338                 :            :         return SUBREG_REG (x);
     339                 :            :       break;
     340                 :            : 
     341                 :          0 :     case LABEL_REF:
     342                 :          0 :       temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
     343                 :          0 :       LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
     344                 :          0 :       return temp;
     345                 :            : 
     346                 :        128 :     case SYMBOL_REF:
     347                 :        128 :       temp = shallow_copy_rtx (x);
     348                 :        128 :       PUT_MODE (temp, to_mode);
     349                 :            :       return temp;
     350                 :            : 
     351                 :          1 :     case CONST:
     352                 :          1 :       temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
     353                 :            :                                                   true, no_emit);
     354                 :          1 :       return temp ? gen_rtx_CONST (to_mode, temp) : temp;
     355                 :            : 
     356                 :         20 :     case PLUS:
     357                 :         20 :     case MULT:
     358                 :            :       /* For addition we can safely permute the conversion and addition
     359                 :            :          operation if one operand is a constant and converting the constant
     360                 :            :          does not change it or if one operand is a constant and we are
     361                 :            :          using a ptr_extend instruction  (POINTERS_EXTEND_UNSIGNED < 0).
     362                 :            :          We can always safely permute them if we are making the address
     363                 :            :          narrower. Inside a CONST RTL, this is safe for both pointers
     364                 :            :          zero or sign extended as pointers cannot wrap. */
     365                 :         40 :       if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
     366                 :         20 :           || (GET_CODE (x) == PLUS
     367                 :         18 :               && CONST_INT_P (XEXP (x, 1))
     368                 :          5 :               && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
     369                 :            :                   || XEXP (x, 1) == convert_memory_address_addr_space_1
     370                 :          4 :                                      (to_mode, XEXP (x, 1), as, in_const,
     371                 :            :                                       no_emit)
     372                 :          4 :                   || POINTERS_EXTEND_UNSIGNED < 0)))
     373                 :            :         {
     374                 :          5 :           temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
     375                 :            :                                                       as, in_const, no_emit);
     376                 :          5 :           return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
     377                 :            :                                          temp, XEXP (x, 1))
     378                 :            :                        : temp);
     379                 :            :         }
     380                 :            :       break;
     381                 :            : 
     382                 :            :     default:
     383                 :            :       break;
     384                 :            :     }
     385                 :            : 
     386                 :         34 :   if (no_emit)
     387                 :            :     return NULL_RTX;
     388                 :            : 
     389                 :         34 :   return convert_modes (to_mode, from_mode,
     390                 :         34 :                         x, POINTERS_EXTEND_UNSIGNED);
     391                 :            : #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
     392                 :            : }
     393                 :            : 
     394                 :            : /* Given X, a memory address in address space AS' pointer mode, convert it to
     395                 :            :    an address in the address space's address mode, or vice versa (TO_MODE says
     396                 :            :    which way).  We take advantage of the fact that pointers are not allowed to
     397                 :            :    overflow by commuting arithmetic operations over conversions so that address
     398                 :            :    arithmetic insns can be used.  */
     399                 :            : 
     400                 :            : rtx
     401                 :   20825600 : convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
     402                 :            :                                    addr_space_t as)
     403                 :            : {
     404                 :   20825600 :   return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
     405                 :            : }
     406                 :            : 
     407                 :            : 
     408                 :            : /* Return something equivalent to X but valid as a memory address for something
     409                 :            :    of mode MODE in the named address space AS.  When X is not itself valid,
     410                 :            :    this works by copying X or subexpressions of it into registers.  */
     411                 :            : 
     412                 :            : rtx
     413                 :   19671300 : memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
     414                 :            : {
     415                 :   19671300 :   rtx oldx = x;
     416                 :   19671300 :   scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
     417                 :            : 
     418                 :   19671300 :   x = convert_memory_address_addr_space (address_mode, x, as);
     419                 :            : 
     420                 :            :   /* By passing constant addresses through registers
     421                 :            :      we get a chance to cse them.  */
     422                 :   19671300 :   if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
     423                 :     739679 :     x = force_reg (address_mode, x);
     424                 :            : 
     425                 :            :   /* We get better cse by rejecting indirect addressing at this stage.
     426                 :            :      Let the combiner create indirect addresses where appropriate.
     427                 :            :      For now, generate the code so that the subexpressions useful to share
     428                 :            :      are visible.  But not if cse won't be done!  */
     429                 :            :   else
     430                 :            :     {
     431                 :   18931700 :       if (! cse_not_expected && !REG_P (x))
     432                 :    9922420 :         x = break_out_memory_refs (x);
     433                 :            : 
     434                 :            :       /* At this point, any valid address is accepted.  */
     435                 :   18931700 :       if (memory_address_addr_space_p (mode, x, as))
     436                 :   18529500 :         goto done;
     437                 :            : 
     438                 :            :       /* If it was valid before but breaking out memory refs invalidated it,
     439                 :            :          use it the old way.  */
     440                 :     402133 :       if (memory_address_addr_space_p (mode, oldx, as))
     441                 :            :         {
     442                 :       6267 :           x = oldx;
     443                 :       6267 :           goto done;
     444                 :            :         }
     445                 :            : 
     446                 :            :       /* Perform machine-dependent transformations on X
     447                 :            :          in certain cases.  This is not necessary since the code
     448                 :            :          below can handle all possible cases, but machine-dependent
     449                 :            :          transformations can make better code.  */
     450                 :     395866 :       {
     451                 :     395866 :         rtx orig_x = x;
     452                 :     395866 :         x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
     453                 :     395866 :         if (orig_x != x && memory_address_addr_space_p (mode, x, as))
     454                 :     133580 :           goto done;
     455                 :            :       }
     456                 :            : 
     457                 :            :       /* PLUS and MULT can appear in special ways
     458                 :            :          as the result of attempts to make an address usable for indexing.
     459                 :            :          Usually they are dealt with by calling force_operand, below.
     460                 :            :          But a sum containing constant terms is special
     461                 :            :          if removing them makes the sum a valid address:
     462                 :            :          then we generate that address in a register
     463                 :            :          and index off of it.  We do this because it often makes
     464                 :            :          shorter code, and because the addresses thus generated
     465                 :            :          in registers often become common subexpressions.  */
     466                 :     262286 :       if (GET_CODE (x) == PLUS)
     467                 :            :         {
     468                 :      62384 :           rtx constant_term = const0_rtx;
     469                 :      62384 :           rtx y = eliminate_constant_term (x, &constant_term);
     470                 :      62384 :           if (constant_term == const0_rtx
     471                 :      62384 :               || ! memory_address_addr_space_p (mode, y, as))
     472                 :      62383 :             x = force_operand (x, NULL_RTX);
     473                 :            :           else
     474                 :            :             {
     475                 :          1 :               y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
     476                 :          1 :               if (! memory_address_addr_space_p (mode, y, as))
     477                 :          0 :                 x = force_operand (x, NULL_RTX);
     478                 :            :               else
     479                 :            :                 x = y;
     480                 :            :             }
     481                 :            :         }
     482                 :            : 
     483                 :     199902 :       else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
     484                 :          0 :         x = force_operand (x, NULL_RTX);
     485                 :            : 
     486                 :            :       /* If we have a register that's an invalid address,
     487                 :            :          it must be a hard reg of the wrong class.  Copy it to a pseudo.  */
     488                 :     199902 :       else if (REG_P (x))
     489                 :          0 :         x = copy_to_reg (x);
     490                 :            : 
     491                 :            :       /* Last resort: copy the value to a register, since
     492                 :            :          the register is a valid address.  */
     493                 :            :       else
     494                 :     199902 :         x = force_reg (address_mode, x);
     495                 :            :     }
     496                 :            : 
     497                 :   19671300 :  done:
     498                 :            : 
     499                 :   19671300 :   gcc_assert (memory_address_addr_space_p (mode, x, as));
     500                 :            :   /* If we didn't change the address, we are done.  Otherwise, mark
     501                 :            :      a reg as a pointer if we have REG or REG + CONST_INT.  */
     502                 :   19671300 :   if (oldx == x)
     503                 :            :     return x;
     504                 :    1365570 :   else if (REG_P (x))
     505                 :    1227870 :     mark_reg_pointer (x, BITS_PER_UNIT);
     506                 :     137702 :   else if (GET_CODE (x) == PLUS
     507                 :     137702 :            && REG_P (XEXP (x, 0))
     508                 :      75157 :            && CONST_INT_P (XEXP (x, 1)))
     509                 :        345 :     mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
     510                 :            : 
     511                 :            :   /* OLDX may have been the address on a temporary.  Update the address
     512                 :            :      to indicate that X is now used.  */
     513                 :    1365570 :   update_temp_slot_address (oldx, x);
     514                 :            : 
     515                 :    1365570 :   return x;
     516                 :            : }
     517                 :            : 
     518                 :            : /* Convert a mem ref into one with a valid memory address.
     519                 :            :    Pass through anything else unchanged.  */
     520                 :            : 
     521                 :            : rtx
     522                 :    4043050 : validize_mem (rtx ref)
     523                 :            : {
     524                 :    4043050 :   if (!MEM_P (ref))
     525                 :            :     return ref;
     526                 :    2538210 :   ref = use_anchored_address (ref);
     527                 :    5076420 :   if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
     528                 :    2538210 :                                    MEM_ADDR_SPACE (ref)))
     529                 :            :     return ref;
     530                 :            : 
     531                 :            :   /* Don't alter REF itself, since that is probably a stack slot.  */
     532                 :       9759 :   return replace_equiv_address (ref, XEXP (ref, 0));
     533                 :            : }
     534                 :            : 
     535                 :            : /* If X is a memory reference to a member of an object block, try rewriting
     536                 :            :    it to use an anchor instead.  Return the new memory reference on success
     537                 :            :    and the old one on failure.  */
     538                 :            : 
     539                 :            : rtx
     540                 :   15909000 : use_anchored_address (rtx x)
     541                 :            : {
     542                 :   15909000 :   rtx base;
     543                 :   15909000 :   HOST_WIDE_INT offset;
     544                 :   15909000 :   machine_mode mode;
     545                 :            : 
     546                 :   15909000 :   if (!flag_section_anchors)
     547                 :            :     return x;
     548                 :            : 
     549                 :          0 :   if (!MEM_P (x))
     550                 :            :     return x;
     551                 :            : 
     552                 :            :   /* Split the address into a base and offset.  */
     553                 :          0 :   base = XEXP (x, 0);
     554                 :          0 :   offset = 0;
     555                 :          0 :   if (GET_CODE (base) == CONST
     556                 :          0 :       && GET_CODE (XEXP (base, 0)) == PLUS
     557                 :          0 :       && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
     558                 :            :     {
     559                 :          0 :       offset += INTVAL (XEXP (XEXP (base, 0), 1));
     560                 :          0 :       base = XEXP (XEXP (base, 0), 0);
     561                 :            :     }
     562                 :            : 
     563                 :            :   /* Check whether BASE is suitable for anchors.  */
     564                 :          0 :   if (GET_CODE (base) != SYMBOL_REF
     565                 :          0 :       || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
     566                 :          0 :       || SYMBOL_REF_ANCHOR_P (base)
     567                 :          0 :       || SYMBOL_REF_BLOCK (base) == NULL
     568                 :          0 :       || !targetm.use_anchors_for_symbol_p (base))
     569                 :          0 :     return x;
     570                 :            : 
     571                 :            :   /* Decide where BASE is going to be.  */
     572                 :          0 :   place_block_symbol (base);
     573                 :            : 
     574                 :            :   /* Get the anchor we need to use.  */
     575                 :          0 :   offset += SYMBOL_REF_BLOCK_OFFSET (base);
     576                 :          0 :   base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
     577                 :          0 :                              SYMBOL_REF_TLS_MODEL (base));
     578                 :            : 
     579                 :            :   /* Work out the offset from the anchor.  */
     580                 :          0 :   offset -= SYMBOL_REF_BLOCK_OFFSET (base);
     581                 :            : 
     582                 :            :   /* If we're going to run a CSE pass, force the anchor into a register.
     583                 :            :      We will then be able to reuse registers for several accesses, if the
     584                 :            :      target costs say that that's worthwhile.  */
     585                 :          0 :   mode = GET_MODE (base);
     586                 :          0 :   if (!cse_not_expected)
     587                 :          0 :     base = force_reg (mode, base);
     588                 :            : 
     589                 :          0 :   return replace_equiv_address (x, plus_constant (mode, base, offset));
     590                 :            : }
     591                 :            : 
     592                 :            : /* Copy the value or contents of X to a new temp reg and return that reg.  */
     593                 :            : 
     594                 :            : rtx
     595                 :     369428 : copy_to_reg (rtx x)
     596                 :            : {
     597                 :     369428 :   rtx temp = gen_reg_rtx (GET_MODE (x));
     598                 :            : 
     599                 :            :   /* If not an operand, must be an address with PLUS and MULT so
     600                 :            :      do the computation.  */
     601                 :     369428 :   if (! general_operand (x, VOIDmode))
     602                 :        273 :     x = force_operand (x, temp);
     603                 :            : 
     604                 :     369428 :   if (x != temp)
     605                 :     369427 :     emit_move_insn (temp, x);
     606                 :            : 
     607                 :     369428 :   return temp;
     608                 :            : }
     609                 :            : 
     610                 :            : /* Like copy_to_reg but always give the new register mode Pmode
     611                 :            :    in case X is a constant.  */
     612                 :            : 
     613                 :            : rtx
     614                 :     312697 : copy_addr_to_reg (rtx x)
     615                 :            : {
     616                 :     446006 :   return copy_to_mode_reg (Pmode, x);
     617                 :            : }
     618                 :            : 
     619                 :            : /* Like copy_to_reg but always give the new register mode MODE
     620                 :            :    in case X is a constant.  */
     621                 :            : 
     622                 :            : rtx
     623                 :    1747880 : copy_to_mode_reg (machine_mode mode, rtx x)
     624                 :            : {
     625                 :    1747880 :   rtx temp = gen_reg_rtx (mode);
     626                 :            : 
     627                 :            :   /* If not an operand, must be an address with PLUS and MULT so
     628                 :            :      do the computation.  */
     629                 :    1747880 :   if (! general_operand (x, VOIDmode))
     630                 :     282366 :     x = force_operand (x, temp);
     631                 :            : 
     632                 :    1747880 :   gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
     633                 :    1747880 :   if (x != temp)
     634                 :    1515700 :     emit_move_insn (temp, x);
     635                 :    1747880 :   return temp;
     636                 :            : }
     637                 :            : 
     638                 :            : /* Load X into a register if it is not already one.
     639                 :            :    Use mode MODE for the register.
     640                 :            :    X should be valid for mode MODE, but it may be a constant which
     641                 :            :    is valid for all integer modes; that's why caller must specify MODE.
     642                 :            : 
     643                 :            :    The caller must not alter the value in the register we return,
     644                 :            :    since we mark it as a "constant" register.  */
     645                 :            : 
     646                 :            : rtx
     647                 :    4940640 : force_reg (machine_mode mode, rtx x)
     648                 :            : {
     649                 :    4940640 :   rtx temp, set;
     650                 :    4940640 :   rtx_insn *insn;
     651                 :            : 
     652                 :    4940640 :   if (REG_P (x))
     653                 :            :     return x;
     654                 :            : 
     655                 :    4271310 :   if (general_operand (x, mode))
     656                 :            :     {
     657                 :    4075760 :       temp = gen_reg_rtx (mode);
     658                 :    4075760 :       insn = emit_move_insn (temp, x);
     659                 :            :     }
     660                 :            :   else
     661                 :            :     {
     662                 :     195547 :       temp = force_operand (x, NULL_RTX);
     663                 :     195547 :       if (REG_P (temp))
     664                 :      10438 :         insn = get_last_insn ();
     665                 :            :       else
     666                 :            :         {
     667                 :     185109 :           rtx temp2 = gen_reg_rtx (mode);
     668                 :     185109 :           insn = emit_move_insn (temp2, temp);
     669                 :     185109 :           temp = temp2;
     670                 :            :         }
     671                 :            :     }
     672                 :            : 
     673                 :            :   /* Let optimizers know that TEMP's value never changes
     674                 :            :      and that X can be substituted for it.  Don't get confused
     675                 :            :      if INSN set something else (such as a SUBREG of TEMP).  */
     676                 :    4271310 :   if (CONSTANT_P (x)
     677                 :    1648390 :       && (set = single_set (insn)) != 0
     678                 :    1648390 :       && SET_DEST (set) == temp
     679                 :    5917240 :       && ! rtx_equal_p (x, SET_SRC (set)))
     680                 :     541732 :     set_unique_reg_note (insn, REG_EQUAL, x);
     681                 :            : 
     682                 :            :   /* Let optimizers know that TEMP is a pointer, and if so, the
     683                 :            :      known alignment of that pointer.  */
     684                 :    4271310 :   {
     685                 :    4271310 :     unsigned align = 0;
     686                 :    4271310 :     if (GET_CODE (x) == SYMBOL_REF)
     687                 :            :       {
     688                 :     833265 :         align = BITS_PER_UNIT;
     689                 :     833265 :         if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
     690                 :     830074 :           align = DECL_ALIGN (SYMBOL_REF_DECL (x));
     691                 :            :       }
     692                 :    3438040 :     else if (GET_CODE (x) == LABEL_REF)
     693                 :            :       align = BITS_PER_UNIT;
     694                 :    3434200 :     else if (GET_CODE (x) == CONST
     695                 :      69597 :              && GET_CODE (XEXP (x, 0)) == PLUS
     696                 :      59458 :              && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
     697                 :      57004 :              && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
     698                 :            :       {
     699                 :      57004 :         rtx s = XEXP (XEXP (x, 0), 0);
     700                 :      57004 :         rtx c = XEXP (XEXP (x, 0), 1);
     701                 :      57004 :         unsigned sa, ca;
     702                 :            : 
     703                 :      57004 :         sa = BITS_PER_UNIT;
     704                 :      57004 :         if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
     705                 :      56989 :           sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
     706                 :            : 
     707                 :      57004 :         if (INTVAL (c) == 0)
     708                 :            :           align = sa;
     709                 :            :         else
     710                 :            :           {
     711                 :      57002 :             ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
     712                 :      57002 :             align = MIN (sa, ca);
     713                 :            :           }
     714                 :            :       }
     715                 :            : 
     716                 :    4264270 :     if (align || (MEM_P (x) && MEM_POINTER (x)))
     717                 :    1496470 :       mark_reg_pointer (temp, align);
     718                 :            :   }
     719                 :            : 
     720                 :            :   return temp;
     721                 :            : }
     722                 :            : 
     723                 :            : /* If X is a memory ref, copy its contents to a new temp reg and return
     724                 :            :    that reg.  Otherwise, return X.  */
     725                 :            : 
     726                 :            : rtx
     727                 :     565118 : force_not_mem (rtx x)
     728                 :            : {
     729                 :     565118 :   rtx temp;
     730                 :            : 
     731                 :     565118 :   if (!MEM_P (x) || GET_MODE (x) == BLKmode)
     732                 :            :     return x;
     733                 :            : 
     734                 :       5957 :   temp = gen_reg_rtx (GET_MODE (x));
     735                 :            : 
     736                 :       5957 :   if (MEM_POINTER (x))
     737                 :        930 :     REG_POINTER (temp) = 1;
     738                 :            : 
     739                 :       5957 :   emit_move_insn (temp, x);
     740                 :       5957 :   return temp;
     741                 :            : }
     742                 :            : 
     743                 :            : /* Copy X to TARGET (if it's nonzero and a reg)
     744                 :            :    or to a new temp reg and return that reg.
     745                 :            :    MODE is the mode to use for X in case it is a constant.  */
     746                 :            : 
     747                 :            : rtx
     748                 :     163409 : copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
     749                 :            : {
     750                 :     163409 :   rtx temp;
     751                 :            : 
     752                 :     163409 :   if (target && REG_P (target))
     753                 :            :     temp = target;
     754                 :            :   else
     755                 :     163405 :     temp = gen_reg_rtx (mode);
     756                 :            : 
     757                 :     163409 :   emit_move_insn (temp, x);
     758                 :     163409 :   return temp;
     759                 :            : }
     760                 :            : 
     761                 :            : /* Return the mode to use to pass or return a scalar of TYPE and MODE.
     762                 :            :    PUNSIGNEDP points to the signedness of the type and may be adjusted
     763                 :            :    to show what signedness to use on extension operations.
     764                 :            : 
     765                 :            :    FOR_RETURN is nonzero if the caller is promoting the return value
     766                 :            :    of FNDECL, else it is for promoting args.  */
     767                 :            : 
     768                 :            : machine_mode
     769                 :   22271200 : promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
     770                 :            :                        const_tree funtype, int for_return)
     771                 :            : {
     772                 :            :   /* Called without a type node for a libcall.  */
     773                 :   22271200 :   if (type == NULL_TREE)
     774                 :            :     {
     775                 :     152727 :       if (INTEGRAL_MODE_P (mode))
     776                 :      12926 :         return targetm.calls.promote_function_mode (NULL_TREE, mode,
     777                 :            :                                                     punsignedp, funtype,
     778                 :      12926 :                                                     for_return);
     779                 :            :       else
     780                 :            :         return mode;
     781                 :            :     }
     782                 :            : 
     783                 :   22118500 :   switch (TREE_CODE (type))
     784                 :            :     {
     785                 :   20037100 :     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
     786                 :   20037100 :     case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
     787                 :   20037100 :     case POINTER_TYPE:   case REFERENCE_TYPE:
     788                 :   20037100 :       return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
     789                 :   20037100 :                                                   for_return);
     790                 :            : 
     791                 :            :     default:
     792                 :            :       return mode;
     793                 :            :     }
     794                 :            : }
     795                 :            : /* Return the mode to use to store a scalar of TYPE and MODE.
     796                 :            :    PUNSIGNEDP points to the signedness of the type and may be adjusted
     797                 :            :    to show what signedness to use on extension operations.  */
     798                 :            : 
     799                 :            : machine_mode
     800                 :   46305900 : promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
     801                 :            :               int *punsignedp ATTRIBUTE_UNUSED)
     802                 :            : {
     803                 :            : #ifdef PROMOTE_MODE
     804                 :   46305900 :   enum tree_code code;
     805                 :   46305900 :   int unsignedp;
     806                 :   46305900 :   scalar_mode smode;
     807                 :            : #endif
     808                 :            : 
     809                 :            :   /* For libcalls this is invoked without TYPE from the backends
     810                 :            :      TARGET_PROMOTE_FUNCTION_MODE hooks.  Don't do anything in that
     811                 :            :      case.  */
     812                 :   46305900 :   if (type == NULL_TREE)
     813                 :            :     return mode;
     814                 :            : 
     815                 :            :   /* FIXME: this is the same logic that was there until GCC 4.4, but we
     816                 :            :      probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
     817                 :            :      is not defined.  The affected targets are M32C, S390, SPARC.  */
     818                 :            : #ifdef PROMOTE_MODE
     819                 :   46305900 :   code = TREE_CODE (type);
     820                 :   46305900 :   unsignedp = *punsignedp;
     821                 :            : 
     822                 :   46305900 :   switch (code)
     823                 :            :     {
     824                 :   33473400 :     case INTEGER_TYPE:   case ENUMERAL_TYPE:   case BOOLEAN_TYPE:
     825                 :   33473400 :     case REAL_TYPE:      case OFFSET_TYPE:     case FIXED_POINT_TYPE:
     826                 :            :       /* Values of these types always have scalar mode.  */
     827                 :   33473400 :       smode = as_a <scalar_mode> (mode);
     828                 :   33473400 :       PROMOTE_MODE (smode, unsignedp, type);
     829                 :            :       *punsignedp = unsignedp;
     830                 :   46305900 :       return smode;
     831                 :            : 
     832                 :            : #ifdef POINTERS_EXTEND_UNSIGNED
     833                 :   10578600 :     case REFERENCE_TYPE:
     834                 :   10578600 :     case POINTER_TYPE:
     835                 :   10578600 :       *punsignedp = POINTERS_EXTEND_UNSIGNED;
     836                 :   21157200 :       return targetm.addr_space.address_mode
     837                 :   10578600 :                (TYPE_ADDR_SPACE (TREE_TYPE (type)));
     838                 :            : #endif
     839                 :            : 
     840                 :            :     default:
     841                 :            :       return mode;
     842                 :            :     }
     843                 :            : #else
     844                 :            :   return mode;
     845                 :            : #endif
     846                 :            : }
     847                 :            : 
     848                 :            : 
     849                 :            : /* Use one of promote_mode or promote_function_mode to find the promoted
     850                 :            :    mode of DECL.  If PUNSIGNEDP is not NULL, store there the unsignedness
     851                 :            :    of DECL after promotion.  */
     852                 :            : 
     853                 :            : machine_mode
     854                 :    5805030 : promote_decl_mode (const_tree decl, int *punsignedp)
     855                 :            : {
     856                 :    5805030 :   tree type = TREE_TYPE (decl);
     857                 :    5805030 :   int unsignedp = TYPE_UNSIGNED (type);
     858                 :    5805030 :   machine_mode mode = DECL_MODE (decl);
     859                 :    5805030 :   machine_mode pmode;
     860                 :            : 
     861                 :    5805030 :   if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
     862                 :    4416020 :     pmode = promote_function_mode (type, mode, &unsignedp,
     863                 :    2208010 :                                    TREE_TYPE (current_function_decl), 1);
     864                 :    3597020 :   else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
     865                 :    6304950 :     pmode = promote_function_mode (type, mode, &unsignedp,
     866                 :    3152470 :                                    TREE_TYPE (current_function_decl), 2);
     867                 :            :   else
     868                 :     444551 :     pmode = promote_mode (type, mode, &unsignedp);
     869                 :            : 
     870                 :    5805030 :   if (punsignedp)
     871                 :     560883 :     *punsignedp = unsignedp;
     872                 :    5805030 :   return pmode;
     873                 :            : }
     874                 :            : 
     875                 :            : /* Return the promoted mode for name.  If it is a named SSA_NAME, it
     876                 :            :    is the same as promote_decl_mode.  Otherwise, it is the promoted
     877                 :            :    mode of a temp decl of same type as the SSA_NAME, if we had created
     878                 :            :    one.  */
     879                 :            : 
     880                 :            : machine_mode
     881                 :   48485800 : promote_ssa_mode (const_tree name, int *punsignedp)
     882                 :            : {
     883                 :   48485800 :   gcc_assert (TREE_CODE (name) == SSA_NAME);
     884                 :            : 
     885                 :            :   /* Partitions holding parms and results must be promoted as expected
     886                 :            :      by function.c.  */
     887                 :   48485800 :   if (SSA_NAME_VAR (name)
     888                 :   13054200 :       && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
     889                 :    9938440 :           || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
     890                 :            :     {
     891                 :    5346190 :       machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
     892                 :    5346190 :       if (mode != BLKmode)
     893                 :            :         return mode;
     894                 :            :     }
     895                 :            : 
     896                 :   43139800 :   tree type = TREE_TYPE (name);
     897                 :   43139800 :   int unsignedp = TYPE_UNSIGNED (type);
     898                 :   43139800 :   machine_mode pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);
     899                 :   43139800 :   if (punsignedp)
     900                 :    1130220 :     *punsignedp = unsignedp;
     901                 :            : 
     902                 :            :   return pmode;
     903                 :            : }
     904                 :            : 
     905                 :            : 
     906                 :            : 
     907                 :            : /* Controls the behavior of {anti_,}adjust_stack.  */
     908                 :            : static bool suppress_reg_args_size;
     909                 :            : 
     910                 :            : /* A helper for adjust_stack and anti_adjust_stack.  */
     911                 :            : 
     912                 :            : static void
     913                 :    1350420 : adjust_stack_1 (rtx adjust, bool anti_p)
     914                 :            : {
     915                 :    1350420 :   rtx temp;
     916                 :    1350420 :   rtx_insn *insn;
     917                 :            : 
     918                 :            :   /* Hereafter anti_p means subtract_p.  */
     919                 :    1350420 :   if (!STACK_GROWS_DOWNWARD)
     920                 :            :     anti_p = !anti_p;
     921                 :            : 
     922                 :    3041400 :   temp = expand_binop (Pmode,
     923                 :            :                        anti_p ? sub_optab : add_optab,
     924                 :            :                        stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
     925                 :            :                        OPTAB_LIB_WIDEN);
     926                 :            : 
     927                 :    1350420 :   if (temp != stack_pointer_rtx)
     928                 :          0 :     insn = emit_move_insn (stack_pointer_rtx, temp);
     929                 :            :   else
     930                 :            :     {
     931                 :    1350420 :       insn = get_last_insn ();
     932                 :    1350420 :       temp = single_set (insn);
     933                 :    1350420 :       gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
     934                 :            :     }
     935                 :            : 
     936                 :    1350420 :   if (!suppress_reg_args_size)
     937                 :    1338470 :     add_args_size_note (insn, stack_pointer_delta);
     938                 :    1350420 : }
     939                 :            : 
     940                 :            : /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
     941                 :            :    This pops when ADJUST is positive.  ADJUST need not be constant.  */
     942                 :            : 
     943                 :            : void
     944                 :     695188 : adjust_stack (rtx adjust)
     945                 :            : {
     946                 :     695188 :   if (adjust == const0_rtx)
     947                 :     695188 :     return;
     948                 :            : 
     949                 :            :   /* We expect all variable sized adjustments to be multiple of
     950                 :            :      PREFERRED_STACK_BOUNDARY.  */
     951                 :     695188 :   poly_int64 const_adjust;
     952                 :     695188 :   if (poly_int_rtx_p (adjust, &const_adjust))
     953                 :     695188 :     stack_pointer_delta -= const_adjust;
     954                 :            : 
     955                 :     695188 :   adjust_stack_1 (adjust, false);
     956                 :            : }
     957                 :            : 
     958                 :            : /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
     959                 :            :    This pushes when ADJUST is positive.  ADJUST need not be constant.  */
     960                 :            : 
     961                 :            : void
     962                 :    2334210 : anti_adjust_stack (rtx adjust)
     963                 :            : {
     964                 :    2334210 :   if (adjust == const0_rtx)
     965                 :    2334210 :     return;
     966                 :            : 
     967                 :            :   /* We expect all variable sized adjustments to be multiple of
     968                 :            :      PREFERRED_STACK_BOUNDARY.  */
     969                 :     655237 :   poly_int64 const_adjust;
     970                 :     655237 :   if (poly_int_rtx_p (adjust, &const_adjust))
     971                 :     643302 :     stack_pointer_delta += const_adjust;
     972                 :            : 
     973                 :     655237 :   adjust_stack_1 (adjust, true);
     974                 :            : }
     975                 :            : 
     976                 :            : /* Round the size of a block to be pushed up to the boundary required
     977                 :            :    by this machine.  SIZE is the desired size, which need not be constant.  */
     978                 :            : 
     979                 :            : static rtx
     980                 :      11935 : round_push (rtx size)
     981                 :            : {
     982                 :      11935 :   rtx align_rtx, alignm1_rtx;
     983                 :            : 
     984                 :      11935 :   if (!SUPPORTS_STACK_ALIGNMENT
     985                 :      11935 :       || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
     986                 :            :     {
     987                 :          0 :       int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
     988                 :            : 
     989                 :          0 :       if (align == 1)
     990                 :            :         return size;
     991                 :            : 
     992                 :          0 :       if (CONST_INT_P (size))
     993                 :            :         {
     994                 :          0 :           HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
     995                 :            : 
     996                 :          0 :           if (INTVAL (size) != new_size)
     997                 :          0 :             size = GEN_INT (new_size);
     998                 :          0 :           return size;
     999                 :            :         }
    1000                 :            : 
    1001                 :          0 :       align_rtx = GEN_INT (align);
    1002                 :          0 :       alignm1_rtx = GEN_INT (align - 1);
    1003                 :            :     }
    1004                 :            :   else
    1005                 :            :     {
    1006                 :            :       /* If crtl->preferred_stack_boundary might still grow, use
    1007                 :            :          virtual_preferred_stack_boundary_rtx instead.  This will be
    1008                 :            :          substituted by the right value in vregs pass and optimized
    1009                 :            :          during combine.  */
    1010                 :      11935 :       align_rtx = virtual_preferred_stack_boundary_rtx;
    1011                 :      12110 :       alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
    1012                 :            :                                    NULL_RTX);
    1013                 :            :     }
    1014                 :            : 
    1015                 :            :   /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
    1016                 :            :      but we know it can't.  So add ourselves and then do
    1017                 :            :      TRUNC_DIV_EXPR.  */
    1018                 :      12110 :   size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
    1019                 :            :                        NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1020                 :      12110 :   size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
    1021                 :            :                         NULL_RTX, 1);
    1022                 :      12110 :   size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
    1023                 :            : 
    1024                 :      11935 :   return size;
    1025                 :            : }
    1026                 :            : 
    1027                 :            : /* Save the stack pointer for the purpose in SAVE_LEVEL.  PSAVE is a pointer
    1028                 :            :    to a previously-created save area.  If no save area has been allocated,
    1029                 :            :    this function will allocate one.  If a save area is specified, it
    1030                 :            :    must be of the proper mode.  */
    1031                 :            : 
    1032                 :            : void
    1033                 :       2908 : emit_stack_save (enum save_level save_level, rtx *psave)
    1034                 :            : {
    1035                 :       2908 :   rtx sa = *psave;
    1036                 :            :   /* The default is that we use a move insn and save in a Pmode object.  */
    1037                 :       2908 :   rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
    1038                 :       2924 :   machine_mode mode = STACK_SAVEAREA_MODE (save_level);
    1039                 :            : 
    1040                 :            :   /* See if this machine has anything special to do for this kind of save.  */
    1041                 :       2908 :   switch (save_level)
    1042                 :            :     {
    1043                 :       1525 :     case SAVE_BLOCK:
    1044                 :       1525 :       if (targetm.have_save_stack_block ())
    1045                 :          0 :         fcn = targetm.gen_save_stack_block;
    1046                 :            :       break;
    1047                 :          0 :     case SAVE_FUNCTION:
    1048                 :          0 :       if (targetm.have_save_stack_function ())
    1049                 :          0 :         fcn = targetm.gen_save_stack_function;
    1050                 :            :       break;
    1051                 :       1383 :     case SAVE_NONLOCAL:
    1052                 :       1383 :       if (targetm.have_save_stack_nonlocal ())
    1053                 :       1383 :         fcn = targetm.gen_save_stack_nonlocal;
    1054                 :            :       break;
    1055                 :            :     default:
    1056                 :            :       break;
    1057                 :            :     }
    1058                 :            : 
    1059                 :            :   /* If there is no save area and we have to allocate one, do so.  Otherwise
    1060                 :            :      verify the save area is the proper mode.  */
    1061                 :            : 
    1062                 :       2908 :   if (sa == 0)
    1063                 :            :     {
    1064                 :       1731 :       if (mode != VOIDmode)
    1065                 :            :         {
    1066                 :       1731 :           if (save_level == SAVE_NONLOCAL)
    1067                 :        412 :             *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
    1068                 :            :           else
    1069                 :       1525 :             *psave = sa = gen_reg_rtx (mode);
    1070                 :            :         }
    1071                 :            :     }
    1072                 :            : 
    1073                 :       2908 :   do_pending_stack_adjust ();
    1074                 :       2908 :   if (sa != 0)
    1075                 :       2908 :     sa = validize_mem (sa);
    1076                 :       2908 :   emit_insn (fcn (sa, stack_pointer_rtx));
    1077                 :       2908 : }
    1078                 :            : 
    1079                 :            : /* Restore the stack pointer for the purpose in SAVE_LEVEL.  SA is the save
    1080                 :            :    area made by emit_stack_save.  If it is zero, we have nothing to do.  */
    1081                 :            : 
    1082                 :            : void
    1083                 :       2454 : emit_stack_restore (enum save_level save_level, rtx sa)
    1084                 :            : {
    1085                 :            :   /* The default is that we use a move insn.  */
    1086                 :       2454 :   rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
    1087                 :            : 
    1088                 :            :   /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
    1089                 :            :      STACK_POINTER and HARD_FRAME_POINTER.
    1090                 :            :      If stack_realign_fp, the x86 backend emits a prologue that aligns only
    1091                 :            :      STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
    1092                 :            :      aligned variables, which is reflected in ix86_can_eliminate.
    1093                 :            :      We normally still have the realigned STACK_POINTER that we can use.
    1094                 :            :      But if there is a stack restore still present at reload, it can trigger 
    1095                 :            :      mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
    1096                 :            :      FRAME_POINTER into a hard reg.
    1097                 :            :      To prevent this situation, we force need_drap if we emit a stack
    1098                 :            :      restore.  */
    1099                 :       2454 :   if (SUPPORTS_STACK_ALIGNMENT)
    1100                 :       2454 :     crtl->need_drap = true;
    1101                 :            : 
    1102                 :            :   /* See if this machine has anything special to do for this kind of save.  */
    1103                 :       2454 :   switch (save_level)
    1104                 :            :     {
    1105                 :       1384 :     case SAVE_BLOCK:
    1106                 :       1384 :       if (targetm.have_restore_stack_block ())
    1107                 :          0 :         fcn = targetm.gen_restore_stack_block;
    1108                 :            :       break;
    1109                 :          0 :     case SAVE_FUNCTION:
    1110                 :          0 :       if (targetm.have_restore_stack_function ())
    1111                 :          0 :         fcn = targetm.gen_restore_stack_function;
    1112                 :            :       break;
    1113                 :       1070 :     case SAVE_NONLOCAL:
    1114                 :       1070 :       if (targetm.have_restore_stack_nonlocal ())
    1115                 :       1070 :         fcn = targetm.gen_restore_stack_nonlocal;
    1116                 :            :       break;
    1117                 :            :     default:
    1118                 :            :       break;
    1119                 :            :     }
    1120                 :            : 
    1121                 :       2454 :   if (sa != 0)
    1122                 :            :     {
    1123                 :       2454 :       sa = validize_mem (sa);
    1124                 :            :       /* These clobbers prevent the scheduler from moving
    1125                 :            :          references to variable arrays below the code
    1126                 :            :          that deletes (pops) the arrays.  */
    1127                 :       2454 :       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
    1128                 :       2454 :       emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
    1129                 :            :     }
    1130                 :            : 
    1131                 :       2454 :   discard_pending_stack_adjust ();
    1132                 :            : 
    1133                 :       2454 :   emit_insn (fcn (stack_pointer_rtx, sa));
    1134                 :       2454 : }
    1135                 :            : 
    1136                 :            : /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
    1137                 :            :    function.  This should be called whenever we allocate or deallocate
    1138                 :            :    dynamic stack space.  */
    1139                 :            : 
    1140                 :            : void
    1141                 :        391 : update_nonlocal_goto_save_area (void)
    1142                 :            : {
    1143                 :        391 :   tree t_save;
    1144                 :        391 :   rtx r_save;
    1145                 :            : 
    1146                 :            :   /* The nonlocal_goto_save_area object is an array of N pointers.  The
    1147                 :            :      first one is used for the frame pointer save; the rest are sized by
    1148                 :            :      STACK_SAVEAREA_MODE.  Create a reference to array index 1, the first
    1149                 :            :      of the stack save area slots.  */
    1150                 :       1173 :   t_save = build4 (ARRAY_REF,
    1151                 :        391 :                    TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
    1152                 :            :                    cfun->nonlocal_goto_save_area,
    1153                 :            :                    integer_one_node, NULL_TREE, NULL_TREE);
    1154                 :        391 :   r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
    1155                 :            : 
    1156                 :        391 :   emit_stack_save (SAVE_NONLOCAL, &r_save);
    1157                 :        391 : }
    1158                 :            : 
    1159                 :            : /* Record a new stack level for the current function.  This should be called
    1160                 :            :    whenever we allocate or deallocate dynamic stack space.  */
    1161                 :            : 
    1162                 :            : void
    1163                 :      13175 : record_new_stack_level (void)
    1164                 :            : {
    1165                 :            :   /* Record the new stack level for nonlocal gotos.  */
    1166                 :      13175 :   if (cfun->nonlocal_goto_save_area)
    1167                 :          0 :     update_nonlocal_goto_save_area ();
    1168                 :            :  
    1169                 :            :   /* Record the new stack level for SJLJ exceptions.  */
    1170                 :      13175 :   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
    1171                 :          0 :     update_sjlj_context ();
    1172                 :      13175 : }
    1173                 :            : 
    1174                 :            : /* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET.  */
    1175                 :            : 
    1176                 :            : rtx
    1177                 :      11935 : align_dynamic_address (rtx target, unsigned required_align)
    1178                 :            : {
    1179                 :            :   /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
    1180                 :            :      but we know it can't.  So add ourselves and then do
    1181                 :            :      TRUNC_DIV_EXPR.  */
    1182                 :      12110 :   target = expand_binop (Pmode, add_optab, target,
    1183                 :      11935 :                          gen_int_mode (required_align / BITS_PER_UNIT - 1,
    1184                 :      12110 :                                        Pmode),
    1185                 :            :                          NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1186                 :      12110 :   target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
    1187                 :      11935 :                           gen_int_mode (required_align / BITS_PER_UNIT,
    1188                 :      12110 :                                         Pmode),
    1189                 :            :                           NULL_RTX, 1);
    1190                 :      12110 :   target = expand_mult (Pmode, target,
    1191                 :      11935 :                         gen_int_mode (required_align / BITS_PER_UNIT,
    1192                 :      12110 :                                       Pmode),
    1193                 :            :                         NULL_RTX, 1);
    1194                 :            : 
    1195                 :      11935 :   return target;
    1196                 :            : }
    1197                 :            : 
    1198                 :            : /* Return an rtx through *PSIZE, representing the size of an area of memory to
    1199                 :            :    be dynamically pushed on the stack.
    1200                 :            : 
    1201                 :            :    *PSIZE is an rtx representing the size of the area.
    1202                 :            : 
    1203                 :            :    SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
    1204                 :            :    parameter may be zero.  If so, a proper value will be extracted
    1205                 :            :    from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
    1206                 :            : 
    1207                 :            :    REQUIRED_ALIGN is the alignment (in bits) required for the region
    1208                 :            :    of memory.
    1209                 :            : 
    1210                 :            :    If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
    1211                 :            :    the additional size returned.  */
    1212                 :            : void
    1213                 :      11935 : get_dynamic_stack_size (rtx *psize, unsigned size_align,
    1214                 :            :                         unsigned required_align,
    1215                 :            :                         HOST_WIDE_INT *pstack_usage_size)
    1216                 :            : {
    1217                 :      11935 :   rtx size = *psize;
    1218                 :            : 
    1219                 :            :   /* Ensure the size is in the proper mode.  */
    1220                 :      12088 :   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    1221                 :          0 :     size = convert_to_mode (Pmode, size, 1);
    1222                 :            : 
    1223                 :      11935 :   if (CONST_INT_P (size))
    1224                 :            :     {
    1225                 :       2430 :       unsigned HOST_WIDE_INT lsb;
    1226                 :            : 
    1227                 :       2430 :       lsb = INTVAL (size);
    1228                 :       2430 :       lsb &= -lsb;
    1229                 :            : 
    1230                 :            :       /* Watch out for overflow truncating to "unsigned".  */
    1231                 :       2430 :       if (lsb > UINT_MAX / BITS_PER_UNIT)
    1232                 :            :         size_align = 1u << (HOST_BITS_PER_INT - 1);
    1233                 :            :       else
    1234                 :       2430 :         size_align = (unsigned)lsb * BITS_PER_UNIT;
    1235                 :            :     }
    1236                 :       9505 :   else if (size_align < BITS_PER_UNIT)
    1237                 :            :     size_align = BITS_PER_UNIT;
    1238                 :            : 
    1239                 :            :   /* We can't attempt to minimize alignment necessary, because we don't
    1240                 :            :      know the final value of preferred_stack_boundary yet while executing
    1241                 :            :      this code.  */
    1242                 :      11935 :   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
    1243                 :       1843 :     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
    1244                 :            : 
    1245                 :            :   /* We will need to ensure that the address we return is aligned to
    1246                 :            :      REQUIRED_ALIGN.  At this point in the compilation, we don't always
    1247                 :            :      know the final value of the STACK_DYNAMIC_OFFSET used in function.c
    1248                 :            :      (it might depend on the size of the outgoing parameter lists, for
    1249                 :            :      example), so we must preventively align the value.  We leave space
    1250                 :            :      in SIZE for the hole that might result from the alignment operation.  */
    1251                 :            : 
    1252                 :      11935 :   unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
    1253                 :      11935 :   if (known_align == 0)
    1254                 :          0 :     known_align = BITS_PER_UNIT;
    1255                 :      11935 :   if (required_align > known_align)
    1256                 :            :     {
    1257                 :       2460 :       unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
    1258                 :       2605 :       size = plus_constant (Pmode, size, extra);
    1259                 :       2460 :       size = force_operand (size, NULL_RTX);
    1260                 :       2460 :       if (size_align > known_align)
    1261                 :            :         size_align = known_align;
    1262                 :            : 
    1263                 :       2460 :       if (flag_stack_usage_info && pstack_usage_size)
    1264                 :          0 :         *pstack_usage_size += extra;
    1265                 :            :     }
    1266                 :            : 
    1267                 :            :   /* Round the size to a multiple of the required stack alignment.
    1268                 :            :      Since the stack is presumed to be rounded before this allocation,
    1269                 :            :      this will maintain the required alignment.
    1270                 :            : 
    1271                 :            :      If the stack grows downward, we could save an insn by subtracting
    1272                 :            :      SIZE from the stack pointer and then aligning the stack pointer.
    1273                 :            :      The problem with this is that the stack pointer may be unaligned
    1274                 :            :      between the execution of the subtraction and alignment insns and
    1275                 :            :      some machines do not allow this.  Even on those that do, some
    1276                 :            :      signal handlers malfunction if a signal should occur between those
    1277                 :            :      insns.  Since this is an extremely rare event, we have no reliable
    1278                 :            :      way of knowing which systems have this problem.  So we avoid even
    1279                 :            :      momentarily mis-aligning the stack.  */
    1280                 :      11935 :   if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
    1281                 :            :     {
    1282                 :      11935 :       size = round_push (size);
    1283                 :            : 
    1284                 :      11935 :       if (flag_stack_usage_info && pstack_usage_size)
    1285                 :            :         {
    1286                 :          1 :           int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
    1287                 :          1 :           *pstack_usage_size =
    1288                 :          1 :             (*pstack_usage_size + align - 1) / align * align;
    1289                 :            :         }
    1290                 :            :     }
    1291                 :            : 
    1292                 :      11935 :   *psize = size;
    1293                 :      11935 : }
    1294                 :            : 
    1295                 :            : /* Return the number of bytes to "protect" on the stack for -fstack-check.
    1296                 :            : 
    1297                 :            :    "protect" in the context of -fstack-check means how many bytes we
    1298                 :            :    should always ensure are available on the stack.  More importantly
    1299                 :            :    this is how many bytes are skipped when probing the stack.
    1300                 :            : 
    1301                 :            :    On some targets we want to reuse the -fstack-check prologue support
    1302                 :            :    to give a degree of protection against stack clashing style attacks.
    1303                 :            : 
    1304                 :            :    In that scenario we do not want to skip bytes before probing as that
    1305                 :            :    would render the stack clash protections useless.
    1306                 :            : 
    1307                 :            :    So we never use STACK_CHECK_PROTECT directly.  Instead we indirect though
    1308                 :            :    this helper which allows us to provide different values for
    1309                 :            :    -fstack-check and -fstack-clash-protection.  */
    1310                 :            : HOST_WIDE_INT
    1311                 :          0 : get_stack_check_protect (void)
    1312                 :            : {
    1313                 :          0 :   if (flag_stack_clash_protection)
    1314                 :            :     return 0;
    1315                 :          0 :  return STACK_CHECK_PROTECT;
    1316                 :            : }
    1317                 :            : 
    1318                 :            : /* Return an rtx representing the address of an area of memory dynamically
    1319                 :            :    pushed on the stack.
    1320                 :            : 
    1321                 :            :    Any required stack pointer alignment is preserved.
    1322                 :            : 
    1323                 :            :    SIZE is an rtx representing the size of the area.
    1324                 :            : 
    1325                 :            :    SIZE_ALIGN is the alignment (in bits) that we know SIZE has.  This
    1326                 :            :    parameter may be zero.  If so, a proper value will be extracted
    1327                 :            :    from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
    1328                 :            : 
    1329                 :            :    REQUIRED_ALIGN is the alignment (in bits) required for the region
    1330                 :            :    of memory.
    1331                 :            : 
    1332                 :            :    MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
    1333                 :            :    no such upper bound is known.
    1334                 :            : 
    1335                 :            :    If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
    1336                 :            :    stack space allocated by the generated code cannot be added with itself
    1337                 :            :    in the course of the execution of the function.  It is always safe to
    1338                 :            :    pass FALSE here and the following criterion is sufficient in order to
    1339                 :            :    pass TRUE: every path in the CFG that starts at the allocation point and
    1340                 :            :    loops to it executes the associated deallocation code.  */
    1341                 :            : 
    1342                 :            : rtx
    1343                 :      11948 : allocate_dynamic_stack_space (rtx size, unsigned size_align,
    1344                 :            :                               unsigned required_align,
    1345                 :            :                               HOST_WIDE_INT max_size,
    1346                 :            :                               bool cannot_accumulate)
    1347                 :            : {
    1348                 :      11948 :   HOST_WIDE_INT stack_usage_size = -1;
    1349                 :      11948 :   rtx_code_label *final_label;
    1350                 :      11948 :   rtx final_target, target;
    1351                 :            : 
    1352                 :            :   /* If we're asking for zero bytes, it doesn't matter what we point
    1353                 :            :      to since we can't dereference it.  But return a reasonable
    1354                 :            :      address anyway.  */
    1355                 :      11948 :   if (size == const0_rtx)
    1356                 :         13 :     return virtual_stack_dynamic_rtx;
    1357                 :            : 
    1358                 :            :   /* Otherwise, show we're calling alloca or equivalent.  */
    1359                 :      11935 :   cfun->calls_alloca = 1;
    1360                 :            : 
    1361                 :            :   /* If stack usage info is requested, look into the size we are passed.
    1362                 :            :      We need to do so this early to avoid the obfuscation that may be
    1363                 :            :      introduced later by the various alignment operations.  */
    1364                 :      11935 :   if (flag_stack_usage_info)
    1365                 :            :     {
    1366                 :          1 :       if (CONST_INT_P (size))
    1367                 :          0 :         stack_usage_size = INTVAL (size);
    1368                 :          1 :       else if (REG_P (size))
    1369                 :            :         {
    1370                 :            :           /* Look into the last emitted insn and see if we can deduce
    1371                 :            :              something for the register.  */
    1372                 :          1 :           rtx_insn *insn;
    1373                 :          1 :           rtx set, note;
    1374                 :          1 :           insn = get_last_insn ();
    1375                 :          1 :           if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
    1376                 :            :             {
    1377                 :          1 :               if (CONST_INT_P (SET_SRC (set)))
    1378                 :          0 :                 stack_usage_size = INTVAL (SET_SRC (set));
    1379                 :          1 :               else if ((note = find_reg_equal_equiv_note (insn))
    1380                 :          1 :                        && CONST_INT_P (XEXP (note, 0)))
    1381                 :          0 :                 stack_usage_size = INTVAL (XEXP (note, 0));
    1382                 :            :             }
    1383                 :            :         }
    1384                 :            : 
    1385                 :            :       /* If the size is not constant, try the maximum size.  */
    1386                 :          1 :       if (stack_usage_size < 0)
    1387                 :          1 :         stack_usage_size = max_size;
    1388                 :            : 
    1389                 :            :       /* If the size is still not constant, we can't say anything.  */
    1390                 :          1 :       if (stack_usage_size < 0)
    1391                 :            :         {
    1392                 :          1 :           current_function_has_unbounded_dynamic_stack_size = 1;
    1393                 :          1 :           stack_usage_size = 0;
    1394                 :            :         }
    1395                 :            :     }
    1396                 :            : 
    1397                 :      11935 :   get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
    1398                 :            : 
    1399                 :      12110 :   target = gen_reg_rtx (Pmode);
    1400                 :            : 
    1401                 :            :   /* The size is supposed to be fully adjusted at this point so record it
    1402                 :            :      if stack usage info is requested.  */
    1403                 :      11935 :   if (flag_stack_usage_info)
    1404                 :            :     {
    1405                 :          1 :       current_function_dynamic_stack_size += stack_usage_size;
    1406                 :            : 
    1407                 :            :       /* ??? This is gross but the only safe stance in the absence
    1408                 :            :          of stack usage oriented flow analysis.  */
    1409                 :          1 :       if (!cannot_accumulate)
    1410                 :          0 :         current_function_has_unbounded_dynamic_stack_size = 1;
    1411                 :            :     }
    1412                 :            : 
    1413                 :      11935 :   do_pending_stack_adjust ();
    1414                 :            : 
    1415                 :      11935 :   final_label = NULL;
    1416                 :      11935 :   final_target = NULL_RTX;
    1417                 :            : 
    1418                 :            :   /* If we are splitting the stack, we need to ask the backend whether
    1419                 :            :      there is enough room on the current stack.  If there isn't, or if
    1420                 :            :      the backend doesn't know how to tell is, then we need to call a
    1421                 :            :      function to allocate memory in some other way.  This memory will
    1422                 :            :      be released when we release the current stack segment.  The
    1423                 :            :      effect is that stack allocation becomes less efficient, but at
    1424                 :            :      least it doesn't cause a stack overflow.  */
    1425                 :      11935 :   if (flag_split_stack)
    1426                 :            :     {
    1427                 :         11 :       rtx_code_label *available_label;
    1428                 :         11 :       rtx ask, space, func;
    1429                 :            : 
    1430                 :         11 :       available_label = NULL;
    1431                 :            : 
    1432                 :         11 :       if (targetm.have_split_stack_space_check ())
    1433                 :            :         {
    1434                 :         11 :           available_label = gen_label_rtx ();
    1435                 :            : 
    1436                 :            :           /* This instruction will branch to AVAILABLE_LABEL if there
    1437                 :            :              are SIZE bytes available on the stack.  */
    1438                 :         11 :           emit_insn (targetm.gen_split_stack_space_check
    1439                 :         11 :                      (size, available_label));
    1440                 :            :         }
    1441                 :            : 
    1442                 :            :       /* The __morestack_allocate_stack_space function will allocate
    1443                 :            :          memory using malloc.  If the alignment of the memory returned
    1444                 :            :          by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
    1445                 :            :          make sure we allocate enough space.  */
    1446                 :         11 :       if (MALLOC_ABI_ALIGNMENT >= required_align)
    1447                 :          5 :         ask = size;
    1448                 :            :       else
    1449                 :          6 :         ask = expand_binop (Pmode, add_optab, size,
    1450                 :          6 :                             gen_int_mode (required_align / BITS_PER_UNIT - 1,
    1451                 :          6 :                                           Pmode),
    1452                 :            :                             NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1453                 :            : 
    1454                 :         11 :       func = init_one_libfunc ("__morestack_allocate_stack_space");
    1455                 :            : 
    1456                 :         11 :       space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
    1457                 :         11 :                                        ask, Pmode);
    1458                 :            : 
    1459                 :         11 :       if (available_label == NULL_RTX)
    1460                 :            :         return space;
    1461                 :            : 
    1462                 :         11 :       final_target = gen_reg_rtx (Pmode);
    1463                 :            : 
    1464                 :         11 :       emit_move_insn (final_target, space);
    1465                 :            : 
    1466                 :         11 :       final_label = gen_label_rtx ();
    1467                 :         11 :       emit_jump (final_label);
    1468                 :            : 
    1469                 :         11 :       emit_label (available_label);
    1470                 :            :     }
    1471                 :            : 
    1472                 :            :  /* We ought to be called always on the toplevel and stack ought to be aligned
    1473                 :            :     properly.  */
    1474                 :      23870 :   gcc_assert (multiple_p (stack_pointer_delta,
    1475                 :            :                           PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
    1476                 :            : 
    1477                 :            :   /* If needed, check that we have the required amount of stack.  Take into
    1478                 :            :      account what has already been checked.  */
    1479                 :      11935 :   if (STACK_CHECK_MOVING_SP)
    1480                 :            :     ;
    1481                 :            :   else if (flag_stack_check == GENERIC_STACK_CHECK)
    1482                 :            :     probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
    1483                 :            :                        size);
    1484                 :            :   else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
    1485                 :            :     probe_stack_range (get_stack_check_protect (), size);
    1486                 :            : 
    1487                 :            :   /* Don't let anti_adjust_stack emit notes.  */
    1488                 :      11935 :   suppress_reg_args_size = true;
    1489                 :            : 
    1490                 :            :   /* Perform the required allocation from the stack.  Some systems do
    1491                 :            :      this differently than simply incrementing/decrementing from the
    1492                 :            :      stack pointer, such as acquiring the space by calling malloc().  */
    1493                 :      11935 :   if (targetm.have_allocate_stack ())
    1494                 :            :     {
    1495                 :          0 :       class expand_operand ops[2];
    1496                 :            :       /* We don't have to check against the predicate for operand 0 since
    1497                 :            :          TARGET is known to be a pseudo of the proper mode, which must
    1498                 :            :          be valid for the operand.  */
    1499                 :          0 :       create_fixed_operand (&ops[0], target);
    1500                 :          0 :       create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
    1501                 :          0 :       expand_insn (targetm.code_for_allocate_stack, 2, ops);
    1502                 :            :     }
    1503                 :            :   else
    1504                 :            :     {
    1505                 :      11935 :       poly_int64 saved_stack_pointer_delta;
    1506                 :            : 
    1507                 :      11935 :       if (!STACK_GROWS_DOWNWARD)
    1508                 :            :         emit_move_insn (target, virtual_stack_dynamic_rtx);
    1509                 :            : 
    1510                 :            :       /* Check stack bounds if necessary.  */
    1511                 :      11935 :       if (crtl->limit_stack)
    1512                 :            :         {
    1513                 :          0 :           rtx available;
    1514                 :          0 :           rtx_code_label *space_available = gen_label_rtx ();
    1515                 :          0 :           if (STACK_GROWS_DOWNWARD)
    1516                 :          0 :             available = expand_binop (Pmode, sub_optab,
    1517                 :            :                                       stack_pointer_rtx, stack_limit_rtx,
    1518                 :            :                                       NULL_RTX, 1, OPTAB_WIDEN);
    1519                 :            :           else
    1520                 :            :             available = expand_binop (Pmode, sub_optab,
    1521                 :            :                                       stack_limit_rtx, stack_pointer_rtx,
    1522                 :            :                                       NULL_RTX, 1, OPTAB_WIDEN);
    1523                 :            : 
    1524                 :          0 :           emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
    1525                 :            :                                    space_available);
    1526                 :          0 :           if (targetm.have_trap ())
    1527                 :          0 :             emit_insn (targetm.gen_trap ());
    1528                 :            :           else
    1529                 :          0 :             error ("stack limits not supported on this target");
    1530                 :          0 :           emit_barrier ();
    1531                 :          0 :           emit_label (space_available);
    1532                 :            :         }
    1533                 :            : 
    1534                 :      11935 :       saved_stack_pointer_delta = stack_pointer_delta;
    1535                 :            : 
    1536                 :      11935 :       if (flag_stack_check && STACK_CHECK_MOVING_SP)
    1537                 :          0 :         anti_adjust_stack_and_probe (size, false);
    1538                 :      11935 :       else if (flag_stack_clash_protection)
    1539                 :         16 :         anti_adjust_stack_and_probe_stack_clash (size);
    1540                 :            :       else
    1541                 :      11919 :         anti_adjust_stack (size);
    1542                 :            : 
    1543                 :            :       /* Even if size is constant, don't modify stack_pointer_delta.
    1544                 :            :          The constant size alloca should preserve
    1545                 :            :          crtl->preferred_stack_boundary alignment.  */
    1546                 :      11935 :       stack_pointer_delta = saved_stack_pointer_delta;
    1547                 :            : 
    1548                 :      11935 :       if (STACK_GROWS_DOWNWARD)
    1549                 :      11935 :         emit_move_insn (target, virtual_stack_dynamic_rtx);
    1550                 :            :     }
    1551                 :            : 
    1552                 :      11935 :   suppress_reg_args_size = false;
    1553                 :            : 
    1554                 :            :   /* Finish up the split stack handling.  */
    1555                 :      11935 :   if (final_label != NULL_RTX)
    1556                 :            :     {
    1557                 :         11 :       gcc_assert (flag_split_stack);
    1558                 :         11 :       emit_move_insn (final_target, target);
    1559                 :         11 :       emit_label (final_label);
    1560                 :         11 :       target = final_target;
    1561                 :            :     }
    1562                 :            : 
    1563                 :      11935 :   target = align_dynamic_address (target, required_align);
    1564                 :            : 
    1565                 :            :   /* Now that we've committed to a return value, mark its alignment.  */
    1566                 :      11935 :   mark_reg_pointer (target, required_align);
    1567                 :            : 
    1568                 :            :   /* Record the new stack level.  */
    1569                 :      11935 :   record_new_stack_level ();
    1570                 :            : 
    1571                 :      11935 :   return target;
    1572                 :            : }
    1573                 :            : 
    1574                 :            : /* Return an rtx representing the address of an area of memory already
    1575                 :            :    statically pushed onto the stack in the virtual stack vars area.  (It is
    1576                 :            :    assumed that the area is allocated in the function prologue.)
    1577                 :            : 
    1578                 :            :    Any required stack pointer alignment is preserved.
    1579                 :            : 
    1580                 :            :    OFFSET is the offset of the area into the virtual stack vars area.
    1581                 :            : 
    1582                 :            :    REQUIRED_ALIGN is the alignment (in bits) required for the region
    1583                 :            :    of memory.  */
    1584                 :            : 
    1585                 :            : rtx
    1586                 :          0 : get_dynamic_stack_base (poly_int64 offset, unsigned required_align)
    1587                 :            : {
    1588                 :          0 :   rtx target;
    1589                 :            : 
    1590                 :          0 :   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
    1591                 :          0 :     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
    1592                 :            : 
    1593                 :          0 :   target = gen_reg_rtx (Pmode);
    1594                 :          0 :   emit_move_insn (target, virtual_stack_vars_rtx);
    1595                 :          0 :   target = expand_binop (Pmode, add_optab, target,
    1596                 :          0 :                          gen_int_mode (offset, Pmode),
    1597                 :            :                          NULL_RTX, 1, OPTAB_LIB_WIDEN);
    1598                 :          0 :   target = align_dynamic_address (target, required_align);
    1599                 :            : 
    1600                 :            :   /* Now that we've committed to a return value, mark its alignment.  */
    1601                 :          0 :   mark_reg_pointer (target, required_align);
    1602                 :            : 
    1603                 :          0 :   return target;
    1604                 :            : }
    1605                 :            : 
    1606                 :            : /* A front end may want to override GCC's stack checking by providing a
    1607                 :            :    run-time routine to call to check the stack, so provide a mechanism for
    1608                 :            :    calling that routine.  */
    1609                 :            : 
    1610                 :            : static GTY(()) rtx stack_check_libfunc;
    1611                 :            : 
    1612                 :            : void
    1613                 :          0 : set_stack_check_libfunc (const char *libfunc_name)
    1614                 :            : {
    1615                 :          0 :   gcc_assert (stack_check_libfunc == NULL_RTX);
    1616                 :          0 :   stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
    1617                 :          0 :   tree decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
    1618                 :            :                           get_identifier (libfunc_name), void_type_node);
    1619                 :          0 :   DECL_EXTERNAL (decl) = 1;
    1620                 :          0 :   SET_SYMBOL_REF_DECL (stack_check_libfunc, decl);
    1621                 :          0 : }
    1622                 :            : 
    1623                 :            : /* Emit one stack probe at ADDRESS, an address within the stack.  */
    1624                 :            : 
    1625                 :            : void
    1626                 :         76 : emit_stack_probe (rtx address)
    1627                 :            : {
    1628                 :         76 :   if (targetm.have_probe_stack_address ())
    1629                 :            :     {
    1630                 :          0 :       class expand_operand ops[1];
    1631                 :          0 :       insn_code icode = targetm.code_for_probe_stack_address;
    1632                 :          0 :       create_address_operand (ops, address);
    1633                 :          0 :       maybe_legitimize_operands (icode, 0, 1, ops);
    1634                 :          0 :       expand_insn (icode, 1, ops);
    1635                 :            :     }
    1636                 :            :   else
    1637                 :            :     {
    1638                 :         76 :       rtx memref = gen_rtx_MEM (word_mode, address);
    1639                 :            : 
    1640                 :         76 :       MEM_VOLATILE_P (memref) = 1;
    1641                 :         76 :       memref = validize_mem (memref);
    1642                 :            : 
    1643                 :            :       /* See if we have an insn to probe the stack.  */
    1644                 :         76 :       if (targetm.have_probe_stack ())
    1645                 :         76 :         emit_insn (targetm.gen_probe_stack (memref));
    1646                 :            :       else
    1647                 :          0 :         emit_move_insn (memref, const0_rtx);
    1648                 :            :     }
    1649                 :         76 : }
    1650                 :            : 
    1651                 :            : /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
    1652                 :            :    FIRST is a constant and size is a Pmode RTX.  These are offsets from
    1653                 :            :    the current stack pointer.  STACK_GROWS_DOWNWARD says whether to add
    1654                 :            :    or subtract them from the stack pointer.  */
    1655                 :            : 
    1656                 :            : #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
    1657                 :            : 
    1658                 :            : #if STACK_GROWS_DOWNWARD
    1659                 :            : #define STACK_GROW_OP MINUS
    1660                 :            : #define STACK_GROW_OPTAB sub_optab
    1661                 :            : #define STACK_GROW_OFF(off) -(off)
    1662                 :            : #else
    1663                 :            : #define STACK_GROW_OP PLUS
    1664                 :            : #define STACK_GROW_OPTAB add_optab
    1665                 :            : #define STACK_GROW_OFF(off) (off)
    1666                 :            : #endif
    1667                 :            : 
    1668                 :            : void
    1669                 :          0 : probe_stack_range (HOST_WIDE_INT first, rtx size)
    1670                 :            : {
    1671                 :            :   /* First ensure SIZE is Pmode.  */
    1672                 :          0 :   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    1673                 :          0 :     size = convert_to_mode (Pmode, size, 1);
    1674                 :            : 
    1675                 :            :   /* Next see if we have a function to check the stack.  */
    1676                 :          0 :   if (stack_check_libfunc)
    1677                 :            :     {
    1678                 :          0 :       rtx addr = memory_address (Pmode,
    1679                 :            :                                  gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1680                 :            :                                                  stack_pointer_rtx,
    1681                 :            :                                                  plus_constant (Pmode,
    1682                 :            :                                                                 size, first)));
    1683                 :          0 :       emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
    1684                 :          0 :                          addr, Pmode);
    1685                 :            :     }
    1686                 :            : 
    1687                 :            :   /* Next see if we have an insn to check the stack.  */
    1688                 :          0 :   else if (targetm.have_check_stack ())
    1689                 :            :     {
    1690                 :          0 :       class expand_operand ops[1];
    1691                 :          0 :       rtx addr = memory_address (Pmode,
    1692                 :            :                                  gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1693                 :            :                                                  stack_pointer_rtx,
    1694                 :            :                                                  plus_constant (Pmode,
    1695                 :            :                                                                 size, first)));
    1696                 :          0 :       bool success;
    1697                 :          0 :       create_input_operand (&ops[0], addr, Pmode);
    1698                 :          0 :       success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
    1699                 :          0 :       gcc_assert (success);
    1700                 :            :     }
    1701                 :            : 
    1702                 :            :   /* Otherwise we have to generate explicit probes.  If we have a constant
    1703                 :            :      small number of them to generate, that's the easy case.  */
    1704                 :          0 :   else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
    1705                 :            :     {
    1706                 :          0 :       HOST_WIDE_INT isize = INTVAL (size), i;
    1707                 :            :       rtx addr;
    1708                 :            : 
    1709                 :            :       /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
    1710                 :            :          it exceeds SIZE.  If only one probe is needed, this will not
    1711                 :            :          generate any code.  Then probe at FIRST + SIZE.  */
    1712                 :          0 :       for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
    1713                 :            :         {
    1714                 :          0 :           addr = memory_address (Pmode,
    1715                 :            :                                  plus_constant (Pmode, stack_pointer_rtx,
    1716                 :            :                                                 STACK_GROW_OFF (first + i)));
    1717                 :          0 :           emit_stack_probe (addr);
    1718                 :            :         }
    1719                 :            : 
    1720                 :          0 :       addr = memory_address (Pmode,
    1721                 :            :                              plus_constant (Pmode, stack_pointer_rtx,
    1722                 :            :                                             STACK_GROW_OFF (first + isize)));
    1723                 :          0 :       emit_stack_probe (addr);
    1724                 :            :     }
    1725                 :            : 
    1726                 :            :   /* In the variable case, do the same as above, but in a loop.  Note that we
    1727                 :            :      must be extra careful with variables wrapping around because we might be
    1728                 :            :      at the very top (or the very bottom) of the address space and we have to
    1729                 :            :      be able to handle this case properly; in particular, we use an equality
    1730                 :            :      test for the loop condition.  */
    1731                 :            :   else
    1732                 :            :     {
    1733                 :          0 :       rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
    1734                 :          0 :       rtx_code_label *loop_lab = gen_label_rtx ();
    1735                 :          0 :       rtx_code_label *end_lab = gen_label_rtx ();
    1736                 :            : 
    1737                 :            :       /* Step 1: round SIZE to the previous multiple of the interval.  */
    1738                 :            : 
    1739                 :            :       /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
    1740                 :          0 :       rounded_size
    1741                 :          0 :         = simplify_gen_binary (AND, Pmode, size,
    1742                 :          0 :                                gen_int_mode (-PROBE_INTERVAL, Pmode));
    1743                 :          0 :       rounded_size_op = force_operand (rounded_size, NULL_RTX);
    1744                 :            : 
    1745                 :            : 
    1746                 :            :       /* Step 2: compute initial and final value of the loop counter.  */
    1747                 :            : 
    1748                 :            :       /* TEST_ADDR = SP + FIRST.  */
    1749                 :          0 :       test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1750                 :            :                                                  stack_pointer_rtx,
    1751                 :            :                                                  gen_int_mode (first, Pmode)),
    1752                 :            :                                  NULL_RTX);
    1753                 :            : 
    1754                 :            :       /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE.  */
    1755                 :          0 :       last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1756                 :            :                                                  test_addr,
    1757                 :            :                                                  rounded_size_op), NULL_RTX);
    1758                 :            : 
    1759                 :            : 
    1760                 :            :       /* Step 3: the loop
    1761                 :            : 
    1762                 :            :          while (TEST_ADDR != LAST_ADDR)
    1763                 :            :            {
    1764                 :            :              TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
    1765                 :            :              probe at TEST_ADDR
    1766                 :            :            }
    1767                 :            : 
    1768                 :            :          probes at FIRST + N * PROBE_INTERVAL for values of N from 1
    1769                 :            :          until it is equal to ROUNDED_SIZE.  */
    1770                 :            : 
    1771                 :          0 :       emit_label (loop_lab);
    1772                 :            : 
    1773                 :            :       /* Jump to END_LAB if TEST_ADDR == LAST_ADDR.  */
    1774                 :          0 :       emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
    1775                 :            :                                end_lab);
    1776                 :            : 
    1777                 :            :       /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL.  */
    1778                 :          0 :       temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
    1779                 :          0 :                            gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
    1780                 :            :                            1, OPTAB_WIDEN);
    1781                 :            : 
    1782                 :          0 :       gcc_assert (temp == test_addr);
    1783                 :            : 
    1784                 :            :       /* Probe at TEST_ADDR.  */
    1785                 :          0 :       emit_stack_probe (test_addr);
    1786                 :            : 
    1787                 :          0 :       emit_jump (loop_lab);
    1788                 :            : 
    1789                 :          0 :       emit_label (end_lab);
    1790                 :            : 
    1791                 :            : 
    1792                 :            :       /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
    1793                 :            :          that SIZE is equal to ROUNDED_SIZE.  */
    1794                 :            : 
    1795                 :            :       /* TEMP = SIZE - ROUNDED_SIZE.  */
    1796                 :          0 :       temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
    1797                 :          0 :       if (temp != const0_rtx)
    1798                 :            :         {
    1799                 :          0 :           rtx addr;
    1800                 :            : 
    1801                 :          0 :           if (CONST_INT_P (temp))
    1802                 :            :             {
    1803                 :            :               /* Use [base + disp} addressing mode if supported.  */
    1804                 :          0 :               HOST_WIDE_INT offset = INTVAL (temp);
    1805                 :          0 :               addr = memory_address (Pmode,
    1806                 :            :                                      plus_constant (Pmode, last_addr,
    1807                 :            :                                                     STACK_GROW_OFF (offset)));
    1808                 :            :             }
    1809                 :            :           else
    1810                 :            :             {
    1811                 :            :               /* Manual CSE if the difference is not known at compile-time.  */
    1812                 :          0 :               temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
    1813                 :          0 :               addr = memory_address (Pmode,
    1814                 :            :                                      gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1815                 :            :                                                      last_addr, temp));
    1816                 :            :             }
    1817                 :            : 
    1818                 :          0 :           emit_stack_probe (addr);
    1819                 :            :         }
    1820                 :            :     }
    1821                 :            : 
    1822                 :            :   /* Make sure nothing is scheduled before we are done.  */
    1823                 :          0 :   emit_insn (gen_blockage ());
    1824                 :          0 : }
    1825                 :            : 
    1826                 :            : /* Compute parameters for stack clash probing a dynamic stack
    1827                 :            :    allocation of SIZE bytes.
    1828                 :            : 
    1829                 :            :    We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
    1830                 :            : 
    1831                 :            :    Additionally we conditionally dump the type of probing that will
    1832                 :            :    be needed given the values computed.  */
    1833                 :            : 
    1834                 :            : void
    1835                 :         16 : compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
    1836                 :            :                                           rtx *residual,
    1837                 :            :                                           HOST_WIDE_INT *probe_interval,
    1838                 :            :                                           rtx size)
    1839                 :            : {
    1840                 :            :   /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
    1841                 :         16 :   *probe_interval
    1842                 :         16 :     = 1 << param_stack_clash_protection_probe_interval;
    1843                 :         16 :   *rounded_size = simplify_gen_binary (AND, Pmode, size,
    1844                 :            :                                         GEN_INT (-*probe_interval));
    1845                 :            : 
    1846                 :            :   /* Compute the value of the stack pointer for the last iteration.
    1847                 :            :      It's just SP + ROUNDED_SIZE.  */
    1848                 :         16 :   rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
    1849                 :         16 :   *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    1850                 :            :                                               stack_pointer_rtx,
    1851                 :            :                                               rounded_size_op),
    1852                 :            :                               NULL_RTX);
    1853                 :            : 
    1854                 :            :   /* Compute any residuals not allocated by the loop above.  Residuals
    1855                 :            :      are just the ROUNDED_SIZE - SIZE.  */
    1856                 :         16 :   *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
    1857                 :            : 
    1858                 :            :   /* Dump key information to make writing tests easy.  */
    1859                 :         16 :   if (dump_file)
    1860                 :            :     {
    1861                 :          9 :       if (*rounded_size == CONST0_RTX (Pmode))
    1862                 :          0 :         fprintf (dump_file,
    1863                 :            :                  "Stack clash skipped dynamic allocation and probing loop.\n");
    1864                 :          9 :       else if (CONST_INT_P (*rounded_size)
    1865                 :          0 :                && INTVAL (*rounded_size) <= 4 * *probe_interval)
    1866                 :          0 :         fprintf (dump_file,
    1867                 :            :                  "Stack clash dynamic allocation and probing inline.\n");
    1868                 :          9 :       else if (CONST_INT_P (*rounded_size))
    1869                 :          0 :         fprintf (dump_file,
    1870                 :            :                  "Stack clash dynamic allocation and probing in "
    1871                 :            :                  "rotated loop.\n");
    1872                 :            :       else
    1873                 :          9 :         fprintf (dump_file,
    1874                 :            :                  "Stack clash dynamic allocation and probing in loop.\n");
    1875                 :            : 
    1876                 :          9 :       if (*residual != CONST0_RTX (Pmode))
    1877                 :          9 :         fprintf (dump_file,
    1878                 :            :                  "Stack clash dynamic allocation and probing residuals.\n");
    1879                 :            :       else
    1880                 :          0 :         fprintf (dump_file,
    1881                 :            :                  "Stack clash skipped dynamic allocation and "
    1882                 :            :                  "probing residuals.\n");
    1883                 :            :     }
    1884                 :         16 : }
    1885                 :            : 
    1886                 :            : /* Emit the start of an allocate/probe loop for stack
    1887                 :            :    clash protection.
    1888                 :            : 
    1889                 :            :    LOOP_LAB and END_LAB are returned for use when we emit the
    1890                 :            :    end of the loop.
    1891                 :            : 
    1892                 :            :    LAST addr is the value for SP which stops the loop.  */
    1893                 :            : void
    1894                 :         16 : emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
    1895                 :            :                                               rtx *end_lab,
    1896                 :            :                                               rtx last_addr,
    1897                 :            :                                               bool rotated)
    1898                 :            : {
    1899                 :            :   /* Essentially we want to emit any setup code, the top of loop
    1900                 :            :      label and the comparison at the top of the loop.  */
    1901                 :         16 :   *loop_lab = gen_label_rtx ();
    1902                 :         16 :   *end_lab = gen_label_rtx ();
    1903                 :            : 
    1904                 :         16 :   emit_label (*loop_lab);
    1905                 :         16 :   if (!rotated)
    1906                 :         16 :     emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
    1907                 :         16 :                              Pmode, 1, *end_lab);
    1908                 :         16 : }
    1909                 :            : 
    1910                 :            : /* Emit the end of a stack clash probing loop.
    1911                 :            : 
    1912                 :            :    This consists of just the jump back to LOOP_LAB and
    1913                 :            :    emitting END_LOOP after the loop.  */
    1914                 :            : 
    1915                 :            : void
    1916                 :         16 : emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
    1917                 :            :                                             rtx last_addr, bool rotated)
    1918                 :            : {
    1919                 :         16 :   if (rotated)
    1920                 :          0 :     emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
    1921                 :          0 :                              Pmode, 1, loop_lab);
    1922                 :            :   else
    1923                 :         16 :     emit_jump (loop_lab);
    1924                 :            : 
    1925                 :         16 :   emit_label (end_loop);
    1926                 :            : 
    1927                 :         16 : }
    1928                 :            : 
    1929                 :            : /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
    1930                 :            :    while probing it.  This pushes when SIZE is positive.  SIZE need not
    1931                 :            :    be constant.
    1932                 :            : 
    1933                 :            :    This is subtly different than anti_adjust_stack_and_probe to try and
    1934                 :            :    prevent stack-clash attacks
    1935                 :            : 
    1936                 :            :      1. It must assume no knowledge of the probing state, any allocation
    1937                 :            :         must probe.
    1938                 :            : 
    1939                 :            :         Consider the case of a 1 byte alloca in a loop.  If the sum of the
    1940                 :            :         allocations is large, then this could be used to jump the guard if
    1941                 :            :         probes were not emitted.
    1942                 :            : 
    1943                 :            :      2. It never skips probes, whereas anti_adjust_stack_and_probe will
    1944                 :            :         skip probes on the first couple PROBE_INTERVALs on the assumption
    1945                 :            :         they're done elsewhere.
    1946                 :            : 
    1947                 :            :      3. It only allocates and probes SIZE bytes, it does not need to
    1948                 :            :         allocate/probe beyond that because this probing style does not
    1949                 :            :         guarantee signal handling capability if the guard is hit.  */
    1950                 :            : 
    1951                 :            : static void
    1952                 :         16 : anti_adjust_stack_and_probe_stack_clash (rtx size)
    1953                 :            : {
    1954                 :            :   /* First ensure SIZE is Pmode.  */
    1955                 :         16 :   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    1956                 :          0 :     size = convert_to_mode (Pmode, size, 1);
    1957                 :            : 
    1958                 :            :   /* We can get here with a constant size on some targets.  */
    1959                 :         16 :   rtx rounded_size, last_addr, residual;
    1960                 :         16 :   HOST_WIDE_INT probe_interval, probe_range;
    1961                 :         16 :   bool target_probe_range_p = false;
    1962                 :         16 :   compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
    1963                 :            :                                             &residual, &probe_interval, size);
    1964                 :            : 
    1965                 :            :   /* Get the back-end specific probe ranges.  */
    1966                 :         16 :   probe_range = targetm.stack_clash_protection_alloca_probe_range ();
    1967                 :         16 :   target_probe_range_p = probe_range != 0;
    1968                 :         16 :   gcc_assert (probe_range >= 0);
    1969                 :            : 
    1970                 :            :   /* If no back-end specific range defined, default to the top of the newly
    1971                 :            :      allocated range.  */
    1972                 :         16 :   if (probe_range == 0)
    1973                 :         32 :     probe_range = probe_interval - GET_MODE_SIZE (word_mode);
    1974                 :            : 
    1975                 :         16 :   if (rounded_size != CONST0_RTX (Pmode))
    1976                 :            :     {
    1977                 :         16 :       if (CONST_INT_P (rounded_size)
    1978                 :          0 :           && INTVAL (rounded_size) <= 4 * probe_interval)
    1979                 :            :         {
    1980                 :          0 :           for (HOST_WIDE_INT i = 0;
    1981                 :          0 :                i < INTVAL (rounded_size);
    1982                 :          0 :                i += probe_interval)
    1983                 :            :             {
    1984                 :          0 :               anti_adjust_stack (GEN_INT (probe_interval));
    1985                 :            :               /* The prologue does not probe residuals.  Thus the offset
    1986                 :            :                  here to probe just beyond what the prologue had already
    1987                 :            :                  allocated.  */
    1988                 :          0 :               emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
    1989                 :            :                                                probe_range));
    1990                 :            : 
    1991                 :          0 :               emit_insn (gen_blockage ());
    1992                 :            :             }
    1993                 :            :         }
    1994                 :            :       else
    1995                 :            :         {
    1996                 :         16 :           rtx loop_lab, end_loop;
    1997                 :         16 :           bool rotate_loop = CONST_INT_P (rounded_size);
    1998                 :         16 :           emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
    1999                 :            :                                                         last_addr, rotate_loop);
    2000                 :            : 
    2001                 :         16 :           anti_adjust_stack (GEN_INT (probe_interval));
    2002                 :            : 
    2003                 :            :           /* The prologue does not probe residuals.  Thus the offset here
    2004                 :            :              to probe just beyond what the prologue had already
    2005                 :            :              allocated.  */
    2006                 :         16 :           emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
    2007                 :            :                                            probe_range));
    2008                 :            : 
    2009                 :         16 :           emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
    2010                 :            :                                                       last_addr, rotate_loop);
    2011                 :         16 :           emit_insn (gen_blockage ());
    2012                 :            :         }
    2013                 :            :     }
    2014                 :            : 
    2015                 :         16 :   if (residual != CONST0_RTX (Pmode))
    2016                 :            :     {
    2017                 :         16 :       rtx label = NULL_RTX;
    2018                 :            :       /* RESIDUAL could be zero at runtime and in that case *sp could
    2019                 :            :          hold live data.  Furthermore, we do not want to probe into the
    2020                 :            :          red zone.
    2021                 :            : 
    2022                 :            :          If TARGET_PROBE_RANGE_P then the target has promised it's safe to
    2023                 :            :          probe at offset 0.  In which case we no longer have to check for
    2024                 :            :          RESIDUAL == 0.  However we still need to probe at the right offset
    2025                 :            :          when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
    2026                 :            : 
    2027                 :            :          If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
    2028                 :            :          on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
    2029                 :            :          */
    2030                 :         16 :       anti_adjust_stack (residual);
    2031                 :            : 
    2032                 :         16 :       if (!CONST_INT_P (residual))
    2033                 :            :         {
    2034                 :         16 :           label = gen_label_rtx ();
    2035                 :         16 :           rtx_code op = target_probe_range_p ? LT : EQ;
    2036                 :         16 :           rtx probe_cmp_value = target_probe_range_p
    2037                 :         16 :             ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
    2038                 :         16 :             : CONST0_RTX (GET_MODE (residual));
    2039                 :            : 
    2040                 :         16 :           if (target_probe_range_p)
    2041                 :          0 :             emit_stack_probe (stack_pointer_rtx);
    2042                 :            : 
    2043                 :         16 :           emit_cmp_and_jump_insns (residual, probe_cmp_value,
    2044                 :         16 :                                    op, NULL_RTX, Pmode, 1, label);
    2045                 :            :         }
    2046                 :            : 
    2047                 :         16 :       rtx x = NULL_RTX;
    2048                 :            : 
    2049                 :            :       /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
    2050                 :            :          by the ABI defined safe value.  */
    2051                 :         16 :       if (!CONST_INT_P (residual) && target_probe_range_p)
    2052                 :          0 :         x = GEN_INT (probe_range);
    2053                 :            :       /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
    2054                 :            :          we still want to probe up, but the safest amount if a word.  */
    2055                 :         16 :       else if (target_probe_range_p)
    2056                 :            :         {
    2057                 :          0 :           if (INTVAL (residual) <= probe_range)
    2058                 :          0 :             x = GEN_INT (GET_MODE_SIZE (word_mode));
    2059                 :            :           else
    2060                 :          0 :             x = GEN_INT (probe_range);
    2061                 :            :         }
    2062                 :            :       else
    2063                 :            :       /* If nothing else, probe at the top of the new allocation.  */
    2064                 :         32 :         x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
    2065                 :            : 
    2066                 :         16 :       emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
    2067                 :            : 
    2068                 :         16 :       emit_insn (gen_blockage ());
    2069                 :         16 :       if (!CONST_INT_P (residual))
    2070                 :         16 :           emit_label (label);
    2071                 :            :     }
    2072                 :         16 : }
    2073                 :            : 
    2074                 :            : 
    2075                 :            : /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
    2076                 :            :    while probing it.  This pushes when SIZE is positive.  SIZE need not
    2077                 :            :    be constant.  If ADJUST_BACK is true, adjust back the stack pointer
    2078                 :            :    by plus SIZE at the end.  */
    2079                 :            : 
    2080                 :            : void
    2081                 :          2 : anti_adjust_stack_and_probe (rtx size, bool adjust_back)
    2082                 :            : {
    2083                 :            :   /* We skip the probe for the first interval + a small dope of 4 words and
    2084                 :            :      probe that many bytes past the specified size to maintain a protection
    2085                 :            :      area at the botton of the stack.  */
    2086                 :          2 :   const int dope = 4 * UNITS_PER_WORD;
    2087                 :            : 
    2088                 :            :   /* First ensure SIZE is Pmode.  */
    2089                 :          2 :   if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
    2090                 :          0 :     size = convert_to_mode (Pmode, size, 1);
    2091                 :            : 
    2092                 :            :   /* If we have a constant small number of probes to generate, that's the
    2093                 :            :      easy case.  */
    2094                 :          2 :   if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
    2095                 :            :     {
    2096                 :          2 :       HOST_WIDE_INT isize = INTVAL (size), i;
    2097                 :            :       bool first_probe = true;
    2098                 :            : 
    2099                 :            :       /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
    2100                 :            :          values of N from 1 until it exceeds SIZE.  If only one probe is
    2101                 :            :          needed, this will not generate any code.  Then adjust and probe
    2102                 :            :          to PROBE_INTERVAL + SIZE.  */
    2103                 :          2 :       for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
    2104                 :            :         {
    2105                 :          0 :           if (first_probe)
    2106                 :            :             {
    2107                 :          0 :               anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
    2108                 :          0 :               first_probe = false;
    2109                 :            :             }
    2110                 :            :           else
    2111                 :          0 :             anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
    2112                 :          0 :           emit_stack_probe (stack_pointer_rtx);
    2113                 :            :         }
    2114                 :            : 
    2115                 :          2 :       if (first_probe)
    2116                 :          2 :         anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
    2117                 :            :       else
    2118                 :          0 :         anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
    2119                 :          2 :       emit_stack_probe (stack_pointer_rtx);
    2120                 :            :     }
    2121                 :            : 
    2122                 :            :   /* In the variable case, do the same as above, but in a loop.  Note that we
    2123                 :            :      must be extra careful with variables wrapping around because we might be
    2124                 :            :      at the very top (or the very bottom) of the address space and we have to
    2125                 :            :      be able to handle this case properly; in particular, we use an equality
    2126                 :            :      test for the loop condition.  */
    2127                 :            :   else
    2128                 :            :     {
    2129                 :          0 :       rtx rounded_size, rounded_size_op, last_addr, temp;
    2130                 :          0 :       rtx_code_label *loop_lab = gen_label_rtx ();
    2131                 :          0 :       rtx_code_label *end_lab = gen_label_rtx ();
    2132                 :            : 
    2133                 :            : 
    2134                 :            :       /* Step 1: round SIZE to the previous multiple of the interval.  */
    2135                 :            : 
    2136                 :            :       /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL  */
    2137                 :          0 :       rounded_size
    2138                 :          0 :         = simplify_gen_binary (AND, Pmode, size,
    2139                 :          0 :                                gen_int_mode (-PROBE_INTERVAL, Pmode));
    2140                 :          0 :       rounded_size_op = force_operand (rounded_size, NULL_RTX);
    2141                 :            : 
    2142                 :            : 
    2143                 :            :       /* Step 2: compute initial and final value of the loop counter.  */
    2144                 :            : 
    2145                 :            :       /* SP = SP_0 + PROBE_INTERVAL.  */
    2146                 :          0 :       anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
    2147                 :            : 
    2148                 :            :       /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE.  */
    2149                 :          0 :       last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
    2150                 :            :                                                  stack_pointer_rtx,
    2151                 :            :                                                  rounded_size_op), NULL_RTX);
    2152                 :            : 
    2153                 :            : 
    2154                 :            :       /* Step 3: the loop
    2155                 :            : 
    2156                 :            :          while (SP != LAST_ADDR)
    2157                 :            :            {
    2158                 :            :              SP = SP + PROBE_INTERVAL
    2159                 :            :              probe at SP
    2160                 :            :            }
    2161                 :            : 
    2162                 :            :          adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
    2163                 :            :          values of N from 1 until it is equal to ROUNDED_SIZE.  */
    2164                 :            : 
    2165                 :          0 :       emit_label (loop_lab);
    2166                 :            : 
    2167                 :            :       /* Jump to END_LAB if SP == LAST_ADDR.  */
    2168                 :          0 :       emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
    2169                 :          0 :                                Pmode, 1, end_lab);
    2170                 :            : 
    2171                 :            :       /* SP = SP + PROBE_INTERVAL and probe at SP.  */
    2172                 :          0 :       anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
    2173                 :          0 :       emit_stack_probe (stack_pointer_rtx);
    2174                 :            : 
    2175                 :          0 :       emit_jump (loop_lab);
    2176                 :            : 
    2177                 :          0 :       emit_label (end_lab);
    2178                 :            : 
    2179                 :            : 
    2180                 :            :       /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
    2181                 :            :          assert at compile-time that SIZE is equal to ROUNDED_SIZE.  */
    2182                 :            : 
    2183                 :            :       /* TEMP = SIZE - ROUNDED_SIZE.  */
    2184                 :          0 :       temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
    2185                 :          0 :       if (temp != const0_rtx)
    2186                 :            :         {
    2187                 :            :           /* Manual CSE if the difference is not known at compile-time.  */
    2188                 :          0 :           if (GET_CODE (temp) != CONST_INT)
    2189                 :          0 :             temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
    2190                 :          0 :           anti_adjust_stack (temp);
    2191                 :          0 :           emit_stack_probe (stack_pointer_rtx);
    2192                 :            :         }
    2193                 :            :     }
    2194                 :            : 
    2195                 :            :   /* Adjust back and account for the additional first interval.  */
    2196                 :          2 :   if (adjust_back)
    2197                 :          2 :     adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
    2198                 :            :   else
    2199                 :          0 :     adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
    2200                 :          2 : }
    2201                 :            : 
    2202                 :            : /* Return an rtx representing the register or memory location
    2203                 :            :    in which a scalar value of data type VALTYPE
    2204                 :            :    was returned by a function call to function FUNC.
    2205                 :            :    FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
    2206                 :            :    function is known, otherwise 0.
    2207                 :            :    OUTGOING is 1 if on a machine with register windows this function
    2208                 :            :    should return the register in which the function will put its result
    2209                 :            :    and 0 otherwise.  */
    2210                 :            : 
    2211                 :            : rtx
    2212                 :   43305600 : hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
    2213                 :            :                      int outgoing ATTRIBUTE_UNUSED)
    2214                 :            : {
    2215                 :   43305600 :   rtx val;
    2216                 :            : 
    2217                 :   85161900 :   val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
    2218                 :            : 
    2219                 :   43305600 :   if (REG_P (val)
    2220                 :   43189000 :       && GET_MODE (val) == BLKmode)
    2221                 :            :     {
    2222                 :       3642 :       unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
    2223                 :       3642 :       opt_scalar_int_mode tmpmode;
    2224                 :            : 
    2225                 :            :       /* int_size_in_bytes can return -1.  We don't need a check here
    2226                 :            :          since the value of bytes will then be large enough that no
    2227                 :            :          mode will match anyway.  */
    2228                 :            : 
    2229                 :      12001 :       FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
    2230                 :            :         {
    2231                 :            :           /* Have we found a large enough mode?  */
    2232                 :      24002 :           if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
    2233                 :            :             break;
    2234                 :            :         }
    2235                 :            : 
    2236                 :       7284 :       PUT_MODE (val, tmpmode.require ());
    2237                 :            :     }
    2238                 :   43305600 :   return val;
    2239                 :            : }
    2240                 :            : 
    2241                 :            : /* Return an rtx representing the register or memory location
    2242                 :            :    in which a scalar value of mode MODE was returned by a library call.  */
    2243                 :            : 
    2244                 :            : rtx
    2245                 :      76707 : hard_libcall_value (machine_mode mode, rtx fun)
    2246                 :            : {
    2247                 :      76707 :   return targetm.calls.libcall_value (mode, fun);
    2248                 :            : }
    2249                 :            : 
    2250                 :            : /* Look up the tree code for a given rtx code
    2251                 :            :    to provide the arithmetic operation for real_arithmetic.
    2252                 :            :    The function returns an int because the caller may not know
    2253                 :            :    what `enum tree_code' means.  */
    2254                 :            : 
    2255                 :            : int
    2256                 :       4371 : rtx_to_tree_code (enum rtx_code code)
    2257                 :            : {
    2258                 :       4371 :   enum tree_code tcode;
    2259                 :            : 
    2260                 :       4371 :   switch (code)
    2261                 :            :     {
    2262                 :            :     case PLUS:
    2263                 :            :       tcode = PLUS_EXPR;
    2264                 :            :       break;
    2265                 :            :     case MINUS:
    2266                 :            :       tcode = MINUS_EXPR;
    2267                 :            :       break;
    2268                 :            :     case MULT:
    2269                 :            :       tcode = MULT_EXPR;
    2270                 :            :       break;
    2271                 :            :     case DIV:
    2272                 :            :       tcode = RDIV_EXPR;
    2273                 :            :       break;
    2274                 :            :     case SMIN:
    2275                 :            :       tcode = MIN_EXPR;
    2276                 :            :       break;
    2277                 :            :     case SMAX:
    2278                 :            :       tcode = MAX_EXPR;
    2279                 :            :       break;
    2280                 :            :     default:
    2281                 :            :       tcode = LAST_AND_UNUSED_TREE_CODE;
    2282                 :            :       break;
    2283                 :            :     }
    2284                 :       4371 :   return ((int) tcode);
    2285                 :            : }
    2286                 :            : 
    2287                 :            : #include "gt-explow.h"

Generated by: LCOV version 1.0

LCOV profile is generated on x86_64 machine using following configure options: configure --disable-bootstrap --enable-coverage=opt --enable-languages=c,c++,fortran,go,jit,lto --enable-host-shared. GCC test suite is run with the built compiler.