1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "diagnostic-core.h"
28 #include "stor-layout.h"
36 #include "hard-reg-set.h"
37 #include "insn-config.h"
40 #include "langhooks.h"
42 #include "common/common-target.h"
45 static rtx break_out_memory_refs (rtx);
48 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
51 trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
53 int width = GET_MODE_PRECISION (mode);
55 /* You want to truncate to a _what_? */
56 gcc_assert (SCALAR_INT_MODE_P (mode));
58 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
60 return c & 1 ? STORE_FLAG_VALUE : 0;
62 /* Sign-extend for the requested mode. */
64 if (width < HOST_BITS_PER_WIDE_INT)
66 HOST_WIDE_INT sign = 1;
76 /* Return an rtx for the sum of X and the integer C, given that X has
80 plus_constant (enum machine_mode mode, rtx x, HOST_WIDE_INT c)
87 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
100 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
102 double_int di_x = double_int::from_shwi (INTVAL (x));
103 double_int di_c = double_int::from_shwi (c);
106 double_int v = di_x.add_with_sign (di_c, false, &overflow);
110 return immed_double_int_const (v, mode);
113 return gen_int_mode (UINTVAL (x) + c, mode);
117 double_int di_x = double_int::from_pair (CONST_DOUBLE_HIGH (x),
118 CONST_DOUBLE_LOW (x));
119 double_int di_c = double_int::from_shwi (c);
122 double_int v = di_x.add_with_sign (di_c, false, &overflow);
124 /* Sorry, we have no way to represent overflows this wide.
125 To fix, add constant support wider than CONST_DOUBLE. */
126 gcc_assert (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_DOUBLE_INT);
128 return immed_double_int_const (v, mode);
132 /* If this is a reference to the constant pool, try replacing it with
133 a reference to a new constant. If the resulting address isn't
134 valid, don't return it because we have no way to validize it. */
135 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
136 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
138 tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
139 tem = force_const_mem (GET_MODE (x), tem);
140 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
146 /* If adding to something entirely constant, set a flag
147 so that we can add a CONST around the result. */
158 /* The interesting case is adding the integer to a sum. Look
159 for constant term in the sum and combine with C. For an
160 integer constant term or a constant term that is not an
161 explicit integer, we combine or group them together anyway.
163 We may not immediately return from the recursive call here, lest
164 all_constant gets lost. */
166 if (CONSTANT_P (XEXP (x, 1)))
168 x = gen_rtx_PLUS (mode, XEXP (x, 0),
169 plus_constant (mode, XEXP (x, 1), c));
172 else if (find_constant_term_loc (&y))
174 /* We need to be careful since X may be shared and we can't
175 modify it in place. */
176 rtx copy = copy_rtx (x);
177 rtx *const_loc = find_constant_term_loc (©);
179 *const_loc = plus_constant (mode, *const_loc, c);
190 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
192 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
194 else if (all_constant)
195 return gen_rtx_CONST (mode, x);
200 /* If X is a sum, return a new sum like X but lacking any constant terms.
201 Add all the removed constant terms into *CONSTPTR.
202 X itself is not altered. The result != X if and only if
203 it is not isomorphic to X. */
206 eliminate_constant_term (rtx x, rtx *constptr)
211 if (GET_CODE (x) != PLUS)
214 /* First handle constants appearing at this level explicitly. */
215 if (CONST_INT_P (XEXP (x, 1))
216 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
218 && CONST_INT_P (tem))
221 return eliminate_constant_term (XEXP (x, 0), constptr);
225 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
226 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
227 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
228 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
230 && CONST_INT_P (tem))
233 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
239 /* Returns a tree for the size of EXP in bytes. */
242 tree_expr_size (const_tree exp)
245 && DECL_SIZE_UNIT (exp) != 0)
246 return DECL_SIZE_UNIT (exp);
248 return size_in_bytes (TREE_TYPE (exp));
251 /* Return an rtx for the size in bytes of the value of EXP. */
258 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
259 size = TREE_OPERAND (exp, 1);
262 size = tree_expr_size (exp);
264 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
267 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
270 /* Return a wide integer for the size in bytes of the value of EXP, or -1
271 if the size can vary or is larger than an integer. */
274 int_expr_size (tree exp)
278 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
279 size = TREE_OPERAND (exp, 1);
282 size = tree_expr_size (exp);
286 if (size == 0 || !tree_fits_shwi_p (size))
289 return tree_to_shwi (size);
292 /* Return a copy of X in which all memory references
293 and all constants that involve symbol refs
294 have been replaced with new temporary registers.
295 Also emit code to load the memory locations and constants
296 into those registers.
298 If X contains no such constants or memory references,
299 X itself (not a copy) is returned.
301 If a constant is found in the address that is not a legitimate constant
302 in an insn, it is left alone in the hope that it might be valid in the
305 X may contain no arithmetic except addition, subtraction and multiplication.
306 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
309 break_out_memory_refs (rtx x)
312 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
313 && GET_MODE (x) != VOIDmode))
314 x = force_reg (GET_MODE (x), x);
315 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
316 || GET_CODE (x) == MULT)
318 rtx op0 = break_out_memory_refs (XEXP (x, 0));
319 rtx op1 = break_out_memory_refs (XEXP (x, 1));
321 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
322 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
328 /* Given X, a memory address in address space AS' pointer mode, convert it to
329 an address in the address space's address mode, or vice versa (TO_MODE says
330 which way). We take advantage of the fact that pointers are not allowed to
331 overflow by commuting arithmetic operations over conversions so that address
332 arithmetic insns can be used. IN_CONST is true if this conversion is inside
336 convert_memory_address_addr_space_1 (enum machine_mode to_mode ATTRIBUTE_UNUSED,
337 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
340 #ifndef POINTERS_EXTEND_UNSIGNED
341 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
343 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
344 enum machine_mode pointer_mode, address_mode, from_mode;
348 /* If X already has the right mode, just return it. */
349 if (GET_MODE (x) == to_mode)
352 pointer_mode = targetm.addr_space.pointer_mode (as);
353 address_mode = targetm.addr_space.address_mode (as);
354 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
356 /* Here we handle some special cases. If none of them apply, fall through
357 to the default case. */
358 switch (GET_CODE (x))
360 CASE_CONST_SCALAR_INT:
361 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
363 else if (POINTERS_EXTEND_UNSIGNED < 0)
365 else if (POINTERS_EXTEND_UNSIGNED > 0)
369 temp = simplify_unary_operation (code, to_mode, x, from_mode);
375 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
376 && GET_MODE (SUBREG_REG (x)) == to_mode)
377 return SUBREG_REG (x);
381 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
382 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
387 temp = shallow_copy_rtx (x);
388 PUT_MODE (temp, to_mode);
393 return gen_rtx_CONST (to_mode,
394 convert_memory_address_addr_space_1
395 (to_mode, XEXP (x, 0), as, true));
400 /* For addition we can safely permute the conversion and addition
401 operation if one operand is a constant and converting the constant
402 does not change it or if one operand is a constant and we are
403 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
404 We can always safely permute them if we are making the address
405 narrower. Inside a CONST RTL, this is safe for both pointers
406 zero or sign extended as pointers cannot wrap. */
407 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
408 || (GET_CODE (x) == PLUS
409 && CONST_INT_P (XEXP (x, 1))
410 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
411 || XEXP (x, 1) == convert_memory_address_addr_space_1
412 (to_mode, XEXP (x, 1), as, in_const)
413 || POINTERS_EXTEND_UNSIGNED < 0)))
414 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
415 convert_memory_address_addr_space_1
416 (to_mode, XEXP (x, 0), as, in_const),
424 return convert_modes (to_mode, from_mode,
425 x, POINTERS_EXTEND_UNSIGNED);
426 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
429 /* Given X, a memory address in address space AS' pointer mode, convert it to
430 an address in the address space's address mode, or vice versa (TO_MODE says
431 which way). We take advantage of the fact that pointers are not allowed to
432 overflow by commuting arithmetic operations over conversions so that address
433 arithmetic insns can be used. */
436 convert_memory_address_addr_space (enum machine_mode to_mode, rtx x, addr_space_t as)
438 return convert_memory_address_addr_space_1 (to_mode, x, as, false);
441 /* Return something equivalent to X but valid as a memory address for something
442 of mode MODE in the named address space AS. When X is not itself valid,
443 this works by copying X or subexpressions of it into registers. */
446 memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
449 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
451 x = convert_memory_address_addr_space (address_mode, x, as);
453 /* By passing constant addresses through registers
454 we get a chance to cse them. */
455 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
456 x = force_reg (address_mode, x);
458 /* We get better cse by rejecting indirect addressing at this stage.
459 Let the combiner create indirect addresses where appropriate.
460 For now, generate the code so that the subexpressions useful to share
461 are visible. But not if cse won't be done! */
464 if (! cse_not_expected && !REG_P (x))
465 x = break_out_memory_refs (x);
467 /* At this point, any valid address is accepted. */
468 if (memory_address_addr_space_p (mode, x, as))
471 /* If it was valid before but breaking out memory refs invalidated it,
472 use it the old way. */
473 if (memory_address_addr_space_p (mode, oldx, as))
479 /* Perform machine-dependent transformations on X
480 in certain cases. This is not necessary since the code
481 below can handle all possible cases, but machine-dependent
482 transformations can make better code. */
485 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
486 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
490 /* PLUS and MULT can appear in special ways
491 as the result of attempts to make an address usable for indexing.
492 Usually they are dealt with by calling force_operand, below.
493 But a sum containing constant terms is special
494 if removing them makes the sum a valid address:
495 then we generate that address in a register
496 and index off of it. We do this because it often makes
497 shorter code, and because the addresses thus generated
498 in registers often become common subexpressions. */
499 if (GET_CODE (x) == PLUS)
501 rtx constant_term = const0_rtx;
502 rtx y = eliminate_constant_term (x, &constant_term);
503 if (constant_term == const0_rtx
504 || ! memory_address_addr_space_p (mode, y, as))
505 x = force_operand (x, NULL_RTX);
508 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
509 if (! memory_address_addr_space_p (mode, y, as))
510 x = force_operand (x, NULL_RTX);
516 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
517 x = force_operand (x, NULL_RTX);
519 /* If we have a register that's an invalid address,
520 it must be a hard reg of the wrong class. Copy it to a pseudo. */
524 /* Last resort: copy the value to a register, since
525 the register is a valid address. */
527 x = force_reg (address_mode, x);
532 gcc_assert (memory_address_addr_space_p (mode, x, as));
533 /* If we didn't change the address, we are done. Otherwise, mark
534 a reg as a pointer if we have REG or REG + CONST_INT. */
538 mark_reg_pointer (x, BITS_PER_UNIT);
539 else if (GET_CODE (x) == PLUS
540 && REG_P (XEXP (x, 0))
541 && CONST_INT_P (XEXP (x, 1)))
542 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
544 /* OLDX may have been the address on a temporary. Update the address
545 to indicate that X is now used. */
546 update_temp_slot_address (oldx, x);
551 /* Convert a mem ref into one with a valid memory address.
552 Pass through anything else unchanged. */
555 validize_mem (rtx ref)
559 ref = use_anchored_address (ref);
560 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
561 MEM_ADDR_SPACE (ref)))
564 /* Don't alter REF itself, since that is probably a stack slot. */
565 return replace_equiv_address (ref, XEXP (ref, 0));
568 /* If X is a memory reference to a member of an object block, try rewriting
569 it to use an anchor instead. Return the new memory reference on success
570 and the old one on failure. */
573 use_anchored_address (rtx x)
576 HOST_WIDE_INT offset;
577 enum machine_mode mode;
579 if (!flag_section_anchors)
585 /* Split the address into a base and offset. */
588 if (GET_CODE (base) == CONST
589 && GET_CODE (XEXP (base, 0)) == PLUS
590 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
592 offset += INTVAL (XEXP (XEXP (base, 0), 1));
593 base = XEXP (XEXP (base, 0), 0);
596 /* Check whether BASE is suitable for anchors. */
597 if (GET_CODE (base) != SYMBOL_REF
598 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
599 || SYMBOL_REF_ANCHOR_P (base)
600 || SYMBOL_REF_BLOCK (base) == NULL
601 || !targetm.use_anchors_for_symbol_p (base))
604 /* Decide where BASE is going to be. */
605 place_block_symbol (base);
607 /* Get the anchor we need to use. */
608 offset += SYMBOL_REF_BLOCK_OFFSET (base);
609 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
610 SYMBOL_REF_TLS_MODEL (base));
612 /* Work out the offset from the anchor. */
613 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
615 /* If we're going to run a CSE pass, force the anchor into a register.
616 We will then be able to reuse registers for several accesses, if the
617 target costs say that that's worthwhile. */
618 mode = GET_MODE (base);
619 if (!cse_not_expected)
620 base = force_reg (mode, base);
622 return replace_equiv_address (x, plus_constant (mode, base, offset));
625 /* Copy the value or contents of X to a new temp reg and return that reg. */
630 rtx temp = gen_reg_rtx (GET_MODE (x));
632 /* If not an operand, must be an address with PLUS and MULT so
633 do the computation. */
634 if (! general_operand (x, VOIDmode))
635 x = force_operand (x, temp);
638 emit_move_insn (temp, x);
643 /* Like copy_to_reg but always give the new register mode Pmode
644 in case X is a constant. */
647 copy_addr_to_reg (rtx x)
649 return copy_to_mode_reg (Pmode, x);
652 /* Like copy_to_reg but always give the new register mode MODE
653 in case X is a constant. */
656 copy_to_mode_reg (enum machine_mode mode, rtx x)
658 rtx temp = gen_reg_rtx (mode);
660 /* If not an operand, must be an address with PLUS and MULT so
661 do the computation. */
662 if (! general_operand (x, VOIDmode))
663 x = force_operand (x, temp);
665 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
667 emit_move_insn (temp, x);
671 /* Load X into a register if it is not already one.
672 Use mode MODE for the register.
673 X should be valid for mode MODE, but it may be a constant which
674 is valid for all integer modes; that's why caller must specify MODE.
676 The caller must not alter the value in the register we return,
677 since we mark it as a "constant" register. */
680 force_reg (enum machine_mode mode, rtx x)
687 if (general_operand (x, mode))
689 temp = gen_reg_rtx (mode);
690 insn = emit_move_insn (temp, x);
694 temp = force_operand (x, NULL_RTX);
696 insn = get_last_insn ();
699 rtx temp2 = gen_reg_rtx (mode);
700 insn = emit_move_insn (temp2, temp);
705 /* Let optimizers know that TEMP's value never changes
706 and that X can be substituted for it. Don't get confused
707 if INSN set something else (such as a SUBREG of TEMP). */
709 && (set = single_set (insn)) != 0
710 && SET_DEST (set) == temp
711 && ! rtx_equal_p (x, SET_SRC (set)))
712 set_unique_reg_note (insn, REG_EQUAL, x);
714 /* Let optimizers know that TEMP is a pointer, and if so, the
715 known alignment of that pointer. */
718 if (GET_CODE (x) == SYMBOL_REF)
720 align = BITS_PER_UNIT;
721 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
722 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
724 else if (GET_CODE (x) == LABEL_REF)
725 align = BITS_PER_UNIT;
726 else if (GET_CODE (x) == CONST
727 && GET_CODE (XEXP (x, 0)) == PLUS
728 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
729 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
731 rtx s = XEXP (XEXP (x, 0), 0);
732 rtx c = XEXP (XEXP (x, 0), 1);
736 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
737 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
743 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
744 align = MIN (sa, ca);
748 if (align || (MEM_P (x) && MEM_POINTER (x)))
749 mark_reg_pointer (temp, align);
755 /* If X is a memory ref, copy its contents to a new temp reg and return
756 that reg. Otherwise, return X. */
759 force_not_mem (rtx x)
763 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
766 temp = gen_reg_rtx (GET_MODE (x));
769 REG_POINTER (temp) = 1;
771 emit_move_insn (temp, x);
775 /* Copy X to TARGET (if it's nonzero and a reg)
776 or to a new temp reg and return that reg.
777 MODE is the mode to use for X in case it is a constant. */
780 copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
784 if (target && REG_P (target))
787 temp = gen_reg_rtx (mode);
789 emit_move_insn (temp, x);
793 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
794 PUNSIGNEDP points to the signedness of the type and may be adjusted
795 to show what signedness to use on extension operations.
797 FOR_RETURN is nonzero if the caller is promoting the return value
798 of FNDECL, else it is for promoting args. */
801 promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
802 const_tree funtype, int for_return)
804 /* Called without a type node for a libcall. */
805 if (type == NULL_TREE)
807 if (INTEGRAL_MODE_P (mode))
808 return targetm.calls.promote_function_mode (NULL_TREE, mode,
815 switch (TREE_CODE (type))
817 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
818 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
819 case POINTER_TYPE: case REFERENCE_TYPE:
820 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
827 /* Return the mode to use to store a scalar of TYPE and MODE.
828 PUNSIGNEDP points to the signedness of the type and may be adjusted
829 to show what signedness to use on extension operations. */
832 promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
833 int *punsignedp ATTRIBUTE_UNUSED)
840 /* For libcalls this is invoked without TYPE from the backends
841 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
843 if (type == NULL_TREE)
846 /* FIXME: this is the same logic that was there until GCC 4.4, but we
847 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
848 is not defined. The affected targets are M32C, S390, SPARC. */
850 code = TREE_CODE (type);
851 unsignedp = *punsignedp;
855 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
856 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
857 PROMOTE_MODE (mode, unsignedp, type);
858 *punsignedp = unsignedp;
862 #ifdef POINTERS_EXTEND_UNSIGNED
865 *punsignedp = POINTERS_EXTEND_UNSIGNED;
866 return targetm.addr_space.address_mode
867 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
880 /* Use one of promote_mode or promote_function_mode to find the promoted
881 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
882 of DECL after promotion. */
885 promote_decl_mode (const_tree decl, int *punsignedp)
887 tree type = TREE_TYPE (decl);
888 int unsignedp = TYPE_UNSIGNED (type);
889 enum machine_mode mode = DECL_MODE (decl);
890 enum machine_mode pmode;
892 if (TREE_CODE (decl) == RESULT_DECL
893 || TREE_CODE (decl) == PARM_DECL)
894 pmode = promote_function_mode (type, mode, &unsignedp,
895 TREE_TYPE (current_function_decl), 2);
897 pmode = promote_mode (type, mode, &unsignedp);
900 *punsignedp = unsignedp;
905 /* Controls the behaviour of {anti_,}adjust_stack. */
906 static bool suppress_reg_args_size;
908 /* A helper for adjust_stack and anti_adjust_stack. */
911 adjust_stack_1 (rtx adjust, bool anti_p)
915 #ifndef STACK_GROWS_DOWNWARD
916 /* Hereafter anti_p means subtract_p. */
920 temp = expand_binop (Pmode,
921 anti_p ? sub_optab : add_optab,
922 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
925 if (temp != stack_pointer_rtx)
926 insn = emit_move_insn (stack_pointer_rtx, temp);
929 insn = get_last_insn ();
930 temp = single_set (insn);
931 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
934 if (!suppress_reg_args_size)
935 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
938 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
939 This pops when ADJUST is positive. ADJUST need not be constant. */
942 adjust_stack (rtx adjust)
944 if (adjust == const0_rtx)
947 /* We expect all variable sized adjustments to be multiple of
948 PREFERRED_STACK_BOUNDARY. */
949 if (CONST_INT_P (adjust))
950 stack_pointer_delta -= INTVAL (adjust);
952 adjust_stack_1 (adjust, false);
955 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
956 This pushes when ADJUST is positive. ADJUST need not be constant. */
959 anti_adjust_stack (rtx adjust)
961 if (adjust == const0_rtx)
964 /* We expect all variable sized adjustments to be multiple of
965 PREFERRED_STACK_BOUNDARY. */
966 if (CONST_INT_P (adjust))
967 stack_pointer_delta += INTVAL (adjust);
969 adjust_stack_1 (adjust, true);
972 /* Round the size of a block to be pushed up to the boundary required
973 by this machine. SIZE is the desired size, which need not be constant. */
976 round_push (rtx size)
978 rtx align_rtx, alignm1_rtx;
980 if (!SUPPORTS_STACK_ALIGNMENT
981 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
983 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
988 if (CONST_INT_P (size))
990 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
992 if (INTVAL (size) != new_size)
993 size = GEN_INT (new_size);
997 align_rtx = GEN_INT (align);
998 alignm1_rtx = GEN_INT (align - 1);
1002 /* If crtl->preferred_stack_boundary might still grow, use
1003 virtual_preferred_stack_boundary_rtx instead. This will be
1004 substituted by the right value in vregs pass and optimized
1006 align_rtx = virtual_preferred_stack_boundary_rtx;
1007 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
1011 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1012 but we know it can't. So add ourselves and then do
1014 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1015 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1016 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1018 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1023 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1024 to a previously-created save area. If no save area has been allocated,
1025 this function will allocate one. If a save area is specified, it
1026 must be of the proper mode. */
1029 emit_stack_save (enum save_level save_level, rtx *psave)
1032 /* The default is that we use a move insn and save in a Pmode object. */
1033 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1034 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1036 /* See if this machine has anything special to do for this kind of save. */
1039 #ifdef HAVE_save_stack_block
1041 if (HAVE_save_stack_block)
1042 fcn = gen_save_stack_block;
1045 #ifdef HAVE_save_stack_function
1047 if (HAVE_save_stack_function)
1048 fcn = gen_save_stack_function;
1051 #ifdef HAVE_save_stack_nonlocal
1053 if (HAVE_save_stack_nonlocal)
1054 fcn = gen_save_stack_nonlocal;
1061 /* If there is no save area and we have to allocate one, do so. Otherwise
1062 verify the save area is the proper mode. */
1066 if (mode != VOIDmode)
1068 if (save_level == SAVE_NONLOCAL)
1069 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1071 *psave = sa = gen_reg_rtx (mode);
1075 do_pending_stack_adjust ();
1077 sa = validize_mem (sa);
1078 emit_insn (fcn (sa, stack_pointer_rtx));
1081 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1082 area made by emit_stack_save. If it is zero, we have nothing to do. */
1085 emit_stack_restore (enum save_level save_level, rtx sa)
1087 /* The default is that we use a move insn. */
1088 rtx (*fcn) (rtx, rtx) = gen_move_insn;
1090 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1091 STACK_POINTER and HARD_FRAME_POINTER.
1092 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1093 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1094 aligned variables, which is reflected in ix86_can_eliminate.
1095 We normally still have the realigned STACK_POINTER that we can use.
1096 But if there is a stack restore still present at reload, it can trigger
1097 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1098 FRAME_POINTER into a hard reg.
1099 To prevent this situation, we force need_drap if we emit a stack
1101 if (SUPPORTS_STACK_ALIGNMENT)
1102 crtl->need_drap = true;
1104 /* See if this machine has anything special to do for this kind of save. */
1107 #ifdef HAVE_restore_stack_block
1109 if (HAVE_restore_stack_block)
1110 fcn = gen_restore_stack_block;
1113 #ifdef HAVE_restore_stack_function
1115 if (HAVE_restore_stack_function)
1116 fcn = gen_restore_stack_function;
1119 #ifdef HAVE_restore_stack_nonlocal
1121 if (HAVE_restore_stack_nonlocal)
1122 fcn = gen_restore_stack_nonlocal;
1131 sa = validize_mem (sa);
1132 /* These clobbers prevent the scheduler from moving
1133 references to variable arrays below the code
1134 that deletes (pops) the arrays. */
1135 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1136 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1139 discard_pending_stack_adjust ();
1141 emit_insn (fcn (stack_pointer_rtx, sa));
1144 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1145 function. This function should be called whenever we allocate or
1146 deallocate dynamic stack space. */
1149 update_nonlocal_goto_save_area (void)
1154 /* The nonlocal_goto_save_area object is an array of N pointers. The
1155 first one is used for the frame pointer save; the rest are sized by
1156 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1157 of the stack save area slots. */
1158 t_save = build4 (ARRAY_REF,
1159 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1160 cfun->nonlocal_goto_save_area,
1161 integer_one_node, NULL_TREE, NULL_TREE);
1162 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1164 emit_stack_save (SAVE_NONLOCAL, &r_save);
1167 /* Return an rtx representing the address of an area of memory dynamically
1168 pushed on the stack.
1170 Any required stack pointer alignment is preserved.
1172 SIZE is an rtx representing the size of the area.
1174 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1175 parameter may be zero. If so, a proper value will be extracted
1176 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1178 REQUIRED_ALIGN is the alignment (in bits) required for the region
1181 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1182 stack space allocated by the generated code cannot be added with itself
1183 in the course of the execution of the function. It is always safe to
1184 pass FALSE here and the following criterion is sufficient in order to
1185 pass TRUE: every path in the CFG that starts at the allocation point and
1186 loops to it executes the associated deallocation code. */
1189 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1190 unsigned required_align, bool cannot_accumulate)
1192 HOST_WIDE_INT stack_usage_size = -1;
1193 rtx final_label, final_target, target;
1194 unsigned extra_align = 0;
1197 /* If we're asking for zero bytes, it doesn't matter what we point
1198 to since we can't dereference it. But return a reasonable
1200 if (size == const0_rtx)
1201 return virtual_stack_dynamic_rtx;
1203 /* Otherwise, show we're calling alloca or equivalent. */
1204 cfun->calls_alloca = 1;
1206 /* If stack usage info is requested, look into the size we are passed.
1207 We need to do so this early to avoid the obfuscation that may be
1208 introduced later by the various alignment operations. */
1209 if (flag_stack_usage_info)
1211 if (CONST_INT_P (size))
1212 stack_usage_size = INTVAL (size);
1213 else if (REG_P (size))
1215 /* Look into the last emitted insn and see if we can deduce
1216 something for the register. */
1217 rtx insn, set, note;
1218 insn = get_last_insn ();
1219 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1221 if (CONST_INT_P (SET_SRC (set)))
1222 stack_usage_size = INTVAL (SET_SRC (set));
1223 else if ((note = find_reg_equal_equiv_note (insn))
1224 && CONST_INT_P (XEXP (note, 0)))
1225 stack_usage_size = INTVAL (XEXP (note, 0));
1229 /* If the size is not constant, we can't say anything. */
1230 if (stack_usage_size == -1)
1232 current_function_has_unbounded_dynamic_stack_size = 1;
1233 stack_usage_size = 0;
1237 /* Ensure the size is in the proper mode. */
1238 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1239 size = convert_to_mode (Pmode, size, 1);
1241 /* Adjust SIZE_ALIGN, if needed. */
1242 if (CONST_INT_P (size))
1244 unsigned HOST_WIDE_INT lsb;
1246 lsb = INTVAL (size);
1249 /* Watch out for overflow truncating to "unsigned". */
1250 if (lsb > UINT_MAX / BITS_PER_UNIT)
1251 size_align = 1u << (HOST_BITS_PER_INT - 1);
1253 size_align = (unsigned)lsb * BITS_PER_UNIT;
1255 else if (size_align < BITS_PER_UNIT)
1256 size_align = BITS_PER_UNIT;
1258 /* We can't attempt to minimize alignment necessary, because we don't
1259 know the final value of preferred_stack_boundary yet while executing
1261 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1262 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1264 /* We will need to ensure that the address we return is aligned to
1265 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1266 always know its final value at this point in the compilation (it
1267 might depend on the size of the outgoing parameter lists, for
1268 example), so we must align the value to be returned in that case.
1269 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1270 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1271 We must also do an alignment operation on the returned value if
1272 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1274 If we have to align, we must leave space in SIZE for the hole
1275 that might result from the alignment operation. */
1277 must_align = (crtl->preferred_stack_boundary < required_align);
1280 if (required_align > PREFERRED_STACK_BOUNDARY)
1281 extra_align = PREFERRED_STACK_BOUNDARY;
1282 else if (required_align > STACK_BOUNDARY)
1283 extra_align = STACK_BOUNDARY;
1285 extra_align = BITS_PER_UNIT;
1288 /* ??? STACK_POINTER_OFFSET is always defined now. */
1289 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1291 extra_align = BITS_PER_UNIT;
1296 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1298 size = plus_constant (Pmode, size, extra);
1299 size = force_operand (size, NULL_RTX);
1301 if (flag_stack_usage_info)
1302 stack_usage_size += extra;
1304 if (extra && size_align > extra_align)
1305 size_align = extra_align;
1308 /* Round the size to a multiple of the required stack alignment.
1309 Since the stack if presumed to be rounded before this allocation,
1310 this will maintain the required alignment.
1312 If the stack grows downward, we could save an insn by subtracting
1313 SIZE from the stack pointer and then aligning the stack pointer.
1314 The problem with this is that the stack pointer may be unaligned
1315 between the execution of the subtraction and alignment insns and
1316 some machines do not allow this. Even on those that do, some
1317 signal handlers malfunction if a signal should occur between those
1318 insns. Since this is an extremely rare event, we have no reliable
1319 way of knowing which systems have this problem. So we avoid even
1320 momentarily mis-aligning the stack. */
1321 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1323 size = round_push (size);
1325 if (flag_stack_usage_info)
1327 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1328 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1332 target = gen_reg_rtx (Pmode);
1334 /* The size is supposed to be fully adjusted at this point so record it
1335 if stack usage info is requested. */
1336 if (flag_stack_usage_info)
1338 current_function_dynamic_stack_size += stack_usage_size;
1340 /* ??? This is gross but the only safe stance in the absence
1341 of stack usage oriented flow analysis. */
1342 if (!cannot_accumulate)
1343 current_function_has_unbounded_dynamic_stack_size = 1;
1346 final_label = NULL_RTX;
1347 final_target = NULL_RTX;
1349 /* If we are splitting the stack, we need to ask the backend whether
1350 there is enough room on the current stack. If there isn't, or if
1351 the backend doesn't know how to tell is, then we need to call a
1352 function to allocate memory in some other way. This memory will
1353 be released when we release the current stack segment. The
1354 effect is that stack allocation becomes less efficient, but at
1355 least it doesn't cause a stack overflow. */
1356 if (flag_split_stack)
1358 rtx available_label, ask, space, func;
1360 available_label = NULL_RTX;
1362 #ifdef HAVE_split_stack_space_check
1363 if (HAVE_split_stack_space_check)
1365 available_label = gen_label_rtx ();
1367 /* This instruction will branch to AVAILABLE_LABEL if there
1368 are SIZE bytes available on the stack. */
1369 emit_insn (gen_split_stack_space_check (size, available_label));
1373 /* The __morestack_allocate_stack_space function will allocate
1374 memory using malloc. If the alignment of the memory returned
1375 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1376 make sure we allocate enough space. */
1377 if (MALLOC_ABI_ALIGNMENT >= required_align)
1381 ask = expand_binop (Pmode, add_optab, size,
1382 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1384 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1388 func = init_one_libfunc ("__morestack_allocate_stack_space");
1390 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1393 if (available_label == NULL_RTX)
1396 final_target = gen_reg_rtx (Pmode);
1398 emit_move_insn (final_target, space);
1400 final_label = gen_label_rtx ();
1401 emit_jump (final_label);
1403 emit_label (available_label);
1406 do_pending_stack_adjust ();
1408 /* We ought to be called always on the toplevel and stack ought to be aligned
1410 gcc_assert (!(stack_pointer_delta
1411 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1413 /* If needed, check that we have the required amount of stack. Take into
1414 account what has already been checked. */
1415 if (STACK_CHECK_MOVING_SP)
1417 else if (flag_stack_check == GENERIC_STACK_CHECK)
1418 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1420 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1421 probe_stack_range (STACK_CHECK_PROTECT, size);
1423 /* Don't let anti_adjust_stack emit notes. */
1424 suppress_reg_args_size = true;
1426 /* Perform the required allocation from the stack. Some systems do
1427 this differently than simply incrementing/decrementing from the
1428 stack pointer, such as acquiring the space by calling malloc(). */
1429 #ifdef HAVE_allocate_stack
1430 if (HAVE_allocate_stack)
1432 struct expand_operand ops[2];
1433 /* We don't have to check against the predicate for operand 0 since
1434 TARGET is known to be a pseudo of the proper mode, which must
1435 be valid for the operand. */
1436 create_fixed_operand (&ops[0], target);
1437 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1438 expand_insn (CODE_FOR_allocate_stack, 2, ops);
1443 int saved_stack_pointer_delta;
1445 #ifndef STACK_GROWS_DOWNWARD
1446 emit_move_insn (target, virtual_stack_dynamic_rtx);
1449 /* Check stack bounds if necessary. */
1450 if (crtl->limit_stack)
1453 rtx space_available = gen_label_rtx ();
1454 #ifdef STACK_GROWS_DOWNWARD
1455 available = expand_binop (Pmode, sub_optab,
1456 stack_pointer_rtx, stack_limit_rtx,
1457 NULL_RTX, 1, OPTAB_WIDEN);
1459 available = expand_binop (Pmode, sub_optab,
1460 stack_limit_rtx, stack_pointer_rtx,
1461 NULL_RTX, 1, OPTAB_WIDEN);
1463 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1467 emit_insn (gen_trap ());
1470 error ("stack limits not supported on this target");
1472 emit_label (space_available);
1475 saved_stack_pointer_delta = stack_pointer_delta;
1477 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1478 anti_adjust_stack_and_probe (size, false);
1480 anti_adjust_stack (size);
1482 /* Even if size is constant, don't modify stack_pointer_delta.
1483 The constant size alloca should preserve
1484 crtl->preferred_stack_boundary alignment. */
1485 stack_pointer_delta = saved_stack_pointer_delta;
1487 #ifdef STACK_GROWS_DOWNWARD
1488 emit_move_insn (target, virtual_stack_dynamic_rtx);
1492 suppress_reg_args_size = false;
1494 /* Finish up the split stack handling. */
1495 if (final_label != NULL_RTX)
1497 gcc_assert (flag_split_stack);
1498 emit_move_insn (final_target, target);
1499 emit_label (final_label);
1500 target = final_target;
1505 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1506 but we know it can't. So add ourselves and then do
1508 target = expand_binop (Pmode, add_optab, target,
1509 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1511 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1512 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1513 gen_int_mode (required_align / BITS_PER_UNIT,
1516 target = expand_mult (Pmode, target,
1517 gen_int_mode (required_align / BITS_PER_UNIT,
1522 /* Now that we've committed to a return value, mark its alignment. */
1523 mark_reg_pointer (target, required_align);
1525 /* Record the new stack level for nonlocal gotos. */
1526 if (cfun->nonlocal_goto_save_area != 0)
1527 update_nonlocal_goto_save_area ();
1532 /* A front end may want to override GCC's stack checking by providing a
1533 run-time routine to call to check the stack, so provide a mechanism for
1534 calling that routine. */
1536 static GTY(()) rtx stack_check_libfunc;
1539 set_stack_check_libfunc (const char *libfunc_name)
1541 gcc_assert (stack_check_libfunc == NULL_RTX);
1542 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1545 /* Emit one stack probe at ADDRESS, an address within the stack. */
1548 emit_stack_probe (rtx address)
1550 #ifdef HAVE_probe_stack_address
1551 if (HAVE_probe_stack_address)
1552 emit_insn (gen_probe_stack_address (address));
1556 rtx memref = gen_rtx_MEM (word_mode, address);
1558 MEM_VOLATILE_P (memref) = 1;
1560 /* See if we have an insn to probe the stack. */
1561 #ifdef HAVE_probe_stack
1562 if (HAVE_probe_stack)
1563 emit_insn (gen_probe_stack (memref));
1566 emit_move_insn (memref, const0_rtx);
1570 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1571 FIRST is a constant and size is a Pmode RTX. These are offsets from
1572 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1573 or subtract them from the stack pointer. */
1575 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1577 #ifdef STACK_GROWS_DOWNWARD
1578 #define STACK_GROW_OP MINUS
1579 #define STACK_GROW_OPTAB sub_optab
1580 #define STACK_GROW_OFF(off) -(off)
1582 #define STACK_GROW_OP PLUS
1583 #define STACK_GROW_OPTAB add_optab
1584 #define STACK_GROW_OFF(off) (off)
1588 probe_stack_range (HOST_WIDE_INT first, rtx size)
1590 /* First ensure SIZE is Pmode. */
1591 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1592 size = convert_to_mode (Pmode, size, 1);
1594 /* Next see if we have a function to check the stack. */
1595 if (stack_check_libfunc)
1597 rtx addr = memory_address (Pmode,
1598 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1600 plus_constant (Pmode,
1602 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1606 /* Next see if we have an insn to check the stack. */
1607 #ifdef HAVE_check_stack
1608 else if (HAVE_check_stack)
1610 struct expand_operand ops[1];
1611 rtx addr = memory_address (Pmode,
1612 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1614 plus_constant (Pmode,
1617 create_input_operand (&ops[0], addr, Pmode);
1618 success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
1619 gcc_assert (success);
1623 /* Otherwise we have to generate explicit probes. If we have a constant
1624 small number of them to generate, that's the easy case. */
1625 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1627 HOST_WIDE_INT isize = INTVAL (size), i;
1630 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1631 it exceeds SIZE. If only one probe is needed, this will not
1632 generate any code. Then probe at FIRST + SIZE. */
1633 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1635 addr = memory_address (Pmode,
1636 plus_constant (Pmode, stack_pointer_rtx,
1637 STACK_GROW_OFF (first + i)));
1638 emit_stack_probe (addr);
1641 addr = memory_address (Pmode,
1642 plus_constant (Pmode, stack_pointer_rtx,
1643 STACK_GROW_OFF (first + isize)));
1644 emit_stack_probe (addr);
1647 /* In the variable case, do the same as above, but in a loop. Note that we
1648 must be extra careful with variables wrapping around because we might be
1649 at the very top (or the very bottom) of the address space and we have to
1650 be able to handle this case properly; in particular, we use an equality
1651 test for the loop condition. */
1654 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1655 rtx loop_lab = gen_label_rtx ();
1656 rtx end_lab = gen_label_rtx ();
1659 /* Step 1: round SIZE to the previous multiple of the interval. */
1661 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1663 = simplify_gen_binary (AND, Pmode, size,
1664 gen_int_mode (-PROBE_INTERVAL, Pmode));
1665 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1668 /* Step 2: compute initial and final value of the loop counter. */
1670 /* TEST_ADDR = SP + FIRST. */
1671 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1673 gen_int_mode (first, Pmode)),
1676 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1677 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1679 rounded_size_op), NULL_RTX);
1684 while (TEST_ADDR != LAST_ADDR)
1686 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1690 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1691 until it is equal to ROUNDED_SIZE. */
1693 emit_label (loop_lab);
1695 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1696 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1699 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1700 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1701 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1704 gcc_assert (temp == test_addr);
1706 /* Probe at TEST_ADDR. */
1707 emit_stack_probe (test_addr);
1709 emit_jump (loop_lab);
1711 emit_label (end_lab);
1714 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1715 that SIZE is equal to ROUNDED_SIZE. */
1717 /* TEMP = SIZE - ROUNDED_SIZE. */
1718 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1719 if (temp != const0_rtx)
1723 if (CONST_INT_P (temp))
1725 /* Use [base + disp} addressing mode if supported. */
1726 HOST_WIDE_INT offset = INTVAL (temp);
1727 addr = memory_address (Pmode,
1728 plus_constant (Pmode, last_addr,
1729 STACK_GROW_OFF (offset)));
1733 /* Manual CSE if the difference is not known at compile-time. */
1734 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1735 addr = memory_address (Pmode,
1736 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1740 emit_stack_probe (addr);
1744 /* Make sure nothing is scheduled before we are done. */
1745 emit_insn (gen_blockage ());
1748 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1749 while probing it. This pushes when SIZE is positive. SIZE need not
1750 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1751 by plus SIZE at the end. */
1754 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1756 /* We skip the probe for the first interval + a small dope of 4 words and
1757 probe that many bytes past the specified size to maintain a protection
1758 area at the botton of the stack. */
1759 const int dope = 4 * UNITS_PER_WORD;
1761 /* First ensure SIZE is Pmode. */
1762 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1763 size = convert_to_mode (Pmode, size, 1);
1765 /* If we have a constant small number of probes to generate, that's the
1767 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1769 HOST_WIDE_INT isize = INTVAL (size), i;
1770 bool first_probe = true;
1772 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1773 values of N from 1 until it exceeds SIZE. If only one probe is
1774 needed, this will not generate any code. Then adjust and probe
1775 to PROBE_INTERVAL + SIZE. */
1776 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1780 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1781 first_probe = false;
1784 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1785 emit_stack_probe (stack_pointer_rtx);
1789 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1791 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
1792 emit_stack_probe (stack_pointer_rtx);
1795 /* In the variable case, do the same as above, but in a loop. Note that we
1796 must be extra careful with variables wrapping around because we might be
1797 at the very top (or the very bottom) of the address space and we have to
1798 be able to handle this case properly; in particular, we use an equality
1799 test for the loop condition. */
1802 rtx rounded_size, rounded_size_op, last_addr, temp;
1803 rtx loop_lab = gen_label_rtx ();
1804 rtx end_lab = gen_label_rtx ();
1807 /* Step 1: round SIZE to the previous multiple of the interval. */
1809 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1811 = simplify_gen_binary (AND, Pmode, size,
1812 gen_int_mode (-PROBE_INTERVAL, Pmode));
1813 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1816 /* Step 2: compute initial and final value of the loop counter. */
1818 /* SP = SP_0 + PROBE_INTERVAL. */
1819 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1821 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1822 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1824 rounded_size_op), NULL_RTX);
1829 while (SP != LAST_ADDR)
1831 SP = SP + PROBE_INTERVAL
1835 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1836 values of N from 1 until it is equal to ROUNDED_SIZE. */
1838 emit_label (loop_lab);
1840 /* Jump to END_LAB if SP == LAST_ADDR. */
1841 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1844 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1845 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1846 emit_stack_probe (stack_pointer_rtx);
1848 emit_jump (loop_lab);
1850 emit_label (end_lab);
1853 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1854 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1856 /* TEMP = SIZE - ROUNDED_SIZE. */
1857 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1858 if (temp != const0_rtx)
1860 /* Manual CSE if the difference is not known at compile-time. */
1861 if (GET_CODE (temp) != CONST_INT)
1862 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1863 anti_adjust_stack (temp);
1864 emit_stack_probe (stack_pointer_rtx);
1868 /* Adjust back and account for the additional first interval. */
1870 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1872 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1875 /* Return an rtx representing the register or memory location
1876 in which a scalar value of data type VALTYPE
1877 was returned by a function call to function FUNC.
1878 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1879 function is known, otherwise 0.
1880 OUTGOING is 1 if on a machine with register windows this function
1881 should return the register in which the function will put its result
1885 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1886 int outgoing ATTRIBUTE_UNUSED)
1890 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1893 && GET_MODE (val) == BLKmode)
1895 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1896 enum machine_mode tmpmode;
1898 /* int_size_in_bytes can return -1. We don't need a check here
1899 since the value of bytes will then be large enough that no
1900 mode will match anyway. */
1902 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1903 tmpmode != VOIDmode;
1904 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1906 /* Have we found a large enough mode? */
1907 if (GET_MODE_SIZE (tmpmode) >= bytes)
1911 /* No suitable mode found. */
1912 gcc_assert (tmpmode != VOIDmode);
1914 PUT_MODE (val, tmpmode);
1919 /* Return an rtx representing the register or memory location
1920 in which a scalar value of mode MODE was returned by a library call. */
1923 hard_libcall_value (enum machine_mode mode, rtx fun)
1925 return targetm.calls.libcall_value (mode, fun);
1928 /* Look up the tree code for a given rtx code
1929 to provide the arithmetic operation for REAL_ARITHMETIC.
1930 The function returns an int because the caller may not know
1931 what `enum tree_code' means. */
1934 rtx_to_tree_code (enum rtx_code code)
1936 enum tree_code tcode;
1959 tcode = LAST_AND_UNUSED_TREE_CODE;
1962 return ((int) tcode);
1965 #include "gt-explow.h"