1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2013 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
32 #include "insn-attr.h"
37 #include "diagnostic-core.h"
41 #include "target-def.h"
46 #define streq(a,b) (strcmp (a, b) == 0)
49 static void v850_print_operand_address (FILE *, rtx);
51 /* Names of the various data areas used on the v850. */
52 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
53 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
55 /* Track the current data area set by the data area pragma (which
56 can be nested). Tested by check_default_data_area. */
57 data_area_stack_element * data_area_stack = NULL;
59 /* True if we don't need to check any more if the current
60 function is an interrupt handler. */
61 static int v850_interrupt_cache_p = FALSE;
63 rtx v850_compare_op0, v850_compare_op1;
65 /* Whether current function is an interrupt handler. */
66 static int v850_interrupt_p = FALSE;
68 static GTY(()) section * rosdata_section;
69 static GTY(()) section * rozdata_section;
70 static GTY(()) section * tdata_section;
71 static GTY(()) section * zdata_section;
72 static GTY(()) section * zbss_section;
74 /* We use this to wrap all emitted insns in the prologue. */
78 if (GET_CODE (x) != CLOBBER)
79 RTX_FRAME_RELATED_P (x) = 1;
83 /* Mark all the subexpressions of the PARALLEL rtx PAR as
84 frame-related. Return PAR.
86 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
87 PARALLEL rtx other than the first if they do not have the
88 FRAME_RELATED flag set on them. */
91 v850_all_frame_related (rtx par)
93 int len = XVECLEN (par, 0);
96 gcc_assert (GET_CODE (par) == PARALLEL);
97 for (i = 0; i < len; i++)
98 F (XVECEXP (par, 0, i));
103 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
104 Specify whether to pass the argument by reference. */
107 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
108 enum machine_mode mode, const_tree type,
109 bool named ATTRIBUTE_UNUSED)
111 unsigned HOST_WIDE_INT size;
117 size = int_size_in_bytes (type);
119 size = GET_MODE_SIZE (mode);
124 /* Return an RTX to represent where an argument with mode MODE
125 and type TYPE will be passed to a function. If the result
126 is NULL_RTX, the argument will be pushed. */
129 v850_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
130 const_tree type, bool named)
132 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
133 rtx result = NULL_RTX;
140 size = int_size_in_bytes (type);
142 size = GET_MODE_SIZE (mode);
144 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
148 /* Once we have stopped using argument registers, do not start up again. */
149 cum->nbytes = 4 * UNITS_PER_WORD;
154 align = UNITS_PER_WORD;
155 else if (size <= UNITS_PER_WORD && type)
156 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
160 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
162 if (cum->nbytes > 4 * UNITS_PER_WORD)
165 if (type == NULL_TREE
166 && cum->nbytes + size > 4 * UNITS_PER_WORD)
169 switch (cum->nbytes / UNITS_PER_WORD)
172 result = gen_rtx_REG (mode, 6);
175 result = gen_rtx_REG (mode, 7);
178 result = gen_rtx_REG (mode, 8);
181 result = gen_rtx_REG (mode, 9);
190 /* Return the number of bytes which must be put into registers
191 for values which are part in registers and part in memory. */
193 v850_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
194 tree type, bool named)
196 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
203 size = int_size_in_bytes (type);
205 size = GET_MODE_SIZE (mode);
211 align = UNITS_PER_WORD;
213 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
217 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
219 if (cum->nbytes > 4 * UNITS_PER_WORD)
222 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
225 if (type == NULL_TREE
226 && cum->nbytes + size > 4 * UNITS_PER_WORD)
229 return 4 * UNITS_PER_WORD - cum->nbytes;
232 /* Update the data in CUM to advance over an argument
233 of mode MODE and data type TYPE.
234 (TYPE is null for libcalls where that information may not be available.) */
237 v850_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
238 const_tree type, bool named ATTRIBUTE_UNUSED)
240 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
243 cum->nbytes += (((mode != BLKmode
244 ? GET_MODE_SIZE (mode)
245 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
248 cum->nbytes += (((type && int_size_in_bytes (type) > 8
249 ? GET_MODE_SIZE (Pmode)
251 ? GET_MODE_SIZE (mode)
252 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
256 /* Return the high and low words of a CONST_DOUBLE */
259 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
261 if (GET_CODE (x) == CONST_DOUBLE)
266 switch (GET_MODE (x))
269 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
270 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
271 *p_high = t[1]; /* since v850 is little endian */
272 *p_low = t[0]; /* high is second word */
276 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
277 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
283 *p_high = CONST_DOUBLE_HIGH (x);
284 *p_low = CONST_DOUBLE_LOW (x);
292 fatal_insn ("const_double_split got a bad insn:", x);
296 /* Return the cost of the rtx R with code CODE. */
299 const_costs_int (HOST_WIDE_INT value, int zero_cost)
301 if (CONST_OK_FOR_I (value))
303 else if (CONST_OK_FOR_J (value))
305 else if (CONST_OK_FOR_K (value))
312 const_costs (rtx r, enum rtx_code c)
314 HOST_WIDE_INT high, low;
319 return const_costs_int (INTVAL (r), 0);
322 const_double_split (r, &high, &low);
323 if (GET_MODE (r) == SFmode)
324 return const_costs_int (high, 1);
326 return const_costs_int (high, 1) + const_costs_int (low, 1);
342 v850_rtx_costs (rtx x,
344 int outer_code ATTRIBUTE_UNUSED,
345 int opno ATTRIBUTE_UNUSED,
346 int * total, bool speed)
348 enum rtx_code code = (enum rtx_code) codearg;
357 *total = COSTS_N_INSNS (const_costs (x, code));
364 if (TARGET_V850E && !speed)
372 && ( GET_MODE (x) == SImode
373 || GET_MODE (x) == HImode
374 || GET_MODE (x) == QImode))
376 if (GET_CODE (XEXP (x, 1)) == REG)
378 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
380 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
382 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
391 if (outer_code == COMPARE)
400 /* Print operand X using operand code CODE to assembly language output file
404 v850_print_operand (FILE * file, rtx x, int code)
406 HOST_WIDE_INT high, low;
411 /* We use 'c' operands with symbols for .vtinherit. */
412 if (GET_CODE (x) == SYMBOL_REF)
414 output_addr_const(file, x);
421 switch ((code == 'B' || code == 'C')
422 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
425 if (code == 'c' || code == 'C')
426 fprintf (file, "nz");
428 fprintf (file, "ne");
431 if (code == 'c' || code == 'C')
437 fprintf (file, "ge");
440 fprintf (file, "gt");
443 fprintf (file, "le");
446 fprintf (file, "lt");
449 fprintf (file, "nl");
455 fprintf (file, "nh");
464 case 'F': /* High word of CONST_DOUBLE. */
465 switch (GET_CODE (x))
468 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
472 const_double_split (x, &high, &low);
473 fprintf (file, "%ld", (long) high);
480 case 'G': /* Low word of CONST_DOUBLE. */
481 switch (GET_CODE (x))
484 fprintf (file, "%ld", (long) INTVAL (x));
488 const_double_split (x, &high, &low);
489 fprintf (file, "%ld", (long) low);
497 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
500 fprintf (file, "%d", exact_log2 (INTVAL (x)));
503 gcc_assert (special_symbolref_operand (x, VOIDmode));
505 if (GET_CODE (x) == CONST)
506 x = XEXP (XEXP (x, 0), 0);
508 gcc_assert (GET_CODE (x) == SYMBOL_REF);
510 if (SYMBOL_REF_ZDA_P (x))
511 fprintf (file, "zdaoff");
512 else if (SYMBOL_REF_SDA_P (x))
513 fprintf (file, "sdaoff");
514 else if (SYMBOL_REF_TDA_P (x))
515 fprintf (file, "tdaoff");
520 gcc_assert (special_symbolref_operand (x, VOIDmode));
521 output_addr_const (file, x);
524 gcc_assert (special_symbolref_operand (x, VOIDmode));
526 if (GET_CODE (x) == CONST)
527 x = XEXP (XEXP (x, 0), 0);
529 gcc_assert (GET_CODE (x) == SYMBOL_REF);
531 if (SYMBOL_REF_ZDA_P (x))
532 fprintf (file, "r0");
533 else if (SYMBOL_REF_SDA_P (x))
534 fprintf (file, "gp");
535 else if (SYMBOL_REF_TDA_P (x))
536 fprintf (file, "ep");
540 case 'R': /* 2nd word of a double. */
541 switch (GET_CODE (x))
544 fprintf (file, reg_names[REGNO (x) + 1]);
547 x = XEXP (adjust_address (x, SImode, 4), 0);
548 v850_print_operand_address (file, x);
549 if (GET_CODE (x) == CONST_INT)
550 fprintf (file, "[r0]");
555 unsigned HOST_WIDE_INT v = INTVAL (x);
557 /* Trickery to avoid problems with shifting
558 32-bits at a time on a 32-bit host. */
561 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
566 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
576 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
577 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
584 /* Like an 'S' operand above, but for unsigned loads only. */
585 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
590 case 'W': /* Print the instruction suffix. */
591 switch (GET_MODE (x))
596 case QImode: fputs (".b", file); break;
597 case HImode: fputs (".h", file); break;
598 case SImode: fputs (".w", file); break;
599 case SFmode: fputs (".w", file); break;
602 case '.': /* Register r0. */
603 fputs (reg_names[0], file);
605 case 'z': /* Reg or zero. */
607 fputs (reg_names[REGNO (x)], file);
608 else if ((GET_MODE(x) == SImode
609 || GET_MODE(x) == DFmode
610 || GET_MODE(x) == SFmode)
611 && x == CONST0_RTX(GET_MODE(x)))
612 fputs (reg_names[0], file);
615 gcc_assert (x == const0_rtx);
616 fputs (reg_names[0], file);
620 switch (GET_CODE (x))
623 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
624 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
627 output_address (XEXP (x, 0));
631 fputs (reg_names[REGNO (x)], file);
634 fputs (reg_names[subreg_regno (x)], file);
637 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
645 v850_print_operand_address (file, x);
656 /* Output assembly language output for the address ADDR to FILE. */
659 v850_print_operand_address (FILE * file, rtx addr)
661 switch (GET_CODE (addr))
664 fprintf (file, "0[");
665 v850_print_operand (file, addr, 0);
669 if (GET_CODE (XEXP (addr, 0)) == REG)
672 fprintf (file, "lo(");
673 v850_print_operand (file, XEXP (addr, 1), 0);
674 fprintf (file, ")[");
675 v850_print_operand (file, XEXP (addr, 0), 0);
680 if (GET_CODE (XEXP (addr, 0)) == REG
681 || GET_CODE (XEXP (addr, 0)) == SUBREG)
684 v850_print_operand (file, XEXP (addr, 1), 0);
686 v850_print_operand (file, XEXP (addr, 0), 0);
691 v850_print_operand (file, XEXP (addr, 0), 0);
693 v850_print_operand (file, XEXP (addr, 1), 0);
698 const char *off_name = NULL;
699 const char *reg_name = NULL;
701 if (SYMBOL_REF_ZDA_P (addr))
706 else if (SYMBOL_REF_SDA_P (addr))
711 else if (SYMBOL_REF_TDA_P (addr))
718 fprintf (file, "%s(", off_name);
719 output_addr_const (file, addr);
721 fprintf (file, ")[%s]", reg_name);
725 if (special_symbolref_operand (addr, VOIDmode))
727 rtx x = XEXP (XEXP (addr, 0), 0);
728 const char *off_name;
729 const char *reg_name;
731 if (SYMBOL_REF_ZDA_P (x))
736 else if (SYMBOL_REF_SDA_P (x))
741 else if (SYMBOL_REF_TDA_P (x))
749 fprintf (file, "%s(", off_name);
750 output_addr_const (file, addr);
751 fprintf (file, ")[%s]", reg_name);
754 output_addr_const (file, addr);
757 output_addr_const (file, addr);
763 v850_print_operand_punct_valid_p (unsigned char code)
768 /* When assemble_integer is used to emit the offsets for a switch
769 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
770 output_addr_const will normally barf at this, but it is OK to omit
771 the truncate and just emit the difference of the two labels. The
772 .hword directive will automatically handle the truncation for us.
774 Returns true if rtx was handled, false otherwise. */
777 v850_output_addr_const_extra (FILE * file, rtx x)
779 if (GET_CODE (x) != TRUNCATE)
784 /* We must also handle the case where the switch table was passed a
785 constant value and so has been collapsed. In this case the first
786 label will have been deleted. In such a case it is OK to emit
787 nothing, since the table will not be used.
788 (cf gcc.c-torture/compile/990801-1.c). */
789 if (GET_CODE (x) == MINUS
790 && GET_CODE (XEXP (x, 0)) == LABEL_REF
791 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
792 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
795 output_addr_const (file, x);
799 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
803 output_move_single (rtx * operands)
805 rtx dst = operands[0];
806 rtx src = operands[1];
813 else if (GET_CODE (src) == CONST_INT)
815 HOST_WIDE_INT value = INTVAL (src);
817 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
820 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
821 return "movea %1,%.,%0";
823 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
824 return "movhi hi0(%1),%.,%0";
826 /* A random constant. */
827 else if (TARGET_V850E || TARGET_V850E2_ALL)
830 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
833 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
835 HOST_WIDE_INT high, low;
837 const_double_split (src, &high, &low);
839 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
842 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
843 return "movea %F1,%.,%0";
845 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
846 return "movhi hi0(%F1),%.,%0";
848 /* A random constant. */
849 else if (TARGET_V850E || TARGET_V850E2_ALL)
853 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
856 else if (GET_CODE (src) == MEM)
857 return "%S1ld%W1 %1,%0";
859 else if (special_symbolref_operand (src, VOIDmode))
860 return "movea %O1(%P1),%Q1,%0";
862 else if (GET_CODE (src) == LABEL_REF
863 || GET_CODE (src) == SYMBOL_REF
864 || GET_CODE (src) == CONST)
866 if (TARGET_V850E || TARGET_V850E2_ALL)
867 return "mov hilo(%1),%0";
869 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
872 else if (GET_CODE (src) == HIGH)
873 return "movhi hi(%1),%.,%0";
875 else if (GET_CODE (src) == LO_SUM)
877 operands[2] = XEXP (src, 0);
878 operands[3] = XEXP (src, 1);
879 return "movea lo(%3),%2,%0";
883 else if (GET_CODE (dst) == MEM)
886 return "%S0st%W0 %1,%0";
888 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
889 return "%S0st%W0 %.,%0";
891 else if (GET_CODE (src) == CONST_DOUBLE
892 && CONST0_RTX (GET_MODE (dst)) == src)
893 return "%S0st%W0 %.,%0";
896 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
901 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
903 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
908 return CC_FPU_LEmode;
910 return CC_FPU_GEmode;
912 return CC_FPU_LTmode;
914 return CC_FPU_GTmode;
916 return CC_FPU_EQmode;
918 return CC_FPU_NEmode;
927 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
929 if (GET_MODE (op0) == DFmode)
934 emit_insn (gen_cmpdf_le_insn (op0, op1));
937 emit_insn (gen_cmpdf_ge_insn (op0, op1));
940 emit_insn (gen_cmpdf_lt_insn (op0, op1));
943 emit_insn (gen_cmpdf_gt_insn (op0, op1));
946 /* Note: There is no NE comparison operator. So we
947 perform an EQ comparison and invert the branch.
948 See v850_float_nz_comparison for how this is done. */
950 emit_insn (gen_cmpdf_eq_insn (op0, op1));
956 else if (GET_MODE (v850_compare_op0) == SFmode)
961 emit_insn (gen_cmpsf_le_insn(op0, op1));
964 emit_insn (gen_cmpsf_ge_insn(op0, op1));
967 emit_insn (gen_cmpsf_lt_insn(op0, op1));
970 emit_insn (gen_cmpsf_gt_insn(op0, op1));
973 /* Note: There is no NE comparison operator. So we
974 perform an EQ comparison and invert the branch.
975 See v850_float_nz_comparison for how this is done. */
977 emit_insn (gen_cmpsf_eq_insn(op0, op1));
986 return v850_select_cc_mode (cond, op0, op1);
990 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
992 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
994 emit_insn (gen_cmpsi_insn (op0, op1));
995 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1000 mode = v850_gen_float_compare (cond, mode, op0, op1);
1001 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1002 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1004 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1008 /* Return maximum offset supported for a short EP memory reference of mode
1009 MODE and signedness UNSIGNEDP. */
1012 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1019 if (TARGET_SMALL_SLD)
1020 max_offset = (1 << 4);
1021 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1023 max_offset = (1 << 4);
1025 max_offset = (1 << 7);
1029 if (TARGET_SMALL_SLD)
1030 max_offset = (1 << 5);
1031 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1033 max_offset = (1 << 5);
1035 max_offset = (1 << 8);
1040 max_offset = (1 << 8);
1050 /* Return true if OP is a valid short EP memory reference */
1053 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1059 /* If we are not using the EP register on a per-function basis
1060 then do not allow this optimization at all. This is to
1061 prevent the use of the SLD/SST instructions which cannot be
1062 guaranteed to work properly due to a hardware bug. */
1066 if (GET_CODE (op) != MEM)
1069 max_offset = ep_memory_offset (mode, unsigned_load);
1071 mask = GET_MODE_SIZE (mode) - 1;
1073 addr = XEXP (op, 0);
1074 if (GET_CODE (addr) == CONST)
1075 addr = XEXP (addr, 0);
1077 switch (GET_CODE (addr))
1083 return SYMBOL_REF_TDA_P (addr);
1086 return REGNO (addr) == EP_REGNUM;
1089 op0 = XEXP (addr, 0);
1090 op1 = XEXP (addr, 1);
1091 if (GET_CODE (op1) == CONST_INT
1092 && INTVAL (op1) < max_offset
1093 && INTVAL (op1) >= 0
1094 && (INTVAL (op1) & mask) == 0)
1096 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1099 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1108 /* Substitute memory references involving a pointer, to use the ep pointer,
1109 taking care to save and preserve the ep. */
1112 substitute_ep_register (rtx first_insn,
1119 rtx reg = gen_rtx_REG (Pmode, regno);
1124 df_set_regs_ever_live (1, true);
1125 *p_r1 = gen_rtx_REG (Pmode, 1);
1126 *p_ep = gen_rtx_REG (Pmode, 30);
1131 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1132 2 * (uses - 3), uses, reg_names[regno],
1133 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1134 INSN_UID (first_insn), INSN_UID (last_insn));
1136 if (GET_CODE (first_insn) == NOTE)
1137 first_insn = next_nonnote_insn (first_insn);
1139 last_insn = next_nonnote_insn (last_insn);
1140 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1142 if (GET_CODE (insn) == INSN)
1144 rtx pattern = single_set (insn);
1146 /* Replace the memory references. */
1150 /* Memory operands are signed by default. */
1151 int unsignedp = FALSE;
1153 if (GET_CODE (SET_DEST (pattern)) == MEM
1154 && GET_CODE (SET_SRC (pattern)) == MEM)
1157 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1158 p_mem = &SET_DEST (pattern);
1160 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1161 p_mem = &SET_SRC (pattern);
1163 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1164 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1165 p_mem = &XEXP (SET_SRC (pattern), 0);
1167 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1168 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1170 p_mem = &XEXP (SET_SRC (pattern), 0);
1178 rtx addr = XEXP (*p_mem, 0);
1180 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1181 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1183 else if (GET_CODE (addr) == PLUS
1184 && GET_CODE (XEXP (addr, 0)) == REG
1185 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1186 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1187 && ((INTVAL (XEXP (addr, 1)))
1188 < ep_memory_offset (GET_MODE (*p_mem),
1190 && ((INTVAL (XEXP (addr, 1))) >= 0))
1191 *p_mem = change_address (*p_mem, VOIDmode,
1192 gen_rtx_PLUS (Pmode,
1200 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1201 insn = prev_nonnote_insn (first_insn);
1202 if (insn && GET_CODE (insn) == INSN
1203 && GET_CODE (PATTERN (insn)) == SET
1204 && SET_DEST (PATTERN (insn)) == *p_ep
1205 && SET_SRC (PATTERN (insn)) == *p_r1)
1208 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1210 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1211 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1215 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1216 the -mep mode to copy heavily used pointers to ep to use the implicit
1228 regs[FIRST_PSEUDO_REGISTER];
1237 /* If not ep mode, just return now. */
1241 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1244 regs[i].first_insn = NULL_RTX;
1245 regs[i].last_insn = NULL_RTX;
1248 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1250 switch (GET_CODE (insn))
1252 /* End of basic block */
1259 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1261 if (max_uses < regs[i].uses)
1263 max_uses = regs[i].uses;
1269 substitute_ep_register (regs[max_regno].first_insn,
1270 regs[max_regno].last_insn,
1271 max_uses, max_regno, &r1, &ep);
1275 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1278 regs[i].first_insn = NULL_RTX;
1279 regs[i].last_insn = NULL_RTX;
1287 pattern = single_set (insn);
1289 /* See if there are any memory references we can shorten. */
1292 rtx src = SET_SRC (pattern);
1293 rtx dest = SET_DEST (pattern);
1295 /* Memory operands are signed by default. */
1296 int unsignedp = FALSE;
1298 /* We might have (SUBREG (MEM)) here, so just get rid of the
1299 subregs to make this code simpler. */
1300 if (GET_CODE (dest) == SUBREG
1301 && (GET_CODE (SUBREG_REG (dest)) == MEM
1302 || GET_CODE (SUBREG_REG (dest)) == REG))
1303 alter_subreg (&dest, false);
1304 if (GET_CODE (src) == SUBREG
1305 && (GET_CODE (SUBREG_REG (src)) == MEM
1306 || GET_CODE (SUBREG_REG (src)) == REG))
1307 alter_subreg (&src, false);
1309 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1312 else if (GET_CODE (dest) == MEM)
1315 else if (GET_CODE (src) == MEM)
1318 else if (GET_CODE (src) == SIGN_EXTEND
1319 && GET_CODE (XEXP (src, 0)) == MEM)
1320 mem = XEXP (src, 0);
1322 else if (GET_CODE (src) == ZERO_EXTEND
1323 && GET_CODE (XEXP (src, 0)) == MEM)
1325 mem = XEXP (src, 0);
1331 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1334 else if (!use_ep && mem
1335 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1337 rtx addr = XEXP (mem, 0);
1341 if (GET_CODE (addr) == REG)
1344 regno = REGNO (addr);
1347 else if (GET_CODE (addr) == PLUS
1348 && GET_CODE (XEXP (addr, 0)) == REG
1349 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1350 && ((INTVAL (XEXP (addr, 1)))
1351 < ep_memory_offset (GET_MODE (mem), unsignedp))
1352 && ((INTVAL (XEXP (addr, 1))) >= 0))
1355 regno = REGNO (XEXP (addr, 0));
1364 regs[regno].last_insn = insn;
1365 if (!regs[regno].first_insn)
1366 regs[regno].first_insn = insn;
1370 /* Loading up a register in the basic block zaps any savings
1372 if (GET_CODE (dest) == REG)
1374 enum machine_mode mode = GET_MODE (dest);
1378 regno = REGNO (dest);
1379 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1383 /* See if we can use the pointer before this
1388 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1390 if (max_uses < regs[i].uses)
1392 max_uses = regs[i].uses;
1398 && max_regno >= regno
1399 && max_regno < endregno)
1401 substitute_ep_register (regs[max_regno].first_insn,
1402 regs[max_regno].last_insn,
1403 max_uses, max_regno, &r1,
1406 /* Since we made a substitution, zap all remembered
1408 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1411 regs[i].first_insn = NULL_RTX;
1412 regs[i].last_insn = NULL_RTX;
1417 for (i = regno; i < endregno; i++)
1420 regs[i].first_insn = NULL_RTX;
1421 regs[i].last_insn = NULL_RTX;
1429 /* # of registers saved by the interrupt handler. */
1430 #define INTERRUPT_FIXED_NUM 5
1432 /* # of bytes for registers saved by the interrupt handler. */
1433 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1435 /* # of words saved for other registers. */
1436 #define INTERRUPT_ALL_SAVE_NUM \
1437 (30 - INTERRUPT_FIXED_NUM)
1439 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1442 compute_register_save_size (long * p_reg_saved)
1446 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1447 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1450 /* Count space for the register saves. */
1451 if (interrupt_handler)
1453 for (i = 0; i <= 31; i++)
1457 if (df_regs_ever_live_p (i) || call_p)
1460 reg_saved |= 1L << i;
1464 /* We don't save/restore r0 or the stack pointer */
1466 case STACK_POINTER_REGNUM:
1469 /* For registers with fixed use, we save them, set them to the
1470 appropriate value, and then restore them.
1471 These registers are handled specially, so don't list them
1472 on the list of registers to save in the prologue. */
1473 case 1: /* temp used to hold ep */
1475 case 10: /* temp used to call interrupt save/restore */
1476 case 11: /* temp used to call interrupt save/restore (long call) */
1477 case EP_REGNUM: /* ep */
1484 /* Find the first register that needs to be saved. */
1485 for (i = 0; i <= 31; i++)
1486 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1487 || i == LINK_POINTER_REGNUM))
1490 /* If it is possible that an out-of-line helper function might be
1491 used to generate the prologue for the current function, then we
1492 need to cover the possibility that such a helper function will
1493 be used, despite the fact that there might be gaps in the list of
1494 registers that need to be saved. To detect this we note that the
1495 helper functions always push at least register r29 (provided
1496 that the function is not an interrupt handler). */
1498 if (TARGET_PROLOG_FUNCTION
1499 && (i == 2 || ((i >= 20) && (i < 30))))
1504 reg_saved |= 1L << i;
1509 /* Helper functions save all registers between the starting
1510 register and the last register, regardless of whether they
1511 are actually used by the function or not. */
1512 for (; i <= 29; i++)
1515 reg_saved |= 1L << i;
1518 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1521 reg_saved |= 1L << LINK_POINTER_REGNUM;
1526 for (; i <= 31; i++)
1527 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1528 || i == LINK_POINTER_REGNUM))
1531 reg_saved |= 1L << i;
1537 *p_reg_saved = reg_saved;
1542 /* Typical stack layout should looks like this after the function's prologue:
1547 | | arguments saved | Increasing
1548 | | on the stack | addresses
1549 PARENT arg pointer -> | | /
1550 -------------------------- ---- -------------------
1551 | | - space for argument split between regs & stack
1553 CHILD | | \ <-- (return address here)
1558 frame pointer -> | | \ ___
1565 | | arguments | | Decreasing
1566 (hard) frame pointer | | / | | addresses
1567 and stack pointer -> | | / _|_ |
1568 -------------------------- ---- ------------------ V */
1571 compute_frame_size (int size, long * p_reg_saved)
1574 + compute_register_save_size (p_reg_saved)
1575 + crtl->outgoing_args_size);
1579 use_prolog_function (int num_save, int frame_size)
1581 int alloc_stack = (4 * num_save);
1582 int unalloc_stack = frame_size - alloc_stack;
1583 int save_func_len, restore_func_len;
1584 int save_normal_len, restore_normal_len;
1586 if (! TARGET_DISABLE_CALLT)
1587 save_func_len = restore_func_len = 2;
1589 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1593 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1594 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1597 /* See if we would have used ep to save the stack. */
1598 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1599 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1601 save_normal_len = restore_normal_len = 4 * num_save;
1603 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1604 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1606 /* Don't bother checking if we don't actually save any space.
1607 This happens for instance if one register is saved and additional
1608 stack space is allocated. */
1609 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1613 increment_stack (signed int amount, bool in_prologue)
1620 inc = GEN_INT (amount);
1622 if (! CONST_OK_FOR_K (amount))
1624 rtx reg = gen_rtx_REG (Pmode, 12);
1626 inc = emit_move_insn (reg, inc);
1632 inc = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1638 expand_prologue (void)
1641 unsigned int size = get_frame_size ();
1642 unsigned int actual_fsize;
1643 unsigned int init_stack_alloc = 0;
1646 unsigned int num_save;
1648 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1651 actual_fsize = compute_frame_size (size, ®_saved);
1653 if (flag_stack_usage_info)
1654 current_function_static_stack_size = actual_fsize;
1656 /* Save/setup global registers for interrupt functions right now. */
1657 if (interrupt_handler)
1659 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1660 emit_insn (gen_callt_save_interrupt ());
1662 emit_insn (gen_save_interrupt ());
1664 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1666 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1667 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1669 /* Interrupt functions are not passed arguments, so no need to
1670 allocate space for split structure arguments. */
1671 gcc_assert (crtl->args.pretend_args_size == 0);
1674 /* Identify all of the saved registers. */
1676 for (i = 1; i < 32; i++)
1678 if (((1L << i) & reg_saved) != 0)
1679 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1682 if (crtl->args.pretend_args_size)
1686 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1690 increment_stack (- crtl->args.pretend_args_size, true);
1693 /* See if we have an insn that allocates stack space and saves the particular
1694 registers we want to. Note that the helpers won't
1695 allocate additional space for registers GCC saves to complete a
1696 "split" structure argument. */
1697 save_all = NULL_RTX;
1698 if (TARGET_PROLOG_FUNCTION
1699 && !crtl->args.pretend_args_size
1702 if (use_prolog_function (num_save, actual_fsize))
1704 int alloc_stack = 4 * num_save;
1707 save_all = gen_rtx_PARALLEL
1709 rtvec_alloc (num_save + 1
1710 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1712 XVECEXP (save_all, 0, 0)
1713 = gen_rtx_SET (VOIDmode,
1715 gen_rtx_PLUS (Pmode,
1717 GEN_INT(-alloc_stack)));
1718 for (i = 0; i < num_save; i++)
1721 XVECEXP (save_all, 0, i+1)
1722 = gen_rtx_SET (VOIDmode,
1724 gen_rtx_PLUS (Pmode,
1730 if (TARGET_DISABLE_CALLT)
1732 XVECEXP (save_all, 0, num_save + 1)
1733 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1735 if (TARGET_LONG_CALLS)
1736 XVECEXP (save_all, 0, num_save + 2)
1737 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1740 v850_all_frame_related (save_all);
1742 code = recog (save_all, NULL_RTX, NULL);
1745 rtx insn = emit_insn (save_all);
1746 INSN_CODE (insn) = code;
1747 actual_fsize -= alloc_stack;
1751 save_all = NULL_RTX;
1755 /* If no prolog save function is available, store the registers the old
1756 fashioned way (one by one). */
1759 /* Special case interrupt functions that save all registers for a call. */
1760 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1762 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1763 emit_insn (gen_callt_save_all_interrupt ());
1765 emit_insn (gen_save_all_interrupt ());
1770 /* If the stack is too big, allocate it in chunks so we can do the
1771 register saves. We use the register save size so we use the ep
1773 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1774 init_stack_alloc = compute_register_save_size (NULL);
1776 init_stack_alloc = actual_fsize;
1778 /* Save registers at the beginning of the stack frame. */
1779 offset = init_stack_alloc - 4;
1781 if (init_stack_alloc)
1782 increment_stack (- (signed) init_stack_alloc, true);
1784 /* Save the return pointer first. */
1785 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1787 F (emit_move_insn (gen_rtx_MEM (SImode,
1788 plus_constant (Pmode,
1791 save_regs[--num_save]));
1795 for (i = 0; i < num_save; i++)
1797 F (emit_move_insn (gen_rtx_MEM (SImode,
1798 plus_constant (Pmode,
1807 /* Allocate the rest of the stack that was not allocated above (either it is
1808 > 32K or we just called a function to save the registers and needed more
1810 if (actual_fsize > init_stack_alloc)
1811 increment_stack (init_stack_alloc - actual_fsize, true);
1813 /* If we need a frame pointer, set it up now. */
1814 if (frame_pointer_needed)
1815 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1820 expand_epilogue (void)
1823 unsigned int size = get_frame_size ();
1825 int actual_fsize = compute_frame_size (size, ®_saved);
1826 rtx restore_regs[32];
1828 unsigned int num_restore;
1830 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1832 /* Eliminate the initial stack stored by interrupt functions. */
1833 if (interrupt_handler)
1835 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1836 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1837 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1840 /* Cut off any dynamic stack created. */
1841 if (frame_pointer_needed)
1842 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1844 /* Identify all of the saved registers. */
1846 for (i = 1; i < 32; i++)
1848 if (((1L << i) & reg_saved) != 0)
1849 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1852 /* See if we have an insn that restores the particular registers we
1854 restore_all = NULL_RTX;
1856 if (TARGET_PROLOG_FUNCTION
1858 && !crtl->args.pretend_args_size
1859 && !interrupt_handler)
1861 int alloc_stack = (4 * num_restore);
1863 /* Don't bother checking if we don't actually save any space. */
1864 if (use_prolog_function (num_restore, actual_fsize))
1867 restore_all = gen_rtx_PARALLEL (VOIDmode,
1868 rtvec_alloc (num_restore + 2));
1869 XVECEXP (restore_all, 0, 0) = ret_rtx;
1870 XVECEXP (restore_all, 0, 1)
1871 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1872 gen_rtx_PLUS (Pmode,
1874 GEN_INT (alloc_stack)));
1876 offset = alloc_stack - 4;
1877 for (i = 0; i < num_restore; i++)
1879 XVECEXP (restore_all, 0, i+2)
1880 = gen_rtx_SET (VOIDmode,
1883 gen_rtx_PLUS (Pmode,
1889 code = recog (restore_all, NULL_RTX, NULL);
1895 actual_fsize -= alloc_stack;
1896 increment_stack (actual_fsize, false);
1898 insn = emit_jump_insn (restore_all);
1899 INSN_CODE (insn) = code;
1902 restore_all = NULL_RTX;
1906 /* If no epilogue save function is available, restore the registers the
1907 old fashioned way (one by one). */
1910 unsigned int init_stack_free;
1912 /* If the stack is large, we need to cut it down in 2 pieces. */
1913 if (interrupt_handler)
1914 init_stack_free = 0;
1915 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1916 init_stack_free = 4 * num_restore;
1918 init_stack_free = (signed) actual_fsize;
1920 /* Deallocate the rest of the stack if it is > 32K. */
1921 if ((unsigned int) actual_fsize > init_stack_free)
1922 increment_stack (actual_fsize - init_stack_free, false);
1924 /* Special case interrupt functions that save all registers
1926 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1928 if (! TARGET_DISABLE_CALLT)
1929 emit_insn (gen_callt_restore_all_interrupt ());
1931 emit_insn (gen_restore_all_interrupt ());
1935 /* Restore registers from the beginning of the stack frame. */
1936 int offset = init_stack_free - 4;
1938 /* Restore the return pointer first. */
1940 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1942 emit_move_insn (restore_regs[--num_restore],
1943 gen_rtx_MEM (SImode,
1944 plus_constant (Pmode,
1950 for (i = 0; i < num_restore; i++)
1952 emit_move_insn (restore_regs[i],
1953 gen_rtx_MEM (SImode,
1954 plus_constant (Pmode,
1958 emit_use (restore_regs[i]);
1962 /* Cut back the remainder of the stack. */
1963 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1967 /* And return or use reti for interrupt handlers. */
1968 if (interrupt_handler)
1970 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1971 emit_insn (gen_callt_return_interrupt ());
1973 emit_jump_insn (gen_return_interrupt ());
1975 else if (actual_fsize)
1976 emit_jump_insn (gen_return_internal ());
1978 emit_jump_insn (gen_return_simple ());
1981 v850_interrupt_cache_p = FALSE;
1982 v850_interrupt_p = FALSE;
1985 /* Update the condition code from the insn. */
1987 notice_update_cc (rtx body, rtx insn)
1989 switch (get_attr_cc (insn))
1992 /* Insn does not affect CC at all. */
1996 /* Insn does not change CC, but the 0'th operand has been changed. */
1997 if (cc_status.value1 != 0
1998 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1999 cc_status.value1 = 0;
2003 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2004 V,C is in an unusable state. */
2006 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2007 cc_status.value1 = recog_data.operand[0];
2011 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2012 C is in an unusable state. */
2014 cc_status.flags |= CC_NO_CARRY;
2015 cc_status.value1 = recog_data.operand[0];
2019 /* The insn is a compare instruction. */
2021 cc_status.value1 = SET_SRC (body);
2025 /* Insn doesn't leave CC in a usable state. */
2034 /* Retrieve the data area that has been chosen for the given decl. */
2037 v850_get_data_area (tree decl)
2039 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2040 return DATA_AREA_SDA;
2042 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2043 return DATA_AREA_TDA;
2045 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2046 return DATA_AREA_ZDA;
2048 return DATA_AREA_NORMAL;
2051 /* Store the indicated data area in the decl's attributes. */
2054 v850_set_data_area (tree decl, v850_data_area data_area)
2060 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2061 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2062 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2067 DECL_ATTRIBUTES (decl) = tree_cons
2068 (name, NULL, DECL_ATTRIBUTES (decl));
2071 /* Handle an "interrupt" attribute; arguments as in
2072 struct attribute_spec.handler. */
2074 v850_handle_interrupt_attribute (tree * node,
2076 tree args ATTRIBUTE_UNUSED,
2077 int flags ATTRIBUTE_UNUSED,
2078 bool * no_add_attrs)
2080 if (TREE_CODE (*node) != FUNCTION_DECL)
2082 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2084 *no_add_attrs = true;
2090 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2091 struct attribute_spec.handler. */
2093 v850_handle_data_area_attribute (tree* node,
2095 tree args ATTRIBUTE_UNUSED,
2096 int flags ATTRIBUTE_UNUSED,
2097 bool * no_add_attrs)
2099 v850_data_area data_area;
2100 v850_data_area area;
2103 /* Implement data area attribute. */
2104 if (is_attribute_p ("sda", name))
2105 data_area = DATA_AREA_SDA;
2106 else if (is_attribute_p ("tda", name))
2107 data_area = DATA_AREA_TDA;
2108 else if (is_attribute_p ("zda", name))
2109 data_area = DATA_AREA_ZDA;
2113 switch (TREE_CODE (decl))
2116 if (current_function_decl != NULL_TREE)
2118 error_at (DECL_SOURCE_LOCATION (decl),
2119 "data area attributes cannot be specified for "
2121 *no_add_attrs = true;
2127 area = v850_get_data_area (decl);
2128 if (area != DATA_AREA_NORMAL && data_area != area)
2130 error ("data area of %q+D conflicts with previous declaration",
2132 *no_add_attrs = true;
2144 /* Return nonzero if FUNC is an interrupt function as specified
2145 by the "interrupt" attribute. */
2148 v850_interrupt_function_p (tree func)
2153 if (v850_interrupt_cache_p)
2154 return v850_interrupt_p;
2156 if (TREE_CODE (func) != FUNCTION_DECL)
2159 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2165 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2166 ret = a != NULL_TREE;
2169 /* Its not safe to trust global variables until after function inlining has
2171 if (reload_completed | reload_in_progress)
2172 v850_interrupt_p = ret;
2179 v850_encode_data_area (tree decl, rtx symbol)
2183 /* Map explicit sections into the appropriate attribute */
2184 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2186 if (DECL_SECTION_NAME (decl))
2188 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2190 if (streq (name, ".zdata") || streq (name, ".zbss"))
2191 v850_set_data_area (decl, DATA_AREA_ZDA);
2193 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2194 v850_set_data_area (decl, DATA_AREA_SDA);
2196 else if (streq (name, ".tdata"))
2197 v850_set_data_area (decl, DATA_AREA_TDA);
2200 /* If no attribute, support -m{zda,sda,tda}=n */
2203 int size = int_size_in_bytes (TREE_TYPE (decl));
2207 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2208 v850_set_data_area (decl, DATA_AREA_TDA);
2210 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2211 v850_set_data_area (decl, DATA_AREA_SDA);
2213 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2214 v850_set_data_area (decl, DATA_AREA_ZDA);
2217 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2221 flags = SYMBOL_REF_FLAGS (symbol);
2222 switch (v850_get_data_area (decl))
2224 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2225 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2226 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2227 default: gcc_unreachable ();
2229 SYMBOL_REF_FLAGS (symbol) = flags;
2233 v850_encode_section_info (tree decl, rtx rtl, int first)
2235 default_encode_section_info (decl, rtl, first);
2237 if (TREE_CODE (decl) == VAR_DECL
2238 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2239 v850_encode_data_area (decl, XEXP (rtl, 0));
2242 /* Construct a JR instruction to a routine that will perform the equivalent of
2243 the RTL passed in as an argument. This RTL is a function epilogue that
2244 pops registers off the stack and possibly releases some extra stack space
2245 as well. The code has already verified that the RTL matches these
2249 construct_restore_jr (rtx op)
2251 int count = XVECLEN (op, 0);
2253 unsigned long int mask;
2254 unsigned long int first;
2255 unsigned long int last;
2257 static char buff [100]; /* XXX */
2261 error ("bogus JR construction: %d", count);
2265 /* Work out how many bytes to pop off the stack before retrieving
2267 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2268 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2269 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2271 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2273 /* Each pop will remove 4 bytes from the stack.... */
2274 stack_bytes -= (count - 2) * 4;
2276 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2277 if (stack_bytes != 0)
2279 error ("bad amount of stack space removal: %d", stack_bytes);
2283 /* Now compute the bit mask of registers to push. */
2285 for (i = 2; i < count; i++)
2287 rtx vector_element = XVECEXP (op, 0, i);
2289 gcc_assert (GET_CODE (vector_element) == SET);
2290 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2291 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2294 mask |= 1 << REGNO (SET_DEST (vector_element));
2297 /* Scan for the first register to pop. */
2298 for (first = 0; first < 32; first++)
2300 if (mask & (1 << first))
2304 gcc_assert (first < 32);
2306 /* Discover the last register to pop. */
2307 if (mask & (1 << LINK_POINTER_REGNUM))
2309 last = LINK_POINTER_REGNUM;
2313 gcc_assert (!stack_bytes);
2314 gcc_assert (mask & (1 << 29));
2319 /* Note, it is possible to have gaps in the register mask.
2320 We ignore this here, and generate a JR anyway. We will
2321 be popping more registers than is strictly necessary, but
2322 it does save code space. */
2324 if (TARGET_LONG_CALLS)
2329 sprintf (name, "__return_%s", reg_names [first]);
2331 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2333 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2339 sprintf (buff, "jr __return_%s", reg_names [first]);
2341 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2348 /* Construct a JARL instruction to a routine that will perform the equivalent
2349 of the RTL passed as a parameter. This RTL is a function prologue that
2350 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2351 some stack space as well. The code has already verified that the RTL
2352 matches these requirements. */
2354 construct_save_jarl (rtx op)
2356 int count = XVECLEN (op, 0);
2358 unsigned long int mask;
2359 unsigned long int first;
2360 unsigned long int last;
2362 static char buff [100]; /* XXX */
2364 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2366 error ("bogus JARL construction: %d", count);
2371 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2372 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2373 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2374 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2376 /* Work out how many bytes to push onto the stack after storing the
2378 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2380 /* Each push will put 4 bytes from the stack.... */
2381 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2383 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2384 if (stack_bytes != 0)
2386 error ("bad amount of stack space removal: %d", stack_bytes);
2390 /* Now compute the bit mask of registers to push. */
2392 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2394 rtx vector_element = XVECEXP (op, 0, i);
2396 gcc_assert (GET_CODE (vector_element) == SET);
2397 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2398 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2401 mask |= 1 << REGNO (SET_SRC (vector_element));
2404 /* Scan for the first register to push. */
2405 for (first = 0; first < 32; first++)
2407 if (mask & (1 << first))
2411 gcc_assert (first < 32);
2413 /* Discover the last register to push. */
2414 if (mask & (1 << LINK_POINTER_REGNUM))
2416 last = LINK_POINTER_REGNUM;
2420 gcc_assert (!stack_bytes);
2421 gcc_assert (mask & (1 << 29));
2426 /* Note, it is possible to have gaps in the register mask.
2427 We ignore this here, and generate a JARL anyway. We will
2428 be pushing more registers than is strictly necessary, but
2429 it does save code space. */
2431 if (TARGET_LONG_CALLS)
2436 sprintf (name, "__save_%s", reg_names [first]);
2438 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2440 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2446 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2448 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2455 /* A version of asm_output_aligned_bss() that copes with the special
2456 data areas of the v850. */
2458 v850_output_aligned_bss (FILE * file,
2461 unsigned HOST_WIDE_INT size,
2464 switch (v850_get_data_area (decl))
2467 switch_to_section (zbss_section);
2471 switch_to_section (sbss_section);
2475 switch_to_section (tdata_section);
2478 switch_to_section (bss_section);
2482 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2483 #ifdef ASM_DECLARE_OBJECT_NAME
2484 last_assemble_variable_decl = decl;
2485 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2487 /* Standard thing is just output label for the object. */
2488 ASM_OUTPUT_LABEL (file, name);
2489 #endif /* ASM_DECLARE_OBJECT_NAME */
2490 ASM_OUTPUT_SKIP (file, size ? size : 1);
2493 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2495 v850_output_common (FILE * file,
2501 if (decl == NULL_TREE)
2503 fprintf (file, "%s", COMMON_ASM_OP);
2507 switch (v850_get_data_area (decl))
2510 fprintf (file, "%s", ZCOMMON_ASM_OP);
2514 fprintf (file, "%s", SCOMMON_ASM_OP);
2518 fprintf (file, "%s", TCOMMON_ASM_OP);
2522 fprintf (file, "%s", COMMON_ASM_OP);
2527 assemble_name (file, name);
2528 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2531 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2533 v850_output_local (FILE * file,
2539 fprintf (file, "%s", LOCAL_ASM_OP);
2540 assemble_name (file, name);
2541 fprintf (file, "\n");
2543 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2546 /* Add data area to the given declaration if a ghs data area pragma is
2547 currently in effect (#pragma ghs startXXX/endXXX). */
2549 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2552 && data_area_stack->data_area
2553 && current_function_decl == NULL_TREE
2554 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2555 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2556 v850_set_data_area (decl, data_area_stack->data_area);
2558 /* Initialize the default names of the v850 specific sections,
2559 if this has not been done before. */
2561 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2563 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2564 = build_string (sizeof (".sdata")-1, ".sdata");
2566 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2567 = build_string (sizeof (".rosdata")-1, ".rosdata");
2569 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2570 = build_string (sizeof (".tdata")-1, ".tdata");
2572 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2573 = build_string (sizeof (".zdata")-1, ".zdata");
2575 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2576 = build_string (sizeof (".rozdata")-1, ".rozdata");
2579 if (current_function_decl == NULL_TREE
2580 && (TREE_CODE (decl) == VAR_DECL
2581 || TREE_CODE (decl) == CONST_DECL
2582 || TREE_CODE (decl) == FUNCTION_DECL)
2583 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2584 && !DECL_SECTION_NAME (decl))
2586 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2587 tree chosen_section;
2589 if (TREE_CODE (decl) == FUNCTION_DECL)
2590 kind = GHS_SECTION_KIND_TEXT;
2593 /* First choose a section kind based on the data area of the decl. */
2594 switch (v850_get_data_area (decl))
2600 kind = ((TREE_READONLY (decl))
2601 ? GHS_SECTION_KIND_ROSDATA
2602 : GHS_SECTION_KIND_SDATA);
2606 kind = GHS_SECTION_KIND_TDATA;
2610 kind = ((TREE_READONLY (decl))
2611 ? GHS_SECTION_KIND_ROZDATA
2612 : GHS_SECTION_KIND_ZDATA);
2615 case DATA_AREA_NORMAL: /* default data area */
2616 if (TREE_READONLY (decl))
2617 kind = GHS_SECTION_KIND_RODATA;
2618 else if (DECL_INITIAL (decl))
2619 kind = GHS_SECTION_KIND_DATA;
2621 kind = GHS_SECTION_KIND_BSS;
2625 /* Now, if the section kind has been explicitly renamed,
2626 then attach a section attribute. */
2627 chosen_section = GHS_current_section_names [(int) kind];
2629 /* Otherwise, if this kind of section needs an explicit section
2630 attribute, then also attach one. */
2631 if (chosen_section == NULL)
2632 chosen_section = GHS_default_section_names [(int) kind];
2636 /* Only set the section name if specified by a pragma, because
2637 otherwise it will force those variables to get allocated storage
2638 in this module, rather than by the linker. */
2639 DECL_SECTION_NAME (decl) = chosen_section;
2644 /* Construct a DISPOSE instruction that is the equivalent of
2645 the given RTX. We have already verified that this should
2649 construct_dispose_instruction (rtx op)
2651 int count = XVECLEN (op, 0);
2653 unsigned long int mask;
2655 static char buff[ 100 ]; /* XXX */
2660 error ("bogus DISPOSE construction: %d", count);
2664 /* Work out how many bytes to pop off the
2665 stack before retrieving registers. */
2666 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2667 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2668 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2670 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2672 /* Each pop will remove 4 bytes from the stack.... */
2673 stack_bytes -= (count - 2) * 4;
2675 /* Make sure that the amount we are popping
2676 will fit into the DISPOSE instruction. */
2677 if (stack_bytes > 128)
2679 error ("too much stack space to dispose of: %d", stack_bytes);
2683 /* Now compute the bit mask of registers to push. */
2686 for (i = 2; i < count; i++)
2688 rtx vector_element = XVECEXP (op, 0, i);
2690 gcc_assert (GET_CODE (vector_element) == SET);
2691 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2692 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2695 if (REGNO (SET_DEST (vector_element)) == 2)
2698 mask |= 1 << REGNO (SET_DEST (vector_element));
2701 if (! TARGET_DISABLE_CALLT
2702 && (use_callt || stack_bytes == 0))
2706 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2711 for (i = 20; i < 32; i++)
2712 if (mask & (1 << i))
2716 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2718 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2719 i, (mask & (1 << 31)) ? "31c" : "29");
2724 static char regs [100]; /* XXX */
2727 /* Generate the DISPOSE instruction. Note we could just issue the
2728 bit mask as a number as the assembler can cope with this, but for
2729 the sake of our readers we turn it into a textual description. */
2733 for (i = 20; i < 32; i++)
2735 if (mask & (1 << i))
2740 strcat (regs, ", ");
2745 strcat (regs, reg_names[ first ]);
2747 for (i++; i < 32; i++)
2748 if ((mask & (1 << i)) == 0)
2753 strcat (regs, " - ");
2754 strcat (regs, reg_names[ i - 1 ] );
2759 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2765 /* Construct a PREPARE instruction that is the equivalent of
2766 the given RTL. We have already verified that this should
2770 construct_prepare_instruction (rtx op)
2774 unsigned long int mask;
2776 static char buff[ 100 ]; /* XXX */
2779 if (XVECLEN (op, 0) <= 1)
2781 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2785 /* Work out how many bytes to push onto
2786 the stack after storing the registers. */
2787 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2788 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2789 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2791 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2794 /* Make sure that the amount we are popping
2795 will fit into the DISPOSE instruction. */
2796 if (stack_bytes < -128)
2798 error ("too much stack space to prepare: %d", stack_bytes);
2802 /* Now compute the bit mask of registers to push. */
2805 for (i = 1; i < XVECLEN (op, 0); i++)
2807 rtx vector_element = XVECEXP (op, 0, i);
2809 if (GET_CODE (vector_element) == CLOBBER)
2812 gcc_assert (GET_CODE (vector_element) == SET);
2813 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2814 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2817 if (REGNO (SET_SRC (vector_element)) == 2)
2820 mask |= 1 << REGNO (SET_SRC (vector_element));
2824 stack_bytes += count * 4;
2826 if ((! TARGET_DISABLE_CALLT)
2827 && (use_callt || stack_bytes == 0))
2831 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2835 for (i = 20; i < 32; i++)
2836 if (mask & (1 << i))
2840 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2842 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2843 i, (mask & (1 << 31)) ? "31c" : "29");
2847 static char regs [100]; /* XXX */
2851 /* Generate the PREPARE instruction. Note we could just issue the
2852 bit mask as a number as the assembler can cope with this, but for
2853 the sake of our readers we turn it into a textual description. */
2857 for (i = 20; i < 32; i++)
2859 if (mask & (1 << i))
2864 strcat (regs, ", ");
2869 strcat (regs, reg_names[ first ]);
2871 for (i++; i < 32; i++)
2872 if ((mask & (1 << i)) == 0)
2877 strcat (regs, " - ");
2878 strcat (regs, reg_names[ i - 1 ] );
2883 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2889 /* Return an RTX indicating where the return address to the
2890 calling function can be found. */
2893 v850_return_addr (int count)
2898 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2901 /* Implement TARGET_ASM_INIT_SECTIONS. */
2904 v850_asm_init_sections (void)
2907 = get_unnamed_section (0, output_section_asm_op,
2908 "\t.section .rosdata,\"a\"");
2911 = get_unnamed_section (0, output_section_asm_op,
2912 "\t.section .rozdata,\"a\"");
2915 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2916 "\t.section .tdata,\"aw\"");
2919 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2920 "\t.section .zdata,\"aw\"");
2923 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2924 output_section_asm_op,
2925 "\t.section .zbss,\"aw\"");
2929 v850_select_section (tree exp,
2930 int reloc ATTRIBUTE_UNUSED,
2931 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2933 if (TREE_CODE (exp) == VAR_DECL)
2936 if (!TREE_READONLY (exp)
2937 || TREE_SIDE_EFFECTS (exp)
2938 || !DECL_INITIAL (exp)
2939 || (DECL_INITIAL (exp) != error_mark_node
2940 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2945 switch (v850_get_data_area (exp))
2948 return is_const ? rozdata_section : zdata_section;
2951 return tdata_section;
2954 return is_const ? rosdata_section : sdata_section;
2957 return is_const ? readonly_data_section : data_section;
2960 return readonly_data_section;
2963 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2966 v850_function_value_regno_p (const unsigned int regno)
2968 return (regno == 10);
2971 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2974 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2976 /* Return values > 8 bytes in length in memory. */
2977 return int_size_in_bytes (type) > 8
2978 || TYPE_MODE (type) == BLKmode
2979 /* With the rh850 ABI return all aggregates in memory. */
2980 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
2984 /* Worker function for TARGET_FUNCTION_VALUE. */
2987 v850_function_value (const_tree valtype,
2988 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2989 bool outgoing ATTRIBUTE_UNUSED)
2991 return gen_rtx_REG (TYPE_MODE (valtype), 10);
2995 /* Worker function for TARGET_CAN_ELIMINATE. */
2998 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3000 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3003 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3005 If TARGET_APP_REGS is not defined then add r2 and r5 to
3006 the pool of fixed registers. See PR 14505. */
3009 v850_conditional_register_usage (void)
3011 if (TARGET_APP_REGS)
3013 fixed_regs[2] = 0; call_used_regs[2] = 0;
3014 fixed_regs[5] = 0; call_used_regs[5] = 1;
3018 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3021 v850_asm_trampoline_template (FILE *f)
3023 fprintf (f, "\tjarl .+4,r12\n");
3024 fprintf (f, "\tld.w 12[r12],r20\n");
3025 fprintf (f, "\tld.w 16[r12],r12\n");
3026 fprintf (f, "\tjmp [r12]\n");
3027 fprintf (f, "\tnop\n");
3028 fprintf (f, "\t.long 0\n");
3029 fprintf (f, "\t.long 0\n");
3032 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3035 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3037 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3039 emit_block_move (m_tramp, assemble_trampoline_template (),
3040 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3042 mem = adjust_address (m_tramp, SImode, 16);
3043 emit_move_insn (mem, chain_value);
3044 mem = adjust_address (m_tramp, SImode, 20);
3045 emit_move_insn (mem, fnaddr);
3049 v850_issue_rate (void)
3051 return (TARGET_V850E2_ALL? 2 : 1);
3054 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3057 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3059 return (GET_CODE (x) == CONST_DOUBLE
3060 || !(GET_CODE (x) == CONST
3061 && GET_CODE (XEXP (x, 0)) == PLUS
3062 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3063 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3064 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3068 v850_memory_move_cost (enum machine_mode mode,
3069 reg_class_t reg_class ATTRIBUTE_UNUSED,
3072 switch (GET_MODE_SIZE (mode))
3082 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3086 /* V850 specific attributes. */
3088 static const struct attribute_spec v850_attribute_table[] =
3090 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3091 affects_type_identity } */
3092 { "interrupt_handler", 0, 0, true, false, false,
3093 v850_handle_interrupt_attribute, false },
3094 { "interrupt", 0, 0, true, false, false,
3095 v850_handle_interrupt_attribute, false },
3096 { "sda", 0, 0, true, false, false,
3097 v850_handle_data_area_attribute, false },
3098 { "tda", 0, 0, true, false, false,
3099 v850_handle_data_area_attribute, false },
3100 { "zda", 0, 0, true, false, false,
3101 v850_handle_data_area_attribute, false },
3102 { NULL, 0, 0, false, false, false, NULL, false }
3107 v850_option_override (void)
3109 if (flag_exceptions || flag_non_call_exceptions)
3110 flag_omit_frame_pointer = 0;
3112 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3113 if (! TARGET_GCC_ABI)
3114 target_flags |= MASK_DISABLE_CALLT;
3117 /* Initialize the GCC target structure. */
3119 #undef TARGET_OPTION_OVERRIDE
3120 #define TARGET_OPTION_OVERRIDE v850_option_override
3122 #undef TARGET_MEMORY_MOVE_COST
3123 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3125 #undef TARGET_ASM_ALIGNED_HI_OP
3126 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3128 #undef TARGET_PRINT_OPERAND
3129 #define TARGET_PRINT_OPERAND v850_print_operand
3130 #undef TARGET_PRINT_OPERAND_ADDRESS
3131 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3132 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3133 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3135 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3136 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3138 #undef TARGET_ATTRIBUTE_TABLE
3139 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3141 #undef TARGET_INSERT_ATTRIBUTES
3142 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3144 #undef TARGET_ASM_SELECT_SECTION
3145 #define TARGET_ASM_SELECT_SECTION v850_select_section
3147 /* The assembler supports switchable .bss sections, but
3148 v850_select_section doesn't yet make use of them. */
3149 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3150 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3152 #undef TARGET_ENCODE_SECTION_INFO
3153 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3155 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3156 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3158 #undef TARGET_RTX_COSTS
3159 #define TARGET_RTX_COSTS v850_rtx_costs
3161 #undef TARGET_ADDRESS_COST
3162 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3164 #undef TARGET_MACHINE_DEPENDENT_REORG
3165 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3167 #undef TARGET_SCHED_ISSUE_RATE
3168 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3170 #undef TARGET_FUNCTION_VALUE_REGNO_P
3171 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3172 #undef TARGET_FUNCTION_VALUE
3173 #define TARGET_FUNCTION_VALUE v850_function_value
3175 #undef TARGET_PROMOTE_PROTOTYPES
3176 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3178 #undef TARGET_RETURN_IN_MEMORY
3179 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3181 #undef TARGET_PASS_BY_REFERENCE
3182 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3184 #undef TARGET_CALLEE_COPIES
3185 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3187 #undef TARGET_ARG_PARTIAL_BYTES
3188 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3190 #undef TARGET_FUNCTION_ARG
3191 #define TARGET_FUNCTION_ARG v850_function_arg
3193 #undef TARGET_FUNCTION_ARG_ADVANCE
3194 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3196 #undef TARGET_CAN_ELIMINATE
3197 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3199 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3200 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3202 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3203 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3204 #undef TARGET_TRAMPOLINE_INIT
3205 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3207 #undef TARGET_LEGITIMATE_CONSTANT_P
3208 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3210 struct gcc_target targetm = TARGET_INITIALIZER;
3212 #include "gt-v850.h"