1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
42 #include "target-def.h"
47 #define streq(a,b) (strcmp (a, b) == 0)
50 static void v850_print_operand_address (FILE *, rtx);
52 /* Names of the various data areas used on the v850. */
53 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
54 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
56 /* Track the current data area set by the data area pragma (which
57 can be nested). Tested by check_default_data_area. */
58 data_area_stack_element * data_area_stack = NULL;
60 /* True if we don't need to check any more if the current
61 function is an interrupt handler. */
62 static int v850_interrupt_cache_p = FALSE;
64 rtx v850_compare_op0, v850_compare_op1;
66 /* Whether current function is an interrupt handler. */
67 static int v850_interrupt_p = FALSE;
69 static GTY(()) section * rosdata_section;
70 static GTY(()) section * rozdata_section;
71 static GTY(()) section * tdata_section;
72 static GTY(()) section * zdata_section;
73 static GTY(()) section * zbss_section;
75 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
76 Specify whether to pass the argument by reference. */
79 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
80 enum machine_mode mode, const_tree type,
81 bool named ATTRIBUTE_UNUSED)
83 unsigned HOST_WIDE_INT size;
86 size = int_size_in_bytes (type);
88 size = GET_MODE_SIZE (mode);
93 /* Implementing the Varargs Macros. */
96 v850_strict_argument_naming (cumulative_args_t ca ATTRIBUTE_UNUSED)
98 return !TARGET_GHS ? true : false;
101 /* Return an RTX to represent where an argument with mode MODE
102 and type TYPE will be passed to a function. If the result
103 is NULL_RTX, the argument will be pushed. */
106 v850_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
107 const_tree type, bool named)
109 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
110 rtx result = NULL_RTX;
117 size = int_size_in_bytes (type);
119 size = GET_MODE_SIZE (mode);
121 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
125 /* Once we have stopped using argument registers, do not start up again. */
126 cum->nbytes = 4 * UNITS_PER_WORD;
130 if (size <= UNITS_PER_WORD && type)
131 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
135 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
137 if (cum->nbytes > 4 * UNITS_PER_WORD)
140 if (type == NULL_TREE
141 && cum->nbytes + size > 4 * UNITS_PER_WORD)
144 switch (cum->nbytes / UNITS_PER_WORD)
147 result = gen_rtx_REG (mode, 6);
150 result = gen_rtx_REG (mode, 7);
153 result = gen_rtx_REG (mode, 8);
156 result = gen_rtx_REG (mode, 9);
165 /* Return the number of bytes which must be put into registers
166 for values which are part in registers and part in memory. */
168 v850_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
169 tree type, bool named)
171 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
174 if (TARGET_GHS && !named)
178 size = int_size_in_bytes (type);
180 size = GET_MODE_SIZE (mode);
186 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
190 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
192 if (cum->nbytes > 4 * UNITS_PER_WORD)
195 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
198 if (type == NULL_TREE
199 && cum->nbytes + size > 4 * UNITS_PER_WORD)
202 return 4 * UNITS_PER_WORD - cum->nbytes;
205 /* Update the data in CUM to advance over an argument
206 of mode MODE and data type TYPE.
207 (TYPE is null for libcalls where that information may not be available.) */
210 v850_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
211 const_tree type, bool named ATTRIBUTE_UNUSED)
213 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
215 cum->nbytes += (((type && int_size_in_bytes (type) > 8
216 ? GET_MODE_SIZE (Pmode)
218 ? GET_MODE_SIZE (mode)
219 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
223 /* Return the high and low words of a CONST_DOUBLE */
226 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
228 if (GET_CODE (x) == CONST_DOUBLE)
233 switch (GET_MODE (x))
236 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
237 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
238 *p_high = t[1]; /* since v850 is little endian */
239 *p_low = t[0]; /* high is second word */
243 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
244 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
250 *p_high = CONST_DOUBLE_HIGH (x);
251 *p_low = CONST_DOUBLE_LOW (x);
259 fatal_insn ("const_double_split got a bad insn:", x);
263 /* Return the cost of the rtx R with code CODE. */
266 const_costs_int (HOST_WIDE_INT value, int zero_cost)
268 if (CONST_OK_FOR_I (value))
270 else if (CONST_OK_FOR_J (value))
272 else if (CONST_OK_FOR_K (value))
279 const_costs (rtx r, enum rtx_code c)
281 HOST_WIDE_INT high, low;
286 return const_costs_int (INTVAL (r), 0);
289 const_double_split (r, &high, &low);
290 if (GET_MODE (r) == SFmode)
291 return const_costs_int (high, 1);
293 return const_costs_int (high, 1) + const_costs_int (low, 1);
309 v850_rtx_costs (rtx x,
311 int outer_code ATTRIBUTE_UNUSED,
312 int opno ATTRIBUTE_UNUSED,
313 int * total, bool speed)
315 enum rtx_code code = (enum rtx_code) codearg;
324 *total = COSTS_N_INSNS (const_costs (x, code));
331 if (TARGET_V850E && !speed)
339 && ( GET_MODE (x) == SImode
340 || GET_MODE (x) == HImode
341 || GET_MODE (x) == QImode))
343 if (GET_CODE (XEXP (x, 1)) == REG)
345 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
347 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
349 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
358 if (outer_code == COMPARE)
367 /* Print operand X using operand code CODE to assembly language output file
371 v850_print_operand (FILE * file, rtx x, int code)
373 HOST_WIDE_INT high, low;
378 /* We use 'c' operands with symbols for .vtinherit */
379 if (GET_CODE (x) == SYMBOL_REF)
381 output_addr_const(file, x);
388 switch ((code == 'B' || code == 'C')
389 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
392 if (code == 'c' || code == 'C')
393 fprintf (file, "nz");
395 fprintf (file, "ne");
398 if (code == 'c' || code == 'C')
404 fprintf (file, "ge");
407 fprintf (file, "gt");
410 fprintf (file, "le");
413 fprintf (file, "lt");
416 fprintf (file, "nl");
422 fprintf (file, "nh");
431 case 'F': /* high word of CONST_DOUBLE */
432 switch (GET_CODE (x))
435 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
439 const_double_split (x, &high, &low);
440 fprintf (file, "%ld", (long) high);
447 case 'G': /* low word of CONST_DOUBLE */
448 switch (GET_CODE (x))
451 fprintf (file, "%ld", (long) INTVAL (x));
455 const_double_split (x, &high, &low);
456 fprintf (file, "%ld", (long) low);
464 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
467 fprintf (file, "%d", exact_log2 (INTVAL (x)));
470 gcc_assert (special_symbolref_operand (x, VOIDmode));
472 if (GET_CODE (x) == CONST)
473 x = XEXP (XEXP (x, 0), 0);
475 gcc_assert (GET_CODE (x) == SYMBOL_REF);
477 if (SYMBOL_REF_ZDA_P (x))
478 fprintf (file, "zdaoff");
479 else if (SYMBOL_REF_SDA_P (x))
480 fprintf (file, "sdaoff");
481 else if (SYMBOL_REF_TDA_P (x))
482 fprintf (file, "tdaoff");
487 gcc_assert (special_symbolref_operand (x, VOIDmode));
488 output_addr_const (file, x);
491 gcc_assert (special_symbolref_operand (x, VOIDmode));
493 if (GET_CODE (x) == CONST)
494 x = XEXP (XEXP (x, 0), 0);
496 gcc_assert (GET_CODE (x) == SYMBOL_REF);
498 if (SYMBOL_REF_ZDA_P (x))
499 fprintf (file, "r0");
500 else if (SYMBOL_REF_SDA_P (x))
501 fprintf (file, "gp");
502 else if (SYMBOL_REF_TDA_P (x))
503 fprintf (file, "ep");
507 case 'R': /* 2nd word of a double. */
508 switch (GET_CODE (x))
511 fprintf (file, reg_names[REGNO (x) + 1]);
514 x = XEXP (adjust_address (x, SImode, 4), 0);
515 v850_print_operand_address (file, x);
516 if (GET_CODE (x) == CONST_INT)
517 fprintf (file, "[r0]");
526 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
527 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
534 /* Like an 'S' operand above, but for unsigned loads only. */
535 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
540 case 'W': /* print the instruction suffix */
541 switch (GET_MODE (x))
546 case QImode: fputs (".b", file); break;
547 case HImode: fputs (".h", file); break;
548 case SImode: fputs (".w", file); break;
549 case SFmode: fputs (".w", file); break;
552 case '.': /* register r0 */
553 fputs (reg_names[0], file);
555 case 'z': /* reg or zero */
556 if (GET_CODE (x) == REG)
557 fputs (reg_names[REGNO (x)], file);
558 else if ((GET_MODE(x) == SImode
559 || GET_MODE(x) == DFmode
560 || GET_MODE(x) == SFmode)
561 && x == CONST0_RTX(GET_MODE(x)))
562 fputs (reg_names[0], file);
565 gcc_assert (x == const0_rtx);
566 fputs (reg_names[0], file);
570 switch (GET_CODE (x))
573 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
574 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
577 output_address (XEXP (x, 0));
581 fputs (reg_names[REGNO (x)], file);
584 fputs (reg_names[subreg_regno (x)], file);
591 v850_print_operand_address (file, x);
602 /* Output assembly language output for the address ADDR to FILE. */
605 v850_print_operand_address (FILE * file, rtx addr)
607 switch (GET_CODE (addr))
610 fprintf (file, "0[");
611 v850_print_operand (file, addr, 0);
615 if (GET_CODE (XEXP (addr, 0)) == REG)
618 fprintf (file, "lo(");
619 v850_print_operand (file, XEXP (addr, 1), 0);
620 fprintf (file, ")[");
621 v850_print_operand (file, XEXP (addr, 0), 0);
626 if (GET_CODE (XEXP (addr, 0)) == REG
627 || GET_CODE (XEXP (addr, 0)) == SUBREG)
630 v850_print_operand (file, XEXP (addr, 1), 0);
632 v850_print_operand (file, XEXP (addr, 0), 0);
637 v850_print_operand (file, XEXP (addr, 0), 0);
639 v850_print_operand (file, XEXP (addr, 1), 0);
644 const char *off_name = NULL;
645 const char *reg_name = NULL;
647 if (SYMBOL_REF_ZDA_P (addr))
652 else if (SYMBOL_REF_SDA_P (addr))
657 else if (SYMBOL_REF_TDA_P (addr))
664 fprintf (file, "%s(", off_name);
665 output_addr_const (file, addr);
667 fprintf (file, ")[%s]", reg_name);
671 if (special_symbolref_operand (addr, VOIDmode))
673 rtx x = XEXP (XEXP (addr, 0), 0);
674 const char *off_name;
675 const char *reg_name;
677 if (SYMBOL_REF_ZDA_P (x))
682 else if (SYMBOL_REF_SDA_P (x))
687 else if (SYMBOL_REF_TDA_P (x))
695 fprintf (file, "%s(", off_name);
696 output_addr_const (file, addr);
697 fprintf (file, ")[%s]", reg_name);
700 output_addr_const (file, addr);
703 output_addr_const (file, addr);
709 v850_print_operand_punct_valid_p (unsigned char code)
714 /* When assemble_integer is used to emit the offsets for a switch
715 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
716 output_addr_const will normally barf at this, but it is OK to omit
717 the truncate and just emit the difference of the two labels. The
718 .hword directive will automatically handle the truncation for us.
720 Returns true if rtx was handled, false otherwise. */
723 v850_output_addr_const_extra (FILE * file, rtx x)
725 if (GET_CODE (x) != TRUNCATE)
730 /* We must also handle the case where the switch table was passed a
731 constant value and so has been collapsed. In this case the first
732 label will have been deleted. In such a case it is OK to emit
733 nothing, since the table will not be used.
734 (cf gcc.c-torture/compile/990801-1.c). */
735 if (GET_CODE (x) == MINUS
736 && GET_CODE (XEXP (x, 0)) == LABEL_REF
737 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
738 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
741 output_addr_const (file, x);
745 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
749 output_move_single (rtx * operands)
751 rtx dst = operands[0];
752 rtx src = operands[1];
759 else if (GET_CODE (src) == CONST_INT)
761 HOST_WIDE_INT value = INTVAL (src);
763 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
766 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
767 return "movea %1,%.,%0";
769 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
770 return "movhi hi0(%1),%.,%0";
772 /* A random constant. */
773 else if (TARGET_V850E || TARGET_V850E2_ALL)
776 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
779 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
781 HOST_WIDE_INT high, low;
783 const_double_split (src, &high, &low);
785 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
788 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
789 return "movea %F1,%.,%0";
791 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
792 return "movhi hi0(%F1),%.,%0";
794 /* A random constant. */
795 else if (TARGET_V850E || TARGET_V850E2_ALL)
799 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
802 else if (GET_CODE (src) == MEM)
803 return "%S1ld%W1 %1,%0";
805 else if (special_symbolref_operand (src, VOIDmode))
806 return "movea %O1(%P1),%Q1,%0";
808 else if (GET_CODE (src) == LABEL_REF
809 || GET_CODE (src) == SYMBOL_REF
810 || GET_CODE (src) == CONST)
812 if (TARGET_V850E || TARGET_V850E2_ALL)
813 return "mov hilo(%1),%0";
815 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
818 else if (GET_CODE (src) == HIGH)
819 return "movhi hi(%1),%.,%0";
821 else if (GET_CODE (src) == LO_SUM)
823 operands[2] = XEXP (src, 0);
824 operands[3] = XEXP (src, 1);
825 return "movea lo(%3),%2,%0";
829 else if (GET_CODE (dst) == MEM)
832 return "%S0st%W0 %1,%0";
834 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
835 return "%S0st%W0 %.,%0";
837 else if (GET_CODE (src) == CONST_DOUBLE
838 && CONST0_RTX (GET_MODE (dst)) == src)
839 return "%S0st%W0 %.,%0";
842 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
846 /* Generate comparison code. */
848 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
850 enum rtx_code code = GET_CODE (op);
852 if (GET_RTX_CLASS (code) != RTX_COMPARE
853 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
856 if (mode != GET_MODE (op) && mode != VOIDmode)
859 if ((GET_CODE (XEXP (op, 0)) != REG
860 || REGNO (XEXP (op, 0)) != CC_REGNUM)
861 || XEXP (op, 1) != const0_rtx)
864 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
866 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
868 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
875 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
877 enum rtx_code code = GET_CODE (op);
879 if (GET_RTX_CLASS (code) != RTX_COMPARE
880 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
883 if (mode != GET_MODE (op) && mode != VOIDmode)
886 if ((GET_CODE (XEXP (op, 0)) != REG
887 || REGNO (XEXP (op, 0)) != CC_REGNUM)
888 || XEXP (op, 1) != const0_rtx)
891 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
893 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
895 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
902 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
904 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
909 return CC_FPU_LEmode;
911 return CC_FPU_GEmode;
913 return CC_FPU_LTmode;
915 return CC_FPU_GTmode;
917 return CC_FPU_EQmode;
919 return CC_FPU_NEmode;
928 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
930 if (GET_MODE(op0) == DFmode)
935 emit_insn (gen_cmpdf_le_insn (op0, op1));
938 emit_insn (gen_cmpdf_ge_insn (op0, op1));
941 emit_insn (gen_cmpdf_lt_insn (op0, op1));
944 emit_insn (gen_cmpdf_gt_insn (op0, op1));
947 emit_insn (gen_cmpdf_eq_insn (op0, op1));
950 emit_insn (gen_cmpdf_ne_insn (op0, op1));
956 else if (GET_MODE(v850_compare_op0) == SFmode)
961 emit_insn (gen_cmpsf_le_insn(op0, op1));
964 emit_insn (gen_cmpsf_ge_insn(op0, op1));
967 emit_insn (gen_cmpsf_lt_insn(op0, op1));
970 emit_insn (gen_cmpsf_gt_insn(op0, op1));
973 emit_insn (gen_cmpsf_eq_insn(op0, op1));
976 emit_insn (gen_cmpsf_ne_insn(op0, op1));
987 return v850_select_cc_mode (cond, op0, op1);
991 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
993 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
995 emit_insn (gen_cmpsi_insn (op0, op1));
996 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1001 mode = v850_gen_float_compare (cond, mode, op0, op1);
1002 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1003 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1005 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1009 /* Return maximum offset supported for a short EP memory reference of mode
1010 MODE and signedness UNSIGNEDP. */
1013 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1020 if (TARGET_SMALL_SLD)
1021 max_offset = (1 << 4);
1022 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1024 max_offset = (1 << 4);
1026 max_offset = (1 << 7);
1030 if (TARGET_SMALL_SLD)
1031 max_offset = (1 << 5);
1032 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1034 max_offset = (1 << 5);
1036 max_offset = (1 << 8);
1041 max_offset = (1 << 8);
1051 /* Return true if OP is a valid short EP memory reference */
1054 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1060 /* If we are not using the EP register on a per-function basis
1061 then do not allow this optimization at all. This is to
1062 prevent the use of the SLD/SST instructions which cannot be
1063 guaranteed to work properly due to a hardware bug. */
1067 if (GET_CODE (op) != MEM)
1070 max_offset = ep_memory_offset (mode, unsigned_load);
1072 mask = GET_MODE_SIZE (mode) - 1;
1074 addr = XEXP (op, 0);
1075 if (GET_CODE (addr) == CONST)
1076 addr = XEXP (addr, 0);
1078 switch (GET_CODE (addr))
1084 return SYMBOL_REF_TDA_P (addr);
1087 return REGNO (addr) == EP_REGNUM;
1090 op0 = XEXP (addr, 0);
1091 op1 = XEXP (addr, 1);
1092 if (GET_CODE (op1) == CONST_INT
1093 && INTVAL (op1) < max_offset
1094 && INTVAL (op1) >= 0
1095 && (INTVAL (op1) & mask) == 0)
1097 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1100 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1109 /* Substitute memory references involving a pointer, to use the ep pointer,
1110 taking care to save and preserve the ep. */
1113 substitute_ep_register (rtx first_insn,
1120 rtx reg = gen_rtx_REG (Pmode, regno);
1125 df_set_regs_ever_live (1, true);
1126 *p_r1 = gen_rtx_REG (Pmode, 1);
1127 *p_ep = gen_rtx_REG (Pmode, 30);
1132 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1133 2 * (uses - 3), uses, reg_names[regno],
1134 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1135 INSN_UID (first_insn), INSN_UID (last_insn));
1137 if (GET_CODE (first_insn) == NOTE)
1138 first_insn = next_nonnote_insn (first_insn);
1140 last_insn = next_nonnote_insn (last_insn);
1141 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1143 if (GET_CODE (insn) == INSN)
1145 rtx pattern = single_set (insn);
1147 /* Replace the memory references. */
1151 /* Memory operands are signed by default. */
1152 int unsignedp = FALSE;
1154 if (GET_CODE (SET_DEST (pattern)) == MEM
1155 && GET_CODE (SET_SRC (pattern)) == MEM)
1158 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1159 p_mem = &SET_DEST (pattern);
1161 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1162 p_mem = &SET_SRC (pattern);
1164 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1165 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1166 p_mem = &XEXP (SET_SRC (pattern), 0);
1168 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1169 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1171 p_mem = &XEXP (SET_SRC (pattern), 0);
1179 rtx addr = XEXP (*p_mem, 0);
1181 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1182 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1184 else if (GET_CODE (addr) == PLUS
1185 && GET_CODE (XEXP (addr, 0)) == REG
1186 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1187 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1188 && ((INTVAL (XEXP (addr, 1)))
1189 < ep_memory_offset (GET_MODE (*p_mem),
1191 && ((INTVAL (XEXP (addr, 1))) >= 0))
1192 *p_mem = change_address (*p_mem, VOIDmode,
1193 gen_rtx_PLUS (Pmode,
1201 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1202 insn = prev_nonnote_insn (first_insn);
1203 if (insn && GET_CODE (insn) == INSN
1204 && GET_CODE (PATTERN (insn)) == SET
1205 && SET_DEST (PATTERN (insn)) == *p_ep
1206 && SET_SRC (PATTERN (insn)) == *p_r1)
1209 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1211 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1212 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1216 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1217 the -mep mode to copy heavily used pointers to ep to use the implicit
1229 regs[FIRST_PSEUDO_REGISTER];
1238 /* If not ep mode, just return now. */
1242 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1245 regs[i].first_insn = NULL_RTX;
1246 regs[i].last_insn = NULL_RTX;
1249 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1251 switch (GET_CODE (insn))
1253 /* End of basic block */
1260 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1262 if (max_uses < regs[i].uses)
1264 max_uses = regs[i].uses;
1270 substitute_ep_register (regs[max_regno].first_insn,
1271 regs[max_regno].last_insn,
1272 max_uses, max_regno, &r1, &ep);
1276 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1279 regs[i].first_insn = NULL_RTX;
1280 regs[i].last_insn = NULL_RTX;
1288 pattern = single_set (insn);
1290 /* See if there are any memory references we can shorten */
1293 rtx src = SET_SRC (pattern);
1294 rtx dest = SET_DEST (pattern);
1296 /* Memory operands are signed by default. */
1297 int unsignedp = FALSE;
1299 /* We might have (SUBREG (MEM)) here, so just get rid of the
1300 subregs to make this code simpler. */
1301 if (GET_CODE (dest) == SUBREG
1302 && (GET_CODE (SUBREG_REG (dest)) == MEM
1303 || GET_CODE (SUBREG_REG (dest)) == REG))
1304 alter_subreg (&dest);
1305 if (GET_CODE (src) == SUBREG
1306 && (GET_CODE (SUBREG_REG (src)) == MEM
1307 || GET_CODE (SUBREG_REG (src)) == REG))
1308 alter_subreg (&src);
1310 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1313 else if (GET_CODE (dest) == MEM)
1316 else if (GET_CODE (src) == MEM)
1319 else if (GET_CODE (src) == SIGN_EXTEND
1320 && GET_CODE (XEXP (src, 0)) == MEM)
1321 mem = XEXP (src, 0);
1323 else if (GET_CODE (src) == ZERO_EXTEND
1324 && GET_CODE (XEXP (src, 0)) == MEM)
1326 mem = XEXP (src, 0);
1332 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1335 else if (!use_ep && mem
1336 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1338 rtx addr = XEXP (mem, 0);
1342 if (GET_CODE (addr) == REG)
1345 regno = REGNO (addr);
1348 else if (GET_CODE (addr) == PLUS
1349 && GET_CODE (XEXP (addr, 0)) == REG
1350 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1351 && ((INTVAL (XEXP (addr, 1)))
1352 < ep_memory_offset (GET_MODE (mem), unsignedp))
1353 && ((INTVAL (XEXP (addr, 1))) >= 0))
1356 regno = REGNO (XEXP (addr, 0));
1365 regs[regno].last_insn = insn;
1366 if (!regs[regno].first_insn)
1367 regs[regno].first_insn = insn;
1371 /* Loading up a register in the basic block zaps any savings
1373 if (GET_CODE (dest) == REG)
1375 enum machine_mode mode = GET_MODE (dest);
1379 regno = REGNO (dest);
1380 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1384 /* See if we can use the pointer before this
1389 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1391 if (max_uses < regs[i].uses)
1393 max_uses = regs[i].uses;
1399 && max_regno >= regno
1400 && max_regno < endregno)
1402 substitute_ep_register (regs[max_regno].first_insn,
1403 regs[max_regno].last_insn,
1404 max_uses, max_regno, &r1,
1407 /* Since we made a substitution, zap all remembered
1409 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1412 regs[i].first_insn = NULL_RTX;
1413 regs[i].last_insn = NULL_RTX;
1418 for (i = regno; i < endregno; i++)
1421 regs[i].first_insn = NULL_RTX;
1422 regs[i].last_insn = NULL_RTX;
1430 /* # of registers saved by the interrupt handler. */
1431 #define INTERRUPT_FIXED_NUM 5
1433 /* # of bytes for registers saved by the interrupt handler. */
1434 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1436 /* # of words saved for other registers. */
1437 #define INTERRUPT_ALL_SAVE_NUM \
1438 (30 - INTERRUPT_FIXED_NUM)
1440 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1443 compute_register_save_size (long * p_reg_saved)
1447 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1448 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1451 /* Count the return pointer if we need to save it. */
1452 if (crtl->profile && !call_p)
1454 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1458 /* Count space for the register saves. */
1459 if (interrupt_handler)
1461 for (i = 0; i <= 31; i++)
1465 if (df_regs_ever_live_p (i) || call_p)
1468 reg_saved |= 1L << i;
1472 /* We don't save/restore r0 or the stack pointer */
1474 case STACK_POINTER_REGNUM:
1477 /* For registers with fixed use, we save them, set them to the
1478 appropriate value, and then restore them.
1479 These registers are handled specially, so don't list them
1480 on the list of registers to save in the prologue. */
1481 case 1: /* temp used to hold ep */
1483 case 10: /* temp used to call interrupt save/restore */
1484 case 11: /* temp used to call interrupt save/restore (long call) */
1485 case EP_REGNUM: /* ep */
1492 /* Find the first register that needs to be saved. */
1493 for (i = 0; i <= 31; i++)
1494 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1495 || i == LINK_POINTER_REGNUM))
1498 /* If it is possible that an out-of-line helper function might be
1499 used to generate the prologue for the current function, then we
1500 need to cover the possibility that such a helper function will
1501 be used, despite the fact that there might be gaps in the list of
1502 registers that need to be saved. To detect this we note that the
1503 helper functions always push at least register r29 (provided
1504 that the function is not an interrupt handler). */
1506 if (TARGET_PROLOG_FUNCTION
1507 && (i == 2 || ((i >= 20) && (i < 30))))
1512 reg_saved |= 1L << i;
1517 /* Helper functions save all registers between the starting
1518 register and the last register, regardless of whether they
1519 are actually used by the function or not. */
1520 for (; i <= 29; i++)
1523 reg_saved |= 1L << i;
1526 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1529 reg_saved |= 1L << LINK_POINTER_REGNUM;
1534 for (; i <= 31; i++)
1535 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1536 || i == LINK_POINTER_REGNUM))
1539 reg_saved |= 1L << i;
1545 *p_reg_saved = reg_saved;
1551 compute_frame_size (int size, long * p_reg_saved)
1554 + compute_register_save_size (p_reg_saved)
1555 + crtl->outgoing_args_size);
1559 use_prolog_function (int num_save, int frame_size)
1561 int alloc_stack = (4 * num_save);
1562 int unalloc_stack = frame_size - alloc_stack;
1563 int save_func_len, restore_func_len;
1564 int save_normal_len, restore_normal_len;
1566 if (! TARGET_DISABLE_CALLT)
1567 save_func_len = restore_func_len = 2;
1569 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1573 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1574 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1577 /* See if we would have used ep to save the stack. */
1578 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1579 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1581 save_normal_len = restore_normal_len = 4 * num_save;
1583 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1584 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1586 /* Don't bother checking if we don't actually save any space.
1587 This happens for instance if one register is saved and additional
1588 stack space is allocated. */
1589 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1593 expand_prologue (void)
1596 unsigned int size = get_frame_size ();
1597 unsigned int actual_fsize;
1598 unsigned int init_stack_alloc = 0;
1601 unsigned int num_save;
1603 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1606 actual_fsize = compute_frame_size (size, ®_saved);
1608 /* Save/setup global registers for interrupt functions right now. */
1609 if (interrupt_handler)
1611 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1612 emit_insn (gen_callt_save_interrupt ());
1614 emit_insn (gen_save_interrupt ());
1616 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1618 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1619 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1622 /* Identify all of the saved registers. */
1624 for (i = 1; i < 32; i++)
1626 if (((1L << i) & reg_saved) != 0)
1627 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1630 /* See if we have an insn that allocates stack space and saves the particular
1631 registers we want to. */
1632 save_all = NULL_RTX;
1633 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1635 if (use_prolog_function (num_save, actual_fsize))
1637 int alloc_stack = 4 * num_save;
1640 save_all = gen_rtx_PARALLEL
1642 rtvec_alloc (num_save + 1
1643 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1645 XVECEXP (save_all, 0, 0)
1646 = gen_rtx_SET (VOIDmode,
1648 gen_rtx_PLUS (Pmode,
1650 GEN_INT(-alloc_stack)));
1651 for (i = 0; i < num_save; i++)
1654 XVECEXP (save_all, 0, i+1)
1655 = gen_rtx_SET (VOIDmode,
1657 gen_rtx_PLUS (Pmode,
1663 if (TARGET_DISABLE_CALLT)
1665 XVECEXP (save_all, 0, num_save + 1)
1666 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1668 if (TARGET_LONG_CALLS)
1669 XVECEXP (save_all, 0, num_save + 2)
1670 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1673 code = recog (save_all, NULL_RTX, NULL);
1676 rtx insn = emit_insn (save_all);
1677 INSN_CODE (insn) = code;
1678 actual_fsize -= alloc_stack;
1682 save_all = NULL_RTX;
1686 /* If no prolog save function is available, store the registers the old
1687 fashioned way (one by one). */
1690 /* Special case interrupt functions that save all registers for a call. */
1691 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1693 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1694 emit_insn (gen_callt_save_all_interrupt ());
1696 emit_insn (gen_save_all_interrupt ());
1701 /* If the stack is too big, allocate it in chunks so we can do the
1702 register saves. We use the register save size so we use the ep
1704 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1705 init_stack_alloc = compute_register_save_size (NULL);
1707 init_stack_alloc = actual_fsize;
1709 /* Save registers at the beginning of the stack frame. */
1710 offset = init_stack_alloc - 4;
1712 if (init_stack_alloc)
1713 emit_insn (gen_addsi3 (stack_pointer_rtx,
1715 GEN_INT (- (signed) init_stack_alloc)));
1717 /* Save the return pointer first. */
1718 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1720 emit_move_insn (gen_rtx_MEM (SImode,
1721 plus_constant (Pmode,
1724 save_regs[--num_save]);
1728 for (i = 0; i < num_save; i++)
1730 emit_move_insn (gen_rtx_MEM (SImode,
1731 plus_constant (Pmode,
1740 /* Allocate the rest of the stack that was not allocated above (either it is
1741 > 32K or we just called a function to save the registers and needed more
1743 if (actual_fsize > init_stack_alloc)
1745 int diff = actual_fsize - init_stack_alloc;
1746 if (CONST_OK_FOR_K (-diff))
1747 emit_insn (gen_addsi3 (stack_pointer_rtx,
1752 rtx reg = gen_rtx_REG (Pmode, 12);
1753 emit_move_insn (reg, GEN_INT (-diff));
1754 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1758 /* If we need a frame pointer, set it up now. */
1759 if (frame_pointer_needed)
1760 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1765 expand_epilogue (void)
1768 unsigned int size = get_frame_size ();
1770 int actual_fsize = compute_frame_size (size, ®_saved);
1771 rtx restore_regs[32];
1773 unsigned int num_restore;
1775 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1777 /* Eliminate the initial stack stored by interrupt functions. */
1778 if (interrupt_handler)
1780 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1781 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1782 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1785 /* Cut off any dynamic stack created. */
1786 if (frame_pointer_needed)
1787 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1789 /* Identify all of the saved registers. */
1791 for (i = 1; i < 32; i++)
1793 if (((1L << i) & reg_saved) != 0)
1794 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1797 /* See if we have an insn that restores the particular registers we
1799 restore_all = NULL_RTX;
1801 if (TARGET_PROLOG_FUNCTION
1803 && !interrupt_handler)
1805 int alloc_stack = (4 * num_restore);
1807 /* Don't bother checking if we don't actually save any space. */
1808 if (use_prolog_function (num_restore, actual_fsize))
1811 restore_all = gen_rtx_PARALLEL (VOIDmode,
1812 rtvec_alloc (num_restore + 2));
1813 XVECEXP (restore_all, 0, 0) = ret_rtx;
1814 XVECEXP (restore_all, 0, 1)
1815 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1816 gen_rtx_PLUS (Pmode,
1818 GEN_INT (alloc_stack)));
1820 offset = alloc_stack - 4;
1821 for (i = 0; i < num_restore; i++)
1823 XVECEXP (restore_all, 0, i+2)
1824 = gen_rtx_SET (VOIDmode,
1827 gen_rtx_PLUS (Pmode,
1833 code = recog (restore_all, NULL_RTX, NULL);
1839 actual_fsize -= alloc_stack;
1842 if (CONST_OK_FOR_K (actual_fsize))
1843 emit_insn (gen_addsi3 (stack_pointer_rtx,
1845 GEN_INT (actual_fsize)));
1848 rtx reg = gen_rtx_REG (Pmode, 12);
1849 emit_move_insn (reg, GEN_INT (actual_fsize));
1850 emit_insn (gen_addsi3 (stack_pointer_rtx,
1856 insn = emit_jump_insn (restore_all);
1857 INSN_CODE (insn) = code;
1861 restore_all = NULL_RTX;
1865 /* If no epilogue save function is available, restore the registers the
1866 old fashioned way (one by one). */
1869 unsigned int init_stack_free;
1871 /* If the stack is large, we need to cut it down in 2 pieces. */
1872 if (interrupt_handler)
1873 init_stack_free = 0;
1874 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1875 init_stack_free = 4 * num_restore;
1877 init_stack_free = (signed) actual_fsize;
1879 /* Deallocate the rest of the stack if it is > 32K. */
1880 if ((unsigned int) actual_fsize > init_stack_free)
1884 diff = actual_fsize - init_stack_free;
1886 if (CONST_OK_FOR_K (diff))
1887 emit_insn (gen_addsi3 (stack_pointer_rtx,
1892 rtx reg = gen_rtx_REG (Pmode, 12);
1893 emit_move_insn (reg, GEN_INT (diff));
1894 emit_insn (gen_addsi3 (stack_pointer_rtx,
1900 /* Special case interrupt functions that save all registers
1902 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1904 if (! TARGET_DISABLE_CALLT)
1905 emit_insn (gen_callt_restore_all_interrupt ());
1907 emit_insn (gen_restore_all_interrupt ());
1911 /* Restore registers from the beginning of the stack frame. */
1912 int offset = init_stack_free - 4;
1914 /* Restore the return pointer first. */
1916 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1918 emit_move_insn (restore_regs[--num_restore],
1919 gen_rtx_MEM (SImode,
1920 plus_constant (Pmode,
1926 for (i = 0; i < num_restore; i++)
1928 emit_move_insn (restore_regs[i],
1929 gen_rtx_MEM (SImode,
1930 plus_constant (Pmode,
1934 emit_use (restore_regs[i]);
1938 /* Cut back the remainder of the stack. */
1939 if (init_stack_free)
1940 emit_insn (gen_addsi3 (stack_pointer_rtx,
1942 GEN_INT (init_stack_free)));
1945 /* And return or use reti for interrupt handlers. */
1946 if (interrupt_handler)
1948 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1949 emit_insn (gen_callt_return_interrupt ());
1951 emit_jump_insn (gen_return_interrupt ());
1953 else if (actual_fsize)
1954 emit_jump_insn (gen_return_internal ());
1956 emit_jump_insn (gen_return_simple ());
1959 v850_interrupt_cache_p = FALSE;
1960 v850_interrupt_p = FALSE;
1963 /* Update the condition code from the insn. */
1965 notice_update_cc (rtx body, rtx insn)
1967 switch (get_attr_cc (insn))
1970 /* Insn does not affect CC at all. */
1974 /* Insn does not change CC, but the 0'th operand has been changed. */
1975 if (cc_status.value1 != 0
1976 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1977 cc_status.value1 = 0;
1981 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1982 V,C is in an unusable state. */
1984 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
1985 cc_status.value1 = recog_data.operand[0];
1989 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1990 C is in an unusable state. */
1992 cc_status.flags |= CC_NO_CARRY;
1993 cc_status.value1 = recog_data.operand[0];
1997 /* The insn is a compare instruction. */
1999 cc_status.value1 = SET_SRC (body);
2003 /* Insn doesn't leave CC in a usable state. */
2012 /* Retrieve the data area that has been chosen for the given decl. */
2015 v850_get_data_area (tree decl)
2017 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2018 return DATA_AREA_SDA;
2020 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2021 return DATA_AREA_TDA;
2023 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2024 return DATA_AREA_ZDA;
2026 return DATA_AREA_NORMAL;
2029 /* Store the indicated data area in the decl's attributes. */
2032 v850_set_data_area (tree decl, v850_data_area data_area)
2038 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2039 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2040 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2045 DECL_ATTRIBUTES (decl) = tree_cons
2046 (name, NULL, DECL_ATTRIBUTES (decl));
2049 /* Handle an "interrupt" attribute; arguments as in
2050 struct attribute_spec.handler. */
2052 v850_handle_interrupt_attribute (tree * node,
2054 tree args ATTRIBUTE_UNUSED,
2055 int flags ATTRIBUTE_UNUSED,
2056 bool * no_add_attrs)
2058 if (TREE_CODE (*node) != FUNCTION_DECL)
2060 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2062 *no_add_attrs = true;
2068 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2069 struct attribute_spec.handler. */
2071 v850_handle_data_area_attribute (tree* node,
2073 tree args ATTRIBUTE_UNUSED,
2074 int flags ATTRIBUTE_UNUSED,
2075 bool * no_add_attrs)
2077 v850_data_area data_area;
2078 v850_data_area area;
2081 /* Implement data area attribute. */
2082 if (is_attribute_p ("sda", name))
2083 data_area = DATA_AREA_SDA;
2084 else if (is_attribute_p ("tda", name))
2085 data_area = DATA_AREA_TDA;
2086 else if (is_attribute_p ("zda", name))
2087 data_area = DATA_AREA_ZDA;
2091 switch (TREE_CODE (decl))
2094 if (current_function_decl != NULL_TREE)
2096 error_at (DECL_SOURCE_LOCATION (decl),
2097 "data area attributes cannot be specified for "
2099 *no_add_attrs = true;
2105 area = v850_get_data_area (decl);
2106 if (area != DATA_AREA_NORMAL && data_area != area)
2108 error ("data area of %q+D conflicts with previous declaration",
2110 *no_add_attrs = true;
2122 /* Return nonzero if FUNC is an interrupt function as specified
2123 by the "interrupt" attribute. */
2126 v850_interrupt_function_p (tree func)
2131 if (v850_interrupt_cache_p)
2132 return v850_interrupt_p;
2134 if (TREE_CODE (func) != FUNCTION_DECL)
2137 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2143 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2144 ret = a != NULL_TREE;
2147 /* Its not safe to trust global variables until after function inlining has
2149 if (reload_completed | reload_in_progress)
2150 v850_interrupt_p = ret;
2157 v850_encode_data_area (tree decl, rtx symbol)
2161 /* Map explicit sections into the appropriate attribute */
2162 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2164 if (DECL_SECTION_NAME (decl))
2166 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2168 if (streq (name, ".zdata") || streq (name, ".zbss"))
2169 v850_set_data_area (decl, DATA_AREA_ZDA);
2171 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2172 v850_set_data_area (decl, DATA_AREA_SDA);
2174 else if (streq (name, ".tdata"))
2175 v850_set_data_area (decl, DATA_AREA_TDA);
2178 /* If no attribute, support -m{zda,sda,tda}=n */
2181 int size = int_size_in_bytes (TREE_TYPE (decl));
2185 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2186 v850_set_data_area (decl, DATA_AREA_TDA);
2188 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2189 v850_set_data_area (decl, DATA_AREA_SDA);
2191 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2192 v850_set_data_area (decl, DATA_AREA_ZDA);
2195 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2199 flags = SYMBOL_REF_FLAGS (symbol);
2200 switch (v850_get_data_area (decl))
2202 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2203 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2204 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2205 default: gcc_unreachable ();
2207 SYMBOL_REF_FLAGS (symbol) = flags;
2211 v850_encode_section_info (tree decl, rtx rtl, int first)
2213 default_encode_section_info (decl, rtl, first);
2215 if (TREE_CODE (decl) == VAR_DECL
2216 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2217 v850_encode_data_area (decl, XEXP (rtl, 0));
2220 /* Construct a JR instruction to a routine that will perform the equivalent of
2221 the RTL passed in as an argument. This RTL is a function epilogue that
2222 pops registers off the stack and possibly releases some extra stack space
2223 as well. The code has already verified that the RTL matches these
2227 construct_restore_jr (rtx op)
2229 int count = XVECLEN (op, 0);
2231 unsigned long int mask;
2232 unsigned long int first;
2233 unsigned long int last;
2235 static char buff [100]; /* XXX */
2239 error ("bogus JR construction: %d", count);
2243 /* Work out how many bytes to pop off the stack before retrieving
2245 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2246 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2247 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2249 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2251 /* Each pop will remove 4 bytes from the stack.... */
2252 stack_bytes -= (count - 2) * 4;
2254 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2255 if (stack_bytes != 0)
2257 error ("bad amount of stack space removal: %d", stack_bytes);
2261 /* Now compute the bit mask of registers to push. */
2263 for (i = 2; i < count; i++)
2265 rtx vector_element = XVECEXP (op, 0, i);
2267 gcc_assert (GET_CODE (vector_element) == SET);
2268 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2269 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2272 mask |= 1 << REGNO (SET_DEST (vector_element));
2275 /* Scan for the first register to pop. */
2276 for (first = 0; first < 32; first++)
2278 if (mask & (1 << first))
2282 gcc_assert (first < 32);
2284 /* Discover the last register to pop. */
2285 if (mask & (1 << LINK_POINTER_REGNUM))
2287 last = LINK_POINTER_REGNUM;
2291 gcc_assert (!stack_bytes);
2292 gcc_assert (mask & (1 << 29));
2297 /* Note, it is possible to have gaps in the register mask.
2298 We ignore this here, and generate a JR anyway. We will
2299 be popping more registers than is strictly necessary, but
2300 it does save code space. */
2302 if (TARGET_LONG_CALLS)
2307 sprintf (name, "__return_%s", reg_names [first]);
2309 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2311 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2317 sprintf (buff, "jr __return_%s", reg_names [first]);
2319 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2326 /* Construct a JARL instruction to a routine that will perform the equivalent
2327 of the RTL passed as a parameter. This RTL is a function prologue that
2328 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2329 some stack space as well. The code has already verified that the RTL
2330 matches these requirements. */
2332 construct_save_jarl (rtx op)
2334 int count = XVECLEN (op, 0);
2336 unsigned long int mask;
2337 unsigned long int first;
2338 unsigned long int last;
2340 static char buff [100]; /* XXX */
2342 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2344 error ("bogus JARL construction: %d", count);
2349 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2350 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2351 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2352 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2354 /* Work out how many bytes to push onto the stack after storing the
2356 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2358 /* Each push will put 4 bytes from the stack.... */
2359 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2361 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2362 if (stack_bytes != 0)
2364 error ("bad amount of stack space removal: %d", stack_bytes);
2368 /* Now compute the bit mask of registers to push. */
2370 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2372 rtx vector_element = XVECEXP (op, 0, i);
2374 gcc_assert (GET_CODE (vector_element) == SET);
2375 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2376 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2379 mask |= 1 << REGNO (SET_SRC (vector_element));
2382 /* Scan for the first register to push. */
2383 for (first = 0; first < 32; first++)
2385 if (mask & (1 << first))
2389 gcc_assert (first < 32);
2391 /* Discover the last register to push. */
2392 if (mask & (1 << LINK_POINTER_REGNUM))
2394 last = LINK_POINTER_REGNUM;
2398 gcc_assert (!stack_bytes);
2399 gcc_assert (mask & (1 << 29));
2404 /* Note, it is possible to have gaps in the register mask.
2405 We ignore this here, and generate a JARL anyway. We will
2406 be pushing more registers than is strictly necessary, but
2407 it does save code space. */
2409 if (TARGET_LONG_CALLS)
2414 sprintf (name, "__save_%s", reg_names [first]);
2416 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2418 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2424 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2426 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2433 /* A version of asm_output_aligned_bss() that copes with the special
2434 data areas of the v850. */
2436 v850_output_aligned_bss (FILE * file,
2439 unsigned HOST_WIDE_INT size,
2442 switch (v850_get_data_area (decl))
2445 switch_to_section (zbss_section);
2449 switch_to_section (sbss_section);
2453 switch_to_section (tdata_section);
2456 switch_to_section (bss_section);
2460 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2461 #ifdef ASM_DECLARE_OBJECT_NAME
2462 last_assemble_variable_decl = decl;
2463 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2465 /* Standard thing is just output label for the object. */
2466 ASM_OUTPUT_LABEL (file, name);
2467 #endif /* ASM_DECLARE_OBJECT_NAME */
2468 ASM_OUTPUT_SKIP (file, size ? size : 1);
2471 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2473 v850_output_common (FILE * file,
2479 if (decl == NULL_TREE)
2481 fprintf (file, "%s", COMMON_ASM_OP);
2485 switch (v850_get_data_area (decl))
2488 fprintf (file, "%s", ZCOMMON_ASM_OP);
2492 fprintf (file, "%s", SCOMMON_ASM_OP);
2496 fprintf (file, "%s", TCOMMON_ASM_OP);
2500 fprintf (file, "%s", COMMON_ASM_OP);
2505 assemble_name (file, name);
2506 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2509 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2511 v850_output_local (FILE * file,
2517 fprintf (file, "%s", LOCAL_ASM_OP);
2518 assemble_name (file, name);
2519 fprintf (file, "\n");
2521 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2524 /* Add data area to the given declaration if a ghs data area pragma is
2525 currently in effect (#pragma ghs startXXX/endXXX). */
2527 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2530 && data_area_stack->data_area
2531 && current_function_decl == NULL_TREE
2532 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2533 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2534 v850_set_data_area (decl, data_area_stack->data_area);
2536 /* Initialize the default names of the v850 specific sections,
2537 if this has not been done before. */
2539 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2541 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2542 = build_string (sizeof (".sdata")-1, ".sdata");
2544 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2545 = build_string (sizeof (".rosdata")-1, ".rosdata");
2547 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2548 = build_string (sizeof (".tdata")-1, ".tdata");
2550 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2551 = build_string (sizeof (".zdata")-1, ".zdata");
2553 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2554 = build_string (sizeof (".rozdata")-1, ".rozdata");
2557 if (current_function_decl == NULL_TREE
2558 && (TREE_CODE (decl) == VAR_DECL
2559 || TREE_CODE (decl) == CONST_DECL
2560 || TREE_CODE (decl) == FUNCTION_DECL)
2561 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2562 && !DECL_SECTION_NAME (decl))
2564 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2565 tree chosen_section;
2567 if (TREE_CODE (decl) == FUNCTION_DECL)
2568 kind = GHS_SECTION_KIND_TEXT;
2571 /* First choose a section kind based on the data area of the decl. */
2572 switch (v850_get_data_area (decl))
2578 kind = ((TREE_READONLY (decl))
2579 ? GHS_SECTION_KIND_ROSDATA
2580 : GHS_SECTION_KIND_SDATA);
2584 kind = GHS_SECTION_KIND_TDATA;
2588 kind = ((TREE_READONLY (decl))
2589 ? GHS_SECTION_KIND_ROZDATA
2590 : GHS_SECTION_KIND_ZDATA);
2593 case DATA_AREA_NORMAL: /* default data area */
2594 if (TREE_READONLY (decl))
2595 kind = GHS_SECTION_KIND_RODATA;
2596 else if (DECL_INITIAL (decl))
2597 kind = GHS_SECTION_KIND_DATA;
2599 kind = GHS_SECTION_KIND_BSS;
2603 /* Now, if the section kind has been explicitly renamed,
2604 then attach a section attribute. */
2605 chosen_section = GHS_current_section_names [(int) kind];
2607 /* Otherwise, if this kind of section needs an explicit section
2608 attribute, then also attach one. */
2609 if (chosen_section == NULL)
2610 chosen_section = GHS_default_section_names [(int) kind];
2614 /* Only set the section name if specified by a pragma, because
2615 otherwise it will force those variables to get allocated storage
2616 in this module, rather than by the linker. */
2617 DECL_SECTION_NAME (decl) = chosen_section;
2622 /* Construct a DISPOSE instruction that is the equivalent of
2623 the given RTX. We have already verified that this should
2627 construct_dispose_instruction (rtx op)
2629 int count = XVECLEN (op, 0);
2631 unsigned long int mask;
2633 static char buff[ 100 ]; /* XXX */
2638 error ("bogus DISPOSE construction: %d", count);
2642 /* Work out how many bytes to pop off the
2643 stack before retrieving registers. */
2644 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2645 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2646 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2648 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2650 /* Each pop will remove 4 bytes from the stack.... */
2651 stack_bytes -= (count - 2) * 4;
2653 /* Make sure that the amount we are popping
2654 will fit into the DISPOSE instruction. */
2655 if (stack_bytes > 128)
2657 error ("too much stack space to dispose of: %d", stack_bytes);
2661 /* Now compute the bit mask of registers to push. */
2664 for (i = 2; i < count; i++)
2666 rtx vector_element = XVECEXP (op, 0, i);
2668 gcc_assert (GET_CODE (vector_element) == SET);
2669 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2670 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2673 if (REGNO (SET_DEST (vector_element)) == 2)
2676 mask |= 1 << REGNO (SET_DEST (vector_element));
2679 if (! TARGET_DISABLE_CALLT
2680 && (use_callt || stack_bytes == 0))
2684 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2689 for (i = 20; i < 32; i++)
2690 if (mask & (1 << i))
2694 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2696 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2697 i, (mask & (1 << 31)) ? "31c" : "29");
2702 static char regs [100]; /* XXX */
2705 /* Generate the DISPOSE instruction. Note we could just issue the
2706 bit mask as a number as the assembler can cope with this, but for
2707 the sake of our readers we turn it into a textual description. */
2711 for (i = 20; i < 32; i++)
2713 if (mask & (1 << i))
2718 strcat (regs, ", ");
2723 strcat (regs, reg_names[ first ]);
2725 for (i++; i < 32; i++)
2726 if ((mask & (1 << i)) == 0)
2731 strcat (regs, " - ");
2732 strcat (regs, reg_names[ i - 1 ] );
2737 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2743 /* Construct a PREPARE instruction that is the equivalent of
2744 the given RTL. We have already verified that this should
2748 construct_prepare_instruction (rtx op)
2752 unsigned long int mask;
2754 static char buff[ 100 ]; /* XXX */
2757 if (XVECLEN (op, 0) <= 1)
2759 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2763 /* Work out how many bytes to push onto
2764 the stack after storing the registers. */
2765 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2766 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2767 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2769 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2772 /* Make sure that the amount we are popping
2773 will fit into the DISPOSE instruction. */
2774 if (stack_bytes < -128)
2776 error ("too much stack space to prepare: %d", stack_bytes);
2780 /* Now compute the bit mask of registers to push. */
2783 for (i = 1; i < XVECLEN (op, 0); i++)
2785 rtx vector_element = XVECEXP (op, 0, i);
2787 if (GET_CODE (vector_element) == CLOBBER)
2790 gcc_assert (GET_CODE (vector_element) == SET);
2791 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2792 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2795 if (REGNO (SET_SRC (vector_element)) == 2)
2798 mask |= 1 << REGNO (SET_SRC (vector_element));
2802 stack_bytes += count * 4;
2804 if ((! TARGET_DISABLE_CALLT)
2805 && (use_callt || stack_bytes == 0))
2809 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2813 for (i = 20; i < 32; i++)
2814 if (mask & (1 << i))
2818 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2820 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2821 i, (mask & (1 << 31)) ? "31c" : "29");
2825 static char regs [100]; /* XXX */
2829 /* Generate the PREPARE instruction. Note we could just issue the
2830 bit mask as a number as the assembler can cope with this, but for
2831 the sake of our readers we turn it into a textual description. */
2835 for (i = 20; i < 32; i++)
2837 if (mask & (1 << i))
2842 strcat (regs, ", ");
2847 strcat (regs, reg_names[ first ]);
2849 for (i++; i < 32; i++)
2850 if ((mask & (1 << i)) == 0)
2855 strcat (regs, " - ");
2856 strcat (regs, reg_names[ i - 1 ] );
2861 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2867 /* Return an RTX indicating where the return address to the
2868 calling function can be found. */
2871 v850_return_addr (int count)
2876 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2879 /* Implement TARGET_ASM_INIT_SECTIONS. */
2882 v850_asm_init_sections (void)
2885 = get_unnamed_section (0, output_section_asm_op,
2886 "\t.section .rosdata,\"a\"");
2889 = get_unnamed_section (0, output_section_asm_op,
2890 "\t.section .rozdata,\"a\"");
2893 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2894 "\t.section .tdata,\"aw\"");
2897 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2898 "\t.section .zdata,\"aw\"");
2901 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2902 output_section_asm_op,
2903 "\t.section .zbss,\"aw\"");
2907 v850_select_section (tree exp,
2908 int reloc ATTRIBUTE_UNUSED,
2909 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2911 if (TREE_CODE (exp) == VAR_DECL)
2914 if (!TREE_READONLY (exp)
2915 || TREE_SIDE_EFFECTS (exp)
2916 || !DECL_INITIAL (exp)
2917 || (DECL_INITIAL (exp) != error_mark_node
2918 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2923 switch (v850_get_data_area (exp))
2926 return is_const ? rozdata_section : zdata_section;
2929 return tdata_section;
2932 return is_const ? rosdata_section : sdata_section;
2935 return is_const ? readonly_data_section : data_section;
2938 return readonly_data_section;
2941 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2944 v850_function_value_regno_p (const unsigned int regno)
2946 return (regno == 10);
2949 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2952 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2954 /* Return values > 8 bytes in length in memory. */
2955 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
2958 /* Worker function for TARGET_FUNCTION_VALUE. */
2961 v850_function_value (const_tree valtype,
2962 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2963 bool outgoing ATTRIBUTE_UNUSED)
2965 return gen_rtx_REG (TYPE_MODE (valtype), 10);
2969 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2972 v850_setup_incoming_varargs (cumulative_args_t ca,
2973 enum machine_mode mode ATTRIBUTE_UNUSED,
2974 tree type ATTRIBUTE_UNUSED,
2975 int *pretend_arg_size ATTRIBUTE_UNUSED,
2976 int second_time ATTRIBUTE_UNUSED)
2978 get_cumulative_args (ca)->anonymous_args = (!TARGET_GHS ? 1 : 0);
2981 /* Worker function for TARGET_CAN_ELIMINATE. */
2984 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2986 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
2989 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2991 If TARGET_APP_REGS is not defined then add r2 and r5 to
2992 the pool of fixed registers. See PR 14505. */
2995 v850_conditional_register_usage (void)
2997 if (TARGET_APP_REGS)
2999 fixed_regs[2] = 0; call_used_regs[2] = 0;
3000 fixed_regs[5] = 0; call_used_regs[5] = 1;
3004 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3007 v850_asm_trampoline_template (FILE *f)
3009 fprintf (f, "\tjarl .+4,r12\n");
3010 fprintf (f, "\tld.w 12[r12],r20\n");
3011 fprintf (f, "\tld.w 16[r12],r12\n");
3012 fprintf (f, "\tjmp [r12]\n");
3013 fprintf (f, "\tnop\n");
3014 fprintf (f, "\t.long 0\n");
3015 fprintf (f, "\t.long 0\n");
3018 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3021 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3023 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3025 emit_block_move (m_tramp, assemble_trampoline_template (),
3026 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3028 mem = adjust_address (m_tramp, SImode, 16);
3029 emit_move_insn (mem, chain_value);
3030 mem = adjust_address (m_tramp, SImode, 20);
3031 emit_move_insn (mem, fnaddr);
3035 v850_issue_rate (void)
3037 return (TARGET_V850E2_ALL? 2 : 1);
3040 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3043 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3045 return (GET_CODE (x) == CONST_DOUBLE
3046 || !(GET_CODE (x) == CONST
3047 && GET_CODE (XEXP (x, 0)) == PLUS
3048 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3049 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3050 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3054 v850_memory_move_cost (enum machine_mode mode,
3055 reg_class_t reg_class ATTRIBUTE_UNUSED,
3058 switch (GET_MODE_SIZE (mode))
3068 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3072 /* V850 specific attributes. */
3074 static const struct attribute_spec v850_attribute_table[] =
3076 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3077 affects_type_identity } */
3078 { "interrupt_handler", 0, 0, true, false, false,
3079 v850_handle_interrupt_attribute, false },
3080 { "interrupt", 0, 0, true, false, false,
3081 v850_handle_interrupt_attribute, false },
3082 { "sda", 0, 0, true, false, false,
3083 v850_handle_data_area_attribute, false },
3084 { "tda", 0, 0, true, false, false,
3085 v850_handle_data_area_attribute, false },
3086 { "zda", 0, 0, true, false, false,
3087 v850_handle_data_area_attribute, false },
3088 { NULL, 0, 0, false, false, false, NULL, false }
3091 /* Initialize the GCC target structure. */
3093 #undef TARGET_MEMORY_MOVE_COST
3094 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3096 #undef TARGET_ASM_ALIGNED_HI_OP
3097 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3099 #undef TARGET_PRINT_OPERAND
3100 #define TARGET_PRINT_OPERAND v850_print_operand
3101 #undef TARGET_PRINT_OPERAND_ADDRESS
3102 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3103 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3104 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3106 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3107 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3109 #undef TARGET_ATTRIBUTE_TABLE
3110 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3112 #undef TARGET_INSERT_ATTRIBUTES
3113 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3115 #undef TARGET_ASM_SELECT_SECTION
3116 #define TARGET_ASM_SELECT_SECTION v850_select_section
3118 /* The assembler supports switchable .bss sections, but
3119 v850_select_section doesn't yet make use of them. */
3120 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3121 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3123 #undef TARGET_ENCODE_SECTION_INFO
3124 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3126 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3127 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3129 #undef TARGET_RTX_COSTS
3130 #define TARGET_RTX_COSTS v850_rtx_costs
3132 #undef TARGET_ADDRESS_COST
3133 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3135 #undef TARGET_MACHINE_DEPENDENT_REORG
3136 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3138 #undef TARGET_SCHED_ISSUE_RATE
3139 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3141 #undef TARGET_FUNCTION_VALUE_REGNO_P
3142 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3143 #undef TARGET_FUNCTION_VALUE
3144 #define TARGET_FUNCTION_VALUE v850_function_value
3146 #undef TARGET_PROMOTE_PROTOTYPES
3147 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3149 #undef TARGET_RETURN_IN_MEMORY
3150 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3152 #undef TARGET_PASS_BY_REFERENCE
3153 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3155 #undef TARGET_CALLEE_COPIES
3156 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3158 #undef TARGET_SETUP_INCOMING_VARARGS
3159 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3161 #undef TARGET_ARG_PARTIAL_BYTES
3162 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3164 #undef TARGET_FUNCTION_ARG
3165 #define TARGET_FUNCTION_ARG v850_function_arg
3167 #undef TARGET_FUNCTION_ARG_ADVANCE
3168 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3170 #undef TARGET_CAN_ELIMINATE
3171 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3173 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3174 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3176 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3177 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3178 #undef TARGET_TRAMPOLINE_INIT
3179 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3181 #undef TARGET_STRICT_ARGUMENT_NAMING
3182 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3184 #undef TARGET_LEGITIMATE_CONSTANT_P
3185 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3187 struct gcc_target targetm = TARGET_INITIALIZER;
3189 #include "gt-v850.h"