1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
40 #include "integrate.h"
43 #include "target-def.h"
47 #define streq(a,b) (strcmp (a, b) == 0)
50 static void v850_print_operand_address (FILE *, rtx);
52 /* Information about the various small memory areas. */
53 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
55 /* Name Max Physical max. */
61 /* Names of the various data areas used on the v850. */
62 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
63 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
65 /* Track the current data area set by the data area pragma (which
66 can be nested). Tested by check_default_data_area. */
67 data_area_stack_element * data_area_stack = NULL;
69 /* True if we don't need to check any more if the current
70 function is an interrupt handler. */
71 static int v850_interrupt_cache_p = FALSE;
73 rtx v850_compare_op0, v850_compare_op1;
75 /* Whether current function is an interrupt handler. */
76 static int v850_interrupt_p = FALSE;
78 static GTY(()) section * rosdata_section;
79 static GTY(()) section * rozdata_section;
80 static GTY(()) section * tdata_section;
81 static GTY(()) section * zdata_section;
82 static GTY(()) section * zbss_section;
84 /* Set the maximum size of small memory area TYPE to the value given
85 by VALUE. Return true if VALUE was syntactically correct. VALUE
86 starts with the argument separator: either "-" or "=". */
89 v850_handle_memory_option (enum small_memory_type type, const char *value)
93 if (*value != '-' && *value != '=')
97 for (i = 0; value[i]; i++)
98 if (!ISDIGIT (value[i]))
102 if (size > small_memory[type].physical_max)
103 error ("value passed to %<-m%s%> is too large", small_memory[type].name);
105 small_memory[type].max = size;
109 /* Implement TARGET_HANDLE_OPTION. */
112 v850_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
117 target_flags |= MASK_EP | MASK_PROLOG_FUNCTION;
121 target_flags &= ~(MASK_CPU ^ MASK_V850);
126 target_flags &= ~(MASK_CPU ^ MASK_V850E);
130 return v850_handle_memory_option (SMALL_MEMORY_TDA, arg);
133 return v850_handle_memory_option (SMALL_MEMORY_SDA, arg);
136 return v850_handle_memory_option (SMALL_MEMORY_ZDA, arg);
143 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
145 static const struct default_options v850_option_optimization_table[] =
147 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
148 /* Note - we no longer enable MASK_EP when optimizing. This is
149 because of a hardware bug which stops the SLD and SST instructions
150 from correctly detecting some hazards. If the user is sure that
151 their hardware is fixed or that their program will not encounter
152 the conditions that trigger the bug then they can enable -mep by
154 { OPT_LEVELS_1_PLUS, OPT_mprolog_function, NULL, 1 },
155 { OPT_LEVELS_NONE, 0, NULL, 0 }
158 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
159 Specify whether to pass the argument by reference. */
162 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
163 enum machine_mode mode, const_tree type,
164 bool named ATTRIBUTE_UNUSED)
166 unsigned HOST_WIDE_INT size;
169 size = int_size_in_bytes (type);
171 size = GET_MODE_SIZE (mode);
176 /* Implementing the Varargs Macros. */
179 v850_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
181 return !TARGET_GHS ? true : false;
184 /* Return an RTX to represent where an argument with mode MODE
185 and type TYPE will be passed to a function. If the result
186 is NULL_RTX, the argument will be pushed. */
189 v850_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
190 const_tree type, bool named)
192 rtx result = NULL_RTX;
199 size = int_size_in_bytes (type);
201 size = GET_MODE_SIZE (mode);
203 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
207 /* Once we have stopped using argument registers, do not start up again. */
208 cum->nbytes = 4 * UNITS_PER_WORD;
212 if (size <= UNITS_PER_WORD && type)
213 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
217 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
219 if (cum->nbytes > 4 * UNITS_PER_WORD)
222 if (type == NULL_TREE
223 && cum->nbytes + size > 4 * UNITS_PER_WORD)
226 switch (cum->nbytes / UNITS_PER_WORD)
229 result = gen_rtx_REG (mode, 6);
232 result = gen_rtx_REG (mode, 7);
235 result = gen_rtx_REG (mode, 8);
238 result = gen_rtx_REG (mode, 9);
247 /* Return the number of bytes which must be put into registers
248 for values which are part in registers and part in memory. */
250 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
251 tree type, bool named)
255 if (TARGET_GHS && !named)
259 size = int_size_in_bytes (type);
261 size = GET_MODE_SIZE (mode);
267 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
271 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
273 if (cum->nbytes > 4 * UNITS_PER_WORD)
276 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
279 if (type == NULL_TREE
280 && cum->nbytes + size > 4 * UNITS_PER_WORD)
283 return 4 * UNITS_PER_WORD - cum->nbytes;
286 /* Update the data in CUM to advance over an argument
287 of mode MODE and data type TYPE.
288 (TYPE is null for libcalls where that information may not be available.) */
291 v850_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
292 const_tree type, bool named ATTRIBUTE_UNUSED)
294 cum->nbytes += (((type && int_size_in_bytes (type) > 8
295 ? GET_MODE_SIZE (Pmode)
297 ? GET_MODE_SIZE (mode)
298 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
302 /* Return the high and low words of a CONST_DOUBLE */
305 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
307 if (GET_CODE (x) == CONST_DOUBLE)
312 switch (GET_MODE (x))
315 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
316 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
317 *p_high = t[1]; /* since v850 is little endian */
318 *p_low = t[0]; /* high is second word */
322 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
323 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
329 *p_high = CONST_DOUBLE_HIGH (x);
330 *p_low = CONST_DOUBLE_LOW (x);
338 fatal_insn ("const_double_split got a bad insn:", x);
342 /* Return the cost of the rtx R with code CODE. */
345 const_costs_int (HOST_WIDE_INT value, int zero_cost)
347 if (CONST_OK_FOR_I (value))
349 else if (CONST_OK_FOR_J (value))
351 else if (CONST_OK_FOR_K (value))
358 const_costs (rtx r, enum rtx_code c)
360 HOST_WIDE_INT high, low;
365 return const_costs_int (INTVAL (r), 0);
368 const_double_split (r, &high, &low);
369 if (GET_MODE (r) == SFmode)
370 return const_costs_int (high, 1);
372 return const_costs_int (high, 1) + const_costs_int (low, 1);
388 v850_rtx_costs (rtx x,
390 int outer_code ATTRIBUTE_UNUSED,
391 int * total, bool speed)
393 enum rtx_code code = (enum rtx_code) codearg;
402 *total = COSTS_N_INSNS (const_costs (x, code));
409 if (TARGET_V850E && !speed)
417 && ( GET_MODE (x) == SImode
418 || GET_MODE (x) == HImode
419 || GET_MODE (x) == QImode))
421 if (GET_CODE (XEXP (x, 1)) == REG)
423 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
425 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
427 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
436 if (outer_code == COMPARE)
445 /* Print operand X using operand code CODE to assembly language output file
449 v850_print_operand (FILE * file, rtx x, int code)
451 HOST_WIDE_INT high, low;
456 /* We use 'c' operands with symbols for .vtinherit */
457 if (GET_CODE (x) == SYMBOL_REF)
459 output_addr_const(file, x);
466 switch ((code == 'B' || code == 'C')
467 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
470 if (code == 'c' || code == 'C')
471 fprintf (file, "nz");
473 fprintf (file, "ne");
476 if (code == 'c' || code == 'C')
482 fprintf (file, "ge");
485 fprintf (file, "gt");
488 fprintf (file, "le");
491 fprintf (file, "lt");
494 fprintf (file, "nl");
500 fprintf (file, "nh");
509 case 'F': /* high word of CONST_DOUBLE */
510 switch (GET_CODE (x))
513 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
517 const_double_split (x, &high, &low);
518 fprintf (file, "%ld", (long) high);
525 case 'G': /* low word of CONST_DOUBLE */
526 switch (GET_CODE (x))
529 fprintf (file, "%ld", (long) INTVAL (x));
533 const_double_split (x, &high, &low);
534 fprintf (file, "%ld", (long) low);
542 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
545 fprintf (file, "%d", exact_log2 (INTVAL (x)));
548 gcc_assert (special_symbolref_operand (x, VOIDmode));
550 if (GET_CODE (x) == CONST)
551 x = XEXP (XEXP (x, 0), 0);
553 gcc_assert (GET_CODE (x) == SYMBOL_REF);
555 if (SYMBOL_REF_ZDA_P (x))
556 fprintf (file, "zdaoff");
557 else if (SYMBOL_REF_SDA_P (x))
558 fprintf (file, "sdaoff");
559 else if (SYMBOL_REF_TDA_P (x))
560 fprintf (file, "tdaoff");
565 gcc_assert (special_symbolref_operand (x, VOIDmode));
566 output_addr_const (file, x);
569 gcc_assert (special_symbolref_operand (x, VOIDmode));
571 if (GET_CODE (x) == CONST)
572 x = XEXP (XEXP (x, 0), 0);
574 gcc_assert (GET_CODE (x) == SYMBOL_REF);
576 if (SYMBOL_REF_ZDA_P (x))
577 fprintf (file, "r0");
578 else if (SYMBOL_REF_SDA_P (x))
579 fprintf (file, "gp");
580 else if (SYMBOL_REF_TDA_P (x))
581 fprintf (file, "ep");
585 case 'R': /* 2nd word of a double. */
586 switch (GET_CODE (x))
589 fprintf (file, reg_names[REGNO (x) + 1]);
592 x = XEXP (adjust_address (x, SImode, 4), 0);
593 v850_print_operand_address (file, x);
594 if (GET_CODE (x) == CONST_INT)
595 fprintf (file, "[r0]");
604 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
605 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
612 /* Like an 'S' operand above, but for unsigned loads only. */
613 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
618 case 'W': /* print the instruction suffix */
619 switch (GET_MODE (x))
624 case QImode: fputs (".b", file); break;
625 case HImode: fputs (".h", file); break;
626 case SImode: fputs (".w", file); break;
627 case SFmode: fputs (".w", file); break;
630 case '.': /* register r0 */
631 fputs (reg_names[0], file);
633 case 'z': /* reg or zero */
634 if (GET_CODE (x) == REG)
635 fputs (reg_names[REGNO (x)], file);
636 else if ((GET_MODE(x) == SImode
637 || GET_MODE(x) == DFmode
638 || GET_MODE(x) == SFmode)
639 && x == CONST0_RTX(GET_MODE(x)))
640 fputs (reg_names[0], file);
643 gcc_assert (x == const0_rtx);
644 fputs (reg_names[0], file);
648 switch (GET_CODE (x))
651 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
652 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
655 output_address (XEXP (x, 0));
659 fputs (reg_names[REGNO (x)], file);
662 fputs (reg_names[subreg_regno (x)], file);
669 v850_print_operand_address (file, x);
680 /* Output assembly language output for the address ADDR to FILE. */
683 v850_print_operand_address (FILE * file, rtx addr)
685 switch (GET_CODE (addr))
688 fprintf (file, "0[");
689 v850_print_operand (file, addr, 0);
693 if (GET_CODE (XEXP (addr, 0)) == REG)
696 fprintf (file, "lo(");
697 v850_print_operand (file, XEXP (addr, 1), 0);
698 fprintf (file, ")[");
699 v850_print_operand (file, XEXP (addr, 0), 0);
704 if (GET_CODE (XEXP (addr, 0)) == REG
705 || GET_CODE (XEXP (addr, 0)) == SUBREG)
708 v850_print_operand (file, XEXP (addr, 1), 0);
710 v850_print_operand (file, XEXP (addr, 0), 0);
715 v850_print_operand (file, XEXP (addr, 0), 0);
717 v850_print_operand (file, XEXP (addr, 1), 0);
722 const char *off_name = NULL;
723 const char *reg_name = NULL;
725 if (SYMBOL_REF_ZDA_P (addr))
730 else if (SYMBOL_REF_SDA_P (addr))
735 else if (SYMBOL_REF_TDA_P (addr))
742 fprintf (file, "%s(", off_name);
743 output_addr_const (file, addr);
745 fprintf (file, ")[%s]", reg_name);
749 if (special_symbolref_operand (addr, VOIDmode))
751 rtx x = XEXP (XEXP (addr, 0), 0);
752 const char *off_name;
753 const char *reg_name;
755 if (SYMBOL_REF_ZDA_P (x))
760 else if (SYMBOL_REF_SDA_P (x))
765 else if (SYMBOL_REF_TDA_P (x))
773 fprintf (file, "%s(", off_name);
774 output_addr_const (file, addr);
775 fprintf (file, ")[%s]", reg_name);
778 output_addr_const (file, addr);
781 output_addr_const (file, addr);
787 v850_print_operand_punct_valid_p (unsigned char code)
792 /* When assemble_integer is used to emit the offsets for a switch
793 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
794 output_addr_const will normally barf at this, but it is OK to omit
795 the truncate and just emit the difference of the two labels. The
796 .hword directive will automatically handle the truncation for us.
798 Returns true if rtx was handled, false otherwise. */
801 v850_output_addr_const_extra (FILE * file, rtx x)
803 if (GET_CODE (x) != TRUNCATE)
808 /* We must also handle the case where the switch table was passed a
809 constant value and so has been collapsed. In this case the first
810 label will have been deleted. In such a case it is OK to emit
811 nothing, since the table will not be used.
812 (cf gcc.c-torture/compile/990801-1.c). */
813 if (GET_CODE (x) == MINUS
814 && GET_CODE (XEXP (x, 0)) == LABEL_REF
815 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
816 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
819 output_addr_const (file, x);
823 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
827 output_move_single (rtx * operands)
829 rtx dst = operands[0];
830 rtx src = operands[1];
837 else if (GET_CODE (src) == CONST_INT)
839 HOST_WIDE_INT value = INTVAL (src);
841 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
844 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
845 return "movea %1,%.,%0";
847 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
848 return "movhi hi0(%1),%.,%0";
850 /* A random constant. */
851 else if (TARGET_V850E || TARGET_V850E2_ALL)
854 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
857 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
859 HOST_WIDE_INT high, low;
861 const_double_split (src, &high, &low);
863 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
866 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
867 return "movea %F1,%.,%0";
869 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
870 return "movhi hi0(%F1),%.,%0";
872 /* A random constant. */
873 else if (TARGET_V850E || TARGET_V850E2_ALL)
877 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
880 else if (GET_CODE (src) == MEM)
881 return "%S1ld%W1 %1,%0";
883 else if (special_symbolref_operand (src, VOIDmode))
884 return "movea %O1(%P1),%Q1,%0";
886 else if (GET_CODE (src) == LABEL_REF
887 || GET_CODE (src) == SYMBOL_REF
888 || GET_CODE (src) == CONST)
890 if (TARGET_V850E || TARGET_V850E2_ALL)
891 return "mov hilo(%1),%0";
893 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
896 else if (GET_CODE (src) == HIGH)
897 return "movhi hi(%1),%.,%0";
899 else if (GET_CODE (src) == LO_SUM)
901 operands[2] = XEXP (src, 0);
902 operands[3] = XEXP (src, 1);
903 return "movea lo(%3),%2,%0";
907 else if (GET_CODE (dst) == MEM)
910 return "%S0st%W0 %1,%0";
912 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
913 return "%S0st%W0 %.,%0";
915 else if (GET_CODE (src) == CONST_DOUBLE
916 && CONST0_RTX (GET_MODE (dst)) == src)
917 return "%S0st%W0 %.,%0";
920 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
924 /* Generate comparison code. */
926 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
928 enum rtx_code code = GET_CODE (op);
930 if (GET_RTX_CLASS (code) != RTX_COMPARE
931 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
934 if (mode != GET_MODE (op) && mode != VOIDmode)
937 if ((GET_CODE (XEXP (op, 0)) != REG
938 || REGNO (XEXP (op, 0)) != CC_REGNUM)
939 || XEXP (op, 1) != const0_rtx)
942 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
944 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
946 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
953 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
955 enum rtx_code code = GET_CODE (op);
957 if (GET_RTX_CLASS (code) != RTX_COMPARE
958 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
961 if (mode != GET_MODE (op) && mode != VOIDmode)
964 if ((GET_CODE (XEXP (op, 0)) != REG
965 || REGNO (XEXP (op, 0)) != CC_REGNUM)
966 || XEXP (op, 1) != const0_rtx)
969 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
971 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
973 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
980 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
982 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
987 return CC_FPU_LEmode;
989 return CC_FPU_GEmode;
991 return CC_FPU_LTmode;
993 return CC_FPU_GTmode;
995 return CC_FPU_EQmode;
997 return CC_FPU_NEmode;
1006 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
1008 if (GET_MODE(op0) == DFmode)
1013 emit_insn (gen_cmpdf_le_insn (op0, op1));
1016 emit_insn (gen_cmpdf_ge_insn (op0, op1));
1019 emit_insn (gen_cmpdf_lt_insn (op0, op1));
1022 emit_insn (gen_cmpdf_gt_insn (op0, op1));
1025 emit_insn (gen_cmpdf_eq_insn (op0, op1));
1028 emit_insn (gen_cmpdf_ne_insn (op0, op1));
1034 else if (GET_MODE(v850_compare_op0) == SFmode)
1039 emit_insn (gen_cmpsf_le_insn(op0, op1));
1042 emit_insn (gen_cmpsf_ge_insn(op0, op1));
1045 emit_insn (gen_cmpsf_lt_insn(op0, op1));
1048 emit_insn (gen_cmpsf_gt_insn(op0, op1));
1051 emit_insn (gen_cmpsf_eq_insn(op0, op1));
1054 emit_insn (gen_cmpsf_ne_insn(op0, op1));
1065 return v850_select_cc_mode (cond, op0, op1);
1069 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
1071 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1073 emit_insn (gen_cmpsi_insn (op0, op1));
1074 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1079 mode = v850_gen_float_compare (cond, mode, op0, op1);
1080 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1081 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1083 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1087 /* Return maximum offset supported for a short EP memory reference of mode
1088 MODE and signedness UNSIGNEDP. */
1091 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1098 if (TARGET_SMALL_SLD)
1099 max_offset = (1 << 4);
1100 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1102 max_offset = (1 << 4);
1104 max_offset = (1 << 7);
1108 if (TARGET_SMALL_SLD)
1109 max_offset = (1 << 5);
1110 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1112 max_offset = (1 << 5);
1114 max_offset = (1 << 8);
1119 max_offset = (1 << 8);
1129 /* Return true if OP is a valid short EP memory reference */
1132 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1138 /* If we are not using the EP register on a per-function basis
1139 then do not allow this optimization at all. This is to
1140 prevent the use of the SLD/SST instructions which cannot be
1141 guaranteed to work properly due to a hardware bug. */
1145 if (GET_CODE (op) != MEM)
1148 max_offset = ep_memory_offset (mode, unsigned_load);
1150 mask = GET_MODE_SIZE (mode) - 1;
1152 addr = XEXP (op, 0);
1153 if (GET_CODE (addr) == CONST)
1154 addr = XEXP (addr, 0);
1156 switch (GET_CODE (addr))
1162 return SYMBOL_REF_TDA_P (addr);
1165 return REGNO (addr) == EP_REGNUM;
1168 op0 = XEXP (addr, 0);
1169 op1 = XEXP (addr, 1);
1170 if (GET_CODE (op1) == CONST_INT
1171 && INTVAL (op1) < max_offset
1172 && INTVAL (op1) >= 0
1173 && (INTVAL (op1) & mask) == 0)
1175 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1178 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1187 /* Substitute memory references involving a pointer, to use the ep pointer,
1188 taking care to save and preserve the ep. */
1191 substitute_ep_register (rtx first_insn,
1198 rtx reg = gen_rtx_REG (Pmode, regno);
1203 df_set_regs_ever_live (1, true);
1204 *p_r1 = gen_rtx_REG (Pmode, 1);
1205 *p_ep = gen_rtx_REG (Pmode, 30);
1210 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1211 2 * (uses - 3), uses, reg_names[regno],
1212 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1213 INSN_UID (first_insn), INSN_UID (last_insn));
1215 if (GET_CODE (first_insn) == NOTE)
1216 first_insn = next_nonnote_insn (first_insn);
1218 last_insn = next_nonnote_insn (last_insn);
1219 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1221 if (GET_CODE (insn) == INSN)
1223 rtx pattern = single_set (insn);
1225 /* Replace the memory references. */
1229 /* Memory operands are signed by default. */
1230 int unsignedp = FALSE;
1232 if (GET_CODE (SET_DEST (pattern)) == MEM
1233 && GET_CODE (SET_SRC (pattern)) == MEM)
1236 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1237 p_mem = &SET_DEST (pattern);
1239 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1240 p_mem = &SET_SRC (pattern);
1242 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1243 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1244 p_mem = &XEXP (SET_SRC (pattern), 0);
1246 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1247 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1249 p_mem = &XEXP (SET_SRC (pattern), 0);
1257 rtx addr = XEXP (*p_mem, 0);
1259 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1260 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1262 else if (GET_CODE (addr) == PLUS
1263 && GET_CODE (XEXP (addr, 0)) == REG
1264 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1265 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1266 && ((INTVAL (XEXP (addr, 1)))
1267 < ep_memory_offset (GET_MODE (*p_mem),
1269 && ((INTVAL (XEXP (addr, 1))) >= 0))
1270 *p_mem = change_address (*p_mem, VOIDmode,
1271 gen_rtx_PLUS (Pmode,
1279 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1280 insn = prev_nonnote_insn (first_insn);
1281 if (insn && GET_CODE (insn) == INSN
1282 && GET_CODE (PATTERN (insn)) == SET
1283 && SET_DEST (PATTERN (insn)) == *p_ep
1284 && SET_SRC (PATTERN (insn)) == *p_r1)
1287 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1289 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1290 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1294 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1295 the -mep mode to copy heavily used pointers to ep to use the implicit
1307 regs[FIRST_PSEUDO_REGISTER];
1316 /* If not ep mode, just return now. */
1320 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1323 regs[i].first_insn = NULL_RTX;
1324 regs[i].last_insn = NULL_RTX;
1327 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1329 switch (GET_CODE (insn))
1331 /* End of basic block */
1338 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1340 if (max_uses < regs[i].uses)
1342 max_uses = regs[i].uses;
1348 substitute_ep_register (regs[max_regno].first_insn,
1349 regs[max_regno].last_insn,
1350 max_uses, max_regno, &r1, &ep);
1354 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1357 regs[i].first_insn = NULL_RTX;
1358 regs[i].last_insn = NULL_RTX;
1366 pattern = single_set (insn);
1368 /* See if there are any memory references we can shorten */
1371 rtx src = SET_SRC (pattern);
1372 rtx dest = SET_DEST (pattern);
1374 /* Memory operands are signed by default. */
1375 int unsignedp = FALSE;
1377 /* We might have (SUBREG (MEM)) here, so just get rid of the
1378 subregs to make this code simpler. */
1379 if (GET_CODE (dest) == SUBREG
1380 && (GET_CODE (SUBREG_REG (dest)) == MEM
1381 || GET_CODE (SUBREG_REG (dest)) == REG))
1382 alter_subreg (&dest);
1383 if (GET_CODE (src) == SUBREG
1384 && (GET_CODE (SUBREG_REG (src)) == MEM
1385 || GET_CODE (SUBREG_REG (src)) == REG))
1386 alter_subreg (&src);
1388 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1391 else if (GET_CODE (dest) == MEM)
1394 else if (GET_CODE (src) == MEM)
1397 else if (GET_CODE (src) == SIGN_EXTEND
1398 && GET_CODE (XEXP (src, 0)) == MEM)
1399 mem = XEXP (src, 0);
1401 else if (GET_CODE (src) == ZERO_EXTEND
1402 && GET_CODE (XEXP (src, 0)) == MEM)
1404 mem = XEXP (src, 0);
1410 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1413 else if (!use_ep && mem
1414 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1416 rtx addr = XEXP (mem, 0);
1420 if (GET_CODE (addr) == REG)
1423 regno = REGNO (addr);
1426 else if (GET_CODE (addr) == PLUS
1427 && GET_CODE (XEXP (addr, 0)) == REG
1428 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1429 && ((INTVAL (XEXP (addr, 1)))
1430 < ep_memory_offset (GET_MODE (mem), unsignedp))
1431 && ((INTVAL (XEXP (addr, 1))) >= 0))
1434 regno = REGNO (XEXP (addr, 0));
1443 regs[regno].last_insn = insn;
1444 if (!regs[regno].first_insn)
1445 regs[regno].first_insn = insn;
1449 /* Loading up a register in the basic block zaps any savings
1451 if (GET_CODE (dest) == REG)
1453 enum machine_mode mode = GET_MODE (dest);
1457 regno = REGNO (dest);
1458 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1462 /* See if we can use the pointer before this
1467 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1469 if (max_uses < regs[i].uses)
1471 max_uses = regs[i].uses;
1477 && max_regno >= regno
1478 && max_regno < endregno)
1480 substitute_ep_register (regs[max_regno].first_insn,
1481 regs[max_regno].last_insn,
1482 max_uses, max_regno, &r1,
1485 /* Since we made a substitution, zap all remembered
1487 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1490 regs[i].first_insn = NULL_RTX;
1491 regs[i].last_insn = NULL_RTX;
1496 for (i = regno; i < endregno; i++)
1499 regs[i].first_insn = NULL_RTX;
1500 regs[i].last_insn = NULL_RTX;
1508 /* # of registers saved by the interrupt handler. */
1509 #define INTERRUPT_FIXED_NUM 5
1511 /* # of bytes for registers saved by the interrupt handler. */
1512 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1514 /* # of words saved for other registers. */
1515 #define INTERRUPT_ALL_SAVE_NUM \
1516 (30 - INTERRUPT_FIXED_NUM)
1518 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1521 compute_register_save_size (long * p_reg_saved)
1525 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1526 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1529 /* Count the return pointer if we need to save it. */
1530 if (crtl->profile && !call_p)
1532 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1536 /* Count space for the register saves. */
1537 if (interrupt_handler)
1539 for (i = 0; i <= 31; i++)
1543 if (df_regs_ever_live_p (i) || call_p)
1546 reg_saved |= 1L << i;
1550 /* We don't save/restore r0 or the stack pointer */
1552 case STACK_POINTER_REGNUM:
1555 /* For registers with fixed use, we save them, set them to the
1556 appropriate value, and then restore them.
1557 These registers are handled specially, so don't list them
1558 on the list of registers to save in the prologue. */
1559 case 1: /* temp used to hold ep */
1561 case 10: /* temp used to call interrupt save/restore */
1562 case 11: /* temp used to call interrupt save/restore (long call) */
1563 case EP_REGNUM: /* ep */
1570 /* Find the first register that needs to be saved. */
1571 for (i = 0; i <= 31; i++)
1572 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1573 || i == LINK_POINTER_REGNUM))
1576 /* If it is possible that an out-of-line helper function might be
1577 used to generate the prologue for the current function, then we
1578 need to cover the possibility that such a helper function will
1579 be used, despite the fact that there might be gaps in the list of
1580 registers that need to be saved. To detect this we note that the
1581 helper functions always push at least register r29 (provided
1582 that the function is not an interrupt handler). */
1584 if (TARGET_PROLOG_FUNCTION
1585 && (i == 2 || ((i >= 20) && (i < 30))))
1590 reg_saved |= 1L << i;
1595 /* Helper functions save all registers between the starting
1596 register and the last register, regardless of whether they
1597 are actually used by the function or not. */
1598 for (; i <= 29; i++)
1601 reg_saved |= 1L << i;
1604 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1607 reg_saved |= 1L << LINK_POINTER_REGNUM;
1612 for (; i <= 31; i++)
1613 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1614 || i == LINK_POINTER_REGNUM))
1617 reg_saved |= 1L << i;
1623 *p_reg_saved = reg_saved;
1629 compute_frame_size (int size, long * p_reg_saved)
1632 + compute_register_save_size (p_reg_saved)
1633 + crtl->outgoing_args_size);
1637 use_prolog_function (int num_save, int frame_size)
1639 int alloc_stack = (4 * num_save);
1640 int unalloc_stack = frame_size - alloc_stack;
1641 int save_func_len, restore_func_len;
1642 int save_normal_len, restore_normal_len;
1644 if (! TARGET_DISABLE_CALLT)
1645 save_func_len = restore_func_len = 2;
1647 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1651 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1652 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1655 /* See if we would have used ep to save the stack. */
1656 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1657 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1659 save_normal_len = restore_normal_len = 4 * num_save;
1661 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1662 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1664 /* Don't bother checking if we don't actually save any space.
1665 This happens for instance if one register is saved and additional
1666 stack space is allocated. */
1667 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1671 expand_prologue (void)
1674 unsigned int size = get_frame_size ();
1675 unsigned int actual_fsize;
1676 unsigned int init_stack_alloc = 0;
1679 unsigned int num_save;
1681 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1684 actual_fsize = compute_frame_size (size, ®_saved);
1686 /* Save/setup global registers for interrupt functions right now. */
1687 if (interrupt_handler)
1689 if (! TARGET_DISABLE_CALLT)
1690 emit_insn (gen_callt_save_interrupt ());
1692 emit_insn (gen_save_interrupt ());
1694 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1696 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1697 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1700 /* Identify all of the saved registers. */
1702 for (i = 1; i < 32; i++)
1704 if (((1L << i) & reg_saved) != 0)
1705 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1708 /* See if we have an insn that allocates stack space and saves the particular
1709 registers we want to. */
1710 save_all = NULL_RTX;
1711 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1713 if (use_prolog_function (num_save, actual_fsize))
1715 int alloc_stack = 4 * num_save;
1718 save_all = gen_rtx_PARALLEL
1720 rtvec_alloc (num_save + 1
1721 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1723 XVECEXP (save_all, 0, 0)
1724 = gen_rtx_SET (VOIDmode,
1726 gen_rtx_PLUS (Pmode,
1728 GEN_INT(-alloc_stack)));
1729 for (i = 0; i < num_save; i++)
1732 XVECEXP (save_all, 0, i+1)
1733 = gen_rtx_SET (VOIDmode,
1735 gen_rtx_PLUS (Pmode,
1741 if (TARGET_DISABLE_CALLT)
1743 XVECEXP (save_all, 0, num_save + 1)
1744 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1746 if (TARGET_LONG_CALLS)
1747 XVECEXP (save_all, 0, num_save + 2)
1748 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1751 code = recog (save_all, NULL_RTX, NULL);
1754 rtx insn = emit_insn (save_all);
1755 INSN_CODE (insn) = code;
1756 actual_fsize -= alloc_stack;
1760 save_all = NULL_RTX;
1764 /* If no prolog save function is available, store the registers the old
1765 fashioned way (one by one). */
1768 /* Special case interrupt functions that save all registers for a call. */
1769 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1771 if (! TARGET_DISABLE_CALLT)
1772 emit_insn (gen_callt_save_all_interrupt ());
1774 emit_insn (gen_save_all_interrupt ());
1779 /* If the stack is too big, allocate it in chunks so we can do the
1780 register saves. We use the register save size so we use the ep
1782 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1783 init_stack_alloc = compute_register_save_size (NULL);
1785 init_stack_alloc = actual_fsize;
1787 /* Save registers at the beginning of the stack frame. */
1788 offset = init_stack_alloc - 4;
1790 if (init_stack_alloc)
1791 emit_insn (gen_addsi3 (stack_pointer_rtx,
1793 GEN_INT (- (signed) init_stack_alloc)));
1795 /* Save the return pointer first. */
1796 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1798 emit_move_insn (gen_rtx_MEM (SImode,
1799 plus_constant (stack_pointer_rtx,
1801 save_regs[--num_save]);
1805 for (i = 0; i < num_save; i++)
1807 emit_move_insn (gen_rtx_MEM (SImode,
1808 plus_constant (stack_pointer_rtx,
1816 /* Allocate the rest of the stack that was not allocated above (either it is
1817 > 32K or we just called a function to save the registers and needed more
1819 if (actual_fsize > init_stack_alloc)
1821 int diff = actual_fsize - init_stack_alloc;
1822 if (CONST_OK_FOR_K (-diff))
1823 emit_insn (gen_addsi3 (stack_pointer_rtx,
1828 rtx reg = gen_rtx_REG (Pmode, 12);
1829 emit_move_insn (reg, GEN_INT (-diff));
1830 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1834 /* If we need a frame pointer, set it up now. */
1835 if (frame_pointer_needed)
1836 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1841 expand_epilogue (void)
1844 unsigned int size = get_frame_size ();
1846 int actual_fsize = compute_frame_size (size, ®_saved);
1847 rtx restore_regs[32];
1849 unsigned int num_restore;
1851 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1853 /* Eliminate the initial stack stored by interrupt functions. */
1854 if (interrupt_handler)
1856 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1857 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1858 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1861 /* Cut off any dynamic stack created. */
1862 if (frame_pointer_needed)
1863 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1865 /* Identify all of the saved registers. */
1867 for (i = 1; i < 32; i++)
1869 if (((1L << i) & reg_saved) != 0)
1870 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1873 /* See if we have an insn that restores the particular registers we
1875 restore_all = NULL_RTX;
1877 if (TARGET_PROLOG_FUNCTION
1879 && !interrupt_handler)
1881 int alloc_stack = (4 * num_restore);
1883 /* Don't bother checking if we don't actually save any space. */
1884 if (use_prolog_function (num_restore, actual_fsize))
1887 restore_all = gen_rtx_PARALLEL (VOIDmode,
1888 rtvec_alloc (num_restore + 2));
1889 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1890 XVECEXP (restore_all, 0, 1)
1891 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1892 gen_rtx_PLUS (Pmode,
1894 GEN_INT (alloc_stack)));
1896 offset = alloc_stack - 4;
1897 for (i = 0; i < num_restore; i++)
1899 XVECEXP (restore_all, 0, i+2)
1900 = gen_rtx_SET (VOIDmode,
1903 gen_rtx_PLUS (Pmode,
1909 code = recog (restore_all, NULL_RTX, NULL);
1915 actual_fsize -= alloc_stack;
1918 if (CONST_OK_FOR_K (actual_fsize))
1919 emit_insn (gen_addsi3 (stack_pointer_rtx,
1921 GEN_INT (actual_fsize)));
1924 rtx reg = gen_rtx_REG (Pmode, 12);
1925 emit_move_insn (reg, GEN_INT (actual_fsize));
1926 emit_insn (gen_addsi3 (stack_pointer_rtx,
1932 insn = emit_jump_insn (restore_all);
1933 INSN_CODE (insn) = code;
1937 restore_all = NULL_RTX;
1941 /* If no epilogue save function is available, restore the registers the
1942 old fashioned way (one by one). */
1945 unsigned int init_stack_free;
1947 /* If the stack is large, we need to cut it down in 2 pieces. */
1948 if (interrupt_handler)
1949 init_stack_free = 0;
1950 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1951 init_stack_free = 4 * num_restore;
1953 init_stack_free = (signed) actual_fsize;
1955 /* Deallocate the rest of the stack if it is > 32K. */
1956 if ((unsigned int) actual_fsize > init_stack_free)
1960 diff = actual_fsize - init_stack_free;
1962 if (CONST_OK_FOR_K (diff))
1963 emit_insn (gen_addsi3 (stack_pointer_rtx,
1968 rtx reg = gen_rtx_REG (Pmode, 12);
1969 emit_move_insn (reg, GEN_INT (diff));
1970 emit_insn (gen_addsi3 (stack_pointer_rtx,
1976 /* Special case interrupt functions that save all registers
1978 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1980 if (! TARGET_DISABLE_CALLT)
1981 emit_insn (gen_callt_restore_all_interrupt ());
1983 emit_insn (gen_restore_all_interrupt ());
1987 /* Restore registers from the beginning of the stack frame. */
1988 int offset = init_stack_free - 4;
1990 /* Restore the return pointer first. */
1992 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1994 emit_move_insn (restore_regs[--num_restore],
1995 gen_rtx_MEM (SImode,
1996 plus_constant (stack_pointer_rtx,
2001 for (i = 0; i < num_restore; i++)
2003 emit_move_insn (restore_regs[i],
2004 gen_rtx_MEM (SImode,
2005 plus_constant (stack_pointer_rtx,
2008 emit_use (restore_regs[i]);
2012 /* Cut back the remainder of the stack. */
2013 if (init_stack_free)
2014 emit_insn (gen_addsi3 (stack_pointer_rtx,
2016 GEN_INT (init_stack_free)));
2019 /* And return or use reti for interrupt handlers. */
2020 if (interrupt_handler)
2022 if (! TARGET_DISABLE_CALLT)
2023 emit_insn (gen_callt_return_interrupt ());
2025 emit_jump_insn (gen_return_interrupt ());
2027 else if (actual_fsize)
2028 emit_jump_insn (gen_return_internal ());
2030 emit_jump_insn (gen_return_simple ());
2033 v850_interrupt_cache_p = FALSE;
2034 v850_interrupt_p = FALSE;
2037 /* Update the condition code from the insn. */
2039 notice_update_cc (rtx body, rtx insn)
2041 switch (get_attr_cc (insn))
2044 /* Insn does not affect CC at all. */
2048 /* Insn does not change CC, but the 0'th operand has been changed. */
2049 if (cc_status.value1 != 0
2050 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2051 cc_status.value1 = 0;
2055 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2056 V,C is in an unusable state. */
2058 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2059 cc_status.value1 = recog_data.operand[0];
2063 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2064 C is in an unusable state. */
2066 cc_status.flags |= CC_NO_CARRY;
2067 cc_status.value1 = recog_data.operand[0];
2071 /* The insn is a compare instruction. */
2073 cc_status.value1 = SET_SRC (body);
2077 /* Insn doesn't leave CC in a usable state. */
2086 /* Retrieve the data area that has been chosen for the given decl. */
2089 v850_get_data_area (tree decl)
2091 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2092 return DATA_AREA_SDA;
2094 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2095 return DATA_AREA_TDA;
2097 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2098 return DATA_AREA_ZDA;
2100 return DATA_AREA_NORMAL;
2103 /* Store the indicated data area in the decl's attributes. */
2106 v850_set_data_area (tree decl, v850_data_area data_area)
2112 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2113 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2114 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2119 DECL_ATTRIBUTES (decl) = tree_cons
2120 (name, NULL, DECL_ATTRIBUTES (decl));
2123 /* Handle an "interrupt" attribute; arguments as in
2124 struct attribute_spec.handler. */
2126 v850_handle_interrupt_attribute (tree * node,
2128 tree args ATTRIBUTE_UNUSED,
2129 int flags ATTRIBUTE_UNUSED,
2130 bool * no_add_attrs)
2132 if (TREE_CODE (*node) != FUNCTION_DECL)
2134 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2136 *no_add_attrs = true;
2142 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2143 struct attribute_spec.handler. */
2145 v850_handle_data_area_attribute (tree* node,
2147 tree args ATTRIBUTE_UNUSED,
2148 int flags ATTRIBUTE_UNUSED,
2149 bool * no_add_attrs)
2151 v850_data_area data_area;
2152 v850_data_area area;
2155 /* Implement data area attribute. */
2156 if (is_attribute_p ("sda", name))
2157 data_area = DATA_AREA_SDA;
2158 else if (is_attribute_p ("tda", name))
2159 data_area = DATA_AREA_TDA;
2160 else if (is_attribute_p ("zda", name))
2161 data_area = DATA_AREA_ZDA;
2165 switch (TREE_CODE (decl))
2168 if (current_function_decl != NULL_TREE)
2170 error_at (DECL_SOURCE_LOCATION (decl),
2171 "data area attributes cannot be specified for "
2173 *no_add_attrs = true;
2179 area = v850_get_data_area (decl);
2180 if (area != DATA_AREA_NORMAL && data_area != area)
2182 error ("data area of %q+D conflicts with previous declaration",
2184 *no_add_attrs = true;
2196 /* Return nonzero if FUNC is an interrupt function as specified
2197 by the "interrupt" attribute. */
2200 v850_interrupt_function_p (tree func)
2205 if (v850_interrupt_cache_p)
2206 return v850_interrupt_p;
2208 if (TREE_CODE (func) != FUNCTION_DECL)
2211 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2217 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2218 ret = a != NULL_TREE;
2221 /* Its not safe to trust global variables until after function inlining has
2223 if (reload_completed | reload_in_progress)
2224 v850_interrupt_p = ret;
2231 v850_encode_data_area (tree decl, rtx symbol)
2235 /* Map explicit sections into the appropriate attribute */
2236 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2238 if (DECL_SECTION_NAME (decl))
2240 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2242 if (streq (name, ".zdata") || streq (name, ".zbss"))
2243 v850_set_data_area (decl, DATA_AREA_ZDA);
2245 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2246 v850_set_data_area (decl, DATA_AREA_SDA);
2248 else if (streq (name, ".tdata"))
2249 v850_set_data_area (decl, DATA_AREA_TDA);
2252 /* If no attribute, support -m{zda,sda,tda}=n */
2255 int size = int_size_in_bytes (TREE_TYPE (decl));
2259 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2260 v850_set_data_area (decl, DATA_AREA_TDA);
2262 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2263 v850_set_data_area (decl, DATA_AREA_SDA);
2265 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2266 v850_set_data_area (decl, DATA_AREA_ZDA);
2269 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2273 flags = SYMBOL_REF_FLAGS (symbol);
2274 switch (v850_get_data_area (decl))
2276 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2277 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2278 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2279 default: gcc_unreachable ();
2281 SYMBOL_REF_FLAGS (symbol) = flags;
2285 v850_encode_section_info (tree decl, rtx rtl, int first)
2287 default_encode_section_info (decl, rtl, first);
2289 if (TREE_CODE (decl) == VAR_DECL
2290 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2291 v850_encode_data_area (decl, XEXP (rtl, 0));
2294 /* Construct a JR instruction to a routine that will perform the equivalent of
2295 the RTL passed in as an argument. This RTL is a function epilogue that
2296 pops registers off the stack and possibly releases some extra stack space
2297 as well. The code has already verified that the RTL matches these
2301 construct_restore_jr (rtx op)
2303 int count = XVECLEN (op, 0);
2305 unsigned long int mask;
2306 unsigned long int first;
2307 unsigned long int last;
2309 static char buff [100]; /* XXX */
2313 error ("bogus JR construction: %d", count);
2317 /* Work out how many bytes to pop off the stack before retrieving
2319 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2320 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2321 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2323 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2325 /* Each pop will remove 4 bytes from the stack.... */
2326 stack_bytes -= (count - 2) * 4;
2328 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2329 if (stack_bytes != 0)
2331 error ("bad amount of stack space removal: %d", stack_bytes);
2335 /* Now compute the bit mask of registers to push. */
2337 for (i = 2; i < count; i++)
2339 rtx vector_element = XVECEXP (op, 0, i);
2341 gcc_assert (GET_CODE (vector_element) == SET);
2342 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2343 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2346 mask |= 1 << REGNO (SET_DEST (vector_element));
2349 /* Scan for the first register to pop. */
2350 for (first = 0; first < 32; first++)
2352 if (mask & (1 << first))
2356 gcc_assert (first < 32);
2358 /* Discover the last register to pop. */
2359 if (mask & (1 << LINK_POINTER_REGNUM))
2361 last = LINK_POINTER_REGNUM;
2365 gcc_assert (!stack_bytes);
2366 gcc_assert (mask & (1 << 29));
2371 /* Note, it is possible to have gaps in the register mask.
2372 We ignore this here, and generate a JR anyway. We will
2373 be popping more registers than is strictly necessary, but
2374 it does save code space. */
2376 if (TARGET_LONG_CALLS)
2381 sprintf (name, "__return_%s", reg_names [first]);
2383 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2385 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2391 sprintf (buff, "jr __return_%s", reg_names [first]);
2393 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2400 /* Construct a JARL instruction to a routine that will perform the equivalent
2401 of the RTL passed as a parameter. This RTL is a function prologue that
2402 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2403 some stack space as well. The code has already verified that the RTL
2404 matches these requirements. */
2406 construct_save_jarl (rtx op)
2408 int count = XVECLEN (op, 0);
2410 unsigned long int mask;
2411 unsigned long int first;
2412 unsigned long int last;
2414 static char buff [100]; /* XXX */
2416 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2418 error ("bogus JARL construction: %d", count);
2423 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2424 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2425 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2426 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2428 /* Work out how many bytes to push onto the stack after storing the
2430 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2432 /* Each push will put 4 bytes from the stack.... */
2433 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2435 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2436 if (stack_bytes != 0)
2438 error ("bad amount of stack space removal: %d", stack_bytes);
2442 /* Now compute the bit mask of registers to push. */
2444 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2446 rtx vector_element = XVECEXP (op, 0, i);
2448 gcc_assert (GET_CODE (vector_element) == SET);
2449 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2450 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2453 mask |= 1 << REGNO (SET_SRC (vector_element));
2456 /* Scan for the first register to push. */
2457 for (first = 0; first < 32; first++)
2459 if (mask & (1 << first))
2463 gcc_assert (first < 32);
2465 /* Discover the last register to push. */
2466 if (mask & (1 << LINK_POINTER_REGNUM))
2468 last = LINK_POINTER_REGNUM;
2472 gcc_assert (!stack_bytes);
2473 gcc_assert (mask & (1 << 29));
2478 /* Note, it is possible to have gaps in the register mask.
2479 We ignore this here, and generate a JARL anyway. We will
2480 be pushing more registers than is strictly necessary, but
2481 it does save code space. */
2483 if (TARGET_LONG_CALLS)
2488 sprintf (name, "__save_%s", reg_names [first]);
2490 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2492 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2498 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2500 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2507 extern tree last_assemble_variable_decl;
2508 extern int size_directive_output;
2510 /* A version of asm_output_aligned_bss() that copes with the special
2511 data areas of the v850. */
2513 v850_output_aligned_bss (FILE * file,
2516 unsigned HOST_WIDE_INT size,
2519 switch (v850_get_data_area (decl))
2522 switch_to_section (zbss_section);
2526 switch_to_section (sbss_section);
2530 switch_to_section (tdata_section);
2533 switch_to_section (bss_section);
2537 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2538 #ifdef ASM_DECLARE_OBJECT_NAME
2539 last_assemble_variable_decl = decl;
2540 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2542 /* Standard thing is just output label for the object. */
2543 ASM_OUTPUT_LABEL (file, name);
2544 #endif /* ASM_DECLARE_OBJECT_NAME */
2545 ASM_OUTPUT_SKIP (file, size ? size : 1);
2548 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2550 v850_output_common (FILE * file,
2556 if (decl == NULL_TREE)
2558 fprintf (file, "%s", COMMON_ASM_OP);
2562 switch (v850_get_data_area (decl))
2565 fprintf (file, "%s", ZCOMMON_ASM_OP);
2569 fprintf (file, "%s", SCOMMON_ASM_OP);
2573 fprintf (file, "%s", TCOMMON_ASM_OP);
2577 fprintf (file, "%s", COMMON_ASM_OP);
2582 assemble_name (file, name);
2583 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2586 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2588 v850_output_local (FILE * file,
2594 fprintf (file, "%s", LOCAL_ASM_OP);
2595 assemble_name (file, name);
2596 fprintf (file, "\n");
2598 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2601 /* Add data area to the given declaration if a ghs data area pragma is
2602 currently in effect (#pragma ghs startXXX/endXXX). */
2604 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2607 && data_area_stack->data_area
2608 && current_function_decl == NULL_TREE
2609 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2610 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2611 v850_set_data_area (decl, data_area_stack->data_area);
2613 /* Initialize the default names of the v850 specific sections,
2614 if this has not been done before. */
2616 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2618 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2619 = build_string (sizeof (".sdata")-1, ".sdata");
2621 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2622 = build_string (sizeof (".rosdata")-1, ".rosdata");
2624 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2625 = build_string (sizeof (".tdata")-1, ".tdata");
2627 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2628 = build_string (sizeof (".zdata")-1, ".zdata");
2630 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2631 = build_string (sizeof (".rozdata")-1, ".rozdata");
2634 if (current_function_decl == NULL_TREE
2635 && (TREE_CODE (decl) == VAR_DECL
2636 || TREE_CODE (decl) == CONST_DECL
2637 || TREE_CODE (decl) == FUNCTION_DECL)
2638 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2639 && !DECL_SECTION_NAME (decl))
2641 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2642 tree chosen_section;
2644 if (TREE_CODE (decl) == FUNCTION_DECL)
2645 kind = GHS_SECTION_KIND_TEXT;
2648 /* First choose a section kind based on the data area of the decl. */
2649 switch (v850_get_data_area (decl))
2655 kind = ((TREE_READONLY (decl))
2656 ? GHS_SECTION_KIND_ROSDATA
2657 : GHS_SECTION_KIND_SDATA);
2661 kind = GHS_SECTION_KIND_TDATA;
2665 kind = ((TREE_READONLY (decl))
2666 ? GHS_SECTION_KIND_ROZDATA
2667 : GHS_SECTION_KIND_ZDATA);
2670 case DATA_AREA_NORMAL: /* default data area */
2671 if (TREE_READONLY (decl))
2672 kind = GHS_SECTION_KIND_RODATA;
2673 else if (DECL_INITIAL (decl))
2674 kind = GHS_SECTION_KIND_DATA;
2676 kind = GHS_SECTION_KIND_BSS;
2680 /* Now, if the section kind has been explicitly renamed,
2681 then attach a section attribute. */
2682 chosen_section = GHS_current_section_names [(int) kind];
2684 /* Otherwise, if this kind of section needs an explicit section
2685 attribute, then also attach one. */
2686 if (chosen_section == NULL)
2687 chosen_section = GHS_default_section_names [(int) kind];
2691 /* Only set the section name if specified by a pragma, because
2692 otherwise it will force those variables to get allocated storage
2693 in this module, rather than by the linker. */
2694 DECL_SECTION_NAME (decl) = chosen_section;
2699 /* Construct a DISPOSE instruction that is the equivalent of
2700 the given RTX. We have already verified that this should
2704 construct_dispose_instruction (rtx op)
2706 int count = XVECLEN (op, 0);
2708 unsigned long int mask;
2710 static char buff[ 100 ]; /* XXX */
2715 error ("bogus DISPOSE construction: %d", count);
2719 /* Work out how many bytes to pop off the
2720 stack before retrieving registers. */
2721 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2722 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2723 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2725 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2727 /* Each pop will remove 4 bytes from the stack.... */
2728 stack_bytes -= (count - 2) * 4;
2730 /* Make sure that the amount we are popping
2731 will fit into the DISPOSE instruction. */
2732 if (stack_bytes > 128)
2734 error ("too much stack space to dispose of: %d", stack_bytes);
2738 /* Now compute the bit mask of registers to push. */
2741 for (i = 2; i < count; i++)
2743 rtx vector_element = XVECEXP (op, 0, i);
2745 gcc_assert (GET_CODE (vector_element) == SET);
2746 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2747 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2750 if (REGNO (SET_DEST (vector_element)) == 2)
2753 mask |= 1 << REGNO (SET_DEST (vector_element));
2756 if (! TARGET_DISABLE_CALLT
2757 && (use_callt || stack_bytes == 0))
2761 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2766 for (i = 20; i < 32; i++)
2767 if (mask & (1 << i))
2771 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2773 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2774 i, (mask & (1 << 31)) ? "31c" : "29");
2779 static char regs [100]; /* XXX */
2782 /* Generate the DISPOSE instruction. Note we could just issue the
2783 bit mask as a number as the assembler can cope with this, but for
2784 the sake of our readers we turn it into a textual description. */
2788 for (i = 20; i < 32; i++)
2790 if (mask & (1 << i))
2795 strcat (regs, ", ");
2800 strcat (regs, reg_names[ first ]);
2802 for (i++; i < 32; i++)
2803 if ((mask & (1 << i)) == 0)
2808 strcat (regs, " - ");
2809 strcat (regs, reg_names[ i - 1 ] );
2814 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2820 /* Construct a PREPARE instruction that is the equivalent of
2821 the given RTL. We have already verified that this should
2825 construct_prepare_instruction (rtx op)
2829 unsigned long int mask;
2831 static char buff[ 100 ]; /* XXX */
2834 if (XVECLEN (op, 0) <= 1)
2836 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2840 /* Work out how many bytes to push onto
2841 the stack after storing the registers. */
2842 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2843 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2844 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2846 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2849 /* Make sure that the amount we are popping
2850 will fit into the DISPOSE instruction. */
2851 if (stack_bytes < -128)
2853 error ("too much stack space to prepare: %d", stack_bytes);
2857 /* Now compute the bit mask of registers to push. */
2860 for (i = 1; i < XVECLEN (op, 0); i++)
2862 rtx vector_element = XVECEXP (op, 0, i);
2864 if (GET_CODE (vector_element) == CLOBBER)
2867 gcc_assert (GET_CODE (vector_element) == SET);
2868 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2869 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2872 if (REGNO (SET_SRC (vector_element)) == 2)
2875 mask |= 1 << REGNO (SET_SRC (vector_element));
2879 stack_bytes += count * 4;
2881 if ((! TARGET_DISABLE_CALLT)
2882 && (use_callt || stack_bytes == 0))
2886 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2890 for (i = 20; i < 32; i++)
2891 if (mask & (1 << i))
2895 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2897 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2898 i, (mask & (1 << 31)) ? "31c" : "29");
2902 static char regs [100]; /* XXX */
2906 /* Generate the PREPARE instruction. Note we could just issue the
2907 bit mask as a number as the assembler can cope with this, but for
2908 the sake of our readers we turn it into a textual description. */
2912 for (i = 20; i < 32; i++)
2914 if (mask & (1 << i))
2919 strcat (regs, ", ");
2924 strcat (regs, reg_names[ first ]);
2926 for (i++; i < 32; i++)
2927 if ((mask & (1 << i)) == 0)
2932 strcat (regs, " - ");
2933 strcat (regs, reg_names[ i - 1 ] );
2938 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2944 /* Return an RTX indicating where the return address to the
2945 calling function can be found. */
2948 v850_return_addr (int count)
2953 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2956 /* Implement TARGET_ASM_INIT_SECTIONS. */
2959 v850_asm_init_sections (void)
2962 = get_unnamed_section (0, output_section_asm_op,
2963 "\t.section .rosdata,\"a\"");
2966 = get_unnamed_section (0, output_section_asm_op,
2967 "\t.section .rozdata,\"a\"");
2970 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2971 "\t.section .tdata,\"aw\"");
2974 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2975 "\t.section .zdata,\"aw\"");
2978 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2979 output_section_asm_op,
2980 "\t.section .zbss,\"aw\"");
2984 v850_select_section (tree exp,
2985 int reloc ATTRIBUTE_UNUSED,
2986 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2988 if (TREE_CODE (exp) == VAR_DECL)
2991 if (!TREE_READONLY (exp)
2992 || TREE_SIDE_EFFECTS (exp)
2993 || !DECL_INITIAL (exp)
2994 || (DECL_INITIAL (exp) != error_mark_node
2995 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3000 switch (v850_get_data_area (exp))
3003 return is_const ? rozdata_section : zdata_section;
3006 return tdata_section;
3009 return is_const ? rosdata_section : sdata_section;
3012 return is_const ? readonly_data_section : data_section;
3015 return readonly_data_section;
3018 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
3021 v850_function_value_regno_p (const unsigned int regno)
3023 return (regno == 10);
3026 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3029 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3031 /* Return values > 8 bytes in length in memory. */
3032 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
3035 /* Worker function for TARGET_FUNCTION_VALUE. */
3038 v850_function_value (const_tree valtype,
3039 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3040 bool outgoing ATTRIBUTE_UNUSED)
3042 return gen_rtx_REG (TYPE_MODE (valtype), 10);
3046 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3049 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
3050 enum machine_mode mode ATTRIBUTE_UNUSED,
3051 tree type ATTRIBUTE_UNUSED,
3052 int *pretend_arg_size ATTRIBUTE_UNUSED,
3053 int second_time ATTRIBUTE_UNUSED)
3055 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
3058 /* Worker function for TARGET_CAN_ELIMINATE. */
3061 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3063 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3066 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3068 If TARGET_APP_REGS is not defined then add r2 and r5 to
3069 the pool of fixed registers. See PR 14505. */
3072 v850_conditional_register_usage (void)
3074 if (TARGET_APP_REGS)
3076 fixed_regs[2] = 0; call_used_regs[2] = 0;
3077 fixed_regs[5] = 0; call_used_regs[5] = 1;
3081 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3084 v850_asm_trampoline_template (FILE *f)
3086 fprintf (f, "\tjarl .+4,r12\n");
3087 fprintf (f, "\tld.w 12[r12],r20\n");
3088 fprintf (f, "\tld.w 16[r12],r12\n");
3089 fprintf (f, "\tjmp [r12]\n");
3090 fprintf (f, "\tnop\n");
3091 fprintf (f, "\t.long 0\n");
3092 fprintf (f, "\t.long 0\n");
3095 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3098 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3100 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3102 emit_block_move (m_tramp, assemble_trampoline_template (),
3103 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3105 mem = adjust_address (m_tramp, SImode, 16);
3106 emit_move_insn (mem, chain_value);
3107 mem = adjust_address (m_tramp, SImode, 20);
3108 emit_move_insn (mem, fnaddr);
3112 v850_issue_rate (void)
3114 return (TARGET_V850E2_ALL? 2 : 1);
3117 /* V850 specific attributes. */
3119 static const struct attribute_spec v850_attribute_table[] =
3121 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
3122 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
3123 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
3124 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
3125 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
3126 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
3127 { NULL, 0, 0, false, false, false, NULL }
3130 /* Initialize the GCC target structure. */
3131 #undef TARGET_ASM_ALIGNED_HI_OP
3132 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3134 #undef TARGET_PRINT_OPERAND
3135 #define TARGET_PRINT_OPERAND v850_print_operand
3136 #undef TARGET_PRINT_OPERAND_ADDRESS
3137 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3138 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3139 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3141 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3142 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3144 #undef TARGET_ATTRIBUTE_TABLE
3145 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3147 #undef TARGET_INSERT_ATTRIBUTES
3148 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3150 #undef TARGET_ASM_SELECT_SECTION
3151 #define TARGET_ASM_SELECT_SECTION v850_select_section
3153 /* The assembler supports switchable .bss sections, but
3154 v850_select_section doesn't yet make use of them. */
3155 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3156 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3158 #undef TARGET_ENCODE_SECTION_INFO
3159 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3161 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3162 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3164 #undef TARGET_DEFAULT_TARGET_FLAGS
3165 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS)
3166 #undef TARGET_HANDLE_OPTION
3167 #define TARGET_HANDLE_OPTION v850_handle_option
3169 #undef TARGET_RTX_COSTS
3170 #define TARGET_RTX_COSTS v850_rtx_costs
3172 #undef TARGET_ADDRESS_COST
3173 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3175 #undef TARGET_MACHINE_DEPENDENT_REORG
3176 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3178 #undef TARGET_SCHED_ISSUE_RATE
3179 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3181 #undef TARGET_FUNCTION_VALUE_REGNO_P
3182 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3183 #undef TARGET_FUNCTION_VALUE
3184 #define TARGET_FUNCTION_VALUE v850_function_value
3186 #undef TARGET_PROMOTE_PROTOTYPES
3187 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3189 #undef TARGET_RETURN_IN_MEMORY
3190 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3192 #undef TARGET_PASS_BY_REFERENCE
3193 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3195 #undef TARGET_CALLEE_COPIES
3196 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3198 #undef TARGET_SETUP_INCOMING_VARARGS
3199 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3201 #undef TARGET_ARG_PARTIAL_BYTES
3202 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3204 #undef TARGET_FUNCTION_ARG
3205 #define TARGET_FUNCTION_ARG v850_function_arg
3207 #undef TARGET_FUNCTION_ARG_ADVANCE
3208 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3210 #undef TARGET_CAN_ELIMINATE
3211 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3213 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3214 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3216 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3217 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3218 #undef TARGET_TRAMPOLINE_INIT
3219 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3221 #undef TARGET_STRICT_ARGUMENT_NAMING
3222 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3224 #undef TARGET_OPTION_OPTIMIZATION_TABLE
3225 #define TARGET_OPTION_OPTIMIZATION_TABLE v850_option_optimization_table
3227 struct gcc_target targetm = TARGET_INITIALIZER;
3229 #include "gt-v850.h"