1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
48 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
63 + Function args in registers
64 + Handle pipeline hazards
67 + Machine-dependent Reorg
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed;
91 /* Records __builtin_return address. */
95 int reg_save_slot[FIRST_PSEUDO_REGISTER];
96 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
98 /* 2 if the current function has an interrupt attribute, 1 if not, 0
99 if unknown. This is here because resource.c uses EPILOGUE_USES
101 int interrupt_handler;
103 /* Likewise, for disinterrupt attribute. */
104 int disable_interrupts;
106 /* Number of doloop tags used so far. */
109 /* True if the last tag was allocated to a doloop_end. */
110 bool doloop_tag_from_end;
112 /* True if reload changes $TP. */
113 bool reload_changes_tp;
115 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
116 We only set this if the function is an interrupt handler. */
117 int asms_without_operands;
120 #define MEP_CONTROL_REG(x) \
121 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
123 static const struct attribute_spec mep_attribute_table[11];
125 static GTY(()) section * based_section;
126 static GTY(()) section * tinybss_section;
127 static GTY(()) section * far_section;
128 static GTY(()) section * farbss_section;
129 static GTY(()) section * frodata_section;
130 static GTY(()) section * srodata_section;
132 static void mep_set_leaf_registers (int);
133 static bool symbol_p (rtx);
134 static bool symbolref_p (rtx);
135 static void encode_pattern_1 (rtx);
136 static void encode_pattern (rtx);
137 static bool const_in_range (rtx, int, int);
138 static void mep_rewrite_mult (rtx, rtx);
139 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
140 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
141 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
142 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
143 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
144 static bool mep_nongeneral_reg (rtx);
145 static bool mep_general_copro_reg (rtx);
146 static bool mep_nonregister (rtx);
147 static struct machine_function* mep_init_machine_status (void);
148 static rtx mep_tp_rtx (void);
149 static rtx mep_gp_rtx (void);
150 static bool mep_interrupt_p (void);
151 static bool mep_disinterrupt_p (void);
152 static bool mep_reg_set_p (rtx, rtx);
153 static bool mep_reg_set_in_function (int);
154 static bool mep_interrupt_saved_reg (int);
155 static bool mep_call_saves_register (int);
157 static void add_constant (int, int, int, int);
158 static bool mep_function_uses_sp (void);
159 static rtx maybe_dead_move (rtx, rtx, bool);
160 static void mep_reload_pointer (int, const char *);
161 static void mep_start_function (FILE *, HOST_WIDE_INT);
162 static bool mep_function_ok_for_sibcall (tree, tree);
163 static int unique_bit_in (HOST_WIDE_INT);
164 static int bit_size_for_clip (HOST_WIDE_INT);
165 static int bytesize (const_tree, enum machine_mode);
166 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
167 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
168 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
169 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
170 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
171 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
172 static bool mep_function_attribute_inlinable_p (const_tree);
173 static bool mep_lookup_pragma_disinterrupt (const char *);
174 static int mep_multiple_address_regions (tree, bool);
175 static int mep_attrlist_to_encoding (tree, tree);
176 static void mep_insert_attributes (tree, tree *);
177 static void mep_encode_section_info (tree, rtx, int);
178 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
179 static void mep_unique_section (tree, int);
180 static unsigned int mep_section_type_flags (tree, const char *, int);
181 static void mep_asm_named_section (const char *, unsigned int, tree);
182 static bool mep_mentioned_p (rtx, rtx, int);
183 static void mep_reorg_regmove (rtx);
184 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
185 static void mep_reorg_repeat (rtx);
186 static bool mep_invertable_branch_p (rtx);
187 static void mep_invert_branch (rtx, rtx);
188 static void mep_reorg_erepeat (rtx);
189 static void mep_jmp_return_reorg (rtx);
190 static void mep_reorg_addcombine (rtx);
191 static void mep_reorg (void);
192 static void mep_init_intrinsics (void);
193 static void mep_init_builtins (void);
194 static void mep_intrinsic_unavailable (int);
195 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
196 static bool mep_get_move_insn (int, const struct cgen_insn **);
197 static rtx mep_convert_arg (enum machine_mode, rtx);
198 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
199 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
200 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
201 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
202 static int mep_adjust_cost (rtx, rtx, rtx, int);
203 static int mep_issue_rate (void);
204 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
205 static void mep_move_ready_insn (rtx *, int, rtx);
206 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
207 static rtx mep_make_bundle (rtx, rtx);
208 static void mep_bundle_insns (rtx);
209 static bool mep_rtx_cost (rtx, int, int, int *, bool);
210 static int mep_address_cost (rtx, bool);
211 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
213 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
215 static bool mep_vector_mode_supported_p (enum machine_mode);
216 static bool mep_handle_option (size_t, const char *, int);
217 static rtx mep_allocate_initial_value (rtx);
218 static void mep_asm_init_sections (void);
219 static int mep_comp_type_attributes (const_tree, const_tree);
220 static bool mep_narrow_volatile_bitfield (void);
221 static rtx mep_expand_builtin_saveregs (void);
222 static tree mep_build_builtin_va_list (void);
223 static void mep_expand_va_start (tree, rtx);
224 static tree mep_gimplify_va_arg_expr (tree, tree, tree *, tree *);
226 /* Initialize the GCC target structure. */
228 #undef TARGET_ASM_FUNCTION_PROLOGUE
229 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
230 #undef TARGET_ATTRIBUTE_TABLE
231 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
232 #undef TARGET_COMP_TYPE_ATTRIBUTES
233 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
234 #undef TARGET_INSERT_ATTRIBUTES
235 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
236 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
237 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
238 #undef TARGET_SECTION_TYPE_FLAGS
239 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
240 #undef TARGET_ASM_NAMED_SECTION
241 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
242 #undef TARGET_INIT_BUILTINS
243 #define TARGET_INIT_BUILTINS mep_init_builtins
244 #undef TARGET_EXPAND_BUILTIN
245 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
246 #undef TARGET_SCHED_ADJUST_COST
247 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
248 #undef TARGET_SCHED_ISSUE_RATE
249 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
250 #undef TARGET_SCHED_REORDER
251 #define TARGET_SCHED_REORDER mep_sched_reorder
252 #undef TARGET_STRIP_NAME_ENCODING
253 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
254 #undef TARGET_ASM_SELECT_SECTION
255 #define TARGET_ASM_SELECT_SECTION mep_select_section
256 #undef TARGET_ASM_UNIQUE_SECTION
257 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
258 #undef TARGET_ENCODE_SECTION_INFO
259 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
260 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
261 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
262 #undef TARGET_RTX_COSTS
263 #define TARGET_RTX_COSTS mep_rtx_cost
264 #undef TARGET_ADDRESS_COST
265 #define TARGET_ADDRESS_COST mep_address_cost
266 #undef TARGET_MACHINE_DEPENDENT_REORG
267 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
268 #undef TARGET_SETUP_INCOMING_VARARGS
269 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
270 #undef TARGET_PASS_BY_REFERENCE
271 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
272 #undef TARGET_VECTOR_MODE_SUPPORTED_P
273 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
274 #undef TARGET_HANDLE_OPTION
275 #define TARGET_HANDLE_OPTION mep_handle_option
276 #undef TARGET_DEFAULT_TARGET_FLAGS
277 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
278 #undef TARGET_ALLOCATE_INITIAL_VALUE
279 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
280 #undef TARGET_ASM_INIT_SECTIONS
281 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
282 #undef TARGET_RETURN_IN_MEMORY
283 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
284 #undef TARGET_NARROW_VOLATILE_BITFIELD
285 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
286 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
287 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
288 #undef TARGET_BUILD_BUILTIN_VA_LIST
289 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
290 #undef TARGET_EXPAND_BUILTIN_VA_START
291 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
292 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
293 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
295 struct gcc_target targetm = TARGET_INITIALIZER;
297 #define WANT_GCC_DEFINITIONS
298 #include "mep-intrin.h"
299 #undef WANT_GCC_DEFINITIONS
302 /* Command Line Option Support. */
304 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
306 /* True if we can use cmov instructions to move values back and forth
307 between core and coprocessor registers. */
308 bool mep_have_core_copro_moves_p;
310 /* True if we can use cmov instructions (or a work-alike) to move
311 values between coprocessor registers. */
312 bool mep_have_copro_copro_moves_p;
314 /* A table of all coprocessor instructions that can act like
315 a coprocessor-to-coprocessor cmov. */
316 static const int mep_cmov_insns[] = {
329 static int option_mtiny_specified = 0;
333 mep_set_leaf_registers (int enable)
337 if (mep_leaf_registers[0] != enable)
338 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
339 mep_leaf_registers[i] = enable;
343 mep_conditional_register_usage (char *fixed_regs, char *call_used_regs)
347 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
349 fixed_regs[HI_REGNO] = 1;
350 fixed_regs[LO_REGNO] = 1;
351 call_used_regs[HI_REGNO] = 1;
352 call_used_regs[LO_REGNO] = 1;
355 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
360 mep_optimization_options (void)
362 /* The first scheduling pass often increases register pressure and tends
363 to result in more spill code. Only run it when specifically asked. */
364 flag_schedule_insns = 0;
366 /* Using $fp doesn't gain us much, even when debugging is important. */
367 flag_omit_frame_pointer = 1;
371 mep_override_options (void)
374 warning (OPT_fpic, "-fpic is not supported");
376 warning (OPT_fPIC, "-fPIC is not supported");
377 if (TARGET_S && TARGET_M)
378 error ("only one of -ms and -mm may be given");
379 if (TARGET_S && TARGET_L)
380 error ("only one of -ms and -ml may be given");
381 if (TARGET_M && TARGET_L)
382 error ("only one of -mm and -ml may be given");
383 if (TARGET_S && option_mtiny_specified)
384 error ("only one of -ms and -mtiny= may be given");
385 if (TARGET_M && option_mtiny_specified)
386 error ("only one of -mm and -mtiny= may be given");
387 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
388 warning (0, "-mclip currently has no effect without -mminmax");
390 if (mep_const_section)
392 if (strcmp (mep_const_section, "tiny") != 0
393 && strcmp (mep_const_section, "near") != 0
394 && strcmp (mep_const_section, "far") != 0)
395 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
399 mep_tiny_cutoff = 65536;
402 if (TARGET_L && ! option_mtiny_specified)
405 if (TARGET_64BIT_CR_REGS)
406 flag_split_wide_types = 0;
408 init_machine_status = mep_init_machine_status;
409 mep_init_intrinsics ();
412 /* Pattern Support - constraints, predicates, expanders. */
414 /* MEP has very few instructions that can refer to the span of
415 addresses used by symbols, so it's common to check for them. */
420 int c = GET_CODE (x);
422 return (c == CONST_INT
432 if (GET_CODE (x) != MEM)
435 c = GET_CODE (XEXP (x, 0));
436 return (c == CONST_INT
441 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
443 #define GEN_REG(R, STRICT) \
446 && ((R) == ARG_POINTER_REGNUM \
447 || (R) >= FIRST_PSEUDO_REGISTER)))
449 static char pattern[12], *patternp;
450 static GTY(()) rtx patternr[12];
451 #define RTX_IS(x) (strcmp (pattern, x) == 0)
454 encode_pattern_1 (rtx x)
458 if (patternp == pattern + sizeof (pattern) - 2)
464 patternr[patternp-pattern] = x;
466 switch (GET_CODE (x))
474 encode_pattern_1 (XEXP(x, 0));
478 encode_pattern_1 (XEXP(x, 0));
479 encode_pattern_1 (XEXP(x, 1));
483 encode_pattern_1 (XEXP(x, 0));
484 encode_pattern_1 (XEXP(x, 1));
488 encode_pattern_1 (XEXP(x, 0));
502 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
503 for (i=0; i<XVECLEN (x, 0); i++)
504 encode_pattern_1 (XVECEXP (x, 0, i));
512 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
521 encode_pattern (rtx x)
524 encode_pattern_1 (x);
529 mep_section_tag (rtx x)
535 switch (GET_CODE (x))
542 x = XVECEXP (x, 0, 0);
545 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
554 if (GET_CODE (x) != SYMBOL_REF)
557 if (name[0] == '@' && name[2] == '.')
559 if (name[1] == 'i' || name[1] == 'I')
562 return 'f'; /* near */
563 return 'n'; /* far */
571 mep_regno_reg_class (int regno)
575 case SP_REGNO: return SP_REGS;
576 case TP_REGNO: return TP_REGS;
577 case GP_REGNO: return GP_REGS;
578 case 0: return R0_REGS;
579 case HI_REGNO: return HI_REGS;
580 case LO_REGNO: return LO_REGS;
581 case ARG_POINTER_REGNUM: return GENERAL_REGS;
584 if (GR_REGNO_P (regno))
585 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
586 if (CONTROL_REGNO_P (regno))
589 if (CR_REGNO_P (regno))
593 /* Search for the register amongst user-defined subclasses of
594 the coprocessor registers. */
595 for (i = USER0_REGS; i <= USER3_REGS; ++i)
597 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
599 for (j = 0; j < N_REG_CLASSES; ++j)
601 enum reg_class sub = reg_class_subclasses[i][j];
603 if (sub == LIM_REG_CLASSES)
605 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
610 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
613 if (CCR_REGNO_P (regno))
616 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
622 mep_reg_class_from_constraint (int c, const char *str)
639 return LOADABLE_CR_REGS;
641 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
643 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
670 enum reg_class which = c - 'A' + USER0_REGS;
671 return (reg_class_size[which] > 0 ? which : NO_REGS);
680 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
684 case 'I': return value >= -32768 && value < 32768;
685 case 'J': return value >= 0 && value < 65536;
686 case 'K': return value >= 0 && value < 0x01000000;
687 case 'L': return value >= -32 && value < 32;
688 case 'M': return value >= 0 && value < 32;
689 case 'N': return value >= 0 && value < 16;
693 return value >= -2147483647-1 && value <= 2147483647;
700 mep_extra_constraint (rtx value, int c)
702 encode_pattern (value);
707 /* For near symbols, like what call uses. */
708 if (GET_CODE (value) == REG)
710 return mep_call_address_operand (value, GET_MODE (value));
713 /* For signed 8-bit immediates. */
714 return (GET_CODE (value) == CONST_INT
715 && INTVAL (value) >= -128
716 && INTVAL (value) <= 127);
719 /* For tp/gp relative symbol values. */
720 return (RTX_IS ("u3s") || RTX_IS ("u2s")
721 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
724 /* Non-absolute memories. */
725 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
729 return RTX_IS ("Hs");
732 /* Register indirect. */
733 return RTX_IS ("mr");
736 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
747 const_in_range (rtx x, int minv, int maxv)
749 return (GET_CODE (x) == CONST_INT
750 && INTVAL (x) >= minv
751 && INTVAL (x) <= maxv);
754 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
755 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
756 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
757 at the end of the insn stream. */
760 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
762 if (rtx_equal_p (dest, src1))
764 else if (rtx_equal_p (dest, src2))
769 emit_insn (gen_movsi (copy_rtx (dest), src1));
771 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
776 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
777 Change the last element of PATTERN from (clobber (scratch:SI))
778 to (clobber (reg:SI HI_REGNO)). */
781 mep_rewrite_mult (rtx insn, rtx pattern)
785 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
786 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
787 PATTERN (insn) = pattern;
788 INSN_CODE (insn) = -1;
791 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
792 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
793 store the result in DEST if nonnull. */
796 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
800 lo = gen_rtx_REG (SImode, LO_REGNO);
802 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
803 mep_mulr_source (insn, dest, src1, src2));
805 pattern = gen_mulsi3_lo (lo, src1, src2);
806 mep_rewrite_mult (insn, pattern);
809 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
810 SRC3 into $lo, then use either madd or maddr. The move into $lo will
811 be deleted by a peephole2 if SRC3 is already in $lo. */
814 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
818 lo = gen_rtx_REG (SImode, LO_REGNO);
819 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
821 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
822 mep_mulr_source (insn, dest, src1, src2),
825 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
826 mep_rewrite_mult (insn, pattern);
829 /* Return true if $lo has the same value as integer register GPR when
830 instruction INSN is reached. If necessary, rewrite the instruction
831 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
832 rtx for (reg:SI LO_REGNO).
834 This function is intended to be used by the peephole2 pass. Since
835 that pass goes from the end of a basic block to the beginning, and
836 propagates liveness information on the way, there is no need to
837 update register notes here.
839 If GPR_DEAD_P is true on entry, and this function returns true,
840 then the caller will replace _every_ use of GPR in and after INSN
841 with LO. This means that if the instruction that sets $lo is a
842 mulr- or maddr-type instruction, we can rewrite it to use mul or
843 madd instead. In combination with the copy progagation pass,
844 this allows us to replace sequences like:
853 if GPR is no longer used. */
856 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
860 insn = PREV_INSN (insn);
862 switch (recog_memoized (insn))
864 case CODE_FOR_mulsi3_1:
866 if (rtx_equal_p (recog_data.operand[0], gpr))
868 mep_rewrite_mulsi3 (insn,
869 gpr_dead_p ? NULL : recog_data.operand[0],
870 recog_data.operand[1],
871 recog_data.operand[2]);
876 case CODE_FOR_maddsi3:
878 if (rtx_equal_p (recog_data.operand[0], gpr))
880 mep_rewrite_maddsi3 (insn,
881 gpr_dead_p ? NULL : recog_data.operand[0],
882 recog_data.operand[1],
883 recog_data.operand[2],
884 recog_data.operand[3]);
889 case CODE_FOR_mulsi3r:
890 case CODE_FOR_maddsi3r:
892 return rtx_equal_p (recog_data.operand[1], gpr);
895 if (reg_set_p (lo, insn)
896 || reg_set_p (gpr, insn)
897 || volatile_insn_p (PATTERN (insn)))
900 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
905 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
909 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
912 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
914 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
919 /* Return true if SET can be turned into a post-modify load or store
920 that adds OFFSET to GPR. In other words, return true if SET can be
923 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
925 It's OK to change SET to an equivalent operation in order to
929 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
932 unsigned int reg_bytes, mem_bytes;
933 enum machine_mode reg_mode, mem_mode;
935 /* Only simple SETs can be converted. */
936 if (GET_CODE (set) != SET)
939 /* Point REG to what we hope will be the register side of the set and
940 MEM to what we hope will be the memory side. */
941 if (GET_CODE (SET_DEST (set)) == MEM)
943 mem = &SET_DEST (set);
944 reg = &SET_SRC (set);
948 reg = &SET_DEST (set);
949 mem = &SET_SRC (set);
950 if (GET_CODE (*mem) == SIGN_EXTEND)
951 mem = &XEXP (*mem, 0);
954 /* Check that *REG is a suitable coprocessor register. */
955 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
958 /* Check that *MEM is a suitable memory reference. */
959 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
962 /* Get the number of bytes in each operand. */
963 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
964 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
966 /* Check that OFFSET is suitably aligned. */
967 if (INTVAL (offset) & (mem_bytes - 1))
970 /* Convert *MEM to a normal integer mode. */
971 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
972 *mem = change_address (*mem, mem_mode, NULL);
974 /* Adjust *REG as well. */
975 *reg = shallow_copy_rtx (*reg);
976 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
978 /* SET is a subword load. Convert it to an explicit extension. */
979 PUT_MODE (*reg, SImode);
980 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
984 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
985 PUT_MODE (*reg, reg_mode);
990 /* Return the effect of frame-related instruction INSN. */
993 mep_frame_expr (rtx insn)
997 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
998 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
999 RTX_FRAME_RELATED_P (expr) = 1;
1003 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
1004 new pattern in INSN1; INSN2 will be deleted by the caller. */
1007 mep_make_parallel (rtx insn1, rtx insn2)
1011 if (RTX_FRAME_RELATED_P (insn2))
1013 expr = mep_frame_expr (insn2);
1014 if (RTX_FRAME_RELATED_P (insn1))
1015 expr = gen_rtx_SEQUENCE (VOIDmode,
1016 gen_rtvec (2, mep_frame_expr (insn1), expr));
1017 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1018 RTX_FRAME_RELATED_P (insn1) = 1;
1021 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1022 gen_rtvec (2, PATTERN (insn1),
1024 INSN_CODE (insn1) = -1;
1027 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1028 the basic block to see if any previous load or store instruction can
1029 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1032 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1039 insn = PREV_INSN (insn);
1042 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1044 mep_make_parallel (insn, set_insn);
1048 if (reg_set_p (reg, insn)
1049 || reg_referenced_p (reg, PATTERN (insn))
1050 || volatile_insn_p (PATTERN (insn)))
1054 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1058 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1061 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1063 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1064 extract_insn (insn);
1069 mep_allow_clip (rtx ux, rtx lx, int s)
1071 HOST_WIDE_INT u = INTVAL (ux);
1072 HOST_WIDE_INT l = INTVAL (lx);
1075 if (!TARGET_OPT_CLIP)
1080 for (i = 0; i < 30; i ++)
1081 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1082 && (l == - ((HOST_WIDE_INT) 1 << i)))
1090 for (i = 0; i < 30; i ++)
1091 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1098 mep_bit_position_p (rtx x, bool looking_for)
1100 if (GET_CODE (x) != CONST_INT)
1102 switch ((int) INTVAL(x) & 0xff)
1104 case 0x01: case 0x02: case 0x04: case 0x08:
1105 case 0x10: case 0x20: case 0x40: case 0x80:
1107 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1108 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1109 return !looking_for;
1115 move_needs_splitting (rtx dest, rtx src,
1116 enum machine_mode mode ATTRIBUTE_UNUSED)
1118 int s = mep_section_tag (src);
1122 if (GET_CODE (src) == CONST
1123 || GET_CODE (src) == MEM)
1124 src = XEXP (src, 0);
1125 else if (GET_CODE (src) == SYMBOL_REF
1126 || GET_CODE (src) == LABEL_REF
1127 || GET_CODE (src) == PLUS)
1133 || (GET_CODE (src) == PLUS
1134 && GET_CODE (XEXP (src, 1)) == CONST_INT
1135 && (INTVAL (XEXP (src, 1)) < -65536
1136 || INTVAL (XEXP (src, 1)) > 0xffffff))
1137 || (GET_CODE (dest) == REG
1138 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1144 mep_split_mov (rtx *operands, int symbolic)
1148 if (move_needs_splitting (operands[0], operands[1], SImode))
1153 if (GET_CODE (operands[1]) != CONST_INT)
1156 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1157 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1158 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1161 if (((!reload_completed && !reload_in_progress)
1162 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1163 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1169 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1170 it to one specific value. So the insn chosen depends on whether
1171 the source and destination modes match. */
1174 mep_vliw_mode_match (rtx tgt)
1176 bool src_vliw = mep_vliw_function_p (cfun->decl);
1177 bool tgt_vliw = INTVAL (tgt);
1179 return src_vliw == tgt_vliw;
1183 mep_multi_slot (rtx x)
1185 return get_attr_slot (x) == SLOT_MULTI;
1189 /* Be careful not to use macros that need to be compiled one way for
1190 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1193 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1197 #define DEBUG_LEGIT 0
1199 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1203 if (GET_CODE (x) == LO_SUM
1204 && GET_CODE (XEXP (x, 0)) == REG
1205 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1206 && CONSTANT_P (XEXP (x, 1)))
1208 if (GET_MODE_SIZE (mode) > 4)
1210 /* We will end up splitting this, and lo_sums are not
1211 offsettable for us. */
1213 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1218 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1223 if (GET_CODE (x) == REG
1224 && GEN_REG (REGNO (x), strict))
1227 fprintf (stderr, " - yup, [reg]\n");
1232 if (GET_CODE (x) == PLUS
1233 && GET_CODE (XEXP (x, 0)) == REG
1234 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1235 && const_in_range (XEXP (x, 1), -32768, 32767))
1238 fprintf (stderr, " - yup, [reg+const]\n");
1243 if (GET_CODE (x) == PLUS
1244 && GET_CODE (XEXP (x, 0)) == REG
1245 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1246 && GET_CODE (XEXP (x, 1)) == CONST
1247 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1248 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1249 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1250 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1253 fprintf (stderr, " - yup, [reg+unspec]\n");
1258 the_tag = mep_section_tag (x);
1263 fprintf (stderr, " - nope, [far]\n");
1268 if (mode == VOIDmode
1269 && GET_CODE (x) == SYMBOL_REF)
1272 fprintf (stderr, " - yup, call [symbol]\n");
1277 if ((mode == SImode || mode == SFmode)
1279 && LEGITIMATE_CONSTANT_P (x)
1280 && the_tag != 't' && the_tag != 'b')
1282 if (GET_CODE (x) != CONST_INT
1283 || (INTVAL (x) <= 0xfffff
1285 && (INTVAL (x) % 4) == 0))
1288 fprintf (stderr, " - yup, [const]\n");
1295 fprintf (stderr, " - nope.\n");
1301 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1302 enum reload_type type,
1303 int ind_levels ATTRIBUTE_UNUSED)
1305 if (GET_CODE (*x) == PLUS
1306 && GET_CODE (XEXP (*x, 0)) == MEM
1307 && GET_CODE (XEXP (*x, 1)) == REG)
1309 /* GCC will by default copy the MEM into a REG, which results in
1310 an invalid address. For us, the best thing to do is move the
1311 whole expression to a REG. */
1312 push_reload (*x, NULL_RTX, x, NULL,
1313 GENERAL_REGS, mode, VOIDmode,
1318 if (GET_CODE (*x) == PLUS
1319 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1320 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1322 char e = mep_section_tag (XEXP (*x, 0));
1324 if (e != 't' && e != 'b')
1326 /* GCC thinks that (sym+const) is a valid address. Well,
1327 sometimes it is, this time it isn't. The best thing to
1328 do is reload the symbol to a register, since reg+int
1329 tends to work, and we can't just add the symbol and
1331 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1332 GENERAL_REGS, mode, VOIDmode,
1341 mep_core_address_length (rtx insn, int opn)
1343 rtx set = single_set (insn);
1344 rtx mem = XEXP (set, opn);
1345 rtx other = XEXP (set, 1-opn);
1346 rtx addr = XEXP (mem, 0);
1348 if (register_operand (addr, Pmode))
1350 if (GET_CODE (addr) == PLUS)
1352 rtx addend = XEXP (addr, 1);
1354 gcc_assert (REG_P (XEXP (addr, 0)));
1356 switch (REGNO (XEXP (addr, 0)))
1358 case STACK_POINTER_REGNUM:
1359 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1360 && mep_imm7a4_operand (addend, VOIDmode))
1365 gcc_assert (REG_P (other));
1367 if (REGNO (other) >= 8)
1370 if (GET_CODE (addend) == CONST
1371 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1372 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1375 if (GET_CODE (addend) == CONST_INT
1376 && INTVAL (addend) >= 0
1377 && INTVAL (addend) <= 127
1378 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1388 mep_cop_address_length (rtx insn, int opn)
1390 rtx set = single_set (insn);
1391 rtx mem = XEXP (set, opn);
1392 rtx addr = XEXP (mem, 0);
1394 if (GET_CODE (mem) != MEM)
1396 if (register_operand (addr, Pmode))
1398 if (GET_CODE (addr) == POST_INC)
1404 #define DEBUG_EXPAND_MOV 0
1406 mep_expand_mov (rtx *operands, enum machine_mode mode)
1411 int post_reload = 0;
1413 tag[0] = mep_section_tag (operands[0]);
1414 tag[1] = mep_section_tag (operands[1]);
1416 if (!reload_in_progress
1417 && !reload_completed
1418 && GET_CODE (operands[0]) != REG
1419 && GET_CODE (operands[0]) != SUBREG
1420 && GET_CODE (operands[1]) != REG
1421 && GET_CODE (operands[1]) != SUBREG)
1422 operands[1] = copy_to_mode_reg (mode, operands[1]);
1424 #if DEBUG_EXPAND_MOV
1425 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1426 reload_in_progress || reload_completed);
1427 debug_rtx (operands[0]);
1428 debug_rtx (operands[1]);
1431 if (mode == DImode || mode == DFmode)
1434 if (reload_in_progress || reload_completed)
1438 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1439 cfun->machine->reload_changes_tp = true;
1441 if (tag[0] == 't' || tag[1] == 't')
1443 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1444 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1447 if (tag[0] == 'b' || tag[1] == 'b')
1449 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1450 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1453 if (cfun->machine->reload_changes_tp == true)
1460 if (symbol_p (operands[1]))
1462 t = mep_section_tag (operands[1]);
1463 if (t == 'b' || t == 't')
1466 if (GET_CODE (operands[1]) == SYMBOL_REF)
1468 tpsym = operands[1];
1469 n = gen_rtx_UNSPEC (mode,
1470 gen_rtvec (1, operands[1]),
1471 t == 'b' ? UNS_TPREL : UNS_GPREL);
1472 n = gen_rtx_CONST (mode, n);
1474 else if (GET_CODE (operands[1]) == CONST
1475 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1476 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1477 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1479 tpsym = XEXP (XEXP (operands[1], 0), 0);
1480 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1481 n = gen_rtx_UNSPEC (mode,
1482 gen_rtvec (1, tpsym),
1483 t == 'b' ? UNS_TPREL : UNS_GPREL);
1484 n = gen_rtx_PLUS (mode, n, tpoffs);
1485 n = gen_rtx_CONST (mode, n);
1487 else if (GET_CODE (operands[1]) == CONST
1488 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1492 error ("unusual TP-relative address");
1496 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1497 : mep_gp_rtx ()), n);
1498 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1499 #if DEBUG_EXPAND_MOV
1500 fprintf(stderr, "mep_expand_mov emitting ");
1507 for (i=0; i < 2; i++)
1509 t = mep_section_tag (operands[i]);
1510 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1515 sym = XEXP (operands[i], 0);
1516 if (GET_CODE (sym) == CONST
1517 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1518 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1531 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1532 n = gen_rtx_CONST (Pmode, n);
1533 n = gen_rtx_PLUS (Pmode, r, n);
1534 operands[i] = replace_equiv_address (operands[i], n);
1539 if ((GET_CODE (operands[1]) != REG
1540 && MEP_CONTROL_REG (operands[0]))
1541 || (GET_CODE (operands[0]) != REG
1542 && MEP_CONTROL_REG (operands[1])))
1545 #if DEBUG_EXPAND_MOV
1546 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1548 temp = gen_reg_rtx (mode);
1549 emit_move_insn (temp, operands[1]);
1553 if (symbolref_p (operands[0])
1554 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1555 || (GET_MODE_SIZE (mode) != 4)))
1559 gcc_assert (!reload_in_progress && !reload_completed);
1561 temp = force_reg (Pmode, XEXP (operands[0], 0));
1562 operands[0] = replace_equiv_address (operands[0], temp);
1563 emit_move_insn (operands[0], operands[1]);
1567 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1570 if (symbol_p (operands[1])
1571 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1573 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1574 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1578 if (symbolref_p (operands[1])
1579 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1583 if (reload_in_progress || reload_completed)
1586 temp = gen_reg_rtx (Pmode);
1588 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1589 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1590 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1597 /* Cases where the pattern can't be made to use at all. */
1600 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1604 #define DEBUG_MOV_OK 0
1606 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1607 mep_section_tag (operands[1]));
1608 debug_rtx (operands[0]);
1609 debug_rtx (operands[1]);
1612 /* We want the movh patterns to get these. */
1613 if (GET_CODE (operands[1]) == HIGH)
1616 /* We can't store a register to a far variable without using a
1617 scratch register to hold the address. Using far variables should
1618 be split by mep_emit_mov anyway. */
1619 if (mep_section_tag (operands[0]) == 'f'
1620 || mep_section_tag (operands[1]) == 'f')
1623 fprintf (stderr, " - no, f\n");
1627 i = mep_section_tag (operands[1]);
1628 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1629 /* These are supposed to be generated with adds of the appropriate
1630 register. During and after reload, however, we allow them to
1631 be accessed as normal symbols because adding a dependency on
1632 the base register now might cause problems. */
1635 fprintf (stderr, " - no, bt\n");
1640 /* The only moves we can allow involve at least one general
1641 register, so require it. */
1642 for (i = 0; i < 2; i ++)
1644 /* Allow subregs too, before reload. */
1645 rtx x = operands[i];
1647 if (GET_CODE (x) == SUBREG)
1649 if (GET_CODE (x) == REG
1650 && ! MEP_CONTROL_REG (x))
1653 fprintf (stderr, " - ok\n");
1659 fprintf (stderr, " - no, no gen reg\n");
1664 #define DEBUG_SPLIT_WIDE_MOVE 0
1666 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1670 #if DEBUG_SPLIT_WIDE_MOVE
1671 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1672 debug_rtx (operands[0]);
1673 debug_rtx (operands[1]);
1676 for (i = 0; i <= 1; i++)
1678 rtx op = operands[i], hi, lo;
1680 switch (GET_CODE (op))
1684 unsigned int regno = REGNO (op);
1686 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1690 lo = gen_rtx_REG (SImode, regno);
1692 hi = gen_rtx_ZERO_EXTRACT (SImode,
1693 gen_rtx_REG (DImode, regno),
1698 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1699 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1707 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1708 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1715 /* The high part of CR <- GPR moves must be done after the low part. */
1716 operands [i + 4] = lo;
1717 operands [i + 2] = hi;
1720 if (reg_mentioned_p (operands[2], operands[5])
1721 || GET_CODE (operands[2]) == ZERO_EXTRACT
1722 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1726 /* Overlapping register pairs -- make sure we don't
1727 early-clobber ourselves. */
1729 operands[2] = operands[4];
1732 operands[3] = operands[5];
1736 #if DEBUG_SPLIT_WIDE_MOVE
1737 fprintf(stderr, "\033[34m");
1738 debug_rtx (operands[2]);
1739 debug_rtx (operands[3]);
1740 debug_rtx (operands[4]);
1741 debug_rtx (operands[5]);
1742 fprintf(stderr, "\033[0m");
1746 /* Emit a setcc instruction in its entirity. */
1749 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1757 tmp = op1, op1 = op2, op2 = tmp;
1758 code = swap_condition (code);
1763 op1 = force_reg (SImode, op1);
1764 emit_insn (gen_rtx_SET (VOIDmode, dest,
1765 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1769 if (op2 != const0_rtx)
1770 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1771 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1775 /* Branchful sequence:
1777 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1780 Branchless sequence:
1781 add3 tmp, op1, -op2 32-bit (or mov + sub)
1782 sltu3 tmp, tmp, 1 16-bit
1783 xor3 dest, tmp, 1 32-bit
1785 if (optimize_size && op2 != const0_rtx)
1788 if (op2 != const0_rtx)
1789 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1791 op2 = gen_reg_rtx (SImode);
1792 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1794 emit_insn (gen_rtx_SET (VOIDmode, dest,
1795 gen_rtx_XOR (SImode, op2, const1_rtx)));
1799 if (GET_CODE (op2) != CONST_INT
1800 || INTVAL (op2) == 0x7ffffff)
1802 op2 = GEN_INT (INTVAL (op2) + 1);
1803 return mep_expand_setcc_1 (LT, dest, op1, op2);
1806 if (GET_CODE (op2) != CONST_INT
1807 || INTVAL (op2) == -1)
1809 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1810 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1813 if (GET_CODE (op2) != CONST_INT
1814 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1816 op2 = GEN_INT (INTVAL (op2) - 1);
1817 return mep_expand_setcc_1 (GT, dest, op1, op2);
1820 if (GET_CODE (op2) != CONST_INT
1821 || op2 == const0_rtx)
1823 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1824 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1832 mep_expand_setcc (rtx *operands)
1834 rtx dest = operands[0];
1835 enum rtx_code code = GET_CODE (operands[1]);
1836 rtx op0 = operands[2];
1837 rtx op1 = operands[3];
1839 return mep_expand_setcc_1 (code, dest, op0, op1);
1843 mep_expand_cbranch (rtx *operands)
1845 enum rtx_code code = GET_CODE (operands[0]);
1846 rtx op0 = operands[1];
1847 rtx op1 = operands[2];
1854 if (mep_imm4_operand (op1, SImode))
1857 tmp = gen_reg_rtx (SImode);
1858 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1865 if (mep_imm4_operand (op1, SImode))
1868 tmp = gen_reg_rtx (SImode);
1869 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1878 if (! mep_reg_or_imm4_operand (op1, SImode))
1879 op1 = force_reg (SImode, op1);
1884 if (GET_CODE (op1) == CONST_INT
1885 && INTVAL (op1) != 0x7fffffff)
1887 op1 = GEN_INT (INTVAL (op1) + 1);
1888 code = (code == LE ? LT : GE);
1892 tmp = gen_reg_rtx (SImode);
1893 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1895 code = (code == LE ? EQ : NE);
1901 if (op1 == const1_rtx)
1908 tmp = gen_reg_rtx (SImode);
1909 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1916 tmp = gen_reg_rtx (SImode);
1917 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1919 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1928 tmp = gen_reg_rtx (SImode);
1929 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1930 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1937 tmp = gen_reg_rtx (SImode);
1938 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1940 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1952 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1956 mep_emit_cbranch (rtx *operands, int ne)
1958 if (GET_CODE (operands[1]) == REG)
1959 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1960 else if (INTVAL (operands[1]) == 0)
1961 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1963 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1967 mep_expand_call (rtx *operands, int returns_value)
1969 rtx addr = operands[returns_value];
1970 rtx tp = mep_tp_rtx ();
1971 rtx gp = mep_gp_rtx ();
1973 gcc_assert (GET_CODE (addr) == MEM);
1975 addr = XEXP (addr, 0);
1977 if (! mep_call_address_operand (addr, VOIDmode))
1978 addr = force_reg (SImode, addr);
1980 if (! operands[returns_value+2])
1981 operands[returns_value+2] = const0_rtx;
1984 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1985 operands[3], tp, gp));
1987 emit_call_insn (gen_call_internal (addr, operands[1],
1988 operands[2], tp, gp));
1991 /* Aliasing Support. */
1993 /* If X is a machine specific address (i.e. a symbol or label being
1994 referenced as a displacement from the GOT implemented using an
1995 UNSPEC), then return the base term. Otherwise return X. */
1998 mep_find_base_term (rtx x)
2003 if (GET_CODE (x) != PLUS)
2008 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2009 && base == mep_tp_rtx ())
2011 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2012 && base == mep_gp_rtx ())
2017 if (GET_CODE (term) != CONST)
2019 term = XEXP (term, 0);
2021 if (GET_CODE (term) != UNSPEC
2022 || XINT (term, 1) != unspec)
2025 return XVECEXP (term, 0, 0);
2028 /* Reload Support. */
2030 /* Return true if the registers in CLASS cannot represent the change from
2031 modes FROM to TO. */
2034 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2035 enum reg_class regclass)
2040 /* 64-bit COP regs must remain 64-bit COP regs. */
2041 if (TARGET_64BIT_CR_REGS
2042 && (regclass == CR_REGS
2043 || regclass == LOADABLE_CR_REGS)
2044 && (GET_MODE_SIZE (to) < 8
2045 || GET_MODE_SIZE (from) < 8))
2051 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2054 mep_general_reg (rtx x)
2056 while (GET_CODE (x) == SUBREG)
2058 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2062 mep_nongeneral_reg (rtx x)
2064 while (GET_CODE (x) == SUBREG)
2066 return (GET_CODE (x) == REG
2067 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2071 mep_general_copro_reg (rtx x)
2073 while (GET_CODE (x) == SUBREG)
2075 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2079 mep_nonregister (rtx x)
2081 while (GET_CODE (x) == SUBREG)
2083 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2086 #define DEBUG_RELOAD 0
2088 /* Return the secondary reload class needed for moving value X to or
2089 from a register in coprocessor register class CLASS. */
2091 static enum reg_class
2092 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2094 if (mep_general_reg (x))
2095 /* We can do the move directly if mep_have_core_copro_moves_p,
2096 otherwise we need to go through memory. Either way, no secondary
2097 register is needed. */
2100 if (mep_general_copro_reg (x))
2102 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2103 if (mep_have_copro_copro_moves_p)
2106 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2107 if (mep_have_core_copro_moves_p)
2108 return GENERAL_REGS;
2110 /* Otherwise we need to do it through memory. No secondary
2111 register is needed. */
2115 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2116 && constraint_satisfied_p (x, CONSTRAINT_U))
2117 /* X is a memory value that we can access directly. */
2120 /* We have to move X into a GPR first and then copy it to
2121 the coprocessor register. The move from the GPR to the
2122 coprocessor might be done directly or through memory,
2123 depending on mep_have_core_copro_moves_p. */
2124 return GENERAL_REGS;
2127 /* Copying X to register in RCLASS. */
2130 mep_secondary_input_reload_class (enum reg_class rclass,
2131 enum machine_mode mode ATTRIBUTE_UNUSED,
2137 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2141 if (reg_class_subset_p (rclass, CR_REGS))
2142 rv = mep_secondary_copro_reload_class (rclass, x);
2143 else if (MEP_NONGENERAL_CLASS (rclass)
2144 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2148 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2153 /* Copying register in RCLASS to X. */
2156 mep_secondary_output_reload_class (enum reg_class rclass,
2157 enum machine_mode mode ATTRIBUTE_UNUSED,
2163 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2167 if (reg_class_subset_p (rclass, CR_REGS))
2168 rv = mep_secondary_copro_reload_class (rclass, x);
2169 else if (MEP_NONGENERAL_CLASS (rclass)
2170 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2174 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2180 /* Implement SECONDARY_MEMORY_NEEDED. */
2183 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2184 enum machine_mode mode ATTRIBUTE_UNUSED)
2186 if (!mep_have_core_copro_moves_p)
2188 if (reg_classes_intersect_p (rclass1, CR_REGS)
2189 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2191 if (reg_classes_intersect_p (rclass2, CR_REGS)
2192 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2194 if (!mep_have_copro_copro_moves_p
2195 && reg_classes_intersect_p (rclass1, CR_REGS)
2196 && reg_classes_intersect_p (rclass2, CR_REGS))
2203 mep_expand_reload (rtx *operands, enum machine_mode mode)
2205 /* There are three cases for each direction:
2210 int s0 = mep_section_tag (operands[0]) == 'f';
2211 int s1 = mep_section_tag (operands[1]) == 'f';
2212 int c0 = mep_nongeneral_reg (operands[0]);
2213 int c1 = mep_nongeneral_reg (operands[1]);
2214 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2217 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2218 debug_rtx (operands[0]);
2219 debug_rtx (operands[1]);
2224 case 00: /* Don't know why this gets here. */
2225 case 02: /* general = far */
2226 emit_move_insn (operands[0], operands[1]);
2229 case 10: /* cr = mem */
2230 case 11: /* cr = cr */
2231 case 01: /* mem = cr */
2232 case 12: /* cr = far */
2233 emit_move_insn (operands[2], operands[1]);
2234 emit_move_insn (operands[0], operands[2]);
2237 case 20: /* far = general */
2238 emit_move_insn (operands[2], XEXP (operands[1], 0));
2239 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2242 case 21: /* far = cr */
2243 case 22: /* far = far */
2245 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2246 which, mode_name[mode]);
2247 debug_rtx (operands[0]);
2248 debug_rtx (operands[1]);
2253 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2254 can be moved directly into registers 0 to 7, but not into the rest.
2255 If so, and if the required class includes registers 0 to 7, restrict
2256 it to those registers. */
2259 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2261 switch (GET_CODE (x))
2264 if (INTVAL (x) >= 0x10000
2265 && INTVAL (x) < 0x01000000
2266 && (INTVAL (x) & 0xffff) != 0
2267 && reg_class_subset_p (TPREL_REGS, rclass))
2268 rclass = TPREL_REGS;
2274 if (mep_section_tag (x) != 'f'
2275 && reg_class_subset_p (TPREL_REGS, rclass))
2276 rclass = TPREL_REGS;
2285 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2286 moves, 4 for direct double-register moves, and 1000 for anything
2287 that requires a temporary register or temporary stack slot. */
2290 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2292 if (mep_have_copro_copro_moves_p
2293 && reg_class_subset_p (from, CR_REGS)
2294 && reg_class_subset_p (to, CR_REGS))
2296 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2300 if (reg_class_subset_p (from, CR_REGS)
2301 && reg_class_subset_p (to, CR_REGS))
2303 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2307 if (reg_class_subset_p (from, CR_REGS)
2308 || reg_class_subset_p (to, CR_REGS))
2310 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2314 if (mep_secondary_memory_needed (from, to, mode))
2316 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2319 if (GET_MODE_SIZE (mode) > 4)
2326 /* Functions to save and restore machine-specific function data. */
2328 static struct machine_function *
2329 mep_init_machine_status (void)
2331 struct machine_function *f;
2333 f = (struct machine_function *) ggc_alloc_cleared (sizeof (struct machine_function));
2339 mep_allocate_initial_value (rtx reg)
2343 if (GET_CODE (reg) != REG)
2346 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2349 /* In interrupt functions, the "initial" values of $gp and $tp are
2350 provided by the prologue. They are not necessarily the same as
2351 the values that the caller was using. */
2352 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2353 if (mep_interrupt_p ())
2356 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2358 cfun->machine->reg_save_size += 4;
2359 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2362 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2363 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2367 mep_return_addr_rtx (int count)
2372 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2378 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2384 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2388 mep_interrupt_p (void)
2390 if (cfun->machine->interrupt_handler == 0)
2392 int interrupt_handler
2393 = (lookup_attribute ("interrupt",
2394 DECL_ATTRIBUTES (current_function_decl))
2396 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2398 return cfun->machine->interrupt_handler == 2;
2402 mep_disinterrupt_p (void)
2404 if (cfun->machine->disable_interrupts == 0)
2406 int disable_interrupts
2407 = (lookup_attribute ("disinterrupt",
2408 DECL_ATTRIBUTES (current_function_decl))
2410 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2412 return cfun->machine->disable_interrupts == 2;
2416 /* Frame/Epilog/Prolog Related. */
2419 mep_reg_set_p (rtx reg, rtx insn)
2421 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2424 if (FIND_REG_INC_NOTE (insn, reg))
2426 insn = PATTERN (insn);
2429 if (GET_CODE (insn) == SET
2430 && GET_CODE (XEXP (insn, 0)) == REG
2431 && GET_CODE (XEXP (insn, 1)) == REG
2432 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2435 return set_of (reg, insn) != NULL_RTX;
2439 #define MEP_SAVES_UNKNOWN 0
2440 #define MEP_SAVES_YES 1
2441 #define MEP_SAVES_MAYBE 2
2442 #define MEP_SAVES_NO 3
2445 mep_reg_set_in_function (int regno)
2449 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2452 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2455 push_topmost_sequence ();
2456 insn = get_insns ();
2457 pop_topmost_sequence ();
2462 reg = gen_rtx_REG (SImode, regno);
2464 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2465 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2471 mep_asm_without_operands_p (void)
2473 if (cfun->machine->asms_without_operands == 0)
2477 push_topmost_sequence ();
2478 insn = get_insns ();
2479 pop_topmost_sequence ();
2481 cfun->machine->asms_without_operands = 1;
2485 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2487 cfun->machine->asms_without_operands = 2;
2490 insn = NEXT_INSN (insn);
2494 return cfun->machine->asms_without_operands == 2;
2497 /* Interrupt functions save/restore every call-preserved register, and
2498 any call-used register it uses (or all if it calls any function,
2499 since they may get clobbered there too). Here we check to see
2500 which call-used registers need saving. */
2503 mep_interrupt_saved_reg (int r)
2505 if (!mep_interrupt_p ())
2507 if (r == REGSAVE_CONTROL_TEMP
2508 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2510 if (mep_asm_without_operands_p ()
2512 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)))
2514 if (!current_function_is_leaf)
2515 /* Function calls mean we need to save $lp. */
2518 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2519 /* The interrupt handler might use these registers for repeat blocks,
2520 or it might call a function that does so. */
2521 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2523 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2525 /* Functions we call might clobber these. */
2526 if (call_used_regs[r] && !fixed_regs[r])
2528 /* Additional registers that need to be saved for IVC2. */
2530 && (r == FIRST_CCR_REGNO + 1
2531 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11)
2532 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2539 mep_call_saves_register (int r)
2541 /* if (cfun->machine->reg_saved[r] == MEP_SAVES_UNKNOWN)*/
2543 int rv = MEP_SAVES_NO;
2545 if (cfun->machine->reg_save_slot[r])
2547 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2549 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2551 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2553 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2554 /* We need these to have stack slots so that they can be set during
2557 else if (mep_interrupt_saved_reg (r))
2559 cfun->machine->reg_saved[r] = rv;
2561 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2564 /* Return true if epilogue uses register REGNO. */
2567 mep_epilogue_uses (int regno)
2569 /* Since $lp is a call-saved register, the generic code will normally
2570 mark it used in the epilogue if it needs to be saved and restored.
2571 However, when profiling is enabled, the profiling code will implicitly
2572 clobber $11. This case has to be handled specially both here and in
2573 mep_call_saves_register. */
2574 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2576 /* Interrupt functions save/restore pretty much everything. */
2577 return (reload_completed && mep_interrupt_saved_reg (regno));
2581 mep_reg_size (int regno)
2583 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2589 mep_elimination_offset (int from, int to)
2593 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2596 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2598 /* We don't count arg_regs_to_save in the arg pointer offset, because
2599 gcc thinks the arg pointer has moved along with the saved regs.
2600 However, we do count it when we adjust $sp in the prologue. */
2602 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2603 if (mep_call_saves_register (i))
2604 reg_save_size += mep_reg_size (i);
2606 if (reg_save_size % 8)
2607 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2609 cfun->machine->regsave_filler = 0;
2611 /* This is what our total stack adjustment looks like. */
2612 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2615 cfun->machine->frame_filler = 8 - (total_size % 8);
2617 cfun->machine->frame_filler = 0;
2620 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2621 return reg_save_size + cfun->machine->regsave_filler;
2623 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2624 return cfun->machine->frame_filler + frame_size;
2626 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2627 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2635 RTX_FRAME_RELATED_P (x) = 1;
2639 /* Since the prologue/epilogue code is generated after optimization,
2640 we can't rely on gcc to split constants for us. So, this code
2641 captures all the ways to add a constant to a register in one logic
2642 chunk, including optimizing away insns we just don't need. This
2643 makes the prolog/epilog code easier to follow. */
2645 add_constant (int dest, int src, int value, int mark_frame)
2650 if (src == dest && value == 0)
2655 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2656 gen_rtx_REG (SImode, src));
2658 RTX_FRAME_RELATED_P(insn) = 1;
2662 if (value >= -32768 && value <= 32767)
2664 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2665 gen_rtx_REG (SImode, src),
2668 RTX_FRAME_RELATED_P(insn) = 1;
2672 /* Big constant, need to use a temp register. We use
2673 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2674 area is always small enough to directly add to). */
2676 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2677 lo = value & 0xffff;
2679 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2684 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2685 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2689 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2690 gen_rtx_REG (SImode, src),
2691 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2694 RTX_FRAME_RELATED_P(insn) = 1;
2695 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2696 gen_rtx_SET (SImode,
2697 gen_rtx_REG (SImode, dest),
2698 gen_rtx_PLUS (SImode,
2699 gen_rtx_REG (SImode, dest),
2705 mep_function_uses_sp (void)
2708 struct sequence_stack *seq;
2709 rtx sp = gen_rtx_REG (SImode, SP_REGNO);
2711 insn = get_insns ();
2712 for (seq = crtl->emit.sequence_stack;
2714 insn = seq->first, seq = seq->next);
2718 if (mep_mentioned_p (insn, sp, 0))
2720 insn = NEXT_INSN (insn);
2725 /* Move SRC to DEST. Mark the move as being potentially dead if
2729 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2731 rtx insn = emit_move_insn (dest, src);
2734 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2739 /* Used for interrupt functions, which can't assume that $tp and $gp
2740 contain the correct pointers. */
2743 mep_reload_pointer (int regno, const char *symbol)
2747 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2750 reg = gen_rtx_REG (SImode, regno);
2751 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2752 emit_insn (gen_movsi_topsym_s (reg, sym));
2753 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2757 mep_expand_prologue (void)
2759 int i, rss, sp_offset = 0;
2762 int really_need_stack_frame = frame_size;
2765 /* We must not allow register renaming in interrupt functions,
2766 because that invalidates the correctness of the set of call-used
2767 registers we're going to save/restore. */
2768 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2770 if (mep_disinterrupt_p ())
2771 emit_insn (gen_mep_disable_int ());
2773 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2775 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2776 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2778 /* Assign save slots for any register not already saved. DImode
2779 registers go at the end of the reg save area; the rest go at the
2780 beginning. This is for alignment purposes. */
2781 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2782 if (mep_call_saves_register(i))
2784 int regsize = mep_reg_size (i);
2786 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2787 || mep_reg_set_in_function (i))
2788 really_need_stack_frame = 1;
2790 if (cfun->machine->reg_save_slot[i])
2795 cfun->machine->reg_save_size += regsize;
2796 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2800 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2805 sp_offset = reg_save_size;
2806 if (sp_offset + frame_size < 128)
2807 sp_offset += frame_size ;
2809 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2811 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2812 if (mep_call_saves_register(i))
2816 enum machine_mode rmode;
2818 rss = cfun->machine->reg_save_slot[i];
2820 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2821 && (!mep_reg_set_in_function (i)
2822 && !mep_interrupt_p ()))
2825 if (mep_reg_size (i) == 8)
2830 /* If there is a pseudo associated with this register's initial value,
2831 reload might have already spilt it to the stack slot suggested by
2832 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2834 mem = gen_rtx_MEM (rmode,
2835 plus_constant (stack_pointer_rtx, sp_offset - rss));
2836 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2838 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2839 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2840 else if (rmode == DImode)
2843 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2845 mem = gen_rtx_MEM (SImode,
2846 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2848 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2849 gen_rtx_REG (SImode, i),
2851 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2852 gen_rtx_ZERO_EXTRACT (SImode,
2853 gen_rtx_REG (DImode, i),
2857 insn = maybe_dead_move (mem,
2858 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2860 RTX_FRAME_RELATED_P (insn) = 1;
2862 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2863 gen_rtx_SET (VOIDmode,
2865 gen_rtx_REG (rmode, i)));
2866 mem = gen_rtx_MEM (SImode,
2867 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2868 insn = maybe_dead_move (mem,
2869 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2875 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2876 gen_rtx_REG (rmode, i),
2878 insn = maybe_dead_move (mem,
2879 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2881 RTX_FRAME_RELATED_P (insn) = 1;
2883 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2884 gen_rtx_SET (VOIDmode,
2886 gen_rtx_REG (rmode, i)));
2890 if (frame_pointer_needed)
2891 add_constant (FP_REGNO, SP_REGNO, sp_offset - frame_size, 1);
2893 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2895 if (mep_interrupt_p ())
2897 mep_reload_pointer(GP_REGNO, "__sdabase");
2898 mep_reload_pointer(TP_REGNO, "__tpbase");
2903 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2905 int local = hwi_local;
2906 int frame_size = local + crtl->outgoing_args_size;
2911 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2913 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2914 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2915 sp_offset = reg_save_size + frame_size;
2917 ffill = cfun->machine->frame_filler;
2919 if (cfun->machine->mep_frame_pointer_needed)
2920 reg_names[FP_REGNO] = "$fp";
2922 reg_names[FP_REGNO] = "$8";
2927 if (debug_info_level == DINFO_LEVEL_NONE)
2929 fprintf (file, "\t# frame: %d", sp_offset);
2931 fprintf (file, " %d regs", reg_save_size);
2933 fprintf (file, " %d locals", local);
2934 if (crtl->outgoing_args_size)
2935 fprintf (file, " %d args", crtl->outgoing_args_size);
2936 fprintf (file, "\n");
2940 fprintf (file, "\t#\n");
2941 fprintf (file, "\t# Initial Frame Information:\n");
2942 if (sp_offset || !frame_pointer_needed)
2943 fprintf (file, "\t# Entry ---------- 0\n");
2945 /* Sort registers by save slots, so they're printed in the order
2946 they appear in memory, not the order they're saved in. */
2947 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2949 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2950 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2951 if (cfun->machine->reg_save_slot[slot_map[si]]
2952 > cfun->machine->reg_save_slot[slot_map[sj]])
2954 int t = slot_map[si];
2955 slot_map[si] = slot_map[sj];
2960 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2963 int r = slot_map[i];
2964 int rss = cfun->machine->reg_save_slot[r];
2969 rsize = mep_reg_size(r);
2970 skip = rss - (sp+rsize);
2972 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2973 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2974 rsize, reg_names[r], sp_offset - rss);
2978 skip = reg_save_size - sp;
2980 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2982 if (frame_pointer_needed)
2983 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2985 fprintf (file, "\t# %3d bytes for local vars\n", local);
2987 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2988 if (crtl->outgoing_args_size)
2989 fprintf (file, "\t# %3d bytes for outgoing args\n",
2990 crtl->outgoing_args_size);
2991 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2992 fprintf (file, "\t#\n");
2996 static int mep_prevent_lp_restore = 0;
2997 static int mep_sibcall_epilogue = 0;
3000 mep_expand_epilogue (void)
3002 int i, sp_offset = 0;
3003 int reg_save_size = 0;
3005 int lp_temp = LP_REGNO, lp_slot = -1;
3006 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3007 int interrupt_handler = mep_interrupt_p ();
3009 if (profile_arc_flag == 2)
3010 emit_insn (gen_mep_bb_trace_ret ());
3012 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3013 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3015 /* All save slots are set by mep_expand_prologue. */
3016 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
3017 if (mep_call_saves_register(i))
3019 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
3020 || mep_reg_set_in_function (i))
3021 really_need_stack_frame = 1;
3024 if (frame_pointer_needed)
3026 /* If we have a frame pointer, we won't have a reliable stack
3027 pointer (alloca, you know), so rebase SP from FP */
3028 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3029 gen_rtx_REG (SImode, FP_REGNO));
3030 sp_offset = reg_save_size;
3034 /* SP is right under our local variable space. Adjust it if
3036 sp_offset = reg_save_size + frame_size;
3037 if (sp_offset >= 128)
3039 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3040 sp_offset -= frame_size;
3044 /* This is backwards so that we restore the control and coprocessor
3045 registers before the temporary registers we use to restore
3047 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3048 if (mep_call_saves_register (i))
3050 enum machine_mode rmode;
3051 int rss = cfun->machine->reg_save_slot[i];
3053 if (mep_reg_size (i) == 8)
3058 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3059 && !(mep_reg_set_in_function (i) || interrupt_handler))
3061 if (mep_prevent_lp_restore && i == LP_REGNO)
3063 if (!mep_prevent_lp_restore
3064 && !interrupt_handler
3065 && (i == 10 || i == 11))
3068 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3069 emit_move_insn (gen_rtx_REG (rmode, i),
3071 plus_constant (stack_pointer_rtx,
3075 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3076 /* Defer this one so we can jump indirect rather than
3077 copying the RA to $lp and "ret". EH epilogues
3078 automatically skip this anyway. */
3079 lp_slot = sp_offset-rss;
3082 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3084 plus_constant (stack_pointer_rtx,
3086 emit_move_insn (gen_rtx_REG (rmode, i),
3087 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3093 /* Restore this one last so we know it will be in the temp
3094 register when we return by jumping indirectly via the temp. */
3095 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3096 gen_rtx_MEM (SImode,
3097 plus_constant (stack_pointer_rtx,
3099 lp_temp = REGSAVE_CONTROL_TEMP;
3103 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3105 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3106 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3107 gen_rtx_REG (SImode, SP_REGNO),
3108 cfun->machine->eh_stack_adjust));
3110 if (mep_sibcall_epilogue)
3113 if (mep_disinterrupt_p ())
3114 emit_insn (gen_mep_enable_int ());
3116 if (mep_prevent_lp_restore)
3118 emit_jump_insn (gen_eh_return_internal ());
3121 else if (interrupt_handler)
3122 emit_jump_insn (gen_mep_reti ());
3124 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3128 mep_expand_eh_return (rtx *operands)
3130 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3132 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3133 emit_move_insn (ra, operands[0]);
3137 emit_insn (gen_eh_epilogue (operands[0]));
3141 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3143 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3144 mep_prevent_lp_restore = 1;
3145 mep_expand_epilogue ();
3146 mep_prevent_lp_restore = 0;
3150 mep_expand_sibcall_epilogue (void)
3152 mep_sibcall_epilogue = 1;
3153 mep_expand_epilogue ();
3154 mep_sibcall_epilogue = 0;
3158 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3163 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3166 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3167 if (mep_interrupt_p () || mep_disinterrupt_p ())
3174 mep_return_stackadj_rtx (void)
3176 return gen_rtx_REG (SImode, 10);
3180 mep_return_handler_rtx (void)
3182 return gen_rtx_REG (SImode, LP_REGNO);
3186 mep_function_profiler (FILE *file)
3188 /* Always right at the beginning of the function. */
3189 fprintf (file, "\t# mep function profiler\n");
3190 fprintf (file, "\tadd\t$sp, -8\n");
3191 fprintf (file, "\tsw\t$0, ($sp)\n");
3192 fprintf (file, "\tldc\t$0, $lp\n");
3193 fprintf (file, "\tsw\t$0, 4($sp)\n");
3194 fprintf (file, "\tbsr\t__mep_mcount\n");
3195 fprintf (file, "\tlw\t$0, 4($sp)\n");
3196 fprintf (file, "\tstc\t$0, $lp\n");
3197 fprintf (file, "\tlw\t$0, ($sp)\n");
3198 fprintf (file, "\tadd\t$sp, 8\n\n");
3202 mep_emit_bb_trace_ret (void)
3204 fprintf (asm_out_file, "\t# end of block profiling\n");
3205 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3206 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3207 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3208 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3209 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3210 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3211 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3212 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3213 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3220 /* Operand Printing. */
3223 mep_print_operand_address (FILE *stream, rtx address)
3225 if (GET_CODE (address) == MEM)
3226 address = XEXP (address, 0);
3228 /* cf: gcc.dg/asm-4.c. */
3229 gcc_assert (GET_CODE (address) == REG);
3231 mep_print_operand (stream, address, 0);
3237 const char *pattern;
3240 const conversions[] =
3243 { 0, "m+ri", "3(2)" },
3246 { 0, "mLrs", "%lo(3)(2)" },
3247 { 0, "mLr+si", "%lo(4+5)(2)" },
3248 { 0, "m+ru2s", "%tpoff(5)(2)" },
3249 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3250 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3251 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3252 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3253 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3255 { 0, "m+si", "(2+3)" },
3256 { 0, "m+li", "(2+3)" },
3259 { 0, "+si", "1+2" },
3260 { 0, "+u2si", "%tpoff(3+4)" },
3261 { 0, "+u3si", "%sdaoff(3+4)" },
3267 { 'h', "Hs", "%hi(1)" },
3269 { 'I', "u2s", "%tpoff(2)" },
3270 { 'I', "u3s", "%sdaoff(2)" },
3271 { 'I', "+u2si", "%tpoff(3+4)" },
3272 { 'I', "+u3si", "%sdaoff(3+4)" },
3274 { 'P', "mr", "(1\\+),\\0" },
3280 unique_bit_in (HOST_WIDE_INT i)
3284 case 0x01: case 0xfe: return 0;
3285 case 0x02: case 0xfd: return 1;
3286 case 0x04: case 0xfb: return 2;
3287 case 0x08: case 0xf7: return 3;
3288 case 0x10: case 0x7f: return 4;
3289 case 0x20: case 0xbf: return 5;
3290 case 0x40: case 0xdf: return 6;
3291 case 0x80: case 0xef: return 7;
3298 bit_size_for_clip (HOST_WIDE_INT i)
3302 for (rv = 0; rv < 31; rv ++)
3303 if (((HOST_WIDE_INT) 1 << rv) > i)
3308 /* Print an operand to a assembler instruction. */
3311 mep_print_operand (FILE *file, rtx x, int code)
3314 const char *real_name;
3318 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3319 we're using, then skip over the "mep_" part of its name. */
3320 const struct cgen_insn *insn;
3322 if (mep_get_move_insn (mep_cmov, &insn))
3323 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3325 mep_intrinsic_unavailable (mep_cmov);
3330 switch (GET_CODE (x))
3333 fputs ("clr", file);
3336 fputs ("set", file);
3339 fputs ("not", file);
3342 output_operand_lossage ("invalid %%L code");
3347 /* Print the second operand of a CR <- CR move. If we're using
3348 a two-operand instruction (i.e., a real cmov), then just print
3349 the operand normally. If we're using a "reg, reg, immediate"
3350 instruction such as caddi3, print the operand followed by a
3351 zero field. If we're using a three-register instruction,
3352 print the operand twice. */
3353 const struct cgen_insn *insn;
3355 mep_print_operand (file, x, 0);
3356 if (mep_get_move_insn (mep_cmov, &insn)
3357 && insn_data[insn->icode].n_operands == 3)
3360 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3361 mep_print_operand (file, x, 0);
3363 mep_print_operand (file, const0_rtx, 0);
3369 for (i = 0; conversions[i].pattern; i++)
3370 if (conversions[i].code == code
3371 && strcmp(conversions[i].pattern, pattern) == 0)
3373 for (j = 0; conversions[i].format[j]; j++)
3374 if (conversions[i].format[j] == '\\')
3376 fputc (conversions[i].format[j+1], file);
3379 else if (ISDIGIT(conversions[i].format[j]))
3381 rtx r = patternr[conversions[i].format[j] - '0'];
3382 switch (GET_CODE (r))
3385 fprintf (file, "%s", reg_names [REGNO (r)]);
3391 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3394 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3397 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3400 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3403 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3406 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3407 && !(INTVAL (r) & 0xff))
3408 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3410 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3413 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3414 && conversions[i].format[j+1] == 0)
3416 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3417 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3420 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3423 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3428 fprintf(file, "[const_double 0x%lx]",
3429 (unsigned long) CONST_DOUBLE_HIGH(r));
3432 real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3433 assemble_name (file, real_name);
3436 output_asm_label (r);
3439 fprintf (stderr, "don't know how to print this operand:");
3446 if (conversions[i].format[j] == '+'
3447 && (!code || code == 'I')
3448 && ISDIGIT (conversions[i].format[j+1])
3449 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3450 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3452 fputc(conversions[i].format[j], file);
3456 if (!conversions[i].pattern)
3458 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3466 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3467 int noperands ATTRIBUTE_UNUSED)
3469 /* Despite the fact that MeP is perfectly capable of branching and
3470 doing something else in the same bundle, gcc does jump
3471 optimization *after* scheduling, so we cannot trust the bundling
3472 flags on jump instructions. */
3473 if (GET_MODE (insn) == BImode
3474 && get_attr_slots (insn) != SLOTS_CORE)
3475 fputc ('+', asm_out_file);
3478 /* Function args in registers. */
3481 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3482 enum machine_mode mode ATTRIBUTE_UNUSED,
3483 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3484 int second_time ATTRIBUTE_UNUSED)
3486 int nsave = 4 - (cum->nregs + 1);
3489 cfun->machine->arg_regs_to_save = nsave;
3490 *pretend_size = nsave * 4;
3494 bytesize (const_tree type, enum machine_mode mode)
3496 if (mode == BLKmode)
3497 return int_size_in_bytes (type);
3498 return GET_MODE_SIZE (mode);
3502 mep_expand_builtin_saveregs (void)
3507 ns = cfun->machine->arg_regs_to_save;
3508 bufsize = ns * (TARGET_IVC2 ? 12 : 4);
3509 regbuf = assign_stack_local (SImode, bufsize, 32);
3511 move_block_from_reg (5-ns, regbuf, ns);
3515 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3518 for (i=0; i<ns; i++)
3520 int rn = (4-ns) + i + 49;
3523 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3524 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3528 return XEXP (regbuf, 0);
3531 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3534 mep_build_builtin_va_list (void)
3536 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3540 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3542 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3543 get_identifier ("__va_next_gp"), ptr_type_node);
3544 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3545 get_identifier ("__va_next_gp_limit"),
3547 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3549 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3552 DECL_FIELD_CONTEXT (f_next_gp) = record;
3553 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3554 DECL_FIELD_CONTEXT (f_next_cop) = record;
3555 DECL_FIELD_CONTEXT (f_next_stack) = record;
3557 TYPE_FIELDS (record) = f_next_gp;
3558 TREE_CHAIN (f_next_gp) = f_next_gp_limit;
3559 TREE_CHAIN (f_next_gp_limit) = f_next_cop;
3560 TREE_CHAIN (f_next_cop) = f_next_stack;
3562 layout_type (record);
3568 mep_expand_va_start (tree valist, rtx nextarg)
3570 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3571 tree next_gp, next_gp_limit, next_cop, next_stack;
3575 ns = cfun->machine->arg_regs_to_save;
3577 f_next_gp = TYPE_FIELDS (va_list_type_node);
3578 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3579 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3580 f_next_stack = TREE_CHAIN (f_next_cop);
3582 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3584 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3585 valist, f_next_gp_limit, NULL_TREE);
3586 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3588 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3589 valist, f_next_stack, NULL_TREE);
3591 /* va_list.next_gp = expand_builtin_saveregs (); */
3592 u = make_tree (sizetype, expand_builtin_saveregs ());
3593 u = fold_convert (ptr_type_node, u);
3594 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3595 TREE_SIDE_EFFECTS (t) = 1;
3596 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3598 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3599 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3601 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3602 TREE_SIDE_EFFECTS (t) = 1;
3603 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3605 /* va_list.next_cop = va_list.next_gp_limit; */
3606 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3607 TREE_SIDE_EFFECTS (t) = 1;
3608 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3610 /* va_list.next_stack = nextarg; */
3611 u = make_tree (ptr_type_node, nextarg);
3612 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3613 TREE_SIDE_EFFECTS (t) = 1;
3614 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3618 mep_gimplify_va_arg_expr (tree valist, tree type,
3619 tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
3621 HOST_WIDE_INT size, rsize;
3622 bool by_reference, ivc2_vec;
3623 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3624 tree next_gp, next_gp_limit, next_cop, next_stack;
3625 tree label_sover, label_selse;
3628 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3630 size = int_size_in_bytes (type);
3631 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3635 type = build_pointer_type (type);
3638 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3640 f_next_gp = TYPE_FIELDS (va_list_type_node);
3641 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3642 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3643 f_next_stack = TREE_CHAIN (f_next_cop);
3645 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3647 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3648 valist, f_next_gp_limit, NULL_TREE);
3649 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3651 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3652 valist, f_next_stack, NULL_TREE);
3654 /* if f_next_gp < f_next_gp_limit
3655 IF (VECTOR_P && IVC2)
3663 val = *f_next_stack;
3664 f_next_stack += rsize;
3668 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3669 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3670 res_addr = create_tmp_var (ptr_type_node, NULL);
3672 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3673 unshare_expr (next_gp_limit));
3674 tmp = build3 (COND_EXPR, void_type_node, tmp,
3675 build1 (GOTO_EXPR, void_type_node,
3676 unshare_expr (label_selse)),
3678 gimplify_and_add (tmp, pre_p);
3682 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3683 gimplify_and_add (tmp, pre_p);
3687 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3688 gimplify_and_add (tmp, pre_p);
3691 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3692 unshare_expr (next_gp), size_int (4));
3693 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3695 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3696 unshare_expr (next_cop), size_int (8));
3697 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3699 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3700 gimplify_and_add (tmp, pre_p);
3704 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3705 gimplify_and_add (tmp, pre_p);
3707 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3708 gimplify_and_add (tmp, pre_p);
3710 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3711 unshare_expr (next_stack), size_int (rsize));
3712 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3716 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3717 gimplify_and_add (tmp, pre_p);
3719 res_addr = fold_convert (build_pointer_type (type), res_addr);
3722 res_addr = build_va_arg_indirect_ref (res_addr);
3724 return build_va_arg_indirect_ref (res_addr);
3728 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3729 rtx libname ATTRIBUTE_UNUSED,
3730 tree fndecl ATTRIBUTE_UNUSED)
3734 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3741 mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3742 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3744 /* VOIDmode is a signal for the backend to pass data to the call
3745 expander via the second operand to the call pattern. We use
3746 this to determine whether to use "jsr" or "jsrv". */
3747 if (mode == VOIDmode)
3748 return GEN_INT (cum.vliw);
3750 /* If we havn't run out of argument registers, return the next. */
3753 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3754 return gen_rtx_REG (mode, cum.nregs + 49);
3756 return gen_rtx_REG (mode, cum.nregs + 1);
3759 /* Otherwise the argument goes on the stack. */
3764 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3765 enum machine_mode mode,
3767 bool named ATTRIBUTE_UNUSED)
3769 int size = bytesize (type, mode);
3770 if (type && TARGET_IVC2 && cum->nregs < 4 && VECTOR_TYPE_P (type))
3771 return size <= 0 || size > 8;
3772 return size <= 0 || size > 4;
3776 mep_arg_advance (CUMULATIVE_ARGS *pcum,
3777 enum machine_mode mode ATTRIBUTE_UNUSED,
3778 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3784 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3786 int size = bytesize (type, BLKmode);
3787 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3788 return size >= 0 && size <= 8 ? 0 : 1;
3789 return size >= 0 && size <= 4 ? 0 : 1;
3793 mep_narrow_volatile_bitfield (void)
3799 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3802 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3804 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3805 return gen_rtx_REG (TYPE_MODE (type), 48);
3806 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3809 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3812 mep_libcall_value (enum machine_mode mode)
3814 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3817 /* Handle pipeline hazards. */
3819 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3820 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3822 static int prev_opcode = 0;
3824 /* This isn't as optimal as it could be, because we don't know what
3825 control register the STC opcode is storing in. We only need to add
3826 the nop if it's the relevent register, but we add it for irrelevent
3830 mep_asm_output_opcode (FILE *file, const char *ptr)
3832 int this_opcode = op_none;
3833 const char *hazard = 0;
3838 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3839 this_opcode = op_fsft;
3842 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3843 this_opcode = op_ret;
3846 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3847 this_opcode = op_stc;
3851 if (prev_opcode == op_stc && this_opcode == op_fsft)
3853 if (prev_opcode == op_stc && this_opcode == op_ret)
3857 fprintf(file, "%s\t# %s-%s hazard\n\t",
3858 hazard, opnames[prev_opcode], opnames[this_opcode]);
3860 prev_opcode = this_opcode;
3863 /* Handle attributes. */
3866 mep_validate_based_tiny (tree *node, tree name, tree args,
3867 int flags ATTRIBUTE_UNUSED, bool *no_add)
3869 if (TREE_CODE (*node) != VAR_DECL
3870 && TREE_CODE (*node) != POINTER_TYPE
3871 && TREE_CODE (*node) != TYPE_DECL)
3873 warning (0, "%qE attribute only applies to variables", name);
3876 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3878 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3880 warning (0, "address region attributes not allowed with auto storage class");
3883 /* Ignore storage attribute of pointed to variable: char __far * x; */
3884 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3886 warning (0, "address region attributes on pointed-to types ignored");
3895 mep_multiple_address_regions (tree list, bool check_section_attr)
3898 int count_sections = 0;
3899 int section_attr_count = 0;
3901 for (a = list; a; a = TREE_CHAIN (a))
3903 if (is_attribute_p ("based", TREE_PURPOSE (a))
3904 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3905 || is_attribute_p ("near", TREE_PURPOSE (a))
3906 || is_attribute_p ("far", TREE_PURPOSE (a))
3907 || is_attribute_p ("io", TREE_PURPOSE (a)))
3909 if (check_section_attr)
3910 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3913 if (check_section_attr)
3914 return section_attr_count;
3916 return count_sections;
3919 #define MEP_ATTRIBUTES(decl) \
3920 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3921 : DECL_ATTRIBUTES (decl) \
3922 ? (DECL_ATTRIBUTES (decl)) \
3923 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3926 mep_validate_near_far (tree *node, tree name, tree args,
3927 int flags ATTRIBUTE_UNUSED, bool *no_add)
3929 if (TREE_CODE (*node) != VAR_DECL
3930 && TREE_CODE (*node) != FUNCTION_DECL
3931 && TREE_CODE (*node) != METHOD_TYPE
3932 && TREE_CODE (*node) != POINTER_TYPE
3933 && TREE_CODE (*node) != TYPE_DECL)
3935 warning (0, "%qE attribute only applies to variables and functions",
3939 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3941 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3943 warning (0, "address region attributes not allowed with auto storage class");
3946 /* Ignore storage attribute of pointed to variable: char __far * x; */
3947 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3949 warning (0, "address region attributes on pointed-to types ignored");
3953 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3955 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3956 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3957 DECL_ATTRIBUTES (*node) = NULL_TREE;
3963 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3964 int flags ATTRIBUTE_UNUSED, bool *no_add)
3966 if (TREE_CODE (*node) != FUNCTION_DECL
3967 && TREE_CODE (*node) != METHOD_TYPE)
3969 warning (0, "%qE attribute only applies to functions", name);
3976 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3977 int flags ATTRIBUTE_UNUSED, bool *no_add)
3981 if (TREE_CODE (*node) != FUNCTION_DECL)
3983 warning (0, "%qE attribute only applies to functions", name);
3988 if (DECL_DECLARED_INLINE_P (*node))
3989 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3990 DECL_UNINLINABLE (*node) = 1;
3992 function_type = TREE_TYPE (*node);
3994 if (TREE_TYPE (function_type) != void_type_node)
3995 error ("interrupt function must have return type of void");
3997 if (TYPE_ARG_TYPES (function_type)
3998 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
3999 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4000 error ("interrupt function must have no arguments");
4006 mep_validate_io_cb (tree *node, tree name, tree args,
4007 int flags ATTRIBUTE_UNUSED, bool *no_add)
4009 if (TREE_CODE (*node) != VAR_DECL)
4011 warning (0, "%qE attribute only applies to variables", name);
4015 if (args != NULL_TREE)
4017 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4018 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4019 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4021 warning (0, "%qE attribute allows only an integer constant argument",
4027 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4028 TREE_THIS_VOLATILE (*node) = 1;
4034 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4035 int flags ATTRIBUTE_UNUSED, bool *no_add)
4037 if (TREE_CODE (*node) != FUNCTION_TYPE
4038 && TREE_CODE (*node) != FUNCTION_DECL
4039 && TREE_CODE (*node) != METHOD_TYPE
4040 && TREE_CODE (*node) != FIELD_DECL
4041 && TREE_CODE (*node) != TYPE_DECL)
4043 static int gave_pointer_note = 0;
4044 static int gave_array_note = 0;
4045 static const char * given_type = NULL;
4047 given_type = tree_code_name[TREE_CODE (*node)];
4048 if (TREE_CODE (*node) == POINTER_TYPE)
4049 given_type = "pointers";
4050 if (TREE_CODE (*node) == ARRAY_TYPE)
4051 given_type = "arrays";
4054 warning (0, "%qE attribute only applies to functions, not %s",
4057 warning (0, "%qE attribute only applies to functions",
4061 if (TREE_CODE (*node) == POINTER_TYPE
4062 && !gave_pointer_note)
4064 inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4065 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4066 gave_pointer_note = 1;
4069 if (TREE_CODE (*node) == ARRAY_TYPE
4070 && !gave_array_note)
4072 inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4073 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4074 gave_array_note = 1;
4078 error ("VLIW functions are not allowed without a VLIW configuration");
4082 static const struct attribute_spec mep_attribute_table[11] =
4084 /* name min max decl type func handler */
4085 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4086 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4087 { "near", 0, 0, false, false, false, mep_validate_near_far },
4088 { "far", 0, 0, false, false, false, mep_validate_near_far },
4089 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4090 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4091 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4092 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4093 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4094 { NULL, 0, 0, false, false, false, NULL }
4098 mep_function_attribute_inlinable_p (const_tree callee)
4100 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4101 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4102 return (lookup_attribute ("disinterrupt", attrs) == 0
4103 && lookup_attribute ("interrupt", attrs) == 0);
4107 #define FUNC_DISINTERRUPT 2
4110 struct GTY(()) pragma_entry {
4113 const char *funcname;
4115 typedef struct pragma_entry pragma_entry;
4117 /* Hash table of farcall-tagged sections. */
4118 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4121 pragma_entry_eq (const void *p1, const void *p2)
4123 const pragma_entry *old = (const pragma_entry *) p1;
4124 const char *new_name = (const char *) p2;
4126 return strcmp (old->funcname, new_name) == 0;
4130 pragma_entry_hash (const void *p)
4132 const pragma_entry *old = (const pragma_entry *) p;
4133 return htab_hash_string (old->funcname);
4137 mep_note_pragma_flag (const char *funcname, int flag)
4139 pragma_entry **slot;
4142 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4143 pragma_entry_eq, NULL);
4145 slot = (pragma_entry **)
4146 htab_find_slot_with_hash (pragma_htab, funcname,
4147 htab_hash_string (funcname), INSERT);
4151 *slot = GGC_NEW (pragma_entry);
4154 (*slot)->funcname = ggc_strdup (funcname);
4156 (*slot)->flag |= flag;
4160 mep_lookup_pragma_flag (const char *funcname, int flag)
4162 pragma_entry **slot;
4167 if (funcname[0] == '@' && funcname[2] == '.')
4170 slot = (pragma_entry **)
4171 htab_find_slot_with_hash (pragma_htab, funcname,
4172 htab_hash_string (funcname), NO_INSERT);
4173 if (slot && *slot && ((*slot)->flag & flag))
4175 (*slot)->used |= flag;
4182 mep_lookup_pragma_call (const char *funcname)
4184 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4188 mep_note_pragma_call (const char *funcname)
4190 mep_note_pragma_flag (funcname, FUNC_CALL);
4194 mep_lookup_pragma_disinterrupt (const char *funcname)
4196 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4200 mep_note_pragma_disinterrupt (const char *funcname)
4202 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4206 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4208 const pragma_entry *d = (const pragma_entry *)(*slot);
4210 if ((d->flag & FUNC_DISINTERRUPT)
4211 && !(d->used & FUNC_DISINTERRUPT))
4212 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4217 mep_file_cleanups (void)
4220 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4225 mep_attrlist_to_encoding (tree list, tree decl)
4227 if (mep_multiple_address_regions (list, false) > 1)
4229 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4230 TREE_PURPOSE (TREE_CHAIN (list)),
4232 DECL_SOURCE_LINE (decl));
4233 TREE_CHAIN (list) = NULL_TREE;
4238 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4240 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4242 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4244 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4246 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4248 if (TREE_VALUE (list)
4249 && TREE_VALUE (TREE_VALUE (list))
4250 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4252 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4254 && location <= 0x1000000)
4259 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4261 list = TREE_CHAIN (list);
4264 && TREE_CODE (decl) == FUNCTION_DECL
4265 && DECL_SECTION_NAME (decl) == 0)
4271 mep_comp_type_attributes (const_tree t1, const_tree t2)
4275 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4276 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4285 mep_insert_attributes (tree decl, tree *attributes)
4288 const char *secname = 0;
4289 tree attrib, attrlist;
4292 if (TREE_CODE (decl) == FUNCTION_DECL)
4294 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4296 if (mep_lookup_pragma_disinterrupt (funcname))
4298 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4299 *attributes = chainon (*attributes, attrib);
4303 if (TREE_CODE (decl) != VAR_DECL
4304 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4307 if (TREE_READONLY (decl) && TARGET_DC)
4308 /* -mdc means that const variables default to the near section,
4309 regardless of the size cutoff. */
4312 /* User specified an attribute, so override the default.
4313 Ignore storage attribute of pointed to variable. char __far * x; */
4314 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4316 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4317 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4318 else if (DECL_ATTRIBUTES (decl) && *attributes)
4319 DECL_ATTRIBUTES (decl) = NULL_TREE;
4322 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4323 encoding = mep_attrlist_to_encoding (attrlist, decl);
4324 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4326 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4327 encoding = mep_attrlist_to_encoding (attrlist, decl);
4331 /* This means that the declaration has a specific section
4332 attribute, so we should not apply the default rules. */
4334 if (encoding == 'i' || encoding == 'I')
4336 tree attr = lookup_attribute ("io", attrlist);
4338 && TREE_VALUE (attr)
4339 && TREE_VALUE (TREE_VALUE(attr)))
4341 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4342 static tree previous_value = 0;
4343 static int previous_location = 0;
4344 static tree previous_name = 0;
4346 /* We take advantage of the fact that gcc will reuse the
4347 same tree pointer when applying an attribute to a
4348 list of decls, but produce a new tree for attributes
4349 on separate source lines, even when they're textually
4350 identical. This is the behavior we want. */
4351 if (TREE_VALUE (attr) == previous_value
4352 && location == previous_location)
4354 warning(0, "__io address 0x%x is the same for %qE and %qE",
4355 location, previous_name, DECL_NAME (decl));
4357 previous_name = DECL_NAME (decl);
4358 previous_location = location;
4359 previous_value = TREE_VALUE (attr);
4366 /* Declarations of arrays can change size. Don't trust them. */
4367 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4370 size = int_size_in_bytes (TREE_TYPE (decl));
4372 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4374 if (TREE_PUBLIC (decl)
4375 || DECL_EXTERNAL (decl)
4376 || TREE_STATIC (decl))
4378 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4402 if (size <= mep_based_cutoff && size > 0)
4404 else if (size <= mep_tiny_cutoff && size > 0)
4410 if (mep_const_section && TREE_READONLY (decl))
4412 if (strcmp (mep_const_section, "tiny") == 0)
4414 else if (strcmp (mep_const_section, "near") == 0)
4416 else if (strcmp (mep_const_section, "far") == 0)
4423 if (!mep_multiple_address_regions (*attributes, true)
4424 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4426 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4428 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4429 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4430 and mep_validate_based_tiny. */
4431 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4436 mep_encode_section_info (tree decl, rtx rtl, int first)
4439 const char *oldname;
4440 const char *secname;
4446 tree mep_attributes;
4451 if (TREE_CODE (decl) != VAR_DECL
4452 && TREE_CODE (decl) != FUNCTION_DECL)
4455 rtlname = XEXP (rtl, 0);
4456 if (GET_CODE (rtlname) == SYMBOL_REF)
4457 oldname = XSTR (rtlname, 0);
4458 else if (GET_CODE (rtlname) == MEM
4459 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4460 oldname = XSTR (XEXP (rtlname, 0), 0);
4464 type = TREE_TYPE (decl);
4465 if (type == error_mark_node)
4467 mep_attributes = MEP_ATTRIBUTES (decl);
4469 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4473 newname = (char *) alloca (strlen (oldname) + 4);
4474 sprintf (newname, "@%c.%s", encoding, oldname);
4475 idp = get_identifier (newname);
4477 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4490 maxsize = 0x1000000;
4498 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4500 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4502 (long) int_size_in_bytes (TREE_TYPE (decl)),
4508 /* Functions do not go through select_section, so we force it here
4509 by using the DECL_SECTION_NAME as if the user specified the
4510 .vtext or .ftext sections. */
4511 if (! DECL_SECTION_NAME (decl)
4512 && TREE_CODE (decl) == FUNCTION_DECL)
4516 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4518 if (encoding == 'f')
4519 DECL_SECTION_NAME (decl) = build_string (7, ".vftext");
4521 DECL_SECTION_NAME (decl) = build_string (6, ".vtext");
4523 else if (encoding == 'f')
4525 if (flag_function_sections || DECL_ONE_ONLY (decl))
4526 mep_unique_section (decl, 0);
4528 DECL_SECTION_NAME (decl) = build_string (6, ".ftext");
4531 /* This is so we can control inlining. It does not matter what
4532 attribute we add, just that it has one. */
4533 secname = build_tree_list (get_identifier ("section"), DECL_SECTION_NAME (decl));
4535 TYPE_ATTRIBUTES (decl) = chainon (TYPE_ATTRIBUTES (decl), secname);
4537 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), secname);
4542 mep_strip_name_encoding (const char *sym)
4548 else if (*sym == '@' && sym[2] == '.')
4556 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4557 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4561 switch (TREE_CODE (decl))
4564 if (!TREE_READONLY (decl)
4565 || TREE_SIDE_EFFECTS (decl)
4566 || !DECL_INITIAL (decl)
4567 || (DECL_INITIAL (decl) != error_mark_node
4568 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4572 if (! TREE_CONSTANT (decl))
4580 if (TREE_CODE (decl) == VAR_DECL)
4582 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4584 if (name[0] == '@' && name[2] == '.')
4588 return based_section;
4592 return srodata_section;
4593 if (DECL_INITIAL (decl))
4594 return sdata_section;
4595 return tinybss_section;
4599 return frodata_section;
4604 error_at (DECL_SOURCE_LOCATION (decl),
4605 "variable %D of type %<io%> must be uninitialized", decl);
4606 return data_section;
4609 error_at (DECL_SOURCE_LOCATION (decl),
4610 "variable %D of type %<cb%> must be uninitialized", decl);
4611 return data_section;
4616 return readonly_data_section;
4618 return data_section;
4622 mep_unique_section (tree decl, int reloc)
4624 static const char *prefixes[][2] =
4626 { ".text.", ".gnu.linkonce.t." },
4627 { ".rodata.", ".gnu.linkonce.r." },
4628 { ".data.", ".gnu.linkonce.d." },
4629 { ".based.", ".gnu.linkonce.based." },
4630 { ".sdata.", ".gnu.linkonce.s." },
4631 { ".far.", ".gnu.linkonce.far." },
4632 { ".ftext.", ".gnu.linkonce.ft." },
4633 { ".frodata.", ".gnu.linkonce.frd." },
4634 { ".srodata.", ".gnu.linkonce.srd." }
4636 int sec = 2; /* .data */
4638 const char *name, *prefix;
4641 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4642 if (DECL_RTL (decl))
4643 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4645 if (TREE_CODE (decl) == FUNCTION_DECL)
4646 sec = 0; /* .text */
4647 else if (decl_readonly_section (decl, reloc))
4648 sec = 1; /* .rodata */
4650 if (name[0] == '@' && name[2] == '.')
4655 sec = 3; /* .based */
4659 sec = 8; /* .srodata */
4661 sec = 4; /* .sdata */
4665 sec = 6; /* .ftext */
4667 sec = 7; /* .frodata */
4669 sec = 5; /* .far. */
4675 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4676 len = strlen (name) + strlen (prefix);
4677 string = (char *) alloca (len + 1);
4679 sprintf (string, "%s%s", prefix, name);
4681 DECL_SECTION_NAME (decl) = build_string (len, string);
4684 /* Given a decl, a section name, and whether the decl initializer
4685 has relocs, choose attributes for the section. */
4687 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4690 mep_section_type_flags (tree decl, const char *name, int reloc)
4692 unsigned int flags = default_section_type_flags (decl, name, reloc);
4694 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4695 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4696 flags |= SECTION_MEP_VLIW;
4701 /* Switch to an arbitrary section NAME with attributes as specified
4702 by FLAGS. ALIGN specifies any known alignment requirements for
4703 the section; 0 if the default should be used.
4705 Differs from the standard ELF version only in support of VLIW mode. */
4708 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4710 char flagchars[8], *f = flagchars;
4713 if (!(flags & SECTION_DEBUG))
4715 if (flags & SECTION_WRITE)
4717 if (flags & SECTION_CODE)
4719 if (flags & SECTION_SMALL)
4721 if (flags & SECTION_MEP_VLIW)
4725 if (flags & SECTION_BSS)
4730 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4731 name, flagchars, type);
4733 if (flags & SECTION_CODE)
4734 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4739 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4740 int size, int align, int global)
4742 /* We intentionally don't use mep_section_tag() here. */
4744 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4748 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4749 DECL_ATTRIBUTES (decl));
4751 && TREE_VALUE (attr)
4752 && TREE_VALUE (TREE_VALUE(attr)))
4753 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4758 fprintf (stream, "\t.globl\t");
4759 assemble_name (stream, name);
4760 fprintf (stream, "\n");
4762 assemble_name (stream, name);
4763 fprintf (stream, " = %d\n", location);
4766 if (name[0] == '@' && name[2] == '.')
4768 const char *sec = 0;
4772 switch_to_section (based_section);
4776 switch_to_section (tinybss_section);
4780 switch_to_section (farbss_section);
4789 while (align > BITS_PER_UNIT)
4794 name2 = TARGET_STRIP_NAME_ENCODING (name);
4796 fprintf (stream, "\t.globl\t%s\n", name2);
4797 fprintf (stream, "\t.p2align %d\n", p2align);
4798 fprintf (stream, "\t.type\t%s,@object\n", name2);
4799 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4800 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4807 fprintf (stream, "\t.local\t");
4808 assemble_name (stream, name);
4809 fprintf (stream, "\n");
4811 fprintf (stream, "\t.comm\t");
4812 assemble_name (stream, name);
4813 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4819 mep_init_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
4821 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4822 LCT_NORMAL, VOIDmode, 3,
4825 static_chain, Pmode);
4828 /* Experimental Reorg. */
4831 mep_mentioned_p (rtx in,
4832 rtx reg, /* NULL for mem */
4833 int modes_too) /* if nonzero, modes must match also. */
4841 if (reg && GET_CODE (reg) != REG)
4844 if (GET_CODE (in) == LABEL_REF)
4847 code = GET_CODE (in);
4853 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4859 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4861 return (REGNO (in) == REGNO (reg));
4874 /* Set's source should be read-only. */
4875 if (code == SET && !reg)
4876 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4878 fmt = GET_RTX_FORMAT (code);
4880 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4885 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4886 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4889 else if (fmt[i] == 'e'
4890 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4896 #define EXPERIMENTAL_REGMOVE_REORG 1
4898 #if EXPERIMENTAL_REGMOVE_REORG
4901 mep_compatible_reg_class (int r1, int r2)
4903 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4905 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4911 mep_reorg_regmove (rtx insns)
4913 rtx insn, next, pat, follow, *where;
4914 int count = 0, done = 0, replace, before = 0;
4917 for (insn = insns; insn; insn = NEXT_INSN (insn))
4918 if (GET_CODE (insn) == INSN)
4921 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4922 set that uses the r2 and r2 dies there. We replace r2 with r1
4923 and see if it's still a valid insn. If so, delete the first set.
4924 Copied from reorg.c. */
4929 for (insn = insns; insn; insn = next)
4931 next = NEXT_INSN (insn);
4932 if (GET_CODE (insn) != INSN)
4934 pat = PATTERN (insn);
4938 if (GET_CODE (pat) == SET
4939 && GET_CODE (SET_SRC (pat)) == REG
4940 && GET_CODE (SET_DEST (pat)) == REG
4941 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4942 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4944 follow = next_nonnote_insn (insn);
4946 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4948 while (follow && GET_CODE (follow) == INSN
4949 && GET_CODE (PATTERN (follow)) == SET
4950 && !dead_or_set_p (follow, SET_SRC (pat))
4951 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4952 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4955 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4956 follow = next_nonnote_insn (follow);
4960 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4961 if (follow && GET_CODE (follow) == INSN
4962 && GET_CODE (PATTERN (follow)) == SET
4963 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4965 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4967 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4970 where = & SET_SRC (PATTERN (follow));
4973 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4975 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
4978 where = & PATTERN (follow);
4984 /* If so, follow is the corresponding insn */
4991 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
4992 for (x = insn; x ;x = NEXT_INSN (x))
4994 print_rtl_single (dump_file, x);
4997 fprintf (dump_file, "\n");
5001 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5005 next = delete_insn (insn);
5008 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5009 print_rtl_single (dump_file, follow);
5019 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5020 fprintf (dump_file, "=====\n");
5026 /* Figure out where to put LABEL, which is the label for a repeat loop.
5027 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5028 the loop ends just before LAST_INSN. If SHARED, insns other than the
5029 "repeat" might use LABEL to jump to the loop's continuation point.
5031 Return the last instruction in the adjusted loop. */
5034 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5038 int count = 0, code, icode;
5041 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5042 INSN_UID (last_insn));
5044 /* Set PREV to the last insn in the loop. */
5047 prev = PREV_INSN (prev);
5049 /* Set NEXT to the next insn after the repeat label. */
5054 code = GET_CODE (prev);
5055 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5060 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5061 prev = XVECEXP (PATTERN (prev), 0, 1);
5063 /* Other insns that should not be in the last two opcodes. */
5064 icode = recog_memoized (prev);
5066 || icode == CODE_FOR_repeat
5067 || icode == CODE_FOR_erepeat
5068 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5071 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5072 is the second instruction in a VLIW bundle. In that case,
5073 loop again: if the first instruction also satisfies the
5074 conditions above then we will reach here again and put
5075 both of them into the repeat epilogue. Otherwise both
5076 should remain outside. */
5077 if (GET_MODE (prev) != BImode)
5082 print_rtl_single (dump_file, next);
5087 prev = PREV_INSN (prev);
5090 /* See if we're adding the label immediately after the repeat insn.
5091 If so, we need to separate them with a nop. */
5092 prev = prev_real_insn (next);
5094 switch (recog_memoized (prev))
5096 case CODE_FOR_repeat:
5097 case CODE_FOR_erepeat:
5099 fprintf (dump_file, "Adding nop inside loop\n");
5100 emit_insn_before (gen_nop (), next);
5107 /* Insert the label. */
5108 emit_label_before (label, next);
5110 /* Insert the nops. */
5111 if (dump_file && count < 2)
5112 fprintf (dump_file, "Adding %d nop%s\n\n",
5113 2 - count, count == 1 ? "" : "s");
5115 for (; count < 2; count++)
5117 last_insn = emit_insn_after (gen_nop (), last_insn);
5119 emit_insn_before (gen_nop (), last_insn);
5126 mep_emit_doloop (rtx *operands, int is_end)
5130 if (cfun->machine->doloop_tags == 0
5131 || cfun->machine->doloop_tag_from_end == is_end)
5133 cfun->machine->doloop_tags++;
5134 cfun->machine->doloop_tag_from_end = is_end;
5137 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5139 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5141 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5145 /* Code for converting doloop_begins and doloop_ends into valid
5146 MeP instructions. A doloop_begin is just a placeholder:
5148 $count = unspec ($count)
5150 where $count is initially the number of iterations - 1.
5151 doloop_end has the form:
5153 if ($count-- == 0) goto label
5155 The counter variable is private to the doloop insns, nothing else
5156 relies on its value.
5158 There are three cases, in decreasing order of preference:
5160 1. A loop has exactly one doloop_begin and one doloop_end.
5161 The doloop_end branches to the first instruction after
5164 In this case we can replace the doloop_begin with a repeat
5165 instruction and remove the doloop_end. I.e.:
5167 $count1 = unspec ($count1)
5172 if ($count2-- == 0) goto label
5176 repeat $count1,repeat_label
5184 2. As for (1), except there are several doloop_ends. One of them
5185 (call it X) falls through to a label L. All the others fall
5186 through to branches to L.
5188 In this case, we remove X and replace the other doloop_ends
5189 with branches to the repeat label. For example:
5191 $count1 = unspec ($count1)
5194 if ($count2-- == 0) goto label
5197 if ($count3-- == 0) goto label
5202 repeat $count1,repeat_label
5213 3. The fallback case. Replace doloop_begins with:
5217 Replace doloop_ends with the equivalent of:
5220 if ($count == 0) goto label
5222 Note that this might need a scratch register if $count
5223 is stored in memory. */
5225 /* A structure describing one doloop_begin. */
5226 struct mep_doloop_begin {
5227 /* The next doloop_begin with the same tag. */
5228 struct mep_doloop_begin *next;
5230 /* The instruction itself. */
5233 /* The initial counter value. This is known to be a general register. */
5237 /* A structure describing a doloop_end. */
5238 struct mep_doloop_end {
5239 /* The next doloop_end with the same loop tag. */
5240 struct mep_doloop_end *next;
5242 /* The instruction itself. */
5245 /* The first instruction after INSN when the branch isn't taken. */
5248 /* The location of the counter value. Since doloop_end_internal is a
5249 jump instruction, it has to allow the counter to be stored anywhere
5250 (any non-fixed register or memory location). */
5253 /* The target label (the place where the insn branches when the counter
5257 /* A scratch register. Only available when COUNTER isn't stored
5258 in a general register. */
5263 /* One do-while loop. */
5265 /* All the doloop_begins for this loop (in no particular order). */
5266 struct mep_doloop_begin *begin;
5268 /* All the doloop_ends. When there is more than one, arrange things
5269 so that the first one is the most likely to be X in case (2) above. */
5270 struct mep_doloop_end *end;
5274 /* Return true if LOOP can be converted into repeat/repeat_end form
5275 (that is, if it matches cases (1) or (2) above). */
5278 mep_repeat_loop_p (struct mep_doloop *loop)
5280 struct mep_doloop_end *end;
5283 /* There must be exactly one doloop_begin and at least one doloop_end. */
5284 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5287 /* The first doloop_end (X) must branch back to the insn after
5288 the doloop_begin. */
5289 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5292 /* All the other doloop_ends must branch to the same place as X.
5293 When the branch isn't taken, they must jump to the instruction
5295 fallthrough = loop->end->fallthrough;
5296 for (end = loop->end->next; end != 0; end = end->next)
5297 if (end->label != loop->end->label
5298 || !simplejump_p (end->fallthrough)
5299 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5306 /* The main repeat reorg function. See comment above for details. */
5309 mep_reorg_repeat (rtx insns)
5312 struct mep_doloop *loops, *loop;
5313 struct mep_doloop_begin *begin;
5314 struct mep_doloop_end *end;
5316 /* Quick exit if we haven't created any loops. */
5317 if (cfun->machine->doloop_tags == 0)
5320 /* Create an array of mep_doloop structures. */
5321 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5322 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5324 /* Search the function for do-while insns and group them by loop tag. */
5325 for (insn = insns; insn; insn = NEXT_INSN (insn))
5327 switch (recog_memoized (insn))
5329 case CODE_FOR_doloop_begin_internal:
5330 insn_extract (insn);
5331 loop = &loops[INTVAL (recog_data.operand[2])];
5333 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5334 begin->next = loop->begin;
5336 begin->counter = recog_data.operand[0];
5338 loop->begin = begin;
5341 case CODE_FOR_doloop_end_internal:
5342 insn_extract (insn);
5343 loop = &loops[INTVAL (recog_data.operand[2])];
5345 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5347 end->fallthrough = next_real_insn (insn);
5348 end->counter = recog_data.operand[0];
5349 end->label = recog_data.operand[1];
5350 end->scratch = recog_data.operand[3];
5352 /* If this insn falls through to an unconditional jump,
5353 give it a lower priority than the others. */
5354 if (loop->end != 0 && simplejump_p (end->fallthrough))
5356 end->next = loop->end->next;
5357 loop->end->next = end;
5361 end->next = loop->end;
5367 /* Convert the insns for each loop in turn. */
5368 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5369 if (mep_repeat_loop_p (loop))
5371 /* Case (1) or (2). */
5372 rtx repeat_label, label_ref;
5374 /* Create a new label for the repeat insn. */
5375 repeat_label = gen_label_rtx ();
5377 /* Replace the doloop_begin with a repeat. */
5378 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5379 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5381 delete_insn (loop->begin->insn);
5383 /* Insert the repeat label before the first doloop_end.
5384 Fill the gap with nops if there are other doloop_ends. */
5385 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5386 false, loop->end->next != 0);
5388 /* Emit a repeat_end (to improve the readability of the output). */
5389 emit_insn_before (gen_repeat_end (), loop->end->insn);
5391 /* Delete the first doloop_end. */
5392 delete_insn (loop->end->insn);
5394 /* Replace the others with branches to REPEAT_LABEL. */
5395 for (end = loop->end->next; end != 0; end = end->next)
5397 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5398 delete_insn (end->insn);
5399 delete_insn (end->fallthrough);
5404 /* Case (3). First replace all the doloop_begins with increment
5406 for (begin = loop->begin; begin != 0; begin = begin->next)
5408 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5409 begin->counter, const1_rtx),
5411 delete_insn (begin->insn);
5414 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5415 for (end = loop->end; end != 0; end = end->next)
5421 /* Load the counter value into a general register. */
5423 if (!REG_P (reg) || REGNO (reg) > 15)
5426 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5429 /* Decrement the counter. */
5430 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5433 /* Copy it back to its original location. */
5434 if (reg != end->counter)
5435 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5437 /* Jump back to the start label. */
5438 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5440 JUMP_LABEL (insn) = end->label;
5441 LABEL_NUSES (end->label)++;
5443 /* Emit the whole sequence before the doloop_end. */
5444 insn = get_insns ();
5446 emit_insn_before (insn, end->insn);
5448 /* Delete the doloop_end. */
5449 delete_insn (end->insn);
5456 mep_invertable_branch_p (rtx insn)
5459 enum rtx_code old_code;
5462 set = PATTERN (insn);
5463 if (GET_CODE (set) != SET)
5465 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5467 cond = XEXP (XEXP (set, 1), 0);
5468 old_code = GET_CODE (cond);
5472 PUT_CODE (cond, NE);
5475 PUT_CODE (cond, EQ);
5478 PUT_CODE (cond, GE);
5481 PUT_CODE (cond, LT);
5486 INSN_CODE (insn) = -1;
5487 i = recog_memoized (insn);
5488 PUT_CODE (cond, old_code);
5489 INSN_CODE (insn) = -1;
5494 mep_invert_branch (rtx insn, rtx after)
5496 rtx cond, set, label;
5499 set = PATTERN (insn);
5501 gcc_assert (GET_CODE (set) == SET);
5502 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5504 cond = XEXP (XEXP (set, 1), 0);
5505 switch (GET_CODE (cond))
5508 PUT_CODE (cond, NE);
5511 PUT_CODE (cond, EQ);
5514 PUT_CODE (cond, GE);
5517 PUT_CODE (cond, LT);
5522 label = gen_label_rtx ();
5523 emit_label_after (label, after);
5524 for (i=1; i<=2; i++)
5525 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5527 rtx ref = XEXP (XEXP (set, 1), i);
5528 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5529 delete_insn (XEXP (ref, 0));
5530 XEXP (ref, 0) = label;
5531 LABEL_NUSES (label) ++;
5532 JUMP_LABEL (insn) = label;
5534 INSN_CODE (insn) = -1;
5535 i = recog_memoized (insn);
5536 gcc_assert (i >= 0);
5540 mep_reorg_erepeat (rtx insns)
5542 rtx insn, prev, label_before, l, x;
5545 for (insn = insns; insn; insn = NEXT_INSN (insn))
5547 && ! JUMP_TABLE_DATA_P (insn)
5548 && mep_invertable_branch_p (insn))
5552 fprintf (dump_file, "\n------------------------------\n");
5553 fprintf (dump_file, "erepeat: considering this jump:\n");
5554 print_rtl_single (dump_file, insn);
5556 count = simplejump_p (insn) ? 0 : 1;
5558 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5560 if (GET_CODE (prev) == CALL_INSN
5561 || BARRIER_P (prev))
5564 if (prev == JUMP_LABEL (insn))
5568 fprintf (dump_file, "found loop top, %d insns\n", count);
5570 if (LABEL_NUSES (prev) == 1)
5571 /* We're the only user, always safe */ ;
5572 else if (LABEL_NUSES (prev) == 2)
5574 /* See if there's a barrier before this label. If
5575 so, we know nobody inside the loop uses it.
5576 But we must be careful to put the erepeat
5577 *after* the label. */
5579 for (barrier = PREV_INSN (prev);
5580 barrier && GET_CODE (barrier) == NOTE;
5581 barrier = PREV_INSN (barrier))
5583 if (barrier && GET_CODE (barrier) != BARRIER)
5588 /* We don't know who else, within or without our loop, uses this */
5590 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5594 /* Generate a label to be used by the erepat insn. */
5595 l = gen_label_rtx ();
5597 /* Insert the erepeat after INSN's target label. */
5598 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5600 emit_insn_after (x, prev);
5602 /* Insert the erepeat label. */
5603 newlast = (mep_insert_repeat_label_last
5604 (insn, l, !simplejump_p (insn), false));
5605 if (simplejump_p (insn))
5607 emit_insn_before (gen_erepeat_end (), insn);
5612 mep_invert_branch (insn, newlast);
5613 emit_insn_after (gen_erepeat_end (), newlast);
5620 /* A label is OK if there is exactly one user, and we
5621 can find that user before the next label. */
5624 if (LABEL_NUSES (prev) == 1)
5626 for (user = PREV_INSN (prev);
5627 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5628 user = PREV_INSN (user))
5629 if (GET_CODE (user) == JUMP_INSN
5630 && JUMP_LABEL (user) == prev)
5632 safe = INSN_UID (user);
5639 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5640 safe, INSN_UID (prev));
5647 label_before = prev;
5652 fprintf (dump_file, "\n==============================\n");
5655 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5656 always do this on its own. */
5659 mep_jmp_return_reorg (rtx insns)
5661 rtx insn, label, ret;
5664 for (insn = insns; insn; insn = NEXT_INSN (insn))
5665 if (simplejump_p (insn))
5667 /* Find the fist real insn the jump jumps to. */
5668 label = ret = JUMP_LABEL (insn);
5670 && (GET_CODE (ret) == NOTE
5671 || GET_CODE (ret) == CODE_LABEL
5672 || GET_CODE (PATTERN (ret)) == USE))
5673 ret = NEXT_INSN (ret);
5677 /* Is it a return? */
5678 ret_code = recog_memoized (ret);
5679 if (ret_code == CODE_FOR_return_internal
5680 || ret_code == CODE_FOR_eh_return_internal)
5682 /* It is. Replace the jump with a return. */
5683 LABEL_NUSES (label) --;
5684 if (LABEL_NUSES (label) == 0)
5685 delete_insn (label);
5686 PATTERN (insn) = copy_rtx (PATTERN (ret));
5687 INSN_CODE (insn) = -1;
5695 mep_reorg_addcombine (rtx insns)
5699 for (i = insns; i; i = NEXT_INSN (i))
5701 && INSN_CODE (i) == CODE_FOR_addsi3
5702 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5703 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5704 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5705 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5709 && INSN_CODE (n) == CODE_FOR_addsi3
5710 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5711 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5712 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5713 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5715 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5716 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5717 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5719 && ic + nc > -32768)
5721 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5722 NEXT_INSN (i) = NEXT_INSN (n);
5724 PREV_INSN (NEXT_INSN (i)) = i;
5730 /* If this insn adjusts the stack, return the adjustment, else return
5733 add_sp_insn_p (rtx insn)
5737 if (! single_set (insn))
5739 pat = PATTERN (insn);
5740 if (GET_CODE (SET_DEST (pat)) != REG)
5742 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5744 if (GET_CODE (SET_SRC (pat)) != PLUS)
5746 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5748 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5750 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5752 return INTVAL (XEXP (SET_SRC (pat), 1));
5755 /* Check for trivial functions that set up an unneeded stack
5758 mep_reorg_noframe (rtx insns)
5760 rtx start_frame_insn;
5761 rtx end_frame_insn = 0;
5765 /* The first insn should be $sp = $sp + N */
5766 while (insns && ! INSN_P (insns))
5767 insns = NEXT_INSN (insns);
5771 sp_adjust = add_sp_insn_p (insns);
5775 start_frame_insn = insns;
5776 sp = SET_DEST (PATTERN (start_frame_insn));
5778 insns = next_real_insn (insns);
5782 rtx next = next_real_insn (insns);
5786 sp2 = add_sp_insn_p (insns);
5791 end_frame_insn = insns;
5792 if (sp2 != -sp_adjust)
5795 else if (mep_mentioned_p (insns, sp, 0))
5797 else if (CALL_P (insns))
5805 delete_insn (start_frame_insn);
5806 delete_insn (end_frame_insn);
5813 rtx insns = get_insns ();
5814 mep_reorg_addcombine (insns);
5815 #if EXPERIMENTAL_REGMOVE_REORG
5816 /* VLIW packing has been done already, so we can't just delete things. */
5817 if (!mep_vliw_function_p (cfun->decl))
5818 mep_reorg_regmove (insns);
5820 mep_jmp_return_reorg (insns);
5821 mep_bundle_insns (insns);
5822 mep_reorg_repeat (insns);
5825 && !profile_arc_flag
5826 && TARGET_OPT_REPEAT
5827 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5828 mep_reorg_erepeat (insns);
5830 /* This may delete *insns so make sure it's last. */
5831 mep_reorg_noframe (insns);
5836 /*----------------------------------------------------------------------*/
5838 /*----------------------------------------------------------------------*/
5840 /* Element X gives the index into cgen_insns[] of the most general
5841 implementation of intrinsic X. Unimplemented intrinsics are
5843 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5845 /* Element X gives the index of another instruction that is mapped to
5846 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5849 Things are set up so that mep_intrinsic_chain[X] < X. */
5850 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5852 /* The bitmask for the current ISA. The ISA masks are declared
5854 unsigned int mep_selected_isa;
5857 const char *config_name;
5861 static struct mep_config mep_configs[] = {
5862 #ifdef COPROC_SELECTION_TABLE
5863 COPROC_SELECTION_TABLE,
5868 /* Initialize the global intrinsics variables above. */
5871 mep_init_intrinsics (void)
5875 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5876 mep_selected_isa = mep_configs[0].isa;
5877 if (mep_config_string != 0)
5878 for (i = 0; mep_configs[i].config_name; i++)
5879 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5881 mep_selected_isa = mep_configs[i].isa;
5885 /* Assume all intrinsics are unavailable. */
5886 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5887 mep_intrinsic_insn[i] = -1;
5889 /* Build up the global intrinsic tables. */
5890 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5891 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5893 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5894 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5896 /* See whether we can directly move values between one coprocessor
5897 register and another. */
5898 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5899 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5900 mep_have_copro_copro_moves_p = true;
5902 /* See whether we can directly move values between core and
5903 coprocessor registers. */
5904 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5905 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5907 mep_have_core_copro_moves_p = 1;
5910 /* Declare all available intrinsic functions. Called once only. */
5912 static tree cp_data_bus_int_type_node;
5913 static tree opaque_vector_type_node;
5914 static tree v8qi_type_node;
5915 static tree v4hi_type_node;
5916 static tree v2si_type_node;
5917 static tree v8uqi_type_node;
5918 static tree v4uhi_type_node;
5919 static tree v2usi_type_node;
5922 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5926 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5927 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5928 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5929 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5930 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5931 case cgen_regnum_operand_type_CHAR: return char_type_node;
5932 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5933 case cgen_regnum_operand_type_SI: return intSI_type_node;
5934 case cgen_regnum_operand_type_DI: return intDI_type_node;
5935 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5936 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5937 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5938 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5939 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5940 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5941 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5942 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5944 return void_type_node;
5949 mep_init_builtins (void)
5953 if (TARGET_64BIT_CR_REGS)
5954 cp_data_bus_int_type_node = long_long_integer_type_node;
5956 cp_data_bus_int_type_node = long_integer_type_node;
5958 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5959 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5960 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5961 v2si_type_node = build_vector_type (intSI_type_node, 2);
5962 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5963 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5964 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5966 (*lang_hooks.decls.pushdecl)
5967 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
5968 cp_data_bus_int_type_node));
5970 (*lang_hooks.decls.pushdecl)
5971 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
5972 opaque_vector_type_node));
5974 (*lang_hooks.decls.pushdecl)
5975 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
5977 (*lang_hooks.decls.pushdecl)
5978 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
5980 (*lang_hooks.decls.pushdecl)
5981 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
5984 (*lang_hooks.decls.pushdecl)
5985 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
5987 (*lang_hooks.decls.pushdecl)
5988 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
5990 (*lang_hooks.decls.pushdecl)
5991 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
5994 /* Intrinsics like mep_cadd3 are implemented with two groups of
5995 instructions, one which uses UNSPECs and one which uses a specific
5996 rtl code such as PLUS. Instructions in the latter group belong
5997 to GROUP_KNOWN_CODE.
5999 In such cases, the intrinsic will have two entries in the global
6000 tables above. The unspec form is accessed using builtin functions
6001 while the specific form is accessed using the mep_* enum in
6004 The idea is that __cop arithmetic and builtin functions have
6005 different optimization requirements. If mep_cadd3() appears in
6006 the source code, the user will surely except gcc to use cadd3
6007 rather than a work-alike such as add3. However, if the user
6008 just writes "a + b", where a or b are __cop variables, it is
6009 reasonable for gcc to choose a core instruction rather than
6010 cadd3 if it believes that is more optimal. */
6011 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6012 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6013 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6015 tree ret_type = void_type_node;
6018 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6021 if (cgen_insns[i].cret_p)
6022 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6024 bi_type = build_function_type (ret_type, 0);
6025 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6027 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6031 /* Report the unavailablity of the given intrinsic. */
6035 mep_intrinsic_unavailable (int intrinsic)
6037 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6039 if (already_reported_p[intrinsic])
6042 if (mep_intrinsic_insn[intrinsic] < 0)
6043 error ("coprocessor intrinsic %qs is not available in this configuration",
6044 cgen_intrinsics[intrinsic]);
6045 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6046 error ("%qs is not available in VLIW functions",
6047 cgen_intrinsics[intrinsic]);
6049 error ("%qs is not available in non-VLIW functions",
6050 cgen_intrinsics[intrinsic]);
6052 already_reported_p[intrinsic] = 1;
6057 /* See if any implementation of INTRINSIC is available to the
6058 current function. If so, store the most general implementation
6059 in *INSN_PTR and return true. Return false otherwise. */
6062 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6066 i = mep_intrinsic_insn[intrinsic];
6067 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6068 i = mep_intrinsic_chain[i];
6072 *insn_ptr = &cgen_insns[i];
6079 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6080 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6081 try using a work-alike instead. In this case, the returned insn
6082 may have three operands rather than two. */
6085 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6089 if (intrinsic == mep_cmov)
6091 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6092 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6096 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6100 /* If ARG is a register operand that is the same size as MODE, convert it
6101 to MODE using a subreg. Otherwise return ARG as-is. */
6104 mep_convert_arg (enum machine_mode mode, rtx arg)
6106 if (GET_MODE (arg) != mode
6107 && register_operand (arg, VOIDmode)
6108 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6109 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6114 /* Apply regnum conversions to ARG using the description given by REGNUM.
6115 Return the new argument on success and null on failure. */
6118 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6120 if (regnum->count == 0)
6123 if (GET_CODE (arg) != CONST_INT
6125 || INTVAL (arg) >= regnum->count)
6128 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6132 /* Try to make intrinsic argument ARG match the given operand.
6133 UNSIGNED_P is true if the argument has an unsigned type. */
6136 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6139 if (GET_CODE (arg) == CONST_INT)
6141 /* CONST_INTs can only be bound to integer operands. */
6142 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6145 else if (GET_CODE (arg) == CONST_DOUBLE)
6146 /* These hold vector constants. */;
6147 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6149 /* If the argument is a different size from what's expected, we must
6150 have a value in the right mode class in order to convert it. */
6151 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6154 /* If the operand is an rvalue, promote or demote it to match the
6155 operand's size. This might not need extra instructions when
6156 ARG is a register value. */
6157 if (operand->constraint[0] != '=')
6158 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6161 /* If the operand is an lvalue, bind the operand to a new register.
6162 The caller will copy this value into ARG after the main
6163 instruction. By doing this always, we produce slightly more
6165 /* But not for control registers. */
6166 if (operand->constraint[0] == '='
6168 || ! (CCR_REGNO_P (REGNO (arg)) || CR_REGNO_P (REGNO (arg)))
6170 return gen_reg_rtx (operand->mode);
6172 /* Try simple mode punning. */
6173 arg = mep_convert_arg (operand->mode, arg);
6174 if (operand->predicate (arg, operand->mode))
6177 /* See if forcing the argument into a register will make it match. */
6178 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6179 arg = force_reg (operand->mode, arg);
6181 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6182 if (operand->predicate (arg, operand->mode))
6189 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6190 function FNNAME. OPERAND describes the operand to which ARGNUM
6194 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6195 int argnum, tree fnname)
6199 if (GET_CODE (arg) == CONST_INT)
6200 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6201 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6203 const struct cgen_immediate_predicate *predicate;
6204 HOST_WIDE_INT argval;
6206 predicate = &cgen_immediate_predicates[i];
6207 argval = INTVAL (arg);
6208 if (argval < predicate->lower || argval >= predicate->upper)
6209 error ("argument %d of %qE must be in the range %d...%d",
6210 argnum, fnname, predicate->lower, predicate->upper - 1);
6212 error ("argument %d of %qE must be a multiple of %d",
6213 argnum, fnname, predicate->align);
6217 error ("incompatible type for argument %d of %qE", argnum, fnname);
6221 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6222 rtx subtarget ATTRIBUTE_UNUSED,
6223 enum machine_mode mode ATTRIBUTE_UNUSED,
6224 int ignore ATTRIBUTE_UNUSED)
6226 rtx pat, op[10], arg[10];
6228 int opindex, unsigned_p[10];
6230 unsigned int n_args;
6232 const struct cgen_insn *cgen_insn;
6233 const struct insn_data *idata;
6235 int return_type = void_type_node;
6238 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6239 fnname = DECL_NAME (fndecl);
6241 /* Find out which instruction we should emit. Note that some coprocessor
6242 intrinsics may only be available in VLIW mode, or only in normal mode. */
6243 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6245 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6246 return error_mark_node;
6248 idata = &insn_data[cgen_insn->icode];
6250 builtin_n_args = cgen_insn->num_args;
6252 if (cgen_insn->cret_p)
6254 if (cgen_insn->cret_p > 1)
6257 return_type = mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6261 /* Evaluate each argument. */
6262 n_args = call_expr_nargs (exp);
6264 if (n_args < builtin_n_args)
6266 error ("too few arguments to %qE", fnname);
6267 return error_mark_node;
6269 if (n_args > builtin_n_args)
6271 error ("too many arguments to %qE", fnname);
6272 return error_mark_node;
6275 for (a = first_arg; a < builtin_n_args+first_arg; a++)
6279 args = CALL_EXPR_ARG (exp, a-first_arg);
6284 if (cgen_insn->regnums[a].reference_p)
6286 if (TREE_CODE (value) != ADDR_EXPR)
6289 error ("argument %d of %qE must be an address", a+1, fnname);
6290 return error_mark_node;
6292 value = TREE_OPERAND (value, 0);
6296 /* If the argument has been promoted to int, get the unpromoted
6297 value. This is necessary when sub-int memory values are bound
6298 to reference parameters. */
6299 if (TREE_CODE (value) == NOP_EXPR
6300 && TREE_TYPE (value) == integer_type_node
6301 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6302 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6303 < TYPE_PRECISION (TREE_TYPE (value))))
6304 value = TREE_OPERAND (value, 0);
6306 /* If the argument has been promoted to double, get the unpromoted
6307 SFmode value. This is necessary for FMAX support, for example. */
6308 if (TREE_CODE (value) == NOP_EXPR
6309 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6310 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6311 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6312 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6313 value = TREE_OPERAND (value, 0);
6315 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6316 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6317 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6318 if (cgen_insn->regnums[a].reference_p)
6320 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6321 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6323 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6327 error ("argument %d of %qE must be in the range %d...%d",
6328 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6329 return error_mark_node;
6333 for (a=0; a<first_arg; a++)
6335 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6338 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6341 /* Convert the arguments into a form suitable for the intrinsic.
6342 Report an error if this isn't possible. */
6343 for (opindex = 0; opindex < idata->n_operands; opindex++)
6345 a = cgen_insn->op_mapping[opindex];
6346 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6347 arg[a], unsigned_p[a]);
6348 if (op[opindex] == 0)
6350 mep_incompatible_arg (&idata->operand[opindex],
6351 arg[a], a + 1 - first_arg, fnname);
6352 return error_mark_node;
6356 /* Emit the instruction. */
6357 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6358 op[5], op[6], op[7], op[8], op[9]);
6360 if (GET_CODE (pat) == SET
6361 && GET_CODE (SET_DEST (pat)) == PC
6362 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6363 emit_jump_insn (pat);
6367 /* Copy lvalues back to their final locations. */
6368 for (opindex = 0; opindex < idata->n_operands; opindex++)
6369 if (idata->operand[opindex].constraint[0] == '=')
6371 a = cgen_insn->op_mapping[opindex];
6374 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6375 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6376 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6380 /* First convert the operand to the right mode, then copy it
6381 into the destination. Doing the conversion as a separate
6382 step (rather than using convert_move) means that we can
6383 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6384 refer to the same register. */
6385 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6386 op[opindex], unsigned_p[a]);
6387 if (!rtx_equal_p (arg[a], op[opindex]))
6388 emit_move_insn (arg[a], op[opindex]);
6393 if (first_arg > 0 && target && target != op[0])
6395 emit_move_insn (target, op[0]);
6402 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6407 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6408 a global register. */
6411 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6419 switch (GET_CODE (x))
6422 if (REG_P (SUBREG_REG (x)))
6424 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6425 && global_regs[subreg_regno (x)])
6433 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6447 /* A non-constant call might use a global register. */
6457 /* Returns nonzero if X mentions a global register. */
6460 global_reg_mentioned_p (rtx x)
6466 if (! RTL_CONST_OR_PURE_CALL_P (x))
6468 x = CALL_INSN_FUNCTION_USAGE (x);
6476 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6478 /* Scheduling hooks for VLIW mode.
6480 Conceptually this is very simple: we have a two-pack architecture
6481 that takes one core insn and one coprocessor insn to make up either
6482 a 32- or 64-bit instruction word (depending on the option bit set in
6483 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6484 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6485 and one 48-bit cop insn or two 32-bit core/cop insns.
6487 In practice, instruction selection will be a bear. Consider in
6488 VL64 mode the following insns
6493 these cannot pack, since the add is a 16-bit core insn and cmov
6494 is a 32-bit cop insn. However,
6499 packs just fine. For good VLIW code generation in VL64 mode, we
6500 will have to have 32-bit alternatives for many of the common core
6501 insns. Not implemented. */
6504 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6508 if (REG_NOTE_KIND (link) != 0)
6510 /* See whether INSN and DEP_INSN are intrinsics that set the same
6511 hard register. If so, it is more important to free up DEP_INSN
6512 than it is to free up INSN.
6514 Note that intrinsics like mep_mulr are handled differently from
6515 the equivalent mep.md patterns. In mep.md, if we don't care
6516 about the value of $lo and $hi, the pattern will just clobber
6517 the registers, not set them. Since clobbers don't count as
6518 output dependencies, it is often possible to reorder two mulrs,
6521 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6522 so any pair of mep_mulr()s will be inter-dependent. We should
6523 therefore give the first mep_mulr() a higher priority. */
6524 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6525 && global_reg_mentioned_p (PATTERN (insn))
6526 && global_reg_mentioned_p (PATTERN (dep_insn)))
6529 /* If the dependence is an anti or output dependence, assume it
6534 /* If we can't recognize the insns, we can't really do anything. */
6535 if (recog_memoized (dep_insn) < 0)
6538 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6539 attribute instead. */
6542 cost_specified = get_attr_latency (dep_insn);
6543 if (cost_specified != 0)
6544 return cost_specified;
6550 /* ??? We don't properly compute the length of a load/store insn,
6551 taking into account the addressing mode. */
6554 mep_issue_rate (void)
6556 return TARGET_IVC2 ? 3 : 2;
6559 /* Return true if function DECL was declared with the vliw attribute. */
6562 mep_vliw_function_p (tree decl)
6564 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6568 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6572 for (i = nready - 1; i >= 0; --i)
6574 rtx insn = ready[i];
6575 if (recog_memoized (insn) >= 0
6576 && get_attr_slot (insn) == slot
6577 && get_attr_length (insn) == length)
6585 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6589 for (i = 0; i < nready; ++i)
6590 if (ready[i] == insn)
6592 for (; i < nready - 1; ++i)
6593 ready[i] = ready[i + 1];
6602 mep_print_sched_insn (FILE *dump, rtx insn)
6604 const char *slots = "none";
6605 const char *name = NULL;
6609 if (GET_CODE (PATTERN (insn)) == SET
6610 || GET_CODE (PATTERN (insn)) == PARALLEL)
6612 switch (get_attr_slots (insn))
6614 case SLOTS_CORE: slots = "core"; break;
6615 case SLOTS_C3: slots = "c3"; break;
6616 case SLOTS_P0: slots = "p0"; break;
6617 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6618 case SLOTS_P0_P1: slots = "p0,p1"; break;
6619 case SLOTS_P0S: slots = "p0s"; break;
6620 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6621 case SLOTS_P1: slots = "p1"; break;
6623 sprintf(buf, "%d", get_attr_slots (insn));
6628 if (GET_CODE (PATTERN (insn)) == USE)
6631 code = INSN_CODE (insn);
6633 name = get_insn_name (code);
6638 "insn %4d %4d %8s %s\n",
6646 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6647 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6648 int *pnready, int clock ATTRIBUTE_UNUSED)
6650 int nready = *pnready;
6651 rtx core_insn, cop_insn;
6654 if (dump && sched_verbose > 1)
6656 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6657 for (i=0; i<nready; i++)
6658 mep_print_sched_insn (dump, ready[i]);
6659 fprintf (dump, "\n");
6662 if (!mep_vliw_function_p (cfun->decl))
6667 /* IVC2 uses a DFA to determine what's ready and what's not. */
6671 /* We can issue either a core or coprocessor instruction.
6672 Look for a matched pair of insns to reorder. If we don't
6673 find any, don't second-guess the scheduler's priorities. */
6675 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6676 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6677 TARGET_OPT_VL64 ? 6 : 2)))
6679 else if (TARGET_OPT_VL64
6680 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6681 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6684 /* We didn't find a pair. Issue the single insn at the head
6685 of the ready list. */
6688 /* Reorder the two insns first. */
6689 mep_move_ready_insn (ready, nready, core_insn);
6690 mep_move_ready_insn (ready, nready - 1, cop_insn);
6694 /* A for_each_rtx callback. Return true if *X is a register that is
6695 set by insn PREV. */
6698 mep_store_find_set (rtx *x, void *prev)
6700 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6703 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6704 not the containing insn. */
6707 mep_store_data_bypass_1 (rtx prev, rtx pat)
6709 /* Cope with intrinsics like swcpa. */
6710 if (GET_CODE (pat) == PARALLEL)
6714 for (i = 0; i < XVECLEN (pat, 0); i++)
6715 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6721 /* Check for some sort of store. */
6722 if (GET_CODE (pat) != SET
6723 || GET_CODE (SET_DEST (pat)) != MEM)
6726 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6727 The first operand to the unspec is the store data and the other operands
6728 are used to calculate the address. */
6729 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6734 src = SET_SRC (pat);
6735 for (i = 1; i < XVECLEN (src, 0); i++)
6736 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6742 /* Otherwise just check that PREV doesn't modify any register mentioned
6743 in the memory destination. */
6744 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6747 /* Return true if INSN is a store instruction and if the store address
6748 has no true dependence on PREV. */
6751 mep_store_data_bypass_p (rtx prev, rtx insn)
6753 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6756 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6757 is a register other than LO or HI and if PREV sets *X. */
6760 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6763 && REGNO (*x) != LO_REGNO
6764 && REGNO (*x) != HI_REGNO
6765 && reg_set_p (*x, (const_rtx) prev));
6768 /* Return true if, apart from HI/LO, there are no true dependencies
6769 between multiplication instructions PREV and INSN. */
6772 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6776 pat = PATTERN (insn);
6777 if (GET_CODE (pat) == PARALLEL)
6778 pat = XVECEXP (pat, 0, 0);
6779 return (GET_CODE (pat) == SET
6780 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6783 /* Return true if INSN is an ldc instruction that issues to the
6784 MeP-h1 integer pipeline. This is true for instructions that
6785 read from PSW, LP, SAR, HI and LO. */
6788 mep_ipipe_ldc_p (rtx insn)
6792 pat = PATTERN (insn);
6794 /* Cope with instrinsics that set both a hard register and its shadow.
6795 The set of the hard register comes first. */
6796 if (GET_CODE (pat) == PARALLEL)
6797 pat = XVECEXP (pat, 0, 0);
6799 if (GET_CODE (pat) == SET)
6801 src = SET_SRC (pat);
6803 /* Cope with intrinsics. The first operand to the unspec is
6804 the source register. */
6805 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6806 src = XVECEXP (src, 0, 0);
6809 switch (REGNO (src))
6822 /* Create a VLIW bundle from core instruction CORE and coprocessor
6823 instruction COP. COP always satisfies INSN_P, but CORE can be
6824 either a new pattern or an existing instruction.
6826 Emit the bundle in place of COP and return it. */
6829 mep_make_bundle (rtx core, rtx cop)
6833 /* If CORE is an existing instruction, remove it, otherwise put
6834 the new pattern in an INSN harness. */
6838 core = make_insn_raw (core);
6840 /* Generate the bundle sequence and replace COP with it. */
6841 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6842 insn = emit_insn_after (insn, cop);
6845 /* Set up the links of the insns inside the SEQUENCE. */
6846 PREV_INSN (core) = PREV_INSN (insn);
6847 NEXT_INSN (core) = cop;
6848 PREV_INSN (cop) = core;
6849 NEXT_INSN (cop) = NEXT_INSN (insn);
6851 /* Set the VLIW flag for the coprocessor instruction. */
6852 PUT_MODE (core, VOIDmode);
6853 PUT_MODE (cop, BImode);
6855 /* Derive a location for the bundle. Individual instructions cannot
6856 have their own location because there can be no assembler labels
6857 between CORE and COP. */
6858 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6859 INSN_LOCATOR (core) = 0;
6860 INSN_LOCATOR (cop) = 0;
6865 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6868 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6870 rtx * pinsn = (rtx *) data;
6872 if (*pinsn && reg_mentioned_p (x, *pinsn))
6876 /* Return true if anything in insn X is (anti,output,true) dependent on
6877 anything in insn Y. */
6880 mep_insn_dependent_p (rtx x, rtx y)
6884 gcc_assert (INSN_P (x));
6885 gcc_assert (INSN_P (y));
6888 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6889 if (tmp == NULL_RTX)
6893 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6894 if (tmp == NULL_RTX)
6901 core_insn_p (rtx insn)
6903 if (GET_CODE (PATTERN (insn)) == USE)
6905 if (get_attr_slot (insn) == SLOT_CORE)
6910 /* Mark coprocessor instructions that can be bundled together with
6911 the immediately preceeding core instruction. This is later used
6912 to emit the "+" that tells the assembler to create a VLIW insn.
6914 For unbundled insns, the assembler will automatically add coprocessor
6915 nops, and 16-bit core nops. Due to an apparent oversight in the
6916 spec, the assembler will _not_ automatically add 32-bit core nops,
6917 so we have to emit those here.
6919 Called from mep_insn_reorg. */
6922 mep_bundle_insns (rtx insns)
6924 rtx insn, last = NULL_RTX, first = NULL_RTX;
6925 int saw_scheduling = 0;
6927 /* Only do bundling if we're in vliw mode. */
6928 if (!mep_vliw_function_p (cfun->decl))
6931 /* The first insn in a bundle are TImode, the remainder are
6932 VOIDmode. After this function, the first has VOIDmode and the
6933 rest have BImode. */
6935 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6937 /* First, move any NOTEs that are within a bundle, to the beginning
6939 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6941 if (NOTE_P (insn) && first)
6942 /* Don't clear FIRST. */;
6944 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6947 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6951 /* INSN is part of a bundle; FIRST is the first insn in that
6952 bundle. Move all intervening notes out of the bundle.
6953 In addition, since the debug pass may insert a label
6954 whenever the current line changes, set the location info
6955 for INSN to match FIRST. */
6957 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
6959 note = PREV_INSN (insn);
6960 while (note && note != first)
6962 prev = PREV_INSN (note);
6966 /* Remove NOTE from here... */
6967 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6968 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6969 /* ...and put it in here. */
6970 NEXT_INSN (note) = first;
6971 PREV_INSN (note) = PREV_INSN (first);
6972 NEXT_INSN (PREV_INSN (note)) = note;
6973 PREV_INSN (NEXT_INSN (note)) = note;
6980 else if (!NONJUMP_INSN_P (insn))
6984 /* Now fix up the bundles. */
6985 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6990 if (!NONJUMP_INSN_P (insn))
6996 /* If we're not optimizing enough, there won't be scheduling
6997 info. We detect that here. */
6998 if (GET_MODE (insn) == TImode)
7000 if (!saw_scheduling)
7005 rtx core_insn = NULL_RTX;
7007 /* IVC2 slots are scheduled by DFA, so we just accept
7008 whatever the scheduler gives us. However, we must make
7009 sure the core insn (if any) is the first in the bundle.
7010 The IVC2 assembler can insert whatever NOPs are needed,
7011 and allows a COP insn to be first. */
7013 if (NONJUMP_INSN_P (insn)
7014 && GET_CODE (PATTERN (insn)) != USE
7015 && GET_MODE (insn) == TImode)
7019 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7020 && NONJUMP_INSN_P (NEXT_INSN (last));
7021 last = NEXT_INSN (last))
7023 if (core_insn_p (last))
7026 if (core_insn_p (last))
7029 if (core_insn && core_insn != insn)
7031 /* Swap core insn to first in the bundle. */
7033 /* Remove core insn. */
7034 if (PREV_INSN (core_insn))
7035 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7036 if (NEXT_INSN (core_insn))
7037 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7039 /* Re-insert core insn. */
7040 PREV_INSN (core_insn) = PREV_INSN (insn);
7041 NEXT_INSN (core_insn) = insn;
7043 if (PREV_INSN (core_insn))
7044 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7045 PREV_INSN (insn) = core_insn;
7047 PUT_MODE (core_insn, TImode);
7048 PUT_MODE (insn, VOIDmode);
7052 /* The first insn has TImode, the rest have VOIDmode */
7053 if (GET_MODE (insn) == TImode)
7054 PUT_MODE (insn, VOIDmode);
7056 PUT_MODE (insn, BImode);
7060 PUT_MODE (insn, VOIDmode);
7061 if (recog_memoized (insn) >= 0
7062 && get_attr_slot (insn) == SLOT_COP)
7064 if (GET_CODE (insn) == JUMP_INSN
7066 || recog_memoized (last) < 0
7067 || get_attr_slot (last) != SLOT_CORE
7068 || (get_attr_length (insn)
7069 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7070 || mep_insn_dependent_p (insn, last))
7072 switch (get_attr_length (insn))
7077 insn = mep_make_bundle (gen_nop (), insn);
7080 if (TARGET_OPT_VL64)
7081 insn = mep_make_bundle (gen_nop32 (), insn);
7084 if (TARGET_OPT_VL64)
7085 error ("2 byte cop instructions are"
7086 " not allowed in 64-bit VLIW mode");
7088 insn = mep_make_bundle (gen_nop (), insn);
7091 error ("unexpected %d byte cop instruction",
7092 get_attr_length (insn));
7097 insn = mep_make_bundle (last, insn);
7105 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7106 Return true on success. This function can fail if the intrinsic
7107 is unavailable or if the operands don't satisfy their predicates. */
7110 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7112 const struct cgen_insn *cgen_insn;
7113 const struct insn_data *idata;
7117 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7120 idata = &insn_data[cgen_insn->icode];
7121 for (i = 0; i < idata->n_operands; i++)
7123 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7124 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7128 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7129 newop[3], newop[4], newop[5],
7130 newop[6], newop[7], newop[8]));
7136 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7137 OPERANDS[0]. Report an error if the instruction could not
7138 be synthesized. OPERANDS[1] is a register_operand. For sign
7139 and zero extensions, it may be smaller than SImode. */
7142 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7143 rtx * operands ATTRIBUTE_UNUSED)
7149 /* Likewise, but apply a binary operation to OPERANDS[1] and
7150 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7151 can be a general_operand.
7153 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7154 third operand. REG and REG3 take register operands only. */
7157 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7158 int ATTRIBUTE_UNUSED immediate3,
7159 int ATTRIBUTE_UNUSED reg,
7160 int ATTRIBUTE_UNUSED reg3,
7161 rtx * operands ATTRIBUTE_UNUSED)
7167 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7172 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7174 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7181 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7185 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7187 : COSTS_N_INSNS (2));
7194 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7200 mep_handle_option (size_t code,
7201 const char *arg ATTRIBUTE_UNUSED,
7202 int value ATTRIBUTE_UNUSED)
7209 target_flags |= MEP_ALL_OPTS;
7213 target_flags &= ~ MEP_ALL_OPTS;
7217 target_flags |= MASK_COP;
7218 target_flags |= MASK_64BIT_CR_REGS;
7222 option_mtiny_specified = 1;
7225 target_flags |= MASK_COP;
7226 target_flags |= MASK_64BIT_CR_REGS;
7227 target_flags |= MASK_VLIW;
7228 target_flags |= MASK_OPT_VL64;
7229 target_flags |= MASK_IVC2;
7231 for (i=0; i<32; i++)
7232 fixed_regs[i+48] = 0;
7233 for (i=0; i<32; i++)
7234 call_used_regs[i+48] = 1;
7236 call_used_regs[i+48] = 0;
7238 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7275 mep_asm_init_sections (void)
7278 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7279 "\t.section .based,\"aw\"");
7282 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7283 "\t.section .sbss,\"aw\"");
7286 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7287 "\t.section .sdata,\"aw\",@progbits");
7290 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7291 "\t.section .far,\"aw\"");
7294 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7295 "\t.section .farbss,\"aw\"");
7298 = get_unnamed_section (0, output_section_asm_op,
7299 "\t.section .frodata,\"a\"");
7302 = get_unnamed_section (0, output_section_asm_op,
7303 "\t.section .srodata,\"a\"");