1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
48 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
63 + Function args in registers
64 + Handle pipeline hazards
67 + Machine-dependent Reorg
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed;
91 /* Records __builtin_return address. */
95 int reg_save_slot[FIRST_PSEUDO_REGISTER];
96 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
98 /* 2 if the current function has an interrupt attribute, 1 if not, 0
99 if unknown. This is here because resource.c uses EPILOGUE_USES
101 int interrupt_handler;
103 /* Likewise, for disinterrupt attribute. */
104 int disable_interrupts;
106 /* Number of doloop tags used so far. */
109 /* True if the last tag was allocated to a doloop_end. */
110 bool doloop_tag_from_end;
112 /* True if reload changes $TP. */
113 bool reload_changes_tp;
115 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
116 We only set this if the function is an interrupt handler. */
117 int asms_without_operands;
120 #define MEP_CONTROL_REG(x) \
121 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
123 static const struct attribute_spec mep_attribute_table[11];
125 static GTY(()) section * based_section;
126 static GTY(()) section * tinybss_section;
127 static GTY(()) section * far_section;
128 static GTY(()) section * farbss_section;
129 static GTY(()) section * frodata_section;
130 static GTY(()) section * srodata_section;
132 static void mep_set_leaf_registers (int);
133 static bool symbol_p (rtx);
134 static bool symbolref_p (rtx);
135 static void encode_pattern_1 (rtx);
136 static void encode_pattern (rtx);
137 static bool const_in_range (rtx, int, int);
138 static void mep_rewrite_mult (rtx, rtx);
139 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
140 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
141 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
142 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
143 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
144 static bool mep_nongeneral_reg (rtx);
145 static bool mep_general_copro_reg (rtx);
146 static bool mep_nonregister (rtx);
147 static struct machine_function* mep_init_machine_status (void);
148 static rtx mep_tp_rtx (void);
149 static rtx mep_gp_rtx (void);
150 static bool mep_interrupt_p (void);
151 static bool mep_disinterrupt_p (void);
152 static bool mep_reg_set_p (rtx, rtx);
153 static bool mep_reg_set_in_function (int);
154 static bool mep_interrupt_saved_reg (int);
155 static bool mep_call_saves_register (int);
157 static void add_constant (int, int, int, int);
158 static bool mep_function_uses_sp (void);
159 static rtx maybe_dead_move (rtx, rtx, bool);
160 static void mep_reload_pointer (int, const char *);
161 static void mep_start_function (FILE *, HOST_WIDE_INT);
162 static bool mep_function_ok_for_sibcall (tree, tree);
163 static int unique_bit_in (HOST_WIDE_INT);
164 static int bit_size_for_clip (HOST_WIDE_INT);
165 static int bytesize (const_tree, enum machine_mode);
166 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
167 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
168 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
169 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
170 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
171 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
172 static bool mep_function_attribute_inlinable_p (const_tree);
173 static bool mep_option_can_inline_p (tree, tree);
174 static bool mep_lookup_pragma_disinterrupt (const char *);
175 static int mep_multiple_address_regions (tree, bool);
176 static int mep_attrlist_to_encoding (tree, tree);
177 static void mep_insert_attributes (tree, tree *);
178 static void mep_encode_section_info (tree, rtx, int);
179 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
180 static void mep_unique_section (tree, int);
181 static unsigned int mep_section_type_flags (tree, const char *, int);
182 static void mep_asm_named_section (const char *, unsigned int, tree);
183 static bool mep_mentioned_p (rtx, rtx, int);
184 static void mep_reorg_regmove (rtx);
185 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
186 static void mep_reorg_repeat (rtx);
187 static bool mep_invertable_branch_p (rtx);
188 static void mep_invert_branch (rtx, rtx);
189 static void mep_reorg_erepeat (rtx);
190 static void mep_jmp_return_reorg (rtx);
191 static void mep_reorg_addcombine (rtx);
192 static void mep_reorg (void);
193 static void mep_init_intrinsics (void);
194 static void mep_init_builtins (void);
195 static void mep_intrinsic_unavailable (int);
196 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
197 static bool mep_get_move_insn (int, const struct cgen_insn **);
198 static rtx mep_convert_arg (enum machine_mode, rtx);
199 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
200 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
201 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
202 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
203 static int mep_adjust_cost (rtx, rtx, rtx, int);
204 static int mep_issue_rate (void);
205 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
206 static void mep_move_ready_insn (rtx *, int, rtx);
207 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
208 static rtx mep_make_bundle (rtx, rtx);
209 static void mep_bundle_insns (rtx);
210 static bool mep_rtx_cost (rtx, int, int, int *, bool);
211 static int mep_address_cost (rtx, bool);
212 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
214 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
216 static bool mep_vector_mode_supported_p (enum machine_mode);
217 static bool mep_handle_option (size_t, const char *, int);
218 static rtx mep_allocate_initial_value (rtx);
219 static void mep_asm_init_sections (void);
220 static int mep_comp_type_attributes (const_tree, const_tree);
221 static bool mep_narrow_volatile_bitfield (void);
222 static rtx mep_expand_builtin_saveregs (void);
223 static tree mep_build_builtin_va_list (void);
224 static void mep_expand_va_start (tree, rtx);
225 static tree mep_gimplify_va_arg_expr (tree, tree, tree *, tree *);
227 /* Initialize the GCC target structure. */
229 #undef TARGET_ASM_FUNCTION_PROLOGUE
230 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
231 #undef TARGET_ATTRIBUTE_TABLE
232 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
233 #undef TARGET_COMP_TYPE_ATTRIBUTES
234 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
235 #undef TARGET_INSERT_ATTRIBUTES
236 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
237 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
238 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
239 #undef TARGET_OPTION_CAN_INLINE_P
240 #define TARGET_OPTION_CAN_INLINE_P mep_option_can_inline_p
241 #undef TARGET_SECTION_TYPE_FLAGS
242 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
243 #undef TARGET_ASM_NAMED_SECTION
244 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
245 #undef TARGET_INIT_BUILTINS
246 #define TARGET_INIT_BUILTINS mep_init_builtins
247 #undef TARGET_EXPAND_BUILTIN
248 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
249 #undef TARGET_SCHED_ADJUST_COST
250 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
251 #undef TARGET_SCHED_ISSUE_RATE
252 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
253 #undef TARGET_SCHED_REORDER
254 #define TARGET_SCHED_REORDER mep_sched_reorder
255 #undef TARGET_STRIP_NAME_ENCODING
256 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
257 #undef TARGET_ASM_SELECT_SECTION
258 #define TARGET_ASM_SELECT_SECTION mep_select_section
259 #undef TARGET_ASM_UNIQUE_SECTION
260 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
261 #undef TARGET_ENCODE_SECTION_INFO
262 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
263 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
264 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
265 #undef TARGET_RTX_COSTS
266 #define TARGET_RTX_COSTS mep_rtx_cost
267 #undef TARGET_ADDRESS_COST
268 #define TARGET_ADDRESS_COST mep_address_cost
269 #undef TARGET_MACHINE_DEPENDENT_REORG
270 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
271 #undef TARGET_SETUP_INCOMING_VARARGS
272 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
273 #undef TARGET_PASS_BY_REFERENCE
274 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
275 #undef TARGET_VECTOR_MODE_SUPPORTED_P
276 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
277 #undef TARGET_HANDLE_OPTION
278 #define TARGET_HANDLE_OPTION mep_handle_option
279 #undef TARGET_DEFAULT_TARGET_FLAGS
280 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
281 #undef TARGET_ALLOCATE_INITIAL_VALUE
282 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
283 #undef TARGET_ASM_INIT_SECTIONS
284 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
285 #undef TARGET_RETURN_IN_MEMORY
286 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
287 #undef TARGET_NARROW_VOLATILE_BITFIELD
288 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
289 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
290 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
291 #undef TARGET_BUILD_BUILTIN_VA_LIST
292 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
293 #undef TARGET_EXPAND_BUILTIN_VA_START
294 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
295 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
296 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
298 struct gcc_target targetm = TARGET_INITIALIZER;
300 #define WANT_GCC_DEFINITIONS
301 #include "mep-intrin.h"
302 #undef WANT_GCC_DEFINITIONS
305 /* Command Line Option Support. */
307 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
309 /* True if we can use cmov instructions to move values back and forth
310 between core and coprocessor registers. */
311 bool mep_have_core_copro_moves_p;
313 /* True if we can use cmov instructions (or a work-alike) to move
314 values between coprocessor registers. */
315 bool mep_have_copro_copro_moves_p;
317 /* A table of all coprocessor instructions that can act like
318 a coprocessor-to-coprocessor cmov. */
319 static const int mep_cmov_insns[] = {
332 static int option_mtiny_specified = 0;
336 mep_set_leaf_registers (int enable)
340 if (mep_leaf_registers[0] != enable)
341 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
342 mep_leaf_registers[i] = enable;
346 mep_conditional_register_usage (char *fixed_regs, char *call_used_regs)
350 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
352 fixed_regs[HI_REGNO] = 1;
353 fixed_regs[LO_REGNO] = 1;
354 call_used_regs[HI_REGNO] = 1;
355 call_used_regs[LO_REGNO] = 1;
358 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
363 mep_optimization_options (void)
365 /* The first scheduling pass often increases register pressure and tends
366 to result in more spill code. Only run it when specifically asked. */
367 flag_schedule_insns = 0;
369 /* Using $fp doesn't gain us much, even when debugging is important. */
370 flag_omit_frame_pointer = 1;
374 mep_override_options (void)
377 warning (OPT_fpic, "-fpic is not supported");
379 warning (OPT_fPIC, "-fPIC is not supported");
380 if (TARGET_S && TARGET_M)
381 error ("only one of -ms and -mm may be given");
382 if (TARGET_S && TARGET_L)
383 error ("only one of -ms and -ml may be given");
384 if (TARGET_M && TARGET_L)
385 error ("only one of -mm and -ml may be given");
386 if (TARGET_S && option_mtiny_specified)
387 error ("only one of -ms and -mtiny= may be given");
388 if (TARGET_M && option_mtiny_specified)
389 error ("only one of -mm and -mtiny= may be given");
390 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
391 warning (0, "-mclip currently has no effect without -mminmax");
393 if (mep_const_section)
395 if (strcmp (mep_const_section, "tiny") != 0
396 && strcmp (mep_const_section, "near") != 0
397 && strcmp (mep_const_section, "far") != 0)
398 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
402 mep_tiny_cutoff = 65536;
405 if (TARGET_L && ! option_mtiny_specified)
408 if (TARGET_64BIT_CR_REGS)
409 flag_split_wide_types = 0;
411 init_machine_status = mep_init_machine_status;
412 mep_init_intrinsics ();
415 /* Pattern Support - constraints, predicates, expanders. */
417 /* MEP has very few instructions that can refer to the span of
418 addresses used by symbols, so it's common to check for them. */
423 int c = GET_CODE (x);
425 return (c == CONST_INT
435 if (GET_CODE (x) != MEM)
438 c = GET_CODE (XEXP (x, 0));
439 return (c == CONST_INT
444 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
446 #define GEN_REG(R, STRICT) \
449 && ((R) == ARG_POINTER_REGNUM \
450 || (R) >= FIRST_PSEUDO_REGISTER)))
452 static char pattern[12], *patternp;
453 static GTY(()) rtx patternr[12];
454 #define RTX_IS(x) (strcmp (pattern, x) == 0)
457 encode_pattern_1 (rtx x)
461 if (patternp == pattern + sizeof (pattern) - 2)
467 patternr[patternp-pattern] = x;
469 switch (GET_CODE (x))
477 encode_pattern_1 (XEXP(x, 0));
481 encode_pattern_1 (XEXP(x, 0));
482 encode_pattern_1 (XEXP(x, 1));
486 encode_pattern_1 (XEXP(x, 0));
487 encode_pattern_1 (XEXP(x, 1));
491 encode_pattern_1 (XEXP(x, 0));
505 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
506 for (i=0; i<XVECLEN (x, 0); i++)
507 encode_pattern_1 (XVECEXP (x, 0, i));
515 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
524 encode_pattern (rtx x)
527 encode_pattern_1 (x);
532 mep_section_tag (rtx x)
538 switch (GET_CODE (x))
545 x = XVECEXP (x, 0, 0);
548 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
557 if (GET_CODE (x) != SYMBOL_REF)
560 if (name[0] == '@' && name[2] == '.')
562 if (name[1] == 'i' || name[1] == 'I')
565 return 'f'; /* near */
566 return 'n'; /* far */
574 mep_regno_reg_class (int regno)
578 case SP_REGNO: return SP_REGS;
579 case TP_REGNO: return TP_REGS;
580 case GP_REGNO: return GP_REGS;
581 case 0: return R0_REGS;
582 case HI_REGNO: return HI_REGS;
583 case LO_REGNO: return LO_REGS;
584 case ARG_POINTER_REGNUM: return GENERAL_REGS;
587 if (GR_REGNO_P (regno))
588 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
589 if (CONTROL_REGNO_P (regno))
592 if (CR_REGNO_P (regno))
596 /* Search for the register amongst user-defined subclasses of
597 the coprocessor registers. */
598 for (i = USER0_REGS; i <= USER3_REGS; ++i)
600 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
602 for (j = 0; j < N_REG_CLASSES; ++j)
604 enum reg_class sub = reg_class_subclasses[i][j];
606 if (sub == LIM_REG_CLASSES)
608 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
613 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
616 if (CCR_REGNO_P (regno))
619 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
625 mep_reg_class_from_constraint (int c, const char *str)
642 return LOADABLE_CR_REGS;
644 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
646 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
673 enum reg_class which = c - 'A' + USER0_REGS;
674 return (reg_class_size[which] > 0 ? which : NO_REGS);
683 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
687 case 'I': return value >= -32768 && value < 32768;
688 case 'J': return value >= 0 && value < 65536;
689 case 'K': return value >= 0 && value < 0x01000000;
690 case 'L': return value >= -32 && value < 32;
691 case 'M': return value >= 0 && value < 32;
692 case 'N': return value >= 0 && value < 16;
696 return value >= -2147483647-1 && value <= 2147483647;
703 mep_extra_constraint (rtx value, int c)
705 encode_pattern (value);
710 /* For near symbols, like what call uses. */
711 if (GET_CODE (value) == REG)
713 return mep_call_address_operand (value, GET_MODE (value));
716 /* For signed 8-bit immediates. */
717 return (GET_CODE (value) == CONST_INT
718 && INTVAL (value) >= -128
719 && INTVAL (value) <= 127);
722 /* For tp/gp relative symbol values. */
723 return (RTX_IS ("u3s") || RTX_IS ("u2s")
724 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
727 /* Non-absolute memories. */
728 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
732 return RTX_IS ("Hs");
735 /* Register indirect. */
736 return RTX_IS ("mr");
739 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
750 const_in_range (rtx x, int minv, int maxv)
752 return (GET_CODE (x) == CONST_INT
753 && INTVAL (x) >= minv
754 && INTVAL (x) <= maxv);
757 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
758 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
759 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
760 at the end of the insn stream. */
763 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
765 if (rtx_equal_p (dest, src1))
767 else if (rtx_equal_p (dest, src2))
772 emit_insn (gen_movsi (copy_rtx (dest), src1));
774 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
779 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
780 Change the last element of PATTERN from (clobber (scratch:SI))
781 to (clobber (reg:SI HI_REGNO)). */
784 mep_rewrite_mult (rtx insn, rtx pattern)
788 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
789 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
790 PATTERN (insn) = pattern;
791 INSN_CODE (insn) = -1;
794 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
795 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
796 store the result in DEST if nonnull. */
799 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
803 lo = gen_rtx_REG (SImode, LO_REGNO);
805 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
806 mep_mulr_source (insn, dest, src1, src2));
808 pattern = gen_mulsi3_lo (lo, src1, src2);
809 mep_rewrite_mult (insn, pattern);
812 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
813 SRC3 into $lo, then use either madd or maddr. The move into $lo will
814 be deleted by a peephole2 if SRC3 is already in $lo. */
817 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
821 lo = gen_rtx_REG (SImode, LO_REGNO);
822 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
824 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
825 mep_mulr_source (insn, dest, src1, src2),
828 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
829 mep_rewrite_mult (insn, pattern);
832 /* Return true if $lo has the same value as integer register GPR when
833 instruction INSN is reached. If necessary, rewrite the instruction
834 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
835 rtx for (reg:SI LO_REGNO).
837 This function is intended to be used by the peephole2 pass. Since
838 that pass goes from the end of a basic block to the beginning, and
839 propagates liveness information on the way, there is no need to
840 update register notes here.
842 If GPR_DEAD_P is true on entry, and this function returns true,
843 then the caller will replace _every_ use of GPR in and after INSN
844 with LO. This means that if the instruction that sets $lo is a
845 mulr- or maddr-type instruction, we can rewrite it to use mul or
846 madd instead. In combination with the copy progagation pass,
847 this allows us to replace sequences like:
856 if GPR is no longer used. */
859 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
863 insn = PREV_INSN (insn);
865 switch (recog_memoized (insn))
867 case CODE_FOR_mulsi3_1:
869 if (rtx_equal_p (recog_data.operand[0], gpr))
871 mep_rewrite_mulsi3 (insn,
872 gpr_dead_p ? NULL : recog_data.operand[0],
873 recog_data.operand[1],
874 recog_data.operand[2]);
879 case CODE_FOR_maddsi3:
881 if (rtx_equal_p (recog_data.operand[0], gpr))
883 mep_rewrite_maddsi3 (insn,
884 gpr_dead_p ? NULL : recog_data.operand[0],
885 recog_data.operand[1],
886 recog_data.operand[2],
887 recog_data.operand[3]);
892 case CODE_FOR_mulsi3r:
893 case CODE_FOR_maddsi3r:
895 return rtx_equal_p (recog_data.operand[1], gpr);
898 if (reg_set_p (lo, insn)
899 || reg_set_p (gpr, insn)
900 || volatile_insn_p (PATTERN (insn)))
903 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
908 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
912 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
915 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
917 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
922 /* Return true if SET can be turned into a post-modify load or store
923 that adds OFFSET to GPR. In other words, return true if SET can be
926 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
928 It's OK to change SET to an equivalent operation in order to
932 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
935 unsigned int reg_bytes, mem_bytes;
936 enum machine_mode reg_mode, mem_mode;
938 /* Only simple SETs can be converted. */
939 if (GET_CODE (set) != SET)
942 /* Point REG to what we hope will be the register side of the set and
943 MEM to what we hope will be the memory side. */
944 if (GET_CODE (SET_DEST (set)) == MEM)
946 mem = &SET_DEST (set);
947 reg = &SET_SRC (set);
951 reg = &SET_DEST (set);
952 mem = &SET_SRC (set);
953 if (GET_CODE (*mem) == SIGN_EXTEND)
954 mem = &XEXP (*mem, 0);
957 /* Check that *REG is a suitable coprocessor register. */
958 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
961 /* Check that *MEM is a suitable memory reference. */
962 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
965 /* Get the number of bytes in each operand. */
966 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
967 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
969 /* Check that OFFSET is suitably aligned. */
970 if (INTVAL (offset) & (mem_bytes - 1))
973 /* Convert *MEM to a normal integer mode. */
974 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
975 *mem = change_address (*mem, mem_mode, NULL);
977 /* Adjust *REG as well. */
978 *reg = shallow_copy_rtx (*reg);
979 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
981 /* SET is a subword load. Convert it to an explicit extension. */
982 PUT_MODE (*reg, SImode);
983 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
987 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
988 PUT_MODE (*reg, reg_mode);
993 /* Return the effect of frame-related instruction INSN. */
996 mep_frame_expr (rtx insn)
1000 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
1001 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
1002 RTX_FRAME_RELATED_P (expr) = 1;
1006 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
1007 new pattern in INSN1; INSN2 will be deleted by the caller. */
1010 mep_make_parallel (rtx insn1, rtx insn2)
1014 if (RTX_FRAME_RELATED_P (insn2))
1016 expr = mep_frame_expr (insn2);
1017 if (RTX_FRAME_RELATED_P (insn1))
1018 expr = gen_rtx_SEQUENCE (VOIDmode,
1019 gen_rtvec (2, mep_frame_expr (insn1), expr));
1020 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1021 RTX_FRAME_RELATED_P (insn1) = 1;
1024 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1025 gen_rtvec (2, PATTERN (insn1),
1027 INSN_CODE (insn1) = -1;
1030 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1031 the basic block to see if any previous load or store instruction can
1032 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1035 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1042 insn = PREV_INSN (insn);
1045 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1047 mep_make_parallel (insn, set_insn);
1051 if (reg_set_p (reg, insn)
1052 || reg_referenced_p (reg, PATTERN (insn))
1053 || volatile_insn_p (PATTERN (insn)))
1057 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1061 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1064 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1066 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1067 extract_insn (insn);
1072 mep_allow_clip (rtx ux, rtx lx, int s)
1074 HOST_WIDE_INT u = INTVAL (ux);
1075 HOST_WIDE_INT l = INTVAL (lx);
1078 if (!TARGET_OPT_CLIP)
1083 for (i = 0; i < 30; i ++)
1084 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1085 && (l == - ((HOST_WIDE_INT) 1 << i)))
1093 for (i = 0; i < 30; i ++)
1094 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1101 mep_bit_position_p (rtx x, bool looking_for)
1103 if (GET_CODE (x) != CONST_INT)
1105 switch ((int) INTVAL(x) & 0xff)
1107 case 0x01: case 0x02: case 0x04: case 0x08:
1108 case 0x10: case 0x20: case 0x40: case 0x80:
1110 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1111 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1112 return !looking_for;
1118 move_needs_splitting (rtx dest, rtx src,
1119 enum machine_mode mode ATTRIBUTE_UNUSED)
1121 int s = mep_section_tag (src);
1125 if (GET_CODE (src) == CONST
1126 || GET_CODE (src) == MEM)
1127 src = XEXP (src, 0);
1128 else if (GET_CODE (src) == SYMBOL_REF
1129 || GET_CODE (src) == LABEL_REF
1130 || GET_CODE (src) == PLUS)
1136 || (GET_CODE (src) == PLUS
1137 && GET_CODE (XEXP (src, 1)) == CONST_INT
1138 && (INTVAL (XEXP (src, 1)) < -65536
1139 || INTVAL (XEXP (src, 1)) > 0xffffff))
1140 || (GET_CODE (dest) == REG
1141 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1147 mep_split_mov (rtx *operands, int symbolic)
1151 if (move_needs_splitting (operands[0], operands[1], SImode))
1156 if (GET_CODE (operands[1]) != CONST_INT)
1159 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1160 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1161 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1164 if (((!reload_completed && !reload_in_progress)
1165 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1166 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1172 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1173 it to one specific value. So the insn chosen depends on whether
1174 the source and destination modes match. */
1177 mep_vliw_mode_match (rtx tgt)
1179 bool src_vliw = mep_vliw_function_p (cfun->decl);
1180 bool tgt_vliw = INTVAL (tgt);
1182 return src_vliw == tgt_vliw;
1186 mep_multi_slot (rtx x)
1188 return get_attr_slot (x) == SLOT_MULTI;
1192 /* Be careful not to use macros that need to be compiled one way for
1193 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1196 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1200 #define DEBUG_LEGIT 0
1202 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1206 if (GET_CODE (x) == LO_SUM
1207 && GET_CODE (XEXP (x, 0)) == REG
1208 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1209 && CONSTANT_P (XEXP (x, 1)))
1211 if (GET_MODE_SIZE (mode) > 4)
1213 /* We will end up splitting this, and lo_sums are not
1214 offsettable for us. */
1216 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1221 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1226 if (GET_CODE (x) == REG
1227 && GEN_REG (REGNO (x), strict))
1230 fprintf (stderr, " - yup, [reg]\n");
1235 if (GET_CODE (x) == PLUS
1236 && GET_CODE (XEXP (x, 0)) == REG
1237 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1238 && const_in_range (XEXP (x, 1), -32768, 32767))
1241 fprintf (stderr, " - yup, [reg+const]\n");
1246 if (GET_CODE (x) == PLUS
1247 && GET_CODE (XEXP (x, 0)) == REG
1248 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1249 && GET_CODE (XEXP (x, 1)) == CONST
1250 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1251 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1252 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1253 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1256 fprintf (stderr, " - yup, [reg+unspec]\n");
1261 the_tag = mep_section_tag (x);
1266 fprintf (stderr, " - nope, [far]\n");
1271 if (mode == VOIDmode
1272 && GET_CODE (x) == SYMBOL_REF)
1275 fprintf (stderr, " - yup, call [symbol]\n");
1280 if ((mode == SImode || mode == SFmode)
1282 && LEGITIMATE_CONSTANT_P (x)
1283 && the_tag != 't' && the_tag != 'b')
1285 if (GET_CODE (x) != CONST_INT
1286 || (INTVAL (x) <= 0xfffff
1288 && (INTVAL (x) % 4) == 0))
1291 fprintf (stderr, " - yup, [const]\n");
1298 fprintf (stderr, " - nope.\n");
1304 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1305 enum reload_type type,
1306 int ind_levels ATTRIBUTE_UNUSED)
1308 if (GET_CODE (*x) == PLUS
1309 && GET_CODE (XEXP (*x, 0)) == MEM
1310 && GET_CODE (XEXP (*x, 1)) == REG)
1312 /* GCC will by default copy the MEM into a REG, which results in
1313 an invalid address. For us, the best thing to do is move the
1314 whole expression to a REG. */
1315 push_reload (*x, NULL_RTX, x, NULL,
1316 GENERAL_REGS, mode, VOIDmode,
1321 if (GET_CODE (*x) == PLUS
1322 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1323 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1325 char e = mep_section_tag (XEXP (*x, 0));
1327 if (e != 't' && e != 'b')
1329 /* GCC thinks that (sym+const) is a valid address. Well,
1330 sometimes it is, this time it isn't. The best thing to
1331 do is reload the symbol to a register, since reg+int
1332 tends to work, and we can't just add the symbol and
1334 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1335 GENERAL_REGS, mode, VOIDmode,
1344 mep_core_address_length (rtx insn, int opn)
1346 rtx set = single_set (insn);
1347 rtx mem = XEXP (set, opn);
1348 rtx other = XEXP (set, 1-opn);
1349 rtx addr = XEXP (mem, 0);
1351 if (register_operand (addr, Pmode))
1353 if (GET_CODE (addr) == PLUS)
1355 rtx addend = XEXP (addr, 1);
1357 gcc_assert (REG_P (XEXP (addr, 0)));
1359 switch (REGNO (XEXP (addr, 0)))
1361 case STACK_POINTER_REGNUM:
1362 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1363 && mep_imm7a4_operand (addend, VOIDmode))
1368 gcc_assert (REG_P (other));
1370 if (REGNO (other) >= 8)
1373 if (GET_CODE (addend) == CONST
1374 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1375 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1378 if (GET_CODE (addend) == CONST_INT
1379 && INTVAL (addend) >= 0
1380 && INTVAL (addend) <= 127
1381 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1391 mep_cop_address_length (rtx insn, int opn)
1393 rtx set = single_set (insn);
1394 rtx mem = XEXP (set, opn);
1395 rtx addr = XEXP (mem, 0);
1397 if (GET_CODE (mem) != MEM)
1399 if (register_operand (addr, Pmode))
1401 if (GET_CODE (addr) == POST_INC)
1407 #define DEBUG_EXPAND_MOV 0
1409 mep_expand_mov (rtx *operands, enum machine_mode mode)
1414 int post_reload = 0;
1416 tag[0] = mep_section_tag (operands[0]);
1417 tag[1] = mep_section_tag (operands[1]);
1419 if (!reload_in_progress
1420 && !reload_completed
1421 && GET_CODE (operands[0]) != REG
1422 && GET_CODE (operands[0]) != SUBREG
1423 && GET_CODE (operands[1]) != REG
1424 && GET_CODE (operands[1]) != SUBREG)
1425 operands[1] = copy_to_mode_reg (mode, operands[1]);
1427 #if DEBUG_EXPAND_MOV
1428 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1429 reload_in_progress || reload_completed);
1430 debug_rtx (operands[0]);
1431 debug_rtx (operands[1]);
1434 if (mode == DImode || mode == DFmode)
1437 if (reload_in_progress || reload_completed)
1441 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1442 cfun->machine->reload_changes_tp = true;
1444 if (tag[0] == 't' || tag[1] == 't')
1446 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1447 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1450 if (tag[0] == 'b' || tag[1] == 'b')
1452 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1453 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1456 if (cfun->machine->reload_changes_tp == true)
1463 if (symbol_p (operands[1]))
1465 t = mep_section_tag (operands[1]);
1466 if (t == 'b' || t == 't')
1469 if (GET_CODE (operands[1]) == SYMBOL_REF)
1471 tpsym = operands[1];
1472 n = gen_rtx_UNSPEC (mode,
1473 gen_rtvec (1, operands[1]),
1474 t == 'b' ? UNS_TPREL : UNS_GPREL);
1475 n = gen_rtx_CONST (mode, n);
1477 else if (GET_CODE (operands[1]) == CONST
1478 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1479 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1480 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1482 tpsym = XEXP (XEXP (operands[1], 0), 0);
1483 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1484 n = gen_rtx_UNSPEC (mode,
1485 gen_rtvec (1, tpsym),
1486 t == 'b' ? UNS_TPREL : UNS_GPREL);
1487 n = gen_rtx_PLUS (mode, n, tpoffs);
1488 n = gen_rtx_CONST (mode, n);
1490 else if (GET_CODE (operands[1]) == CONST
1491 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1495 error ("unusual TP-relative address");
1499 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1500 : mep_gp_rtx ()), n);
1501 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1502 #if DEBUG_EXPAND_MOV
1503 fprintf(stderr, "mep_expand_mov emitting ");
1510 for (i=0; i < 2; i++)
1512 t = mep_section_tag (operands[i]);
1513 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1518 sym = XEXP (operands[i], 0);
1519 if (GET_CODE (sym) == CONST
1520 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1521 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1534 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1535 n = gen_rtx_CONST (Pmode, n);
1536 n = gen_rtx_PLUS (Pmode, r, n);
1537 operands[i] = replace_equiv_address (operands[i], n);
1542 if ((GET_CODE (operands[1]) != REG
1543 && MEP_CONTROL_REG (operands[0]))
1544 || (GET_CODE (operands[0]) != REG
1545 && MEP_CONTROL_REG (operands[1])))
1548 #if DEBUG_EXPAND_MOV
1549 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1551 temp = gen_reg_rtx (mode);
1552 emit_move_insn (temp, operands[1]);
1556 if (symbolref_p (operands[0])
1557 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1558 || (GET_MODE_SIZE (mode) != 4)))
1562 gcc_assert (!reload_in_progress && !reload_completed);
1564 temp = force_reg (Pmode, XEXP (operands[0], 0));
1565 operands[0] = replace_equiv_address (operands[0], temp);
1566 emit_move_insn (operands[0], operands[1]);
1570 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1573 if (symbol_p (operands[1])
1574 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1576 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1577 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1581 if (symbolref_p (operands[1])
1582 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1586 if (reload_in_progress || reload_completed)
1589 temp = gen_reg_rtx (Pmode);
1591 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1592 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1593 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1600 /* Cases where the pattern can't be made to use at all. */
1603 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1607 #define DEBUG_MOV_OK 0
1609 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1610 mep_section_tag (operands[1]));
1611 debug_rtx (operands[0]);
1612 debug_rtx (operands[1]);
1615 /* We want the movh patterns to get these. */
1616 if (GET_CODE (operands[1]) == HIGH)
1619 /* We can't store a register to a far variable without using a
1620 scratch register to hold the address. Using far variables should
1621 be split by mep_emit_mov anyway. */
1622 if (mep_section_tag (operands[0]) == 'f'
1623 || mep_section_tag (operands[1]) == 'f')
1626 fprintf (stderr, " - no, f\n");
1630 i = mep_section_tag (operands[1]);
1631 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1632 /* These are supposed to be generated with adds of the appropriate
1633 register. During and after reload, however, we allow them to
1634 be accessed as normal symbols because adding a dependency on
1635 the base register now might cause problems. */
1638 fprintf (stderr, " - no, bt\n");
1643 /* The only moves we can allow involve at least one general
1644 register, so require it. */
1645 for (i = 0; i < 2; i ++)
1647 /* Allow subregs too, before reload. */
1648 rtx x = operands[i];
1650 if (GET_CODE (x) == SUBREG)
1652 if (GET_CODE (x) == REG
1653 && ! MEP_CONTROL_REG (x))
1656 fprintf (stderr, " - ok\n");
1662 fprintf (stderr, " - no, no gen reg\n");
1667 #define DEBUG_SPLIT_WIDE_MOVE 0
1669 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1673 #if DEBUG_SPLIT_WIDE_MOVE
1674 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1675 debug_rtx (operands[0]);
1676 debug_rtx (operands[1]);
1679 for (i = 0; i <= 1; i++)
1681 rtx op = operands[i], hi, lo;
1683 switch (GET_CODE (op))
1687 unsigned int regno = REGNO (op);
1689 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1693 lo = gen_rtx_REG (SImode, regno);
1695 hi = gen_rtx_ZERO_EXTRACT (SImode,
1696 gen_rtx_REG (DImode, regno),
1701 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1702 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1710 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1711 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1718 /* The high part of CR <- GPR moves must be done after the low part. */
1719 operands [i + 4] = lo;
1720 operands [i + 2] = hi;
1723 if (reg_mentioned_p (operands[2], operands[5])
1724 || GET_CODE (operands[2]) == ZERO_EXTRACT
1725 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1729 /* Overlapping register pairs -- make sure we don't
1730 early-clobber ourselves. */
1732 operands[2] = operands[4];
1735 operands[3] = operands[5];
1739 #if DEBUG_SPLIT_WIDE_MOVE
1740 fprintf(stderr, "\033[34m");
1741 debug_rtx (operands[2]);
1742 debug_rtx (operands[3]);
1743 debug_rtx (operands[4]);
1744 debug_rtx (operands[5]);
1745 fprintf(stderr, "\033[0m");
1749 /* Emit a setcc instruction in its entirity. */
1752 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1760 tmp = op1, op1 = op2, op2 = tmp;
1761 code = swap_condition (code);
1766 op1 = force_reg (SImode, op1);
1767 emit_insn (gen_rtx_SET (VOIDmode, dest,
1768 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1772 if (op2 != const0_rtx)
1773 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1774 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1778 /* Branchful sequence:
1780 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1783 Branchless sequence:
1784 add3 tmp, op1, -op2 32-bit (or mov + sub)
1785 sltu3 tmp, tmp, 1 16-bit
1786 xor3 dest, tmp, 1 32-bit
1788 if (optimize_size && op2 != const0_rtx)
1791 if (op2 != const0_rtx)
1792 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1794 op2 = gen_reg_rtx (SImode);
1795 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1797 emit_insn (gen_rtx_SET (VOIDmode, dest,
1798 gen_rtx_XOR (SImode, op2, const1_rtx)));
1802 if (GET_CODE (op2) != CONST_INT
1803 || INTVAL (op2) == 0x7ffffff)
1805 op2 = GEN_INT (INTVAL (op2) + 1);
1806 return mep_expand_setcc_1 (LT, dest, op1, op2);
1809 if (GET_CODE (op2) != CONST_INT
1810 || INTVAL (op2) == -1)
1812 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1813 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1816 if (GET_CODE (op2) != CONST_INT
1817 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1819 op2 = GEN_INT (INTVAL (op2) - 1);
1820 return mep_expand_setcc_1 (GT, dest, op1, op2);
1823 if (GET_CODE (op2) != CONST_INT
1824 || op2 == const0_rtx)
1826 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1827 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1835 mep_expand_setcc (rtx *operands)
1837 rtx dest = operands[0];
1838 enum rtx_code code = GET_CODE (operands[1]);
1839 rtx op0 = operands[2];
1840 rtx op1 = operands[3];
1842 return mep_expand_setcc_1 (code, dest, op0, op1);
1846 mep_expand_cbranch (rtx *operands)
1848 enum rtx_code code = GET_CODE (operands[0]);
1849 rtx op0 = operands[1];
1850 rtx op1 = operands[2];
1857 if (mep_imm4_operand (op1, SImode))
1860 tmp = gen_reg_rtx (SImode);
1861 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1868 if (mep_imm4_operand (op1, SImode))
1871 tmp = gen_reg_rtx (SImode);
1872 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1881 if (! mep_reg_or_imm4_operand (op1, SImode))
1882 op1 = force_reg (SImode, op1);
1887 if (GET_CODE (op1) == CONST_INT
1888 && INTVAL (op1) != 0x7fffffff)
1890 op1 = GEN_INT (INTVAL (op1) + 1);
1891 code = (code == LE ? LT : GE);
1895 tmp = gen_reg_rtx (SImode);
1896 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1898 code = (code == LE ? EQ : NE);
1904 if (op1 == const1_rtx)
1911 tmp = gen_reg_rtx (SImode);
1912 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1919 tmp = gen_reg_rtx (SImode);
1920 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1922 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1931 tmp = gen_reg_rtx (SImode);
1932 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1933 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1940 tmp = gen_reg_rtx (SImode);
1941 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1943 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1955 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1959 mep_emit_cbranch (rtx *operands, int ne)
1961 if (GET_CODE (operands[1]) == REG)
1962 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1963 else if (INTVAL (operands[1]) == 0)
1964 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1966 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1970 mep_expand_call (rtx *operands, int returns_value)
1972 rtx addr = operands[returns_value];
1973 rtx tp = mep_tp_rtx ();
1974 rtx gp = mep_gp_rtx ();
1976 gcc_assert (GET_CODE (addr) == MEM);
1978 addr = XEXP (addr, 0);
1980 if (! mep_call_address_operand (addr, VOIDmode))
1981 addr = force_reg (SImode, addr);
1983 if (! operands[returns_value+2])
1984 operands[returns_value+2] = const0_rtx;
1987 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
1988 operands[3], tp, gp));
1990 emit_call_insn (gen_call_internal (addr, operands[1],
1991 operands[2], tp, gp));
1994 /* Aliasing Support. */
1996 /* If X is a machine specific address (i.e. a symbol or label being
1997 referenced as a displacement from the GOT implemented using an
1998 UNSPEC), then return the base term. Otherwise return X. */
2001 mep_find_base_term (rtx x)
2006 if (GET_CODE (x) != PLUS)
2011 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2012 && base == mep_tp_rtx ())
2014 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2015 && base == mep_gp_rtx ())
2020 if (GET_CODE (term) != CONST)
2022 term = XEXP (term, 0);
2024 if (GET_CODE (term) != UNSPEC
2025 || XINT (term, 1) != unspec)
2028 return XVECEXP (term, 0, 0);
2031 /* Reload Support. */
2033 /* Return true if the registers in CLASS cannot represent the change from
2034 modes FROM to TO. */
2037 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2038 enum reg_class regclass)
2043 /* 64-bit COP regs must remain 64-bit COP regs. */
2044 if (TARGET_64BIT_CR_REGS
2045 && (regclass == CR_REGS
2046 || regclass == LOADABLE_CR_REGS)
2047 && (GET_MODE_SIZE (to) < 8
2048 || GET_MODE_SIZE (from) < 8))
2054 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2057 mep_general_reg (rtx x)
2059 while (GET_CODE (x) == SUBREG)
2061 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2065 mep_nongeneral_reg (rtx x)
2067 while (GET_CODE (x) == SUBREG)
2069 return (GET_CODE (x) == REG
2070 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2074 mep_general_copro_reg (rtx x)
2076 while (GET_CODE (x) == SUBREG)
2078 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2082 mep_nonregister (rtx x)
2084 while (GET_CODE (x) == SUBREG)
2086 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2089 #define DEBUG_RELOAD 0
2091 /* Return the secondary reload class needed for moving value X to or
2092 from a register in coprocessor register class CLASS. */
2094 static enum reg_class
2095 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2097 if (mep_general_reg (x))
2098 /* We can do the move directly if mep_have_core_copro_moves_p,
2099 otherwise we need to go through memory. Either way, no secondary
2100 register is needed. */
2103 if (mep_general_copro_reg (x))
2105 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2106 if (mep_have_copro_copro_moves_p)
2109 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2110 if (mep_have_core_copro_moves_p)
2111 return GENERAL_REGS;
2113 /* Otherwise we need to do it through memory. No secondary
2114 register is needed. */
2118 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2119 && constraint_satisfied_p (x, CONSTRAINT_U))
2120 /* X is a memory value that we can access directly. */
2123 /* We have to move X into a GPR first and then copy it to
2124 the coprocessor register. The move from the GPR to the
2125 coprocessor might be done directly or through memory,
2126 depending on mep_have_core_copro_moves_p. */
2127 return GENERAL_REGS;
2130 /* Copying X to register in RCLASS. */
2133 mep_secondary_input_reload_class (enum reg_class rclass,
2134 enum machine_mode mode ATTRIBUTE_UNUSED,
2140 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2144 if (reg_class_subset_p (rclass, CR_REGS))
2145 rv = mep_secondary_copro_reload_class (rclass, x);
2146 else if (MEP_NONGENERAL_CLASS (rclass)
2147 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2151 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2156 /* Copying register in RCLASS to X. */
2159 mep_secondary_output_reload_class (enum reg_class rclass,
2160 enum machine_mode mode ATTRIBUTE_UNUSED,
2166 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2170 if (reg_class_subset_p (rclass, CR_REGS))
2171 rv = mep_secondary_copro_reload_class (rclass, x);
2172 else if (MEP_NONGENERAL_CLASS (rclass)
2173 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2177 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2183 /* Implement SECONDARY_MEMORY_NEEDED. */
2186 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2187 enum machine_mode mode ATTRIBUTE_UNUSED)
2189 if (!mep_have_core_copro_moves_p)
2191 if (reg_classes_intersect_p (rclass1, CR_REGS)
2192 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2194 if (reg_classes_intersect_p (rclass2, CR_REGS)
2195 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2197 if (!mep_have_copro_copro_moves_p
2198 && reg_classes_intersect_p (rclass1, CR_REGS)
2199 && reg_classes_intersect_p (rclass2, CR_REGS))
2206 mep_expand_reload (rtx *operands, enum machine_mode mode)
2208 /* There are three cases for each direction:
2213 int s0 = mep_section_tag (operands[0]) == 'f';
2214 int s1 = mep_section_tag (operands[1]) == 'f';
2215 int c0 = mep_nongeneral_reg (operands[0]);
2216 int c1 = mep_nongeneral_reg (operands[1]);
2217 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2220 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2221 debug_rtx (operands[0]);
2222 debug_rtx (operands[1]);
2227 case 00: /* Don't know why this gets here. */
2228 case 02: /* general = far */
2229 emit_move_insn (operands[0], operands[1]);
2232 case 10: /* cr = mem */
2233 case 11: /* cr = cr */
2234 case 01: /* mem = cr */
2235 case 12: /* cr = far */
2236 emit_move_insn (operands[2], operands[1]);
2237 emit_move_insn (operands[0], operands[2]);
2240 case 20: /* far = general */
2241 emit_move_insn (operands[2], XEXP (operands[1], 0));
2242 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2245 case 21: /* far = cr */
2246 case 22: /* far = far */
2248 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2249 which, mode_name[mode]);
2250 debug_rtx (operands[0]);
2251 debug_rtx (operands[1]);
2256 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2257 can be moved directly into registers 0 to 7, but not into the rest.
2258 If so, and if the required class includes registers 0 to 7, restrict
2259 it to those registers. */
2262 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2264 switch (GET_CODE (x))
2267 if (INTVAL (x) >= 0x10000
2268 && INTVAL (x) < 0x01000000
2269 && (INTVAL (x) & 0xffff) != 0
2270 && reg_class_subset_p (TPREL_REGS, rclass))
2271 rclass = TPREL_REGS;
2277 if (mep_section_tag (x) != 'f'
2278 && reg_class_subset_p (TPREL_REGS, rclass))
2279 rclass = TPREL_REGS;
2288 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2289 moves, 4 for direct double-register moves, and 1000 for anything
2290 that requires a temporary register or temporary stack slot. */
2293 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2295 if (mep_have_copro_copro_moves_p
2296 && reg_class_subset_p (from, CR_REGS)
2297 && reg_class_subset_p (to, CR_REGS))
2299 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2303 if (reg_class_subset_p (from, CR_REGS)
2304 && reg_class_subset_p (to, CR_REGS))
2306 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2310 if (reg_class_subset_p (from, CR_REGS)
2311 || reg_class_subset_p (to, CR_REGS))
2313 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2317 if (mep_secondary_memory_needed (from, to, mode))
2319 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2322 if (GET_MODE_SIZE (mode) > 4)
2329 /* Functions to save and restore machine-specific function data. */
2331 static struct machine_function *
2332 mep_init_machine_status (void)
2334 struct machine_function *f;
2336 f = (struct machine_function *) ggc_alloc_cleared (sizeof (struct machine_function));
2342 mep_allocate_initial_value (rtx reg)
2346 if (GET_CODE (reg) != REG)
2349 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2352 /* In interrupt functions, the "initial" values of $gp and $tp are
2353 provided by the prologue. They are not necessarily the same as
2354 the values that the caller was using. */
2355 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2356 if (mep_interrupt_p ())
2359 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2361 cfun->machine->reg_save_size += 4;
2362 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2365 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2366 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2370 mep_return_addr_rtx (int count)
2375 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2381 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2387 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2391 mep_interrupt_p (void)
2393 if (cfun->machine->interrupt_handler == 0)
2395 int interrupt_handler
2396 = (lookup_attribute ("interrupt",
2397 DECL_ATTRIBUTES (current_function_decl))
2399 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2401 return cfun->machine->interrupt_handler == 2;
2405 mep_disinterrupt_p (void)
2407 if (cfun->machine->disable_interrupts == 0)
2409 int disable_interrupts
2410 = (lookup_attribute ("disinterrupt",
2411 DECL_ATTRIBUTES (current_function_decl))
2413 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2415 return cfun->machine->disable_interrupts == 2;
2419 /* Frame/Epilog/Prolog Related. */
2422 mep_reg_set_p (rtx reg, rtx insn)
2424 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2427 if (FIND_REG_INC_NOTE (insn, reg))
2429 insn = PATTERN (insn);
2432 if (GET_CODE (insn) == SET
2433 && GET_CODE (XEXP (insn, 0)) == REG
2434 && GET_CODE (XEXP (insn, 1)) == REG
2435 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2438 return set_of (reg, insn) != NULL_RTX;
2442 #define MEP_SAVES_UNKNOWN 0
2443 #define MEP_SAVES_YES 1
2444 #define MEP_SAVES_MAYBE 2
2445 #define MEP_SAVES_NO 3
2448 mep_reg_set_in_function (int regno)
2452 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2455 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2458 push_topmost_sequence ();
2459 insn = get_insns ();
2460 pop_topmost_sequence ();
2465 reg = gen_rtx_REG (SImode, regno);
2467 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2468 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2474 mep_asm_without_operands_p (void)
2476 if (cfun->machine->asms_without_operands == 0)
2480 push_topmost_sequence ();
2481 insn = get_insns ();
2482 pop_topmost_sequence ();
2484 cfun->machine->asms_without_operands = 1;
2488 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2490 cfun->machine->asms_without_operands = 2;
2493 insn = NEXT_INSN (insn);
2497 return cfun->machine->asms_without_operands == 2;
2500 /* Interrupt functions save/restore every call-preserved register, and
2501 any call-used register it uses (or all if it calls any function,
2502 since they may get clobbered there too). Here we check to see
2503 which call-used registers need saving. */
2505 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2506 && (r == FIRST_CCR_REGNO + 1 \
2507 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2508 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2511 mep_interrupt_saved_reg (int r)
2513 if (!mep_interrupt_p ())
2515 if (r == REGSAVE_CONTROL_TEMP
2516 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2518 if (mep_asm_without_operands_p ()
2520 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2521 || IVC2_ISAVED_REG (r)))
2523 if (!current_function_is_leaf)
2524 /* Function calls mean we need to save $lp. */
2525 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2527 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2528 /* The interrupt handler might use these registers for repeat blocks,
2529 or it might call a function that does so. */
2530 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2532 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2534 /* Functions we call might clobber these. */
2535 if (call_used_regs[r] && !fixed_regs[r])
2537 /* Additional registers that need to be saved for IVC2. */
2538 if (IVC2_ISAVED_REG (r))
2545 mep_call_saves_register (int r)
2547 /* if (cfun->machine->reg_saved[r] == MEP_SAVES_UNKNOWN)*/
2549 int rv = MEP_SAVES_NO;
2551 if (cfun->machine->reg_save_slot[r])
2553 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2555 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2557 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2559 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2560 /* We need these to have stack slots so that they can be set during
2563 else if (mep_interrupt_saved_reg (r))
2565 cfun->machine->reg_saved[r] = rv;
2567 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2570 /* Return true if epilogue uses register REGNO. */
2573 mep_epilogue_uses (int regno)
2575 /* Since $lp is a call-saved register, the generic code will normally
2576 mark it used in the epilogue if it needs to be saved and restored.
2577 However, when profiling is enabled, the profiling code will implicitly
2578 clobber $11. This case has to be handled specially both here and in
2579 mep_call_saves_register. */
2580 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2582 /* Interrupt functions save/restore pretty much everything. */
2583 return (reload_completed && mep_interrupt_saved_reg (regno));
2587 mep_reg_size (int regno)
2589 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2595 mep_elimination_offset (int from, int to)
2599 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2602 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2604 /* We don't count arg_regs_to_save in the arg pointer offset, because
2605 gcc thinks the arg pointer has moved along with the saved regs.
2606 However, we do count it when we adjust $sp in the prologue. */
2608 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2609 if (mep_call_saves_register (i))
2610 reg_save_size += mep_reg_size (i);
2612 if (reg_save_size % 8)
2613 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2615 cfun->machine->regsave_filler = 0;
2617 /* This is what our total stack adjustment looks like. */
2618 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2621 cfun->machine->frame_filler = 8 - (total_size % 8);
2623 cfun->machine->frame_filler = 0;
2626 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2627 return reg_save_size + cfun->machine->regsave_filler;
2629 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2630 return cfun->machine->frame_filler + frame_size;
2632 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2633 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2641 RTX_FRAME_RELATED_P (x) = 1;
2645 /* Since the prologue/epilogue code is generated after optimization,
2646 we can't rely on gcc to split constants for us. So, this code
2647 captures all the ways to add a constant to a register in one logic
2648 chunk, including optimizing away insns we just don't need. This
2649 makes the prolog/epilog code easier to follow. */
2651 add_constant (int dest, int src, int value, int mark_frame)
2656 if (src == dest && value == 0)
2661 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2662 gen_rtx_REG (SImode, src));
2664 RTX_FRAME_RELATED_P(insn) = 1;
2668 if (value >= -32768 && value <= 32767)
2670 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2671 gen_rtx_REG (SImode, src),
2674 RTX_FRAME_RELATED_P(insn) = 1;
2678 /* Big constant, need to use a temp register. We use
2679 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2680 area is always small enough to directly add to). */
2682 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2683 lo = value & 0xffff;
2685 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2690 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2691 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2695 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2696 gen_rtx_REG (SImode, src),
2697 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2700 RTX_FRAME_RELATED_P(insn) = 1;
2701 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2702 gen_rtx_SET (SImode,
2703 gen_rtx_REG (SImode, dest),
2704 gen_rtx_PLUS (SImode,
2705 gen_rtx_REG (SImode, dest),
2711 mep_function_uses_sp (void)
2714 struct sequence_stack *seq;
2715 rtx sp = gen_rtx_REG (SImode, SP_REGNO);
2717 insn = get_insns ();
2718 for (seq = crtl->emit.sequence_stack;
2720 insn = seq->first, seq = seq->next);
2724 if (mep_mentioned_p (insn, sp, 0))
2726 insn = NEXT_INSN (insn);
2731 /* Move SRC to DEST. Mark the move as being potentially dead if
2735 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2737 rtx insn = emit_move_insn (dest, src);
2740 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2745 /* Used for interrupt functions, which can't assume that $tp and $gp
2746 contain the correct pointers. */
2749 mep_reload_pointer (int regno, const char *symbol)
2753 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2756 reg = gen_rtx_REG (SImode, regno);
2757 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2758 emit_insn (gen_movsi_topsym_s (reg, sym));
2759 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2763 mep_expand_prologue (void)
2765 int i, rss, sp_offset = 0;
2768 int really_need_stack_frame = frame_size;
2771 /* We must not allow register renaming in interrupt functions,
2772 because that invalidates the correctness of the set of call-used
2773 registers we're going to save/restore. */
2774 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2776 if (mep_disinterrupt_p ())
2777 emit_insn (gen_mep_disable_int ());
2779 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2781 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2782 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2784 /* Assign save slots for any register not already saved. DImode
2785 registers go at the end of the reg save area; the rest go at the
2786 beginning. This is for alignment purposes. */
2787 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2788 if (mep_call_saves_register(i))
2790 int regsize = mep_reg_size (i);
2792 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2793 || mep_reg_set_in_function (i))
2794 really_need_stack_frame = 1;
2796 if (cfun->machine->reg_save_slot[i])
2801 cfun->machine->reg_save_size += regsize;
2802 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2806 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2811 sp_offset = reg_save_size;
2812 if (sp_offset + frame_size < 128)
2813 sp_offset += frame_size ;
2815 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2817 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2818 if (mep_call_saves_register(i))
2822 enum machine_mode rmode;
2824 rss = cfun->machine->reg_save_slot[i];
2826 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2827 && (!mep_reg_set_in_function (i)
2828 && !mep_interrupt_p ()))
2831 if (mep_reg_size (i) == 8)
2836 /* If there is a pseudo associated with this register's initial value,
2837 reload might have already spilt it to the stack slot suggested by
2838 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2840 mem = gen_rtx_MEM (rmode,
2841 plus_constant (stack_pointer_rtx, sp_offset - rss));
2842 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2844 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2845 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2846 else if (rmode == DImode)
2849 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2851 mem = gen_rtx_MEM (SImode,
2852 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2854 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2855 gen_rtx_REG (SImode, i),
2857 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2858 gen_rtx_ZERO_EXTRACT (SImode,
2859 gen_rtx_REG (DImode, i),
2863 insn = maybe_dead_move (mem,
2864 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2866 RTX_FRAME_RELATED_P (insn) = 1;
2868 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2869 gen_rtx_SET (VOIDmode,
2871 gen_rtx_REG (rmode, i)));
2872 mem = gen_rtx_MEM (SImode,
2873 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2874 insn = maybe_dead_move (mem,
2875 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2881 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2882 gen_rtx_REG (rmode, i),
2884 insn = maybe_dead_move (mem,
2885 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2887 RTX_FRAME_RELATED_P (insn) = 1;
2889 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2890 gen_rtx_SET (VOIDmode,
2892 gen_rtx_REG (rmode, i)));
2896 if (frame_pointer_needed)
2897 add_constant (FP_REGNO, SP_REGNO, sp_offset - frame_size, 1);
2899 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2901 if (mep_interrupt_p ())
2903 mep_reload_pointer(GP_REGNO, "__sdabase");
2904 mep_reload_pointer(TP_REGNO, "__tpbase");
2909 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2911 int local = hwi_local;
2912 int frame_size = local + crtl->outgoing_args_size;
2917 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2919 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2920 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2921 sp_offset = reg_save_size + frame_size;
2923 ffill = cfun->machine->frame_filler;
2925 if (cfun->machine->mep_frame_pointer_needed)
2926 reg_names[FP_REGNO] = "$fp";
2928 reg_names[FP_REGNO] = "$8";
2933 if (debug_info_level == DINFO_LEVEL_NONE)
2935 fprintf (file, "\t# frame: %d", sp_offset);
2937 fprintf (file, " %d regs", reg_save_size);
2939 fprintf (file, " %d locals", local);
2940 if (crtl->outgoing_args_size)
2941 fprintf (file, " %d args", crtl->outgoing_args_size);
2942 fprintf (file, "\n");
2946 fprintf (file, "\t#\n");
2947 fprintf (file, "\t# Initial Frame Information:\n");
2948 if (sp_offset || !frame_pointer_needed)
2949 fprintf (file, "\t# Entry ---------- 0\n");
2951 /* Sort registers by save slots, so they're printed in the order
2952 they appear in memory, not the order they're saved in. */
2953 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2955 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2956 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2957 if (cfun->machine->reg_save_slot[slot_map[si]]
2958 > cfun->machine->reg_save_slot[slot_map[sj]])
2960 int t = slot_map[si];
2961 slot_map[si] = slot_map[sj];
2966 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2969 int r = slot_map[i];
2970 int rss = cfun->machine->reg_save_slot[r];
2975 rsize = mep_reg_size(r);
2976 skip = rss - (sp+rsize);
2978 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2979 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
2980 rsize, reg_names[r], sp_offset - rss);
2984 skip = reg_save_size - sp;
2986 fprintf (file, "\t# %3d bytes for alignment\n", skip);
2988 if (frame_pointer_needed)
2989 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
2991 fprintf (file, "\t# %3d bytes for local vars\n", local);
2993 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
2994 if (crtl->outgoing_args_size)
2995 fprintf (file, "\t# %3d bytes for outgoing args\n",
2996 crtl->outgoing_args_size);
2997 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
2998 fprintf (file, "\t#\n");
3002 static int mep_prevent_lp_restore = 0;
3003 static int mep_sibcall_epilogue = 0;
3006 mep_expand_epilogue (void)
3008 int i, sp_offset = 0;
3009 int reg_save_size = 0;
3011 int lp_temp = LP_REGNO, lp_slot = -1;
3012 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3013 int interrupt_handler = mep_interrupt_p ();
3015 if (profile_arc_flag == 2)
3016 emit_insn (gen_mep_bb_trace_ret ());
3018 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3019 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3021 /* All save slots are set by mep_expand_prologue. */
3022 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
3023 if (mep_call_saves_register(i))
3025 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
3026 || mep_reg_set_in_function (i))
3027 really_need_stack_frame = 1;
3030 if (frame_pointer_needed)
3032 /* If we have a frame pointer, we won't have a reliable stack
3033 pointer (alloca, you know), so rebase SP from FP */
3034 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3035 gen_rtx_REG (SImode, FP_REGNO));
3036 sp_offset = reg_save_size;
3040 /* SP is right under our local variable space. Adjust it if
3042 sp_offset = reg_save_size + frame_size;
3043 if (sp_offset >= 128)
3045 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3046 sp_offset -= frame_size;
3050 /* This is backwards so that we restore the control and coprocessor
3051 registers before the temporary registers we use to restore
3053 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3054 if (mep_call_saves_register (i))
3056 enum machine_mode rmode;
3057 int rss = cfun->machine->reg_save_slot[i];
3059 if (mep_reg_size (i) == 8)
3064 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3065 && !(mep_reg_set_in_function (i) || interrupt_handler))
3067 if (mep_prevent_lp_restore && i == LP_REGNO)
3069 if (!mep_prevent_lp_restore
3070 && !interrupt_handler
3071 && (i == 10 || i == 11))
3074 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3075 emit_move_insn (gen_rtx_REG (rmode, i),
3077 plus_constant (stack_pointer_rtx,
3081 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3082 /* Defer this one so we can jump indirect rather than
3083 copying the RA to $lp and "ret". EH epilogues
3084 automatically skip this anyway. */
3085 lp_slot = sp_offset-rss;
3088 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3090 plus_constant (stack_pointer_rtx,
3092 emit_move_insn (gen_rtx_REG (rmode, i),
3093 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3099 /* Restore this one last so we know it will be in the temp
3100 register when we return by jumping indirectly via the temp. */
3101 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3102 gen_rtx_MEM (SImode,
3103 plus_constant (stack_pointer_rtx,
3105 lp_temp = REGSAVE_CONTROL_TEMP;
3109 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3111 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3112 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3113 gen_rtx_REG (SImode, SP_REGNO),
3114 cfun->machine->eh_stack_adjust));
3116 if (mep_sibcall_epilogue)
3119 if (mep_disinterrupt_p ())
3120 emit_insn (gen_mep_enable_int ());
3122 if (mep_prevent_lp_restore)
3124 emit_jump_insn (gen_eh_return_internal ());
3127 else if (interrupt_handler)
3128 emit_jump_insn (gen_mep_reti ());
3130 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3134 mep_expand_eh_return (rtx *operands)
3136 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3138 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3139 emit_move_insn (ra, operands[0]);
3143 emit_insn (gen_eh_epilogue (operands[0]));
3147 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3149 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3150 mep_prevent_lp_restore = 1;
3151 mep_expand_epilogue ();
3152 mep_prevent_lp_restore = 0;
3156 mep_expand_sibcall_epilogue (void)
3158 mep_sibcall_epilogue = 1;
3159 mep_expand_epilogue ();
3160 mep_sibcall_epilogue = 0;
3164 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3169 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3172 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3173 if (mep_interrupt_p () || mep_disinterrupt_p ())
3180 mep_return_stackadj_rtx (void)
3182 return gen_rtx_REG (SImode, 10);
3186 mep_return_handler_rtx (void)
3188 return gen_rtx_REG (SImode, LP_REGNO);
3192 mep_function_profiler (FILE *file)
3194 /* Always right at the beginning of the function. */
3195 fprintf (file, "\t# mep function profiler\n");
3196 fprintf (file, "\tadd\t$sp, -8\n");
3197 fprintf (file, "\tsw\t$0, ($sp)\n");
3198 fprintf (file, "\tldc\t$0, $lp\n");
3199 fprintf (file, "\tsw\t$0, 4($sp)\n");
3200 fprintf (file, "\tbsr\t__mep_mcount\n");
3201 fprintf (file, "\tlw\t$0, 4($sp)\n");
3202 fprintf (file, "\tstc\t$0, $lp\n");
3203 fprintf (file, "\tlw\t$0, ($sp)\n");
3204 fprintf (file, "\tadd\t$sp, 8\n\n");
3208 mep_emit_bb_trace_ret (void)
3210 fprintf (asm_out_file, "\t# end of block profiling\n");
3211 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3212 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3213 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3214 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3215 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3216 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3217 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3218 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3219 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3226 /* Operand Printing. */
3229 mep_print_operand_address (FILE *stream, rtx address)
3231 if (GET_CODE (address) == MEM)
3232 address = XEXP (address, 0);
3234 /* cf: gcc.dg/asm-4.c. */
3235 gcc_assert (GET_CODE (address) == REG);
3237 mep_print_operand (stream, address, 0);
3243 const char *pattern;
3246 const conversions[] =
3249 { 0, "m+ri", "3(2)" },
3252 { 0, "mLrs", "%lo(3)(2)" },
3253 { 0, "mLr+si", "%lo(4+5)(2)" },
3254 { 0, "m+ru2s", "%tpoff(5)(2)" },
3255 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3256 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3257 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3258 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3259 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3261 { 0, "m+si", "(2+3)" },
3262 { 0, "m+li", "(2+3)" },
3265 { 0, "+si", "1+2" },
3266 { 0, "+u2si", "%tpoff(3+4)" },
3267 { 0, "+u3si", "%sdaoff(3+4)" },
3273 { 'h', "Hs", "%hi(1)" },
3275 { 'I', "u2s", "%tpoff(2)" },
3276 { 'I', "u3s", "%sdaoff(2)" },
3277 { 'I', "+u2si", "%tpoff(3+4)" },
3278 { 'I', "+u3si", "%sdaoff(3+4)" },
3280 { 'P', "mr", "(1\\+),\\0" },
3286 unique_bit_in (HOST_WIDE_INT i)
3290 case 0x01: case 0xfe: return 0;
3291 case 0x02: case 0xfd: return 1;
3292 case 0x04: case 0xfb: return 2;
3293 case 0x08: case 0xf7: return 3;
3294 case 0x10: case 0x7f: return 4;
3295 case 0x20: case 0xbf: return 5;
3296 case 0x40: case 0xdf: return 6;
3297 case 0x80: case 0xef: return 7;
3304 bit_size_for_clip (HOST_WIDE_INT i)
3308 for (rv = 0; rv < 31; rv ++)
3309 if (((HOST_WIDE_INT) 1 << rv) > i)
3314 /* Print an operand to a assembler instruction. */
3317 mep_print_operand (FILE *file, rtx x, int code)
3320 const char *real_name;
3324 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3325 we're using, then skip over the "mep_" part of its name. */
3326 const struct cgen_insn *insn;
3328 if (mep_get_move_insn (mep_cmov, &insn))
3329 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3331 mep_intrinsic_unavailable (mep_cmov);
3336 switch (GET_CODE (x))
3339 fputs ("clr", file);
3342 fputs ("set", file);
3345 fputs ("not", file);
3348 output_operand_lossage ("invalid %%L code");
3353 /* Print the second operand of a CR <- CR move. If we're using
3354 a two-operand instruction (i.e., a real cmov), then just print
3355 the operand normally. If we're using a "reg, reg, immediate"
3356 instruction such as caddi3, print the operand followed by a
3357 zero field. If we're using a three-register instruction,
3358 print the operand twice. */
3359 const struct cgen_insn *insn;
3361 mep_print_operand (file, x, 0);
3362 if (mep_get_move_insn (mep_cmov, &insn)
3363 && insn_data[insn->icode].n_operands == 3)
3366 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3367 mep_print_operand (file, x, 0);
3369 mep_print_operand (file, const0_rtx, 0);
3375 for (i = 0; conversions[i].pattern; i++)
3376 if (conversions[i].code == code
3377 && strcmp(conversions[i].pattern, pattern) == 0)
3379 for (j = 0; conversions[i].format[j]; j++)
3380 if (conversions[i].format[j] == '\\')
3382 fputc (conversions[i].format[j+1], file);
3385 else if (ISDIGIT(conversions[i].format[j]))
3387 rtx r = patternr[conversions[i].format[j] - '0'];
3388 switch (GET_CODE (r))
3391 fprintf (file, "%s", reg_names [REGNO (r)]);
3397 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3400 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3403 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3406 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3409 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3412 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3413 && !(INTVAL (r) & 0xff))
3414 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3416 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3419 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3420 && conversions[i].format[j+1] == 0)
3422 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3423 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3426 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3429 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3434 fprintf(file, "[const_double 0x%lx]",
3435 (unsigned long) CONST_DOUBLE_HIGH(r));
3438 real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3439 assemble_name (file, real_name);
3442 output_asm_label (r);
3445 fprintf (stderr, "don't know how to print this operand:");
3452 if (conversions[i].format[j] == '+'
3453 && (!code || code == 'I')
3454 && ISDIGIT (conversions[i].format[j+1])
3455 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3456 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3458 fputc(conversions[i].format[j], file);
3462 if (!conversions[i].pattern)
3464 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3472 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3473 int noperands ATTRIBUTE_UNUSED)
3475 /* Despite the fact that MeP is perfectly capable of branching and
3476 doing something else in the same bundle, gcc does jump
3477 optimization *after* scheduling, so we cannot trust the bundling
3478 flags on jump instructions. */
3479 if (GET_MODE (insn) == BImode
3480 && get_attr_slots (insn) != SLOTS_CORE)
3481 fputc ('+', asm_out_file);
3484 /* Function args in registers. */
3487 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3488 enum machine_mode mode ATTRIBUTE_UNUSED,
3489 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3490 int second_time ATTRIBUTE_UNUSED)
3492 int nsave = 4 - (cum->nregs + 1);
3495 cfun->machine->arg_regs_to_save = nsave;
3496 *pretend_size = nsave * 4;
3500 bytesize (const_tree type, enum machine_mode mode)
3502 if (mode == BLKmode)
3503 return int_size_in_bytes (type);
3504 return GET_MODE_SIZE (mode);
3508 mep_expand_builtin_saveregs (void)
3513 ns = cfun->machine->arg_regs_to_save;
3514 bufsize = ns * (TARGET_IVC2 ? 12 : 4);
3515 regbuf = assign_stack_local (SImode, bufsize, 32);
3517 move_block_from_reg (5-ns, regbuf, ns);
3521 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3524 for (i=0; i<ns; i++)
3526 int rn = (4-ns) + i + 49;
3529 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3530 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3534 return XEXP (regbuf, 0);
3537 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3540 mep_build_builtin_va_list (void)
3542 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3546 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3548 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3549 get_identifier ("__va_next_gp"), ptr_type_node);
3550 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3551 get_identifier ("__va_next_gp_limit"),
3553 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3555 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3558 DECL_FIELD_CONTEXT (f_next_gp) = record;
3559 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3560 DECL_FIELD_CONTEXT (f_next_cop) = record;
3561 DECL_FIELD_CONTEXT (f_next_stack) = record;
3563 TYPE_FIELDS (record) = f_next_gp;
3564 TREE_CHAIN (f_next_gp) = f_next_gp_limit;
3565 TREE_CHAIN (f_next_gp_limit) = f_next_cop;
3566 TREE_CHAIN (f_next_cop) = f_next_stack;
3568 layout_type (record);
3574 mep_expand_va_start (tree valist, rtx nextarg)
3576 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3577 tree next_gp, next_gp_limit, next_cop, next_stack;
3581 ns = cfun->machine->arg_regs_to_save;
3583 f_next_gp = TYPE_FIELDS (va_list_type_node);
3584 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3585 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3586 f_next_stack = TREE_CHAIN (f_next_cop);
3588 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3590 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3591 valist, f_next_gp_limit, NULL_TREE);
3592 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3594 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3595 valist, f_next_stack, NULL_TREE);
3597 /* va_list.next_gp = expand_builtin_saveregs (); */
3598 u = make_tree (sizetype, expand_builtin_saveregs ());
3599 u = fold_convert (ptr_type_node, u);
3600 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3601 TREE_SIDE_EFFECTS (t) = 1;
3602 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3604 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3605 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3607 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3608 TREE_SIDE_EFFECTS (t) = 1;
3609 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3611 /* va_list.next_cop = va_list.next_gp_limit; */
3612 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3613 TREE_SIDE_EFFECTS (t) = 1;
3614 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3616 /* va_list.next_stack = nextarg; */
3617 u = make_tree (ptr_type_node, nextarg);
3618 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3619 TREE_SIDE_EFFECTS (t) = 1;
3620 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3624 mep_gimplify_va_arg_expr (tree valist, tree type,
3625 tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
3627 HOST_WIDE_INT size, rsize;
3628 bool by_reference, ivc2_vec;
3629 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3630 tree next_gp, next_gp_limit, next_cop, next_stack;
3631 tree label_sover, label_selse;
3634 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3636 size = int_size_in_bytes (type);
3637 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3641 type = build_pointer_type (type);
3644 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3646 f_next_gp = TYPE_FIELDS (va_list_type_node);
3647 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3648 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3649 f_next_stack = TREE_CHAIN (f_next_cop);
3651 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3653 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3654 valist, f_next_gp_limit, NULL_TREE);
3655 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3657 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3658 valist, f_next_stack, NULL_TREE);
3660 /* if f_next_gp < f_next_gp_limit
3661 IF (VECTOR_P && IVC2)
3669 val = *f_next_stack;
3670 f_next_stack += rsize;
3674 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3675 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3676 res_addr = create_tmp_var (ptr_type_node, NULL);
3678 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3679 unshare_expr (next_gp_limit));
3680 tmp = build3 (COND_EXPR, void_type_node, tmp,
3681 build1 (GOTO_EXPR, void_type_node,
3682 unshare_expr (label_selse)),
3684 gimplify_and_add (tmp, pre_p);
3688 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3689 gimplify_and_add (tmp, pre_p);
3693 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3694 gimplify_and_add (tmp, pre_p);
3697 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3698 unshare_expr (next_gp), size_int (4));
3699 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3701 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3702 unshare_expr (next_cop), size_int (8));
3703 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3705 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3706 gimplify_and_add (tmp, pre_p);
3710 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3711 gimplify_and_add (tmp, pre_p);
3713 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3714 gimplify_and_add (tmp, pre_p);
3716 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3717 unshare_expr (next_stack), size_int (rsize));
3718 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3722 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3723 gimplify_and_add (tmp, pre_p);
3725 res_addr = fold_convert (build_pointer_type (type), res_addr);
3728 res_addr = build_va_arg_indirect_ref (res_addr);
3730 return build_va_arg_indirect_ref (res_addr);
3734 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3735 rtx libname ATTRIBUTE_UNUSED,
3736 tree fndecl ATTRIBUTE_UNUSED)
3740 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3747 mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3748 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3750 /* VOIDmode is a signal for the backend to pass data to the call
3751 expander via the second operand to the call pattern. We use
3752 this to determine whether to use "jsr" or "jsrv". */
3753 if (mode == VOIDmode)
3754 return GEN_INT (cum.vliw);
3756 /* If we havn't run out of argument registers, return the next. */
3759 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3760 return gen_rtx_REG (mode, cum.nregs + 49);
3762 return gen_rtx_REG (mode, cum.nregs + 1);
3765 /* Otherwise the argument goes on the stack. */
3770 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3771 enum machine_mode mode,
3773 bool named ATTRIBUTE_UNUSED)
3775 int size = bytesize (type, mode);
3776 if (type && TARGET_IVC2 && cum->nregs < 4 && VECTOR_TYPE_P (type))
3777 return size <= 0 || size > 8;
3778 return size <= 0 || size > 4;
3782 mep_arg_advance (CUMULATIVE_ARGS *pcum,
3783 enum machine_mode mode ATTRIBUTE_UNUSED,
3784 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3790 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3792 int size = bytesize (type, BLKmode);
3793 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3794 return size >= 0 && size <= 8 ? 0 : 1;
3795 return size >= 0 && size <= 4 ? 0 : 1;
3799 mep_narrow_volatile_bitfield (void)
3805 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3808 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3810 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3811 return gen_rtx_REG (TYPE_MODE (type), 48);
3812 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3815 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3818 mep_libcall_value (enum machine_mode mode)
3820 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3823 /* Handle pipeline hazards. */
3825 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3826 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3828 static int prev_opcode = 0;
3830 /* This isn't as optimal as it could be, because we don't know what
3831 control register the STC opcode is storing in. We only need to add
3832 the nop if it's the relevent register, but we add it for irrelevent
3836 mep_asm_output_opcode (FILE *file, const char *ptr)
3838 int this_opcode = op_none;
3839 const char *hazard = 0;
3844 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3845 this_opcode = op_fsft;
3848 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3849 this_opcode = op_ret;
3852 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3853 this_opcode = op_stc;
3857 if (prev_opcode == op_stc && this_opcode == op_fsft)
3859 if (prev_opcode == op_stc && this_opcode == op_ret)
3863 fprintf(file, "%s\t# %s-%s hazard\n\t",
3864 hazard, opnames[prev_opcode], opnames[this_opcode]);
3866 prev_opcode = this_opcode;
3869 /* Handle attributes. */
3872 mep_validate_based_tiny (tree *node, tree name, tree args,
3873 int flags ATTRIBUTE_UNUSED, bool *no_add)
3875 if (TREE_CODE (*node) != VAR_DECL
3876 && TREE_CODE (*node) != POINTER_TYPE
3877 && TREE_CODE (*node) != TYPE_DECL)
3879 warning (0, "%qE attribute only applies to variables", name);
3882 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3884 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3886 warning (0, "address region attributes not allowed with auto storage class");
3889 /* Ignore storage attribute of pointed to variable: char __far * x; */
3890 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3892 warning (0, "address region attributes on pointed-to types ignored");
3901 mep_multiple_address_regions (tree list, bool check_section_attr)
3904 int count_sections = 0;
3905 int section_attr_count = 0;
3907 for (a = list; a; a = TREE_CHAIN (a))
3909 if (is_attribute_p ("based", TREE_PURPOSE (a))
3910 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3911 || is_attribute_p ("near", TREE_PURPOSE (a))
3912 || is_attribute_p ("far", TREE_PURPOSE (a))
3913 || is_attribute_p ("io", TREE_PURPOSE (a)))
3915 if (check_section_attr)
3916 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3919 if (check_section_attr)
3920 return section_attr_count;
3922 return count_sections;
3925 #define MEP_ATTRIBUTES(decl) \
3926 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3927 : DECL_ATTRIBUTES (decl) \
3928 ? (DECL_ATTRIBUTES (decl)) \
3929 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3932 mep_validate_near_far (tree *node, tree name, tree args,
3933 int flags ATTRIBUTE_UNUSED, bool *no_add)
3935 if (TREE_CODE (*node) != VAR_DECL
3936 && TREE_CODE (*node) != FUNCTION_DECL
3937 && TREE_CODE (*node) != METHOD_TYPE
3938 && TREE_CODE (*node) != POINTER_TYPE
3939 && TREE_CODE (*node) != TYPE_DECL)
3941 warning (0, "%qE attribute only applies to variables and functions",
3945 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3947 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3949 warning (0, "address region attributes not allowed with auto storage class");
3952 /* Ignore storage attribute of pointed to variable: char __far * x; */
3953 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3955 warning (0, "address region attributes on pointed-to types ignored");
3959 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3961 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3962 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3963 DECL_ATTRIBUTES (*node) = NULL_TREE;
3969 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3970 int flags ATTRIBUTE_UNUSED, bool *no_add)
3972 if (TREE_CODE (*node) != FUNCTION_DECL
3973 && TREE_CODE (*node) != METHOD_TYPE)
3975 warning (0, "%qE attribute only applies to functions", name);
3982 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
3983 int flags ATTRIBUTE_UNUSED, bool *no_add)
3987 if (TREE_CODE (*node) != FUNCTION_DECL)
3989 warning (0, "%qE attribute only applies to functions", name);
3994 if (DECL_DECLARED_INLINE_P (*node))
3995 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
3996 DECL_UNINLINABLE (*node) = 1;
3998 function_type = TREE_TYPE (*node);
4000 if (TREE_TYPE (function_type) != void_type_node)
4001 error ("interrupt function must have return type of void");
4003 if (TYPE_ARG_TYPES (function_type)
4004 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4005 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4006 error ("interrupt function must have no arguments");
4012 mep_validate_io_cb (tree *node, tree name, tree args,
4013 int flags ATTRIBUTE_UNUSED, bool *no_add)
4015 if (TREE_CODE (*node) != VAR_DECL)
4017 warning (0, "%qE attribute only applies to variables", name);
4021 if (args != NULL_TREE)
4023 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4024 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4025 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4027 warning (0, "%qE attribute allows only an integer constant argument",
4033 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4034 TREE_THIS_VOLATILE (*node) = 1;
4040 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4041 int flags ATTRIBUTE_UNUSED, bool *no_add)
4043 if (TREE_CODE (*node) != FUNCTION_TYPE
4044 && TREE_CODE (*node) != FUNCTION_DECL
4045 && TREE_CODE (*node) != METHOD_TYPE
4046 && TREE_CODE (*node) != FIELD_DECL
4047 && TREE_CODE (*node) != TYPE_DECL)
4049 static int gave_pointer_note = 0;
4050 static int gave_array_note = 0;
4051 static const char * given_type = NULL;
4053 given_type = tree_code_name[TREE_CODE (*node)];
4054 if (TREE_CODE (*node) == POINTER_TYPE)
4055 given_type = "pointers";
4056 if (TREE_CODE (*node) == ARRAY_TYPE)
4057 given_type = "arrays";
4060 warning (0, "%qE attribute only applies to functions, not %s",
4063 warning (0, "%qE attribute only applies to functions",
4067 if (TREE_CODE (*node) == POINTER_TYPE
4068 && !gave_pointer_note)
4070 inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4071 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4072 gave_pointer_note = 1;
4075 if (TREE_CODE (*node) == ARRAY_TYPE
4076 && !gave_array_note)
4078 inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4079 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4080 gave_array_note = 1;
4084 error ("VLIW functions are not allowed without a VLIW configuration");
4088 static const struct attribute_spec mep_attribute_table[11] =
4090 /* name min max decl type func handler */
4091 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4092 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4093 { "near", 0, 0, false, false, false, mep_validate_near_far },
4094 { "far", 0, 0, false, false, false, mep_validate_near_far },
4095 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4096 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4097 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4098 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4099 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4100 { NULL, 0, 0, false, false, false, NULL }
4104 mep_function_attribute_inlinable_p (const_tree callee)
4106 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4107 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4108 return (lookup_attribute ("disinterrupt", attrs) == 0
4109 && lookup_attribute ("interrupt", attrs) == 0);
4113 mep_option_can_inline_p (tree caller, tree callee)
4115 if (TREE_CODE (callee) == ADDR_EXPR)
4116 callee = TREE_OPERAND (callee, 0);
4118 if (TREE_CODE (callee) == FUNCTION_DECL
4119 && DECL_DECLARED_INLINE_P (callee)
4120 && !mep_vliw_function_p (caller)
4121 && mep_vliw_function_p (callee))
4123 error ("cannot call inline VLIW functions from core functions");
4130 #define FUNC_DISINTERRUPT 2
4133 struct GTY(()) pragma_entry {
4136 const char *funcname;
4138 typedef struct pragma_entry pragma_entry;
4140 /* Hash table of farcall-tagged sections. */
4141 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4144 pragma_entry_eq (const void *p1, const void *p2)
4146 const pragma_entry *old = (const pragma_entry *) p1;
4147 const char *new_name = (const char *) p2;
4149 return strcmp (old->funcname, new_name) == 0;
4153 pragma_entry_hash (const void *p)
4155 const pragma_entry *old = (const pragma_entry *) p;
4156 return htab_hash_string (old->funcname);
4160 mep_note_pragma_flag (const char *funcname, int flag)
4162 pragma_entry **slot;
4165 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4166 pragma_entry_eq, NULL);
4168 slot = (pragma_entry **)
4169 htab_find_slot_with_hash (pragma_htab, funcname,
4170 htab_hash_string (funcname), INSERT);
4174 *slot = GGC_NEW (pragma_entry);
4177 (*slot)->funcname = ggc_strdup (funcname);
4179 (*slot)->flag |= flag;
4183 mep_lookup_pragma_flag (const char *funcname, int flag)
4185 pragma_entry **slot;
4190 if (funcname[0] == '@' && funcname[2] == '.')
4193 slot = (pragma_entry **)
4194 htab_find_slot_with_hash (pragma_htab, funcname,
4195 htab_hash_string (funcname), NO_INSERT);
4196 if (slot && *slot && ((*slot)->flag & flag))
4198 (*slot)->used |= flag;
4205 mep_lookup_pragma_call (const char *funcname)
4207 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4211 mep_note_pragma_call (const char *funcname)
4213 mep_note_pragma_flag (funcname, FUNC_CALL);
4217 mep_lookup_pragma_disinterrupt (const char *funcname)
4219 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4223 mep_note_pragma_disinterrupt (const char *funcname)
4225 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4229 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4231 const pragma_entry *d = (const pragma_entry *)(*slot);
4233 if ((d->flag & FUNC_DISINTERRUPT)
4234 && !(d->used & FUNC_DISINTERRUPT))
4235 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4240 mep_file_cleanups (void)
4243 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4248 mep_attrlist_to_encoding (tree list, tree decl)
4250 if (mep_multiple_address_regions (list, false) > 1)
4252 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4253 TREE_PURPOSE (TREE_CHAIN (list)),
4255 DECL_SOURCE_LINE (decl));
4256 TREE_CHAIN (list) = NULL_TREE;
4261 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4263 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4265 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4267 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4269 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4271 if (TREE_VALUE (list)
4272 && TREE_VALUE (TREE_VALUE (list))
4273 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4275 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4277 && location <= 0x1000000)
4282 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4284 list = TREE_CHAIN (list);
4287 && TREE_CODE (decl) == FUNCTION_DECL
4288 && DECL_SECTION_NAME (decl) == 0)
4294 mep_comp_type_attributes (const_tree t1, const_tree t2)
4298 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4299 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4308 mep_insert_attributes (tree decl, tree *attributes)
4311 const char *secname = 0;
4312 tree attrib, attrlist;
4315 if (TREE_CODE (decl) == FUNCTION_DECL)
4317 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4319 if (mep_lookup_pragma_disinterrupt (funcname))
4321 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4322 *attributes = chainon (*attributes, attrib);
4326 if (TREE_CODE (decl) != VAR_DECL
4327 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4330 if (TREE_READONLY (decl) && TARGET_DC)
4331 /* -mdc means that const variables default to the near section,
4332 regardless of the size cutoff. */
4335 /* User specified an attribute, so override the default.
4336 Ignore storage attribute of pointed to variable. char __far * x; */
4337 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4339 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4340 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4341 else if (DECL_ATTRIBUTES (decl) && *attributes)
4342 DECL_ATTRIBUTES (decl) = NULL_TREE;
4345 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4346 encoding = mep_attrlist_to_encoding (attrlist, decl);
4347 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4349 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4350 encoding = mep_attrlist_to_encoding (attrlist, decl);
4354 /* This means that the declaration has a specific section
4355 attribute, so we should not apply the default rules. */
4357 if (encoding == 'i' || encoding == 'I')
4359 tree attr = lookup_attribute ("io", attrlist);
4361 && TREE_VALUE (attr)
4362 && TREE_VALUE (TREE_VALUE(attr)))
4364 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4365 static tree previous_value = 0;
4366 static int previous_location = 0;
4367 static tree previous_name = 0;
4369 /* We take advantage of the fact that gcc will reuse the
4370 same tree pointer when applying an attribute to a
4371 list of decls, but produce a new tree for attributes
4372 on separate source lines, even when they're textually
4373 identical. This is the behavior we want. */
4374 if (TREE_VALUE (attr) == previous_value
4375 && location == previous_location)
4377 warning(0, "__io address 0x%x is the same for %qE and %qE",
4378 location, previous_name, DECL_NAME (decl));
4380 previous_name = DECL_NAME (decl);
4381 previous_location = location;
4382 previous_value = TREE_VALUE (attr);
4389 /* Declarations of arrays can change size. Don't trust them. */
4390 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4393 size = int_size_in_bytes (TREE_TYPE (decl));
4395 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4397 if (TREE_PUBLIC (decl)
4398 || DECL_EXTERNAL (decl)
4399 || TREE_STATIC (decl))
4401 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4425 if (size <= mep_based_cutoff && size > 0)
4427 else if (size <= mep_tiny_cutoff && size > 0)
4433 if (mep_const_section && TREE_READONLY (decl))
4435 if (strcmp (mep_const_section, "tiny") == 0)
4437 else if (strcmp (mep_const_section, "near") == 0)
4439 else if (strcmp (mep_const_section, "far") == 0)
4446 if (!mep_multiple_address_regions (*attributes, true)
4447 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4449 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4451 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4452 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4453 and mep_validate_based_tiny. */
4454 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4459 mep_encode_section_info (tree decl, rtx rtl, int first)
4462 const char *oldname;
4463 const char *secname;
4469 tree mep_attributes;
4474 if (TREE_CODE (decl) != VAR_DECL
4475 && TREE_CODE (decl) != FUNCTION_DECL)
4478 rtlname = XEXP (rtl, 0);
4479 if (GET_CODE (rtlname) == SYMBOL_REF)
4480 oldname = XSTR (rtlname, 0);
4481 else if (GET_CODE (rtlname) == MEM
4482 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4483 oldname = XSTR (XEXP (rtlname, 0), 0);
4487 type = TREE_TYPE (decl);
4488 if (type == error_mark_node)
4490 mep_attributes = MEP_ATTRIBUTES (decl);
4492 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4496 newname = (char *) alloca (strlen (oldname) + 4);
4497 sprintf (newname, "@%c.%s", encoding, oldname);
4498 idp = get_identifier (newname);
4500 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4513 maxsize = 0x1000000;
4521 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4523 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4525 (long) int_size_in_bytes (TREE_TYPE (decl)),
4531 /* Functions do not go through select_section, so we force it here
4532 by using the DECL_SECTION_NAME as if the user specified the
4533 .vtext or .ftext sections. */
4534 if (! DECL_SECTION_NAME (decl)
4535 && TREE_CODE (decl) == FUNCTION_DECL)
4539 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4541 if (encoding == 'f')
4542 DECL_SECTION_NAME (decl) = build_string (7, ".vftext");
4544 DECL_SECTION_NAME (decl) = build_string (6, ".vtext");
4546 else if (encoding == 'f')
4548 if (flag_function_sections || DECL_ONE_ONLY (decl))
4549 mep_unique_section (decl, 0);
4551 DECL_SECTION_NAME (decl) = build_string (6, ".ftext");
4554 /* This is so we can control inlining. It does not matter what
4555 attribute we add, just that it has one. */
4556 secname = build_tree_list (get_identifier ("section"), DECL_SECTION_NAME (decl));
4558 TYPE_ATTRIBUTES (decl) = chainon (TYPE_ATTRIBUTES (decl), secname);
4560 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), secname);
4565 mep_strip_name_encoding (const char *sym)
4571 else if (*sym == '@' && sym[2] == '.')
4579 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4580 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4584 switch (TREE_CODE (decl))
4587 if (!TREE_READONLY (decl)
4588 || TREE_SIDE_EFFECTS (decl)
4589 || !DECL_INITIAL (decl)
4590 || (DECL_INITIAL (decl) != error_mark_node
4591 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4595 if (! TREE_CONSTANT (decl))
4603 if (TREE_CODE (decl) == VAR_DECL)
4605 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4607 if (name[0] == '@' && name[2] == '.')
4611 return based_section;
4615 return srodata_section;
4616 if (DECL_INITIAL (decl))
4617 return sdata_section;
4618 return tinybss_section;
4622 return frodata_section;
4627 error_at (DECL_SOURCE_LOCATION (decl),
4628 "variable %D of type %<io%> must be uninitialized", decl);
4629 return data_section;
4632 error_at (DECL_SOURCE_LOCATION (decl),
4633 "variable %D of type %<cb%> must be uninitialized", decl);
4634 return data_section;
4639 return readonly_data_section;
4641 return data_section;
4645 mep_unique_section (tree decl, int reloc)
4647 static const char *prefixes[][2] =
4649 { ".text.", ".gnu.linkonce.t." },
4650 { ".rodata.", ".gnu.linkonce.r." },
4651 { ".data.", ".gnu.linkonce.d." },
4652 { ".based.", ".gnu.linkonce.based." },
4653 { ".sdata.", ".gnu.linkonce.s." },
4654 { ".far.", ".gnu.linkonce.far." },
4655 { ".ftext.", ".gnu.linkonce.ft." },
4656 { ".frodata.", ".gnu.linkonce.frd." },
4657 { ".srodata.", ".gnu.linkonce.srd." }
4659 int sec = 2; /* .data */
4661 const char *name, *prefix;
4664 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4665 if (DECL_RTL (decl))
4666 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4668 if (TREE_CODE (decl) == FUNCTION_DECL)
4669 sec = 0; /* .text */
4670 else if (decl_readonly_section (decl, reloc))
4671 sec = 1; /* .rodata */
4673 if (name[0] == '@' && name[2] == '.')
4678 sec = 3; /* .based */
4682 sec = 8; /* .srodata */
4684 sec = 4; /* .sdata */
4688 sec = 6; /* .ftext */
4690 sec = 7; /* .frodata */
4692 sec = 5; /* .far. */
4698 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4699 len = strlen (name) + strlen (prefix);
4700 string = (char *) alloca (len + 1);
4702 sprintf (string, "%s%s", prefix, name);
4704 DECL_SECTION_NAME (decl) = build_string (len, string);
4707 /* Given a decl, a section name, and whether the decl initializer
4708 has relocs, choose attributes for the section. */
4710 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4713 mep_section_type_flags (tree decl, const char *name, int reloc)
4715 unsigned int flags = default_section_type_flags (decl, name, reloc);
4717 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4718 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4719 flags |= SECTION_MEP_VLIW;
4724 /* Switch to an arbitrary section NAME with attributes as specified
4725 by FLAGS. ALIGN specifies any known alignment requirements for
4726 the section; 0 if the default should be used.
4728 Differs from the standard ELF version only in support of VLIW mode. */
4731 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4733 char flagchars[8], *f = flagchars;
4736 if (!(flags & SECTION_DEBUG))
4738 if (flags & SECTION_WRITE)
4740 if (flags & SECTION_CODE)
4742 if (flags & SECTION_SMALL)
4744 if (flags & SECTION_MEP_VLIW)
4748 if (flags & SECTION_BSS)
4753 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4754 name, flagchars, type);
4756 if (flags & SECTION_CODE)
4757 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4762 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4763 int size, int align, int global)
4765 /* We intentionally don't use mep_section_tag() here. */
4767 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4771 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4772 DECL_ATTRIBUTES (decl));
4774 && TREE_VALUE (attr)
4775 && TREE_VALUE (TREE_VALUE(attr)))
4776 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4781 fprintf (stream, "\t.globl\t");
4782 assemble_name (stream, name);
4783 fprintf (stream, "\n");
4785 assemble_name (stream, name);
4786 fprintf (stream, " = %d\n", location);
4789 if (name[0] == '@' && name[2] == '.')
4791 const char *sec = 0;
4795 switch_to_section (based_section);
4799 switch_to_section (tinybss_section);
4803 switch_to_section (farbss_section);
4812 while (align > BITS_PER_UNIT)
4817 name2 = TARGET_STRIP_NAME_ENCODING (name);
4819 fprintf (stream, "\t.globl\t%s\n", name2);
4820 fprintf (stream, "\t.p2align %d\n", p2align);
4821 fprintf (stream, "\t.type\t%s,@object\n", name2);
4822 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4823 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4830 fprintf (stream, "\t.local\t");
4831 assemble_name (stream, name);
4832 fprintf (stream, "\n");
4834 fprintf (stream, "\t.comm\t");
4835 assemble_name (stream, name);
4836 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4842 mep_init_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
4844 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4845 LCT_NORMAL, VOIDmode, 3,
4848 static_chain, Pmode);
4851 /* Experimental Reorg. */
4854 mep_mentioned_p (rtx in,
4855 rtx reg, /* NULL for mem */
4856 int modes_too) /* if nonzero, modes must match also. */
4864 if (reg && GET_CODE (reg) != REG)
4867 if (GET_CODE (in) == LABEL_REF)
4870 code = GET_CODE (in);
4876 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4882 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4884 return (REGNO (in) == REGNO (reg));
4897 /* Set's source should be read-only. */
4898 if (code == SET && !reg)
4899 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4901 fmt = GET_RTX_FORMAT (code);
4903 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4908 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4909 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4912 else if (fmt[i] == 'e'
4913 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4919 #define EXPERIMENTAL_REGMOVE_REORG 1
4921 #if EXPERIMENTAL_REGMOVE_REORG
4924 mep_compatible_reg_class (int r1, int r2)
4926 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4928 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4934 mep_reorg_regmove (rtx insns)
4936 rtx insn, next, pat, follow, *where;
4937 int count = 0, done = 0, replace, before = 0;
4940 for (insn = insns; insn; insn = NEXT_INSN (insn))
4941 if (GET_CODE (insn) == INSN)
4944 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4945 set that uses the r2 and r2 dies there. We replace r2 with r1
4946 and see if it's still a valid insn. If so, delete the first set.
4947 Copied from reorg.c. */
4952 for (insn = insns; insn; insn = next)
4954 next = NEXT_INSN (insn);
4955 if (GET_CODE (insn) != INSN)
4957 pat = PATTERN (insn);
4961 if (GET_CODE (pat) == SET
4962 && GET_CODE (SET_SRC (pat)) == REG
4963 && GET_CODE (SET_DEST (pat)) == REG
4964 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4965 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4967 follow = next_nonnote_insn (insn);
4969 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
4971 while (follow && GET_CODE (follow) == INSN
4972 && GET_CODE (PATTERN (follow)) == SET
4973 && !dead_or_set_p (follow, SET_SRC (pat))
4974 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
4975 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
4978 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
4979 follow = next_nonnote_insn (follow);
4983 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
4984 if (follow && GET_CODE (follow) == INSN
4985 && GET_CODE (PATTERN (follow)) == SET
4986 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
4988 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
4990 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
4993 where = & SET_SRC (PATTERN (follow));
4996 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
4998 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5001 where = & PATTERN (follow);
5007 /* If so, follow is the corresponding insn */
5014 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5015 for (x = insn; x ;x = NEXT_INSN (x))
5017 print_rtl_single (dump_file, x);
5020 fprintf (dump_file, "\n");
5024 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5028 next = delete_insn (insn);
5031 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5032 print_rtl_single (dump_file, follow);
5042 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5043 fprintf (dump_file, "=====\n");
5049 /* Figure out where to put LABEL, which is the label for a repeat loop.
5050 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5051 the loop ends just before LAST_INSN. If SHARED, insns other than the
5052 "repeat" might use LABEL to jump to the loop's continuation point.
5054 Return the last instruction in the adjusted loop. */
5057 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5061 int count = 0, code, icode;
5064 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5065 INSN_UID (last_insn));
5067 /* Set PREV to the last insn in the loop. */
5070 prev = PREV_INSN (prev);
5072 /* Set NEXT to the next insn after the repeat label. */
5077 code = GET_CODE (prev);
5078 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5083 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5084 prev = XVECEXP (PATTERN (prev), 0, 1);
5086 /* Other insns that should not be in the last two opcodes. */
5087 icode = recog_memoized (prev);
5089 || icode == CODE_FOR_repeat
5090 || icode == CODE_FOR_erepeat
5091 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5094 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5095 is the second instruction in a VLIW bundle. In that case,
5096 loop again: if the first instruction also satisfies the
5097 conditions above then we will reach here again and put
5098 both of them into the repeat epilogue. Otherwise both
5099 should remain outside. */
5100 if (GET_MODE (prev) != BImode)
5105 print_rtl_single (dump_file, next);
5110 prev = PREV_INSN (prev);
5113 /* See if we're adding the label immediately after the repeat insn.
5114 If so, we need to separate them with a nop. */
5115 prev = prev_real_insn (next);
5117 switch (recog_memoized (prev))
5119 case CODE_FOR_repeat:
5120 case CODE_FOR_erepeat:
5122 fprintf (dump_file, "Adding nop inside loop\n");
5123 emit_insn_before (gen_nop (), next);
5130 /* Insert the label. */
5131 emit_label_before (label, next);
5133 /* Insert the nops. */
5134 if (dump_file && count < 2)
5135 fprintf (dump_file, "Adding %d nop%s\n\n",
5136 2 - count, count == 1 ? "" : "s");
5138 for (; count < 2; count++)
5140 last_insn = emit_insn_after (gen_nop (), last_insn);
5142 emit_insn_before (gen_nop (), last_insn);
5149 mep_emit_doloop (rtx *operands, int is_end)
5153 if (cfun->machine->doloop_tags == 0
5154 || cfun->machine->doloop_tag_from_end == is_end)
5156 cfun->machine->doloop_tags++;
5157 cfun->machine->doloop_tag_from_end = is_end;
5160 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5162 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5164 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5168 /* Code for converting doloop_begins and doloop_ends into valid
5169 MeP instructions. A doloop_begin is just a placeholder:
5171 $count = unspec ($count)
5173 where $count is initially the number of iterations - 1.
5174 doloop_end has the form:
5176 if ($count-- == 0) goto label
5178 The counter variable is private to the doloop insns, nothing else
5179 relies on its value.
5181 There are three cases, in decreasing order of preference:
5183 1. A loop has exactly one doloop_begin and one doloop_end.
5184 The doloop_end branches to the first instruction after
5187 In this case we can replace the doloop_begin with a repeat
5188 instruction and remove the doloop_end. I.e.:
5190 $count1 = unspec ($count1)
5195 if ($count2-- == 0) goto label
5199 repeat $count1,repeat_label
5207 2. As for (1), except there are several doloop_ends. One of them
5208 (call it X) falls through to a label L. All the others fall
5209 through to branches to L.
5211 In this case, we remove X and replace the other doloop_ends
5212 with branches to the repeat label. For example:
5214 $count1 = unspec ($count1)
5217 if ($count2-- == 0) goto label
5220 if ($count3-- == 0) goto label
5225 repeat $count1,repeat_label
5236 3. The fallback case. Replace doloop_begins with:
5240 Replace doloop_ends with the equivalent of:
5243 if ($count == 0) goto label
5245 Note that this might need a scratch register if $count
5246 is stored in memory. */
5248 /* A structure describing one doloop_begin. */
5249 struct mep_doloop_begin {
5250 /* The next doloop_begin with the same tag. */
5251 struct mep_doloop_begin *next;
5253 /* The instruction itself. */
5256 /* The initial counter value. This is known to be a general register. */
5260 /* A structure describing a doloop_end. */
5261 struct mep_doloop_end {
5262 /* The next doloop_end with the same loop tag. */
5263 struct mep_doloop_end *next;
5265 /* The instruction itself. */
5268 /* The first instruction after INSN when the branch isn't taken. */
5271 /* The location of the counter value. Since doloop_end_internal is a
5272 jump instruction, it has to allow the counter to be stored anywhere
5273 (any non-fixed register or memory location). */
5276 /* The target label (the place where the insn branches when the counter
5280 /* A scratch register. Only available when COUNTER isn't stored
5281 in a general register. */
5286 /* One do-while loop. */
5288 /* All the doloop_begins for this loop (in no particular order). */
5289 struct mep_doloop_begin *begin;
5291 /* All the doloop_ends. When there is more than one, arrange things
5292 so that the first one is the most likely to be X in case (2) above. */
5293 struct mep_doloop_end *end;
5297 /* Return true if LOOP can be converted into repeat/repeat_end form
5298 (that is, if it matches cases (1) or (2) above). */
5301 mep_repeat_loop_p (struct mep_doloop *loop)
5303 struct mep_doloop_end *end;
5306 /* There must be exactly one doloop_begin and at least one doloop_end. */
5307 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5310 /* The first doloop_end (X) must branch back to the insn after
5311 the doloop_begin. */
5312 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5315 /* All the other doloop_ends must branch to the same place as X.
5316 When the branch isn't taken, they must jump to the instruction
5318 fallthrough = loop->end->fallthrough;
5319 for (end = loop->end->next; end != 0; end = end->next)
5320 if (end->label != loop->end->label
5321 || !simplejump_p (end->fallthrough)
5322 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5329 /* The main repeat reorg function. See comment above for details. */
5332 mep_reorg_repeat (rtx insns)
5335 struct mep_doloop *loops, *loop;
5336 struct mep_doloop_begin *begin;
5337 struct mep_doloop_end *end;
5339 /* Quick exit if we haven't created any loops. */
5340 if (cfun->machine->doloop_tags == 0)
5343 /* Create an array of mep_doloop structures. */
5344 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5345 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5347 /* Search the function for do-while insns and group them by loop tag. */
5348 for (insn = insns; insn; insn = NEXT_INSN (insn))
5350 switch (recog_memoized (insn))
5352 case CODE_FOR_doloop_begin_internal:
5353 insn_extract (insn);
5354 loop = &loops[INTVAL (recog_data.operand[2])];
5356 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5357 begin->next = loop->begin;
5359 begin->counter = recog_data.operand[0];
5361 loop->begin = begin;
5364 case CODE_FOR_doloop_end_internal:
5365 insn_extract (insn);
5366 loop = &loops[INTVAL (recog_data.operand[2])];
5368 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5370 end->fallthrough = next_real_insn (insn);
5371 end->counter = recog_data.operand[0];
5372 end->label = recog_data.operand[1];
5373 end->scratch = recog_data.operand[3];
5375 /* If this insn falls through to an unconditional jump,
5376 give it a lower priority than the others. */
5377 if (loop->end != 0 && simplejump_p (end->fallthrough))
5379 end->next = loop->end->next;
5380 loop->end->next = end;
5384 end->next = loop->end;
5390 /* Convert the insns for each loop in turn. */
5391 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5392 if (mep_repeat_loop_p (loop))
5394 /* Case (1) or (2). */
5395 rtx repeat_label, label_ref;
5397 /* Create a new label for the repeat insn. */
5398 repeat_label = gen_label_rtx ();
5400 /* Replace the doloop_begin with a repeat. */
5401 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5402 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5404 delete_insn (loop->begin->insn);
5406 /* Insert the repeat label before the first doloop_end.
5407 Fill the gap with nops if there are other doloop_ends. */
5408 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5409 false, loop->end->next != 0);
5411 /* Emit a repeat_end (to improve the readability of the output). */
5412 emit_insn_before (gen_repeat_end (), loop->end->insn);
5414 /* Delete the first doloop_end. */
5415 delete_insn (loop->end->insn);
5417 /* Replace the others with branches to REPEAT_LABEL. */
5418 for (end = loop->end->next; end != 0; end = end->next)
5420 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5421 delete_insn (end->insn);
5422 delete_insn (end->fallthrough);
5427 /* Case (3). First replace all the doloop_begins with increment
5429 for (begin = loop->begin; begin != 0; begin = begin->next)
5431 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5432 begin->counter, const1_rtx),
5434 delete_insn (begin->insn);
5437 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5438 for (end = loop->end; end != 0; end = end->next)
5444 /* Load the counter value into a general register. */
5446 if (!REG_P (reg) || REGNO (reg) > 15)
5449 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5452 /* Decrement the counter. */
5453 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5456 /* Copy it back to its original location. */
5457 if (reg != end->counter)
5458 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5460 /* Jump back to the start label. */
5461 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5463 JUMP_LABEL (insn) = end->label;
5464 LABEL_NUSES (end->label)++;
5466 /* Emit the whole sequence before the doloop_end. */
5467 insn = get_insns ();
5469 emit_insn_before (insn, end->insn);
5471 /* Delete the doloop_end. */
5472 delete_insn (end->insn);
5479 mep_invertable_branch_p (rtx insn)
5482 enum rtx_code old_code;
5485 set = PATTERN (insn);
5486 if (GET_CODE (set) != SET)
5488 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5490 cond = XEXP (XEXP (set, 1), 0);
5491 old_code = GET_CODE (cond);
5495 PUT_CODE (cond, NE);
5498 PUT_CODE (cond, EQ);
5501 PUT_CODE (cond, GE);
5504 PUT_CODE (cond, LT);
5509 INSN_CODE (insn) = -1;
5510 i = recog_memoized (insn);
5511 PUT_CODE (cond, old_code);
5512 INSN_CODE (insn) = -1;
5517 mep_invert_branch (rtx insn, rtx after)
5519 rtx cond, set, label;
5522 set = PATTERN (insn);
5524 gcc_assert (GET_CODE (set) == SET);
5525 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5527 cond = XEXP (XEXP (set, 1), 0);
5528 switch (GET_CODE (cond))
5531 PUT_CODE (cond, NE);
5534 PUT_CODE (cond, EQ);
5537 PUT_CODE (cond, GE);
5540 PUT_CODE (cond, LT);
5545 label = gen_label_rtx ();
5546 emit_label_after (label, after);
5547 for (i=1; i<=2; i++)
5548 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5550 rtx ref = XEXP (XEXP (set, 1), i);
5551 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5552 delete_insn (XEXP (ref, 0));
5553 XEXP (ref, 0) = label;
5554 LABEL_NUSES (label) ++;
5555 JUMP_LABEL (insn) = label;
5557 INSN_CODE (insn) = -1;
5558 i = recog_memoized (insn);
5559 gcc_assert (i >= 0);
5563 mep_reorg_erepeat (rtx insns)
5565 rtx insn, prev, label_before, l, x;
5568 for (insn = insns; insn; insn = NEXT_INSN (insn))
5570 && ! JUMP_TABLE_DATA_P (insn)
5571 && mep_invertable_branch_p (insn))
5575 fprintf (dump_file, "\n------------------------------\n");
5576 fprintf (dump_file, "erepeat: considering this jump:\n");
5577 print_rtl_single (dump_file, insn);
5579 count = simplejump_p (insn) ? 0 : 1;
5581 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5583 if (GET_CODE (prev) == CALL_INSN
5584 || BARRIER_P (prev))
5587 if (prev == JUMP_LABEL (insn))
5591 fprintf (dump_file, "found loop top, %d insns\n", count);
5593 if (LABEL_NUSES (prev) == 1)
5594 /* We're the only user, always safe */ ;
5595 else if (LABEL_NUSES (prev) == 2)
5597 /* See if there's a barrier before this label. If
5598 so, we know nobody inside the loop uses it.
5599 But we must be careful to put the erepeat
5600 *after* the label. */
5602 for (barrier = PREV_INSN (prev);
5603 barrier && GET_CODE (barrier) == NOTE;
5604 barrier = PREV_INSN (barrier))
5606 if (barrier && GET_CODE (barrier) != BARRIER)
5611 /* We don't know who else, within or without our loop, uses this */
5613 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5617 /* Generate a label to be used by the erepat insn. */
5618 l = gen_label_rtx ();
5620 /* Insert the erepeat after INSN's target label. */
5621 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5623 emit_insn_after (x, prev);
5625 /* Insert the erepeat label. */
5626 newlast = (mep_insert_repeat_label_last
5627 (insn, l, !simplejump_p (insn), false));
5628 if (simplejump_p (insn))
5630 emit_insn_before (gen_erepeat_end (), insn);
5635 mep_invert_branch (insn, newlast);
5636 emit_insn_after (gen_erepeat_end (), newlast);
5643 /* A label is OK if there is exactly one user, and we
5644 can find that user before the next label. */
5647 if (LABEL_NUSES (prev) == 1)
5649 for (user = PREV_INSN (prev);
5650 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5651 user = PREV_INSN (user))
5652 if (GET_CODE (user) == JUMP_INSN
5653 && JUMP_LABEL (user) == prev)
5655 safe = INSN_UID (user);
5662 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5663 safe, INSN_UID (prev));
5670 label_before = prev;
5675 fprintf (dump_file, "\n==============================\n");
5678 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5679 always do this on its own. */
5682 mep_jmp_return_reorg (rtx insns)
5684 rtx insn, label, ret;
5687 for (insn = insns; insn; insn = NEXT_INSN (insn))
5688 if (simplejump_p (insn))
5690 /* Find the fist real insn the jump jumps to. */
5691 label = ret = JUMP_LABEL (insn);
5693 && (GET_CODE (ret) == NOTE
5694 || GET_CODE (ret) == CODE_LABEL
5695 || GET_CODE (PATTERN (ret)) == USE))
5696 ret = NEXT_INSN (ret);
5700 /* Is it a return? */
5701 ret_code = recog_memoized (ret);
5702 if (ret_code == CODE_FOR_return_internal
5703 || ret_code == CODE_FOR_eh_return_internal)
5705 /* It is. Replace the jump with a return. */
5706 LABEL_NUSES (label) --;
5707 if (LABEL_NUSES (label) == 0)
5708 delete_insn (label);
5709 PATTERN (insn) = copy_rtx (PATTERN (ret));
5710 INSN_CODE (insn) = -1;
5718 mep_reorg_addcombine (rtx insns)
5722 for (i = insns; i; i = NEXT_INSN (i))
5724 && INSN_CODE (i) == CODE_FOR_addsi3
5725 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5726 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5727 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5728 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5732 && INSN_CODE (n) == CODE_FOR_addsi3
5733 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5734 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5735 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5736 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5738 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5739 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5740 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5742 && ic + nc > -32768)
5744 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5745 NEXT_INSN (i) = NEXT_INSN (n);
5747 PREV_INSN (NEXT_INSN (i)) = i;
5753 /* If this insn adjusts the stack, return the adjustment, else return
5756 add_sp_insn_p (rtx insn)
5760 if (! single_set (insn))
5762 pat = PATTERN (insn);
5763 if (GET_CODE (SET_DEST (pat)) != REG)
5765 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5767 if (GET_CODE (SET_SRC (pat)) != PLUS)
5769 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5771 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5773 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5775 return INTVAL (XEXP (SET_SRC (pat), 1));
5778 /* Check for trivial functions that set up an unneeded stack
5781 mep_reorg_noframe (rtx insns)
5783 rtx start_frame_insn;
5784 rtx end_frame_insn = 0;
5788 /* The first insn should be $sp = $sp + N */
5789 while (insns && ! INSN_P (insns))
5790 insns = NEXT_INSN (insns);
5794 sp_adjust = add_sp_insn_p (insns);
5798 start_frame_insn = insns;
5799 sp = SET_DEST (PATTERN (start_frame_insn));
5801 insns = next_real_insn (insns);
5805 rtx next = next_real_insn (insns);
5809 sp2 = add_sp_insn_p (insns);
5814 end_frame_insn = insns;
5815 if (sp2 != -sp_adjust)
5818 else if (mep_mentioned_p (insns, sp, 0))
5820 else if (CALL_P (insns))
5828 delete_insn (start_frame_insn);
5829 delete_insn (end_frame_insn);
5836 rtx insns = get_insns ();
5837 mep_reorg_addcombine (insns);
5838 #if EXPERIMENTAL_REGMOVE_REORG
5839 /* VLIW packing has been done already, so we can't just delete things. */
5840 if (!mep_vliw_function_p (cfun->decl))
5841 mep_reorg_regmove (insns);
5843 mep_jmp_return_reorg (insns);
5844 mep_bundle_insns (insns);
5845 mep_reorg_repeat (insns);
5848 && !profile_arc_flag
5849 && TARGET_OPT_REPEAT
5850 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5851 mep_reorg_erepeat (insns);
5853 /* This may delete *insns so make sure it's last. */
5854 mep_reorg_noframe (insns);
5859 /*----------------------------------------------------------------------*/
5861 /*----------------------------------------------------------------------*/
5863 /* Element X gives the index into cgen_insns[] of the most general
5864 implementation of intrinsic X. Unimplemented intrinsics are
5866 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5868 /* Element X gives the index of another instruction that is mapped to
5869 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5872 Things are set up so that mep_intrinsic_chain[X] < X. */
5873 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5875 /* The bitmask for the current ISA. The ISA masks are declared
5877 unsigned int mep_selected_isa;
5880 const char *config_name;
5884 static struct mep_config mep_configs[] = {
5885 #ifdef COPROC_SELECTION_TABLE
5886 COPROC_SELECTION_TABLE,
5891 /* Initialize the global intrinsics variables above. */
5894 mep_init_intrinsics (void)
5898 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5899 mep_selected_isa = mep_configs[0].isa;
5900 if (mep_config_string != 0)
5901 for (i = 0; mep_configs[i].config_name; i++)
5902 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5904 mep_selected_isa = mep_configs[i].isa;
5908 /* Assume all intrinsics are unavailable. */
5909 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5910 mep_intrinsic_insn[i] = -1;
5912 /* Build up the global intrinsic tables. */
5913 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5914 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5916 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5917 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5919 /* See whether we can directly move values between one coprocessor
5920 register and another. */
5921 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5922 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5923 mep_have_copro_copro_moves_p = true;
5925 /* See whether we can directly move values between core and
5926 coprocessor registers. */
5927 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5928 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5930 mep_have_core_copro_moves_p = 1;
5933 /* Declare all available intrinsic functions. Called once only. */
5935 static tree cp_data_bus_int_type_node;
5936 static tree opaque_vector_type_node;
5937 static tree v8qi_type_node;
5938 static tree v4hi_type_node;
5939 static tree v2si_type_node;
5940 static tree v8uqi_type_node;
5941 static tree v4uhi_type_node;
5942 static tree v2usi_type_node;
5945 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5949 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5950 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5951 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5952 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5953 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5954 case cgen_regnum_operand_type_CHAR: return char_type_node;
5955 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5956 case cgen_regnum_operand_type_SI: return intSI_type_node;
5957 case cgen_regnum_operand_type_DI: return intDI_type_node;
5958 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5959 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5960 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5961 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5962 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5963 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5964 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5965 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5967 return void_type_node;
5972 mep_init_builtins (void)
5976 if (TARGET_64BIT_CR_REGS)
5977 cp_data_bus_int_type_node = long_long_integer_type_node;
5979 cp_data_bus_int_type_node = long_integer_type_node;
5981 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
5982 v8qi_type_node = build_vector_type (intQI_type_node, 8);
5983 v4hi_type_node = build_vector_type (intHI_type_node, 4);
5984 v2si_type_node = build_vector_type (intSI_type_node, 2);
5985 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
5986 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
5987 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
5989 (*lang_hooks.decls.pushdecl)
5990 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
5991 cp_data_bus_int_type_node));
5993 (*lang_hooks.decls.pushdecl)
5994 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
5995 opaque_vector_type_node));
5997 (*lang_hooks.decls.pushdecl)
5998 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6000 (*lang_hooks.decls.pushdecl)
6001 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6003 (*lang_hooks.decls.pushdecl)
6004 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6007 (*lang_hooks.decls.pushdecl)
6008 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6010 (*lang_hooks.decls.pushdecl)
6011 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6013 (*lang_hooks.decls.pushdecl)
6014 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6017 /* Intrinsics like mep_cadd3 are implemented with two groups of
6018 instructions, one which uses UNSPECs and one which uses a specific
6019 rtl code such as PLUS. Instructions in the latter group belong
6020 to GROUP_KNOWN_CODE.
6022 In such cases, the intrinsic will have two entries in the global
6023 tables above. The unspec form is accessed using builtin functions
6024 while the specific form is accessed using the mep_* enum in
6027 The idea is that __cop arithmetic and builtin functions have
6028 different optimization requirements. If mep_cadd3() appears in
6029 the source code, the user will surely except gcc to use cadd3
6030 rather than a work-alike such as add3. However, if the user
6031 just writes "a + b", where a or b are __cop variables, it is
6032 reasonable for gcc to choose a core instruction rather than
6033 cadd3 if it believes that is more optimal. */
6034 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6035 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6036 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6038 tree ret_type = void_type_node;
6041 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6044 if (cgen_insns[i].cret_p)
6045 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6047 bi_type = build_function_type (ret_type, 0);
6048 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6050 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6054 /* Report the unavailablity of the given intrinsic. */
6058 mep_intrinsic_unavailable (int intrinsic)
6060 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6062 if (already_reported_p[intrinsic])
6065 if (mep_intrinsic_insn[intrinsic] < 0)
6066 error ("coprocessor intrinsic %qs is not available in this configuration",
6067 cgen_intrinsics[intrinsic]);
6068 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6069 error ("%qs is not available in VLIW functions",
6070 cgen_intrinsics[intrinsic]);
6072 error ("%qs is not available in non-VLIW functions",
6073 cgen_intrinsics[intrinsic]);
6075 already_reported_p[intrinsic] = 1;
6080 /* See if any implementation of INTRINSIC is available to the
6081 current function. If so, store the most general implementation
6082 in *INSN_PTR and return true. Return false otherwise. */
6085 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6089 i = mep_intrinsic_insn[intrinsic];
6090 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6091 i = mep_intrinsic_chain[i];
6095 *insn_ptr = &cgen_insns[i];
6102 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6103 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6104 try using a work-alike instead. In this case, the returned insn
6105 may have three operands rather than two. */
6108 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6112 if (intrinsic == mep_cmov)
6114 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6115 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6119 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6123 /* If ARG is a register operand that is the same size as MODE, convert it
6124 to MODE using a subreg. Otherwise return ARG as-is. */
6127 mep_convert_arg (enum machine_mode mode, rtx arg)
6129 if (GET_MODE (arg) != mode
6130 && register_operand (arg, VOIDmode)
6131 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6132 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6137 /* Apply regnum conversions to ARG using the description given by REGNUM.
6138 Return the new argument on success and null on failure. */
6141 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6143 if (regnum->count == 0)
6146 if (GET_CODE (arg) != CONST_INT
6148 || INTVAL (arg) >= regnum->count)
6151 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6155 /* Try to make intrinsic argument ARG match the given operand.
6156 UNSIGNED_P is true if the argument has an unsigned type. */
6159 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6162 if (GET_CODE (arg) == CONST_INT)
6164 /* CONST_INTs can only be bound to integer operands. */
6165 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6168 else if (GET_CODE (arg) == CONST_DOUBLE)
6169 /* These hold vector constants. */;
6170 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6172 /* If the argument is a different size from what's expected, we must
6173 have a value in the right mode class in order to convert it. */
6174 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6177 /* If the operand is an rvalue, promote or demote it to match the
6178 operand's size. This might not need extra instructions when
6179 ARG is a register value. */
6180 if (operand->constraint[0] != '=')
6181 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6184 /* If the operand is an lvalue, bind the operand to a new register.
6185 The caller will copy this value into ARG after the main
6186 instruction. By doing this always, we produce slightly more
6188 /* But not for control registers. */
6189 if (operand->constraint[0] == '='
6191 || ! (CCR_REGNO_P (REGNO (arg)) || CR_REGNO_P (REGNO (arg)))
6193 return gen_reg_rtx (operand->mode);
6195 /* Try simple mode punning. */
6196 arg = mep_convert_arg (operand->mode, arg);
6197 if (operand->predicate (arg, operand->mode))
6200 /* See if forcing the argument into a register will make it match. */
6201 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6202 arg = force_reg (operand->mode, arg);
6204 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6205 if (operand->predicate (arg, operand->mode))
6212 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6213 function FNNAME. OPERAND describes the operand to which ARGNUM
6217 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6218 int argnum, tree fnname)
6222 if (GET_CODE (arg) == CONST_INT)
6223 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6224 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6226 const struct cgen_immediate_predicate *predicate;
6227 HOST_WIDE_INT argval;
6229 predicate = &cgen_immediate_predicates[i];
6230 argval = INTVAL (arg);
6231 if (argval < predicate->lower || argval >= predicate->upper)
6232 error ("argument %d of %qE must be in the range %d...%d",
6233 argnum, fnname, predicate->lower, predicate->upper - 1);
6235 error ("argument %d of %qE must be a multiple of %d",
6236 argnum, fnname, predicate->align);
6240 error ("incompatible type for argument %d of %qE", argnum, fnname);
6244 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6245 rtx subtarget ATTRIBUTE_UNUSED,
6246 enum machine_mode mode ATTRIBUTE_UNUSED,
6247 int ignore ATTRIBUTE_UNUSED)
6249 rtx pat, op[10], arg[10];
6251 int opindex, unsigned_p[10];
6253 unsigned int n_args;
6255 const struct cgen_insn *cgen_insn;
6256 const struct insn_data *idata;
6258 int return_type = void_type_node;
6261 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6262 fnname = DECL_NAME (fndecl);
6264 /* Find out which instruction we should emit. Note that some coprocessor
6265 intrinsics may only be available in VLIW mode, or only in normal mode. */
6266 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6268 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6269 return error_mark_node;
6271 idata = &insn_data[cgen_insn->icode];
6273 builtin_n_args = cgen_insn->num_args;
6275 if (cgen_insn->cret_p)
6277 if (cgen_insn->cret_p > 1)
6280 return_type = mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6284 /* Evaluate each argument. */
6285 n_args = call_expr_nargs (exp);
6287 if (n_args < builtin_n_args)
6289 error ("too few arguments to %qE", fnname);
6290 return error_mark_node;
6292 if (n_args > builtin_n_args)
6294 error ("too many arguments to %qE", fnname);
6295 return error_mark_node;
6298 for (a = first_arg; a < builtin_n_args+first_arg; a++)
6302 args = CALL_EXPR_ARG (exp, a-first_arg);
6307 if (cgen_insn->regnums[a].reference_p)
6309 if (TREE_CODE (value) != ADDR_EXPR)
6312 error ("argument %d of %qE must be an address", a+1, fnname);
6313 return error_mark_node;
6315 value = TREE_OPERAND (value, 0);
6319 /* If the argument has been promoted to int, get the unpromoted
6320 value. This is necessary when sub-int memory values are bound
6321 to reference parameters. */
6322 if (TREE_CODE (value) == NOP_EXPR
6323 && TREE_TYPE (value) == integer_type_node
6324 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6325 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6326 < TYPE_PRECISION (TREE_TYPE (value))))
6327 value = TREE_OPERAND (value, 0);
6329 /* If the argument has been promoted to double, get the unpromoted
6330 SFmode value. This is necessary for FMAX support, for example. */
6331 if (TREE_CODE (value) == NOP_EXPR
6332 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6333 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6334 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6335 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6336 value = TREE_OPERAND (value, 0);
6338 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6339 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6340 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6341 if (cgen_insn->regnums[a].reference_p)
6343 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6344 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6346 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6350 error ("argument %d of %qE must be in the range %d...%d",
6351 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6352 return error_mark_node;
6356 for (a=0; a<first_arg; a++)
6358 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6361 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6364 /* Convert the arguments into a form suitable for the intrinsic.
6365 Report an error if this isn't possible. */
6366 for (opindex = 0; opindex < idata->n_operands; opindex++)
6368 a = cgen_insn->op_mapping[opindex];
6369 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6370 arg[a], unsigned_p[a]);
6371 if (op[opindex] == 0)
6373 mep_incompatible_arg (&idata->operand[opindex],
6374 arg[a], a + 1 - first_arg, fnname);
6375 return error_mark_node;
6379 /* Emit the instruction. */
6380 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6381 op[5], op[6], op[7], op[8], op[9]);
6383 if (GET_CODE (pat) == SET
6384 && GET_CODE (SET_DEST (pat)) == PC
6385 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6386 emit_jump_insn (pat);
6390 /* Copy lvalues back to their final locations. */
6391 for (opindex = 0; opindex < idata->n_operands; opindex++)
6392 if (idata->operand[opindex].constraint[0] == '=')
6394 a = cgen_insn->op_mapping[opindex];
6397 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6398 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6399 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6403 /* First convert the operand to the right mode, then copy it
6404 into the destination. Doing the conversion as a separate
6405 step (rather than using convert_move) means that we can
6406 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6407 refer to the same register. */
6408 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6409 op[opindex], unsigned_p[a]);
6410 if (!rtx_equal_p (arg[a], op[opindex]))
6411 emit_move_insn (arg[a], op[opindex]);
6416 if (first_arg > 0 && target && target != op[0])
6418 emit_move_insn (target, op[0]);
6425 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6430 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6431 a global register. */
6434 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6442 switch (GET_CODE (x))
6445 if (REG_P (SUBREG_REG (x)))
6447 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6448 && global_regs[subreg_regno (x)])
6456 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6470 /* A non-constant call might use a global register. */
6480 /* Returns nonzero if X mentions a global register. */
6483 global_reg_mentioned_p (rtx x)
6489 if (! RTL_CONST_OR_PURE_CALL_P (x))
6491 x = CALL_INSN_FUNCTION_USAGE (x);
6499 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6501 /* Scheduling hooks for VLIW mode.
6503 Conceptually this is very simple: we have a two-pack architecture
6504 that takes one core insn and one coprocessor insn to make up either
6505 a 32- or 64-bit instruction word (depending on the option bit set in
6506 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6507 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6508 and one 48-bit cop insn or two 32-bit core/cop insns.
6510 In practice, instruction selection will be a bear. Consider in
6511 VL64 mode the following insns
6516 these cannot pack, since the add is a 16-bit core insn and cmov
6517 is a 32-bit cop insn. However,
6522 packs just fine. For good VLIW code generation in VL64 mode, we
6523 will have to have 32-bit alternatives for many of the common core
6524 insns. Not implemented. */
6527 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6531 if (REG_NOTE_KIND (link) != 0)
6533 /* See whether INSN and DEP_INSN are intrinsics that set the same
6534 hard register. If so, it is more important to free up DEP_INSN
6535 than it is to free up INSN.
6537 Note that intrinsics like mep_mulr are handled differently from
6538 the equivalent mep.md patterns. In mep.md, if we don't care
6539 about the value of $lo and $hi, the pattern will just clobber
6540 the registers, not set them. Since clobbers don't count as
6541 output dependencies, it is often possible to reorder two mulrs,
6544 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6545 so any pair of mep_mulr()s will be inter-dependent. We should
6546 therefore give the first mep_mulr() a higher priority. */
6547 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6548 && global_reg_mentioned_p (PATTERN (insn))
6549 && global_reg_mentioned_p (PATTERN (dep_insn)))
6552 /* If the dependence is an anti or output dependence, assume it
6557 /* If we can't recognize the insns, we can't really do anything. */
6558 if (recog_memoized (dep_insn) < 0)
6561 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6562 attribute instead. */
6565 cost_specified = get_attr_latency (dep_insn);
6566 if (cost_specified != 0)
6567 return cost_specified;
6573 /* ??? We don't properly compute the length of a load/store insn,
6574 taking into account the addressing mode. */
6577 mep_issue_rate (void)
6579 return TARGET_IVC2 ? 3 : 2;
6582 /* Return true if function DECL was declared with the vliw attribute. */
6585 mep_vliw_function_p (tree decl)
6587 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6591 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6595 for (i = nready - 1; i >= 0; --i)
6597 rtx insn = ready[i];
6598 if (recog_memoized (insn) >= 0
6599 && get_attr_slot (insn) == slot
6600 && get_attr_length (insn) == length)
6608 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6612 for (i = 0; i < nready; ++i)
6613 if (ready[i] == insn)
6615 for (; i < nready - 1; ++i)
6616 ready[i] = ready[i + 1];
6625 mep_print_sched_insn (FILE *dump, rtx insn)
6627 const char *slots = "none";
6628 const char *name = NULL;
6632 if (GET_CODE (PATTERN (insn)) == SET
6633 || GET_CODE (PATTERN (insn)) == PARALLEL)
6635 switch (get_attr_slots (insn))
6637 case SLOTS_CORE: slots = "core"; break;
6638 case SLOTS_C3: slots = "c3"; break;
6639 case SLOTS_P0: slots = "p0"; break;
6640 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6641 case SLOTS_P0_P1: slots = "p0,p1"; break;
6642 case SLOTS_P0S: slots = "p0s"; break;
6643 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6644 case SLOTS_P1: slots = "p1"; break;
6646 sprintf(buf, "%d", get_attr_slots (insn));
6651 if (GET_CODE (PATTERN (insn)) == USE)
6654 code = INSN_CODE (insn);
6656 name = get_insn_name (code);
6661 "insn %4d %4d %8s %s\n",
6669 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6670 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6671 int *pnready, int clock ATTRIBUTE_UNUSED)
6673 int nready = *pnready;
6674 rtx core_insn, cop_insn;
6677 if (dump && sched_verbose > 1)
6679 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6680 for (i=0; i<nready; i++)
6681 mep_print_sched_insn (dump, ready[i]);
6682 fprintf (dump, "\n");
6685 if (!mep_vliw_function_p (cfun->decl))
6690 /* IVC2 uses a DFA to determine what's ready and what's not. */
6694 /* We can issue either a core or coprocessor instruction.
6695 Look for a matched pair of insns to reorder. If we don't
6696 find any, don't second-guess the scheduler's priorities. */
6698 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6699 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6700 TARGET_OPT_VL64 ? 6 : 2)))
6702 else if (TARGET_OPT_VL64
6703 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6704 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6707 /* We didn't find a pair. Issue the single insn at the head
6708 of the ready list. */
6711 /* Reorder the two insns first. */
6712 mep_move_ready_insn (ready, nready, core_insn);
6713 mep_move_ready_insn (ready, nready - 1, cop_insn);
6717 /* A for_each_rtx callback. Return true if *X is a register that is
6718 set by insn PREV. */
6721 mep_store_find_set (rtx *x, void *prev)
6723 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6726 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6727 not the containing insn. */
6730 mep_store_data_bypass_1 (rtx prev, rtx pat)
6732 /* Cope with intrinsics like swcpa. */
6733 if (GET_CODE (pat) == PARALLEL)
6737 for (i = 0; i < XVECLEN (pat, 0); i++)
6738 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6744 /* Check for some sort of store. */
6745 if (GET_CODE (pat) != SET
6746 || GET_CODE (SET_DEST (pat)) != MEM)
6749 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6750 The first operand to the unspec is the store data and the other operands
6751 are used to calculate the address. */
6752 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6757 src = SET_SRC (pat);
6758 for (i = 1; i < XVECLEN (src, 0); i++)
6759 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6765 /* Otherwise just check that PREV doesn't modify any register mentioned
6766 in the memory destination. */
6767 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6770 /* Return true if INSN is a store instruction and if the store address
6771 has no true dependence on PREV. */
6774 mep_store_data_bypass_p (rtx prev, rtx insn)
6776 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6779 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6780 is a register other than LO or HI and if PREV sets *X. */
6783 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6786 && REGNO (*x) != LO_REGNO
6787 && REGNO (*x) != HI_REGNO
6788 && reg_set_p (*x, (const_rtx) prev));
6791 /* Return true if, apart from HI/LO, there are no true dependencies
6792 between multiplication instructions PREV and INSN. */
6795 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6799 pat = PATTERN (insn);
6800 if (GET_CODE (pat) == PARALLEL)
6801 pat = XVECEXP (pat, 0, 0);
6802 return (GET_CODE (pat) == SET
6803 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6806 /* Return true if INSN is an ldc instruction that issues to the
6807 MeP-h1 integer pipeline. This is true for instructions that
6808 read from PSW, LP, SAR, HI and LO. */
6811 mep_ipipe_ldc_p (rtx insn)
6815 pat = PATTERN (insn);
6817 /* Cope with instrinsics that set both a hard register and its shadow.
6818 The set of the hard register comes first. */
6819 if (GET_CODE (pat) == PARALLEL)
6820 pat = XVECEXP (pat, 0, 0);
6822 if (GET_CODE (pat) == SET)
6824 src = SET_SRC (pat);
6826 /* Cope with intrinsics. The first operand to the unspec is
6827 the source register. */
6828 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6829 src = XVECEXP (src, 0, 0);
6832 switch (REGNO (src))
6845 /* Create a VLIW bundle from core instruction CORE and coprocessor
6846 instruction COP. COP always satisfies INSN_P, but CORE can be
6847 either a new pattern or an existing instruction.
6849 Emit the bundle in place of COP and return it. */
6852 mep_make_bundle (rtx core, rtx cop)
6856 /* If CORE is an existing instruction, remove it, otherwise put
6857 the new pattern in an INSN harness. */
6861 core = make_insn_raw (core);
6863 /* Generate the bundle sequence and replace COP with it. */
6864 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6865 insn = emit_insn_after (insn, cop);
6868 /* Set up the links of the insns inside the SEQUENCE. */
6869 PREV_INSN (core) = PREV_INSN (insn);
6870 NEXT_INSN (core) = cop;
6871 PREV_INSN (cop) = core;
6872 NEXT_INSN (cop) = NEXT_INSN (insn);
6874 /* Set the VLIW flag for the coprocessor instruction. */
6875 PUT_MODE (core, VOIDmode);
6876 PUT_MODE (cop, BImode);
6878 /* Derive a location for the bundle. Individual instructions cannot
6879 have their own location because there can be no assembler labels
6880 between CORE and COP. */
6881 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6882 INSN_LOCATOR (core) = 0;
6883 INSN_LOCATOR (cop) = 0;
6888 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6891 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6893 rtx * pinsn = (rtx *) data;
6895 if (*pinsn && reg_mentioned_p (x, *pinsn))
6899 /* Return true if anything in insn X is (anti,output,true) dependent on
6900 anything in insn Y. */
6903 mep_insn_dependent_p (rtx x, rtx y)
6907 gcc_assert (INSN_P (x));
6908 gcc_assert (INSN_P (y));
6911 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6912 if (tmp == NULL_RTX)
6916 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6917 if (tmp == NULL_RTX)
6924 core_insn_p (rtx insn)
6926 if (GET_CODE (PATTERN (insn)) == USE)
6928 if (get_attr_slot (insn) == SLOT_CORE)
6933 /* Mark coprocessor instructions that can be bundled together with
6934 the immediately preceeding core instruction. This is later used
6935 to emit the "+" that tells the assembler to create a VLIW insn.
6937 For unbundled insns, the assembler will automatically add coprocessor
6938 nops, and 16-bit core nops. Due to an apparent oversight in the
6939 spec, the assembler will _not_ automatically add 32-bit core nops,
6940 so we have to emit those here.
6942 Called from mep_insn_reorg. */
6945 mep_bundle_insns (rtx insns)
6947 rtx insn, last = NULL_RTX, first = NULL_RTX;
6948 int saw_scheduling = 0;
6950 /* Only do bundling if we're in vliw mode. */
6951 if (!mep_vliw_function_p (cfun->decl))
6954 /* The first insn in a bundle are TImode, the remainder are
6955 VOIDmode. After this function, the first has VOIDmode and the
6956 rest have BImode. */
6958 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6960 /* First, move any NOTEs that are within a bundle, to the beginning
6962 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6964 if (NOTE_P (insn) && first)
6965 /* Don't clear FIRST. */;
6967 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
6970 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
6974 /* INSN is part of a bundle; FIRST is the first insn in that
6975 bundle. Move all intervening notes out of the bundle.
6976 In addition, since the debug pass may insert a label
6977 whenever the current line changes, set the location info
6978 for INSN to match FIRST. */
6980 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
6982 note = PREV_INSN (insn);
6983 while (note && note != first)
6985 prev = PREV_INSN (note);
6989 /* Remove NOTE from here... */
6990 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
6991 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
6992 /* ...and put it in here. */
6993 NEXT_INSN (note) = first;
6994 PREV_INSN (note) = PREV_INSN (first);
6995 NEXT_INSN (PREV_INSN (note)) = note;
6996 PREV_INSN (NEXT_INSN (note)) = note;
7003 else if (!NONJUMP_INSN_P (insn))
7007 /* Now fix up the bundles. */
7008 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7013 if (!NONJUMP_INSN_P (insn))
7019 /* If we're not optimizing enough, there won't be scheduling
7020 info. We detect that here. */
7021 if (GET_MODE (insn) == TImode)
7023 if (!saw_scheduling)
7028 rtx core_insn = NULL_RTX;
7030 /* IVC2 slots are scheduled by DFA, so we just accept
7031 whatever the scheduler gives us. However, we must make
7032 sure the core insn (if any) is the first in the bundle.
7033 The IVC2 assembler can insert whatever NOPs are needed,
7034 and allows a COP insn to be first. */
7036 if (NONJUMP_INSN_P (insn)
7037 && GET_CODE (PATTERN (insn)) != USE
7038 && GET_MODE (insn) == TImode)
7042 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7043 && NONJUMP_INSN_P (NEXT_INSN (last));
7044 last = NEXT_INSN (last))
7046 if (core_insn_p (last))
7049 if (core_insn_p (last))
7052 if (core_insn && core_insn != insn)
7054 /* Swap core insn to first in the bundle. */
7056 /* Remove core insn. */
7057 if (PREV_INSN (core_insn))
7058 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7059 if (NEXT_INSN (core_insn))
7060 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7062 /* Re-insert core insn. */
7063 PREV_INSN (core_insn) = PREV_INSN (insn);
7064 NEXT_INSN (core_insn) = insn;
7066 if (PREV_INSN (core_insn))
7067 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7068 PREV_INSN (insn) = core_insn;
7070 PUT_MODE (core_insn, TImode);
7071 PUT_MODE (insn, VOIDmode);
7075 /* The first insn has TImode, the rest have VOIDmode */
7076 if (GET_MODE (insn) == TImode)
7077 PUT_MODE (insn, VOIDmode);
7079 PUT_MODE (insn, BImode);
7083 PUT_MODE (insn, VOIDmode);
7084 if (recog_memoized (insn) >= 0
7085 && get_attr_slot (insn) == SLOT_COP)
7087 if (GET_CODE (insn) == JUMP_INSN
7089 || recog_memoized (last) < 0
7090 || get_attr_slot (last) != SLOT_CORE
7091 || (get_attr_length (insn)
7092 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7093 || mep_insn_dependent_p (insn, last))
7095 switch (get_attr_length (insn))
7100 insn = mep_make_bundle (gen_nop (), insn);
7103 if (TARGET_OPT_VL64)
7104 insn = mep_make_bundle (gen_nop32 (), insn);
7107 if (TARGET_OPT_VL64)
7108 error ("2 byte cop instructions are"
7109 " not allowed in 64-bit VLIW mode");
7111 insn = mep_make_bundle (gen_nop (), insn);
7114 error ("unexpected %d byte cop instruction",
7115 get_attr_length (insn));
7120 insn = mep_make_bundle (last, insn);
7128 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7129 Return true on success. This function can fail if the intrinsic
7130 is unavailable or if the operands don't satisfy their predicates. */
7133 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7135 const struct cgen_insn *cgen_insn;
7136 const struct insn_data *idata;
7140 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7143 idata = &insn_data[cgen_insn->icode];
7144 for (i = 0; i < idata->n_operands; i++)
7146 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7147 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7151 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7152 newop[3], newop[4], newop[5],
7153 newop[6], newop[7], newop[8]));
7159 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7160 OPERANDS[0]. Report an error if the instruction could not
7161 be synthesized. OPERANDS[1] is a register_operand. For sign
7162 and zero extensions, it may be smaller than SImode. */
7165 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7166 rtx * operands ATTRIBUTE_UNUSED)
7172 /* Likewise, but apply a binary operation to OPERANDS[1] and
7173 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7174 can be a general_operand.
7176 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7177 third operand. REG and REG3 take register operands only. */
7180 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7181 int ATTRIBUTE_UNUSED immediate3,
7182 int ATTRIBUTE_UNUSED reg,
7183 int ATTRIBUTE_UNUSED reg3,
7184 rtx * operands ATTRIBUTE_UNUSED)
7190 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7195 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7197 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7204 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7208 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7210 : COSTS_N_INSNS (2));
7217 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7223 mep_handle_option (size_t code,
7224 const char *arg ATTRIBUTE_UNUSED,
7225 int value ATTRIBUTE_UNUSED)
7232 target_flags |= MEP_ALL_OPTS;
7236 target_flags &= ~ MEP_ALL_OPTS;
7240 target_flags |= MASK_COP;
7241 target_flags |= MASK_64BIT_CR_REGS;
7245 option_mtiny_specified = 1;
7248 target_flags |= MASK_COP;
7249 target_flags |= MASK_64BIT_CR_REGS;
7250 target_flags |= MASK_VLIW;
7251 target_flags |= MASK_OPT_VL64;
7252 target_flags |= MASK_IVC2;
7254 for (i=0; i<32; i++)
7255 fixed_regs[i+48] = 0;
7256 for (i=0; i<32; i++)
7257 call_used_regs[i+48] = 1;
7259 call_used_regs[i+48] = 0;
7261 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7298 mep_asm_init_sections (void)
7301 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7302 "\t.section .based,\"aw\"");
7305 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7306 "\t.section .sbss,\"aw\"");
7309 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7310 "\t.section .sdata,\"aw\",@progbits");
7313 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7314 "\t.section .far,\"aw\"");
7317 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7318 "\t.section .farbss,\"aw\"");
7321 = get_unnamed_section (0, output_section_asm_op,
7322 "\t.section .frodata,\"a\"");
7325 = get_unnamed_section (0, output_section_asm_op,
7326 "\t.section .srodata,\"a\"");