1 /* Definitions for Toshiba Media Processor
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
48 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
54 /* Structure of this file:
56 + Command Line Option Support
57 + Pattern support - constraints, predicates, expanders
60 + Functions to save and restore machine-specific function data.
61 + Frame/Epilog/Prolog Related
63 + Function args in registers
64 + Handle pipeline hazards
67 + Machine-dependent Reorg
72 Symbols are encoded as @ <char> . <name> where <char> is one of these:
80 c - cb (control bus) */
82 struct GTY(()) machine_function
84 int mep_frame_pointer_needed;
91 /* Records __builtin_return address. */
95 int reg_save_slot[FIRST_PSEUDO_REGISTER];
96 unsigned char reg_saved[FIRST_PSEUDO_REGISTER];
98 /* 2 if the current function has an interrupt attribute, 1 if not, 0
99 if unknown. This is here because resource.c uses EPILOGUE_USES
101 int interrupt_handler;
103 /* Likewise, for disinterrupt attribute. */
104 int disable_interrupts;
106 /* Number of doloop tags used so far. */
109 /* True if the last tag was allocated to a doloop_end. */
110 bool doloop_tag_from_end;
112 /* True if reload changes $TP. */
113 bool reload_changes_tp;
115 /* 2 if there are asm()s without operands, 1 if not, 0 if unknown.
116 We only set this if the function is an interrupt handler. */
117 int asms_without_operands;
120 #define MEP_CONTROL_REG(x) \
121 (GET_CODE (x) == REG && ANY_CONTROL_REGNO_P (REGNO (x)))
123 static const struct attribute_spec mep_attribute_table[11];
125 static GTY(()) section * based_section;
126 static GTY(()) section * tinybss_section;
127 static GTY(()) section * far_section;
128 static GTY(()) section * farbss_section;
129 static GTY(()) section * frodata_section;
130 static GTY(()) section * srodata_section;
132 static GTY(()) section * vtext_section;
133 static GTY(()) section * vftext_section;
134 static GTY(()) section * ftext_section;
136 static void mep_set_leaf_registers (int);
137 static bool symbol_p (rtx);
138 static bool symbolref_p (rtx);
139 static void encode_pattern_1 (rtx);
140 static void encode_pattern (rtx);
141 static bool const_in_range (rtx, int, int);
142 static void mep_rewrite_mult (rtx, rtx);
143 static void mep_rewrite_mulsi3 (rtx, rtx, rtx, rtx);
144 static void mep_rewrite_maddsi3 (rtx, rtx, rtx, rtx, rtx);
145 static bool mep_reuse_lo_p_1 (rtx, rtx, rtx, bool);
146 static bool move_needs_splitting (rtx, rtx, enum machine_mode);
147 static bool mep_expand_setcc_1 (enum rtx_code, rtx, rtx, rtx);
148 static bool mep_nongeneral_reg (rtx);
149 static bool mep_general_copro_reg (rtx);
150 static bool mep_nonregister (rtx);
151 static struct machine_function* mep_init_machine_status (void);
152 static rtx mep_tp_rtx (void);
153 static rtx mep_gp_rtx (void);
154 static bool mep_interrupt_p (void);
155 static bool mep_disinterrupt_p (void);
156 static bool mep_reg_set_p (rtx, rtx);
157 static bool mep_reg_set_in_function (int);
158 static bool mep_interrupt_saved_reg (int);
159 static bool mep_call_saves_register (int);
161 static void add_constant (int, int, int, int);
162 static bool mep_function_uses_sp (void);
163 static rtx maybe_dead_move (rtx, rtx, bool);
164 static void mep_reload_pointer (int, const char *);
165 static void mep_start_function (FILE *, HOST_WIDE_INT);
166 static bool mep_function_ok_for_sibcall (tree, tree);
167 static int unique_bit_in (HOST_WIDE_INT);
168 static int bit_size_for_clip (HOST_WIDE_INT);
169 static int bytesize (const_tree, enum machine_mode);
170 static tree mep_validate_based_tiny (tree *, tree, tree, int, bool *);
171 static tree mep_validate_near_far (tree *, tree, tree, int, bool *);
172 static tree mep_validate_disinterrupt (tree *, tree, tree, int, bool *);
173 static tree mep_validate_interrupt (tree *, tree, tree, int, bool *);
174 static tree mep_validate_io_cb (tree *, tree, tree, int, bool *);
175 static tree mep_validate_vliw (tree *, tree, tree, int, bool *);
176 static bool mep_function_attribute_inlinable_p (const_tree);
177 static bool mep_can_inline_p (tree, tree);
178 static bool mep_lookup_pragma_disinterrupt (const char *);
179 static int mep_multiple_address_regions (tree, bool);
180 static int mep_attrlist_to_encoding (tree, tree);
181 static void mep_insert_attributes (tree, tree *);
182 static void mep_encode_section_info (tree, rtx, int);
183 static section * mep_select_section (tree, int, unsigned HOST_WIDE_INT);
184 static void mep_unique_section (tree, int);
185 static unsigned int mep_section_type_flags (tree, const char *, int);
186 static void mep_asm_named_section (const char *, unsigned int, tree);
187 static bool mep_mentioned_p (rtx, rtx, int);
188 static void mep_reorg_regmove (rtx);
189 static rtx mep_insert_repeat_label_last (rtx, rtx, bool, bool);
190 static void mep_reorg_repeat (rtx);
191 static bool mep_invertable_branch_p (rtx);
192 static void mep_invert_branch (rtx, rtx);
193 static void mep_reorg_erepeat (rtx);
194 static void mep_jmp_return_reorg (rtx);
195 static void mep_reorg_addcombine (rtx);
196 static void mep_reorg (void);
197 static void mep_init_intrinsics (void);
198 static void mep_init_builtins (void);
199 static void mep_intrinsic_unavailable (int);
200 static bool mep_get_intrinsic_insn (int, const struct cgen_insn **);
201 static bool mep_get_move_insn (int, const struct cgen_insn **);
202 static rtx mep_convert_arg (enum machine_mode, rtx);
203 static rtx mep_convert_regnum (const struct cgen_regnum_operand *, rtx);
204 static rtx mep_legitimize_arg (const struct insn_operand_data *, rtx, int);
205 static void mep_incompatible_arg (const struct insn_operand_data *, rtx, int, tree);
206 static rtx mep_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
207 static int mep_adjust_cost (rtx, rtx, rtx, int);
208 static int mep_issue_rate (void);
209 static rtx mep_find_ready_insn (rtx *, int, enum attr_slot, int);
210 static void mep_move_ready_insn (rtx *, int, rtx);
211 static int mep_sched_reorder (FILE *, int, rtx *, int *, int);
212 static rtx mep_make_bundle (rtx, rtx);
213 static void mep_bundle_insns (rtx);
214 static bool mep_rtx_cost (rtx, int, int, int *, bool);
215 static int mep_address_cost (rtx, bool);
216 static void mep_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
218 static bool mep_pass_by_reference (CUMULATIVE_ARGS * cum, enum machine_mode,
220 static bool mep_vector_mode_supported_p (enum machine_mode);
221 static bool mep_handle_option (size_t, const char *, int);
222 static rtx mep_allocate_initial_value (rtx);
223 static void mep_asm_init_sections (void);
224 static int mep_comp_type_attributes (const_tree, const_tree);
225 static bool mep_narrow_volatile_bitfield (void);
226 static rtx mep_expand_builtin_saveregs (void);
227 static tree mep_build_builtin_va_list (void);
228 static void mep_expand_va_start (tree, rtx);
229 static tree mep_gimplify_va_arg_expr (tree, tree, tree *, tree *);
231 /* Initialize the GCC target structure. */
233 #undef TARGET_ASM_FUNCTION_PROLOGUE
234 #define TARGET_ASM_FUNCTION_PROLOGUE mep_start_function
235 #undef TARGET_ATTRIBUTE_TABLE
236 #define TARGET_ATTRIBUTE_TABLE mep_attribute_table
237 #undef TARGET_COMP_TYPE_ATTRIBUTES
238 #define TARGET_COMP_TYPE_ATTRIBUTES mep_comp_type_attributes
239 #undef TARGET_INSERT_ATTRIBUTES
240 #define TARGET_INSERT_ATTRIBUTES mep_insert_attributes
241 #undef TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P
242 #define TARGET_FUNCTION_ATTRIBUTE_INLINABLE_P mep_function_attribute_inlinable_p
243 #undef TARGET_CAN_INLINE_P
244 #define TARGET_CAN_INLINE_P mep_can_inline_p
245 #undef TARGET_SECTION_TYPE_FLAGS
246 #define TARGET_SECTION_TYPE_FLAGS mep_section_type_flags
247 #undef TARGET_ASM_NAMED_SECTION
248 #define TARGET_ASM_NAMED_SECTION mep_asm_named_section
249 #undef TARGET_INIT_BUILTINS
250 #define TARGET_INIT_BUILTINS mep_init_builtins
251 #undef TARGET_EXPAND_BUILTIN
252 #define TARGET_EXPAND_BUILTIN mep_expand_builtin
253 #undef TARGET_SCHED_ADJUST_COST
254 #define TARGET_SCHED_ADJUST_COST mep_adjust_cost
255 #undef TARGET_SCHED_ISSUE_RATE
256 #define TARGET_SCHED_ISSUE_RATE mep_issue_rate
257 #undef TARGET_SCHED_REORDER
258 #define TARGET_SCHED_REORDER mep_sched_reorder
259 #undef TARGET_STRIP_NAME_ENCODING
260 #define TARGET_STRIP_NAME_ENCODING mep_strip_name_encoding
261 #undef TARGET_ASM_SELECT_SECTION
262 #define TARGET_ASM_SELECT_SECTION mep_select_section
263 #undef TARGET_ASM_UNIQUE_SECTION
264 #define TARGET_ASM_UNIQUE_SECTION mep_unique_section
265 #undef TARGET_ENCODE_SECTION_INFO
266 #define TARGET_ENCODE_SECTION_INFO mep_encode_section_info
267 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
268 #define TARGET_FUNCTION_OK_FOR_SIBCALL mep_function_ok_for_sibcall
269 #undef TARGET_RTX_COSTS
270 #define TARGET_RTX_COSTS mep_rtx_cost
271 #undef TARGET_ADDRESS_COST
272 #define TARGET_ADDRESS_COST mep_address_cost
273 #undef TARGET_MACHINE_DEPENDENT_REORG
274 #define TARGET_MACHINE_DEPENDENT_REORG mep_reorg
275 #undef TARGET_SETUP_INCOMING_VARARGS
276 #define TARGET_SETUP_INCOMING_VARARGS mep_setup_incoming_varargs
277 #undef TARGET_PASS_BY_REFERENCE
278 #define TARGET_PASS_BY_REFERENCE mep_pass_by_reference
279 #undef TARGET_VECTOR_MODE_SUPPORTED_P
280 #define TARGET_VECTOR_MODE_SUPPORTED_P mep_vector_mode_supported_p
281 #undef TARGET_HANDLE_OPTION
282 #define TARGET_HANDLE_OPTION mep_handle_option
283 #undef TARGET_DEFAULT_TARGET_FLAGS
284 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
285 #undef TARGET_ALLOCATE_INITIAL_VALUE
286 #define TARGET_ALLOCATE_INITIAL_VALUE mep_allocate_initial_value
287 #undef TARGET_ASM_INIT_SECTIONS
288 #define TARGET_ASM_INIT_SECTIONS mep_asm_init_sections
289 #undef TARGET_RETURN_IN_MEMORY
290 #define TARGET_RETURN_IN_MEMORY mep_return_in_memory
291 #undef TARGET_NARROW_VOLATILE_BITFIELD
292 #define TARGET_NARROW_VOLATILE_BITFIELD mep_narrow_volatile_bitfield
293 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
294 #define TARGET_EXPAND_BUILTIN_SAVEREGS mep_expand_builtin_saveregs
295 #undef TARGET_BUILD_BUILTIN_VA_LIST
296 #define TARGET_BUILD_BUILTIN_VA_LIST mep_build_builtin_va_list
297 #undef TARGET_EXPAND_BUILTIN_VA_START
298 #define TARGET_EXPAND_BUILTIN_VA_START mep_expand_va_start
299 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
300 #define TARGET_GIMPLIFY_VA_ARG_EXPR mep_gimplify_va_arg_expr
302 struct gcc_target targetm = TARGET_INITIALIZER;
304 #define WANT_GCC_DEFINITIONS
305 #include "mep-intrin.h"
306 #undef WANT_GCC_DEFINITIONS
309 /* Command Line Option Support. */
311 char mep_leaf_registers [FIRST_PSEUDO_REGISTER];
313 /* True if we can use cmov instructions to move values back and forth
314 between core and coprocessor registers. */
315 bool mep_have_core_copro_moves_p;
317 /* True if we can use cmov instructions (or a work-alike) to move
318 values between coprocessor registers. */
319 bool mep_have_copro_copro_moves_p;
321 /* A table of all coprocessor instructions that can act like
322 a coprocessor-to-coprocessor cmov. */
323 static const int mep_cmov_insns[] = {
336 static int option_mtiny_specified = 0;
340 mep_set_leaf_registers (int enable)
344 if (mep_leaf_registers[0] != enable)
345 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
346 mep_leaf_registers[i] = enable;
350 mep_conditional_register_usage (char *fixed_regs, char *call_used_regs)
354 if (!TARGET_OPT_MULT && !TARGET_OPT_DIV)
356 fixed_regs[HI_REGNO] = 1;
357 fixed_regs[LO_REGNO] = 1;
358 call_used_regs[HI_REGNO] = 1;
359 call_used_regs[LO_REGNO] = 1;
362 for (i = FIRST_SHADOW_REGISTER; i <= LAST_SHADOW_REGISTER; i++)
367 mep_optimization_options (void)
369 /* The first scheduling pass often increases register pressure and tends
370 to result in more spill code. Only run it when specifically asked. */
371 flag_schedule_insns = 0;
373 /* Using $fp doesn't gain us much, even when debugging is important. */
374 flag_omit_frame_pointer = 1;
378 mep_override_options (void)
381 warning (OPT_fpic, "-fpic is not supported");
383 warning (OPT_fPIC, "-fPIC is not supported");
384 if (TARGET_S && TARGET_M)
385 error ("only one of -ms and -mm may be given");
386 if (TARGET_S && TARGET_L)
387 error ("only one of -ms and -ml may be given");
388 if (TARGET_M && TARGET_L)
389 error ("only one of -mm and -ml may be given");
390 if (TARGET_S && option_mtiny_specified)
391 error ("only one of -ms and -mtiny= may be given");
392 if (TARGET_M && option_mtiny_specified)
393 error ("only one of -mm and -mtiny= may be given");
394 if (TARGET_OPT_CLIP && ! TARGET_OPT_MINMAX)
395 warning (0, "-mclip currently has no effect without -mminmax");
397 if (mep_const_section)
399 if (strcmp (mep_const_section, "tiny") != 0
400 && strcmp (mep_const_section, "near") != 0
401 && strcmp (mep_const_section, "far") != 0)
402 error ("-mc= must be -mc=tiny, -mc=near, or -mc=far");
406 mep_tiny_cutoff = 65536;
409 if (TARGET_L && ! option_mtiny_specified)
412 if (TARGET_64BIT_CR_REGS)
413 flag_split_wide_types = 0;
415 init_machine_status = mep_init_machine_status;
416 mep_init_intrinsics ();
419 /* Pattern Support - constraints, predicates, expanders. */
421 /* MEP has very few instructions that can refer to the span of
422 addresses used by symbols, so it's common to check for them. */
427 int c = GET_CODE (x);
429 return (c == CONST_INT
439 if (GET_CODE (x) != MEM)
442 c = GET_CODE (XEXP (x, 0));
443 return (c == CONST_INT
448 /* static const char *reg_class_names[] = REG_CLASS_NAMES; */
450 #define GEN_REG(R, STRICT) \
453 && ((R) == ARG_POINTER_REGNUM \
454 || (R) >= FIRST_PSEUDO_REGISTER)))
456 static char pattern[12], *patternp;
457 static GTY(()) rtx patternr[12];
458 #define RTX_IS(x) (strcmp (pattern, x) == 0)
461 encode_pattern_1 (rtx x)
465 if (patternp == pattern + sizeof (pattern) - 2)
471 patternr[patternp-pattern] = x;
473 switch (GET_CODE (x))
481 encode_pattern_1 (XEXP(x, 0));
485 encode_pattern_1 (XEXP(x, 0));
486 encode_pattern_1 (XEXP(x, 1));
490 encode_pattern_1 (XEXP(x, 0));
491 encode_pattern_1 (XEXP(x, 1));
495 encode_pattern_1 (XEXP(x, 0));
509 *patternp++ = '0' + XCINT(x, 1, UNSPEC);
510 for (i=0; i<XVECLEN (x, 0); i++)
511 encode_pattern_1 (XVECEXP (x, 0, i));
519 fprintf (stderr, "can't encode pattern %s\n", GET_RTX_NAME(GET_CODE(x)));
528 encode_pattern (rtx x)
531 encode_pattern_1 (x);
536 mep_section_tag (rtx x)
542 switch (GET_CODE (x))
549 x = XVECEXP (x, 0, 0);
552 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
561 if (GET_CODE (x) != SYMBOL_REF)
564 if (name[0] == '@' && name[2] == '.')
566 if (name[1] == 'i' || name[1] == 'I')
569 return 'f'; /* near */
570 return 'n'; /* far */
578 mep_regno_reg_class (int regno)
582 case SP_REGNO: return SP_REGS;
583 case TP_REGNO: return TP_REGS;
584 case GP_REGNO: return GP_REGS;
585 case 0: return R0_REGS;
586 case HI_REGNO: return HI_REGS;
587 case LO_REGNO: return LO_REGS;
588 case ARG_POINTER_REGNUM: return GENERAL_REGS;
591 if (GR_REGNO_P (regno))
592 return regno < FIRST_GR_REGNO + 8 ? TPREL_REGS : GENERAL_REGS;
593 if (CONTROL_REGNO_P (regno))
596 if (CR_REGNO_P (regno))
600 /* Search for the register amongst user-defined subclasses of
601 the coprocessor registers. */
602 for (i = USER0_REGS; i <= USER3_REGS; ++i)
604 if (! TEST_HARD_REG_BIT (reg_class_contents[i], regno))
606 for (j = 0; j < N_REG_CLASSES; ++j)
608 enum reg_class sub = reg_class_subclasses[i][j];
610 if (sub == LIM_REG_CLASSES)
612 if (TEST_HARD_REG_BIT (reg_class_contents[sub], regno))
617 return LOADABLE_CR_REGNO_P (regno) ? LOADABLE_CR_REGS : CR_REGS;
620 if (CCR_REGNO_P (regno))
623 gcc_assert (regno >= FIRST_SHADOW_REGISTER && regno <= LAST_SHADOW_REGISTER);
629 mep_reg_class_from_constraint (int c, const char *str)
646 return LOADABLE_CR_REGS;
648 return mep_have_copro_copro_moves_p ? CR_REGS : NO_REGS;
650 return mep_have_core_copro_moves_p ? CR_REGS : NO_REGS;
677 enum reg_class which = c - 'A' + USER0_REGS;
678 return (reg_class_size[which] > 0 ? which : NO_REGS);
687 mep_const_ok_for_letter_p (HOST_WIDE_INT value, int c)
691 case 'I': return value >= -32768 && value < 32768;
692 case 'J': return value >= 0 && value < 65536;
693 case 'K': return value >= 0 && value < 0x01000000;
694 case 'L': return value >= -32 && value < 32;
695 case 'M': return value >= 0 && value < 32;
696 case 'N': return value >= 0 && value < 16;
700 return value >= -2147483647-1 && value <= 2147483647;
707 mep_extra_constraint (rtx value, int c)
709 encode_pattern (value);
714 /* For near symbols, like what call uses. */
715 if (GET_CODE (value) == REG)
717 return mep_call_address_operand (value, GET_MODE (value));
720 /* For signed 8-bit immediates. */
721 return (GET_CODE (value) == CONST_INT
722 && INTVAL (value) >= -128
723 && INTVAL (value) <= 127);
726 /* For tp/gp relative symbol values. */
727 return (RTX_IS ("u3s") || RTX_IS ("u2s")
728 || RTX_IS ("+u3si") || RTX_IS ("+u2si"));
731 /* Non-absolute memories. */
732 return GET_CODE (value) == MEM && ! CONSTANT_P (XEXP (value, 0));
736 return RTX_IS ("Hs");
739 /* Register indirect. */
740 return RTX_IS ("mr");
743 return mep_section_tag (value) == 'c' && RTX_IS ("ms");
754 const_in_range (rtx x, int minv, int maxv)
756 return (GET_CODE (x) == CONST_INT
757 && INTVAL (x) >= minv
758 && INTVAL (x) <= maxv);
761 /* Given three integer registers DEST, SRC1 and SRC2, return an rtx X
762 such that "mulr DEST,X" will calculate DEST = SRC1 * SRC2. If a move
763 is needed, emit it before INSN if INSN is nonnull, otherwise emit it
764 at the end of the insn stream. */
767 mep_mulr_source (rtx insn, rtx dest, rtx src1, rtx src2)
769 if (rtx_equal_p (dest, src1))
771 else if (rtx_equal_p (dest, src2))
776 emit_insn (gen_movsi (copy_rtx (dest), src1));
778 emit_insn_before (gen_movsi (copy_rtx (dest), src1), insn);
783 /* Replace INSN's pattern with PATTERN, a multiplication PARALLEL.
784 Change the last element of PATTERN from (clobber (scratch:SI))
785 to (clobber (reg:SI HI_REGNO)). */
788 mep_rewrite_mult (rtx insn, rtx pattern)
792 hi_clobber = XVECEXP (pattern, 0, XVECLEN (pattern, 0) - 1);
793 XEXP (hi_clobber, 0) = gen_rtx_REG (SImode, HI_REGNO);
794 PATTERN (insn) = pattern;
795 INSN_CODE (insn) = -1;
798 /* Subroutine of mep_reuse_lo_p. Rewrite instruction INSN so that it
799 calculates SRC1 * SRC2 and stores the result in $lo. Also make it
800 store the result in DEST if nonnull. */
803 mep_rewrite_mulsi3 (rtx insn, rtx dest, rtx src1, rtx src2)
807 lo = gen_rtx_REG (SImode, LO_REGNO);
809 pattern = gen_mulsi3r (lo, dest, copy_rtx (dest),
810 mep_mulr_source (insn, dest, src1, src2));
812 pattern = gen_mulsi3_lo (lo, src1, src2);
813 mep_rewrite_mult (insn, pattern);
816 /* Like mep_rewrite_mulsi3, but calculate SRC1 * SRC2 + SRC3. First copy
817 SRC3 into $lo, then use either madd or maddr. The move into $lo will
818 be deleted by a peephole2 if SRC3 is already in $lo. */
821 mep_rewrite_maddsi3 (rtx insn, rtx dest, rtx src1, rtx src2, rtx src3)
825 lo = gen_rtx_REG (SImode, LO_REGNO);
826 emit_insn_before (gen_movsi (copy_rtx (lo), src3), insn);
828 pattern = gen_maddsi3r (lo, dest, copy_rtx (dest),
829 mep_mulr_source (insn, dest, src1, src2),
832 pattern = gen_maddsi3_lo (lo, src1, src2, copy_rtx (lo));
833 mep_rewrite_mult (insn, pattern);
836 /* Return true if $lo has the same value as integer register GPR when
837 instruction INSN is reached. If necessary, rewrite the instruction
838 that sets $lo so that it uses a proper SET, not a CLOBBER. LO is an
839 rtx for (reg:SI LO_REGNO).
841 This function is intended to be used by the peephole2 pass. Since
842 that pass goes from the end of a basic block to the beginning, and
843 propagates liveness information on the way, there is no need to
844 update register notes here.
846 If GPR_DEAD_P is true on entry, and this function returns true,
847 then the caller will replace _every_ use of GPR in and after INSN
848 with LO. This means that if the instruction that sets $lo is a
849 mulr- or maddr-type instruction, we can rewrite it to use mul or
850 madd instead. In combination with the copy progagation pass,
851 this allows us to replace sequences like:
860 if GPR is no longer used. */
863 mep_reuse_lo_p_1 (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
867 insn = PREV_INSN (insn);
869 switch (recog_memoized (insn))
871 case CODE_FOR_mulsi3_1:
873 if (rtx_equal_p (recog_data.operand[0], gpr))
875 mep_rewrite_mulsi3 (insn,
876 gpr_dead_p ? NULL : recog_data.operand[0],
877 recog_data.operand[1],
878 recog_data.operand[2]);
883 case CODE_FOR_maddsi3:
885 if (rtx_equal_p (recog_data.operand[0], gpr))
887 mep_rewrite_maddsi3 (insn,
888 gpr_dead_p ? NULL : recog_data.operand[0],
889 recog_data.operand[1],
890 recog_data.operand[2],
891 recog_data.operand[3]);
896 case CODE_FOR_mulsi3r:
897 case CODE_FOR_maddsi3r:
899 return rtx_equal_p (recog_data.operand[1], gpr);
902 if (reg_set_p (lo, insn)
903 || reg_set_p (gpr, insn)
904 || volatile_insn_p (PATTERN (insn)))
907 if (gpr_dead_p && reg_referenced_p (gpr, PATTERN (insn)))
912 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
916 /* A wrapper around mep_reuse_lo_p_1 that preserves recog_data. */
919 mep_reuse_lo_p (rtx lo, rtx gpr, rtx insn, bool gpr_dead_p)
921 bool result = mep_reuse_lo_p_1 (lo, gpr, insn, gpr_dead_p);
926 /* Return true if SET can be turned into a post-modify load or store
927 that adds OFFSET to GPR. In other words, return true if SET can be
930 (parallel [SET (set GPR (plus:SI GPR OFFSET))]).
932 It's OK to change SET to an equivalent operation in order to
936 mep_use_post_modify_for_set_p (rtx set, rtx gpr, rtx offset)
939 unsigned int reg_bytes, mem_bytes;
940 enum machine_mode reg_mode, mem_mode;
942 /* Only simple SETs can be converted. */
943 if (GET_CODE (set) != SET)
946 /* Point REG to what we hope will be the register side of the set and
947 MEM to what we hope will be the memory side. */
948 if (GET_CODE (SET_DEST (set)) == MEM)
950 mem = &SET_DEST (set);
951 reg = &SET_SRC (set);
955 reg = &SET_DEST (set);
956 mem = &SET_SRC (set);
957 if (GET_CODE (*mem) == SIGN_EXTEND)
958 mem = &XEXP (*mem, 0);
961 /* Check that *REG is a suitable coprocessor register. */
962 if (GET_CODE (*reg) != REG || !LOADABLE_CR_REGNO_P (REGNO (*reg)))
965 /* Check that *MEM is a suitable memory reference. */
966 if (GET_CODE (*mem) != MEM || !rtx_equal_p (XEXP (*mem, 0), gpr))
969 /* Get the number of bytes in each operand. */
970 mem_bytes = GET_MODE_SIZE (GET_MODE (*mem));
971 reg_bytes = GET_MODE_SIZE (GET_MODE (*reg));
973 /* Check that OFFSET is suitably aligned. */
974 if (INTVAL (offset) & (mem_bytes - 1))
977 /* Convert *MEM to a normal integer mode. */
978 mem_mode = mode_for_size (mem_bytes * BITS_PER_UNIT, MODE_INT, 0);
979 *mem = change_address (*mem, mem_mode, NULL);
981 /* Adjust *REG as well. */
982 *reg = shallow_copy_rtx (*reg);
983 if (reg == &SET_DEST (set) && reg_bytes < UNITS_PER_WORD)
985 /* SET is a subword load. Convert it to an explicit extension. */
986 PUT_MODE (*reg, SImode);
987 *mem = gen_rtx_SIGN_EXTEND (SImode, *mem);
991 reg_mode = mode_for_size (reg_bytes * BITS_PER_UNIT, MODE_INT, 0);
992 PUT_MODE (*reg, reg_mode);
997 /* Return the effect of frame-related instruction INSN. */
1000 mep_frame_expr (rtx insn)
1004 note = find_reg_note (insn, REG_FRAME_RELATED_EXPR, 0);
1005 expr = (note != 0 ? XEXP (note, 0) : copy_rtx (PATTERN (insn)));
1006 RTX_FRAME_RELATED_P (expr) = 1;
1010 /* Merge instructions INSN1 and INSN2 using a PARALLEL. Store the
1011 new pattern in INSN1; INSN2 will be deleted by the caller. */
1014 mep_make_parallel (rtx insn1, rtx insn2)
1018 if (RTX_FRAME_RELATED_P (insn2))
1020 expr = mep_frame_expr (insn2);
1021 if (RTX_FRAME_RELATED_P (insn1))
1022 expr = gen_rtx_SEQUENCE (VOIDmode,
1023 gen_rtvec (2, mep_frame_expr (insn1), expr));
1024 set_unique_reg_note (insn1, REG_FRAME_RELATED_EXPR, expr);
1025 RTX_FRAME_RELATED_P (insn1) = 1;
1028 PATTERN (insn1) = gen_rtx_PARALLEL (VOIDmode,
1029 gen_rtvec (2, PATTERN (insn1),
1031 INSN_CODE (insn1) = -1;
1034 /* SET_INSN is an instruction that adds OFFSET to REG. Go back through
1035 the basic block to see if any previous load or store instruction can
1036 be persuaded to do SET_INSN as a side-effect. Return true if so. */
1039 mep_use_post_modify_p_1 (rtx set_insn, rtx reg, rtx offset)
1046 insn = PREV_INSN (insn);
1049 if (mep_use_post_modify_for_set_p (PATTERN (insn), reg, offset))
1051 mep_make_parallel (insn, set_insn);
1055 if (reg_set_p (reg, insn)
1056 || reg_referenced_p (reg, PATTERN (insn))
1057 || volatile_insn_p (PATTERN (insn)))
1061 while (!NOTE_INSN_BASIC_BLOCK_P (insn));
1065 /* A wrapper around mep_use_post_modify_p_1 that preserves recog_data. */
1068 mep_use_post_modify_p (rtx insn, rtx reg, rtx offset)
1070 bool result = mep_use_post_modify_p_1 (insn, reg, offset);
1071 extract_insn (insn);
1076 mep_allow_clip (rtx ux, rtx lx, int s)
1078 HOST_WIDE_INT u = INTVAL (ux);
1079 HOST_WIDE_INT l = INTVAL (lx);
1082 if (!TARGET_OPT_CLIP)
1087 for (i = 0; i < 30; i ++)
1088 if ((u == ((HOST_WIDE_INT) 1 << i) - 1)
1089 && (l == - ((HOST_WIDE_INT) 1 << i)))
1097 for (i = 0; i < 30; i ++)
1098 if ((u == ((HOST_WIDE_INT) 1 << i) - 1))
1105 mep_bit_position_p (rtx x, bool looking_for)
1107 if (GET_CODE (x) != CONST_INT)
1109 switch ((int) INTVAL(x) & 0xff)
1111 case 0x01: case 0x02: case 0x04: case 0x08:
1112 case 0x10: case 0x20: case 0x40: case 0x80:
1114 case 0xfe: case 0xfd: case 0xfb: case 0xf7:
1115 case 0xef: case 0xdf: case 0xbf: case 0x7f:
1116 return !looking_for;
1122 move_needs_splitting (rtx dest, rtx src,
1123 enum machine_mode mode ATTRIBUTE_UNUSED)
1125 int s = mep_section_tag (src);
1129 if (GET_CODE (src) == CONST
1130 || GET_CODE (src) == MEM)
1131 src = XEXP (src, 0);
1132 else if (GET_CODE (src) == SYMBOL_REF
1133 || GET_CODE (src) == LABEL_REF
1134 || GET_CODE (src) == PLUS)
1140 || (GET_CODE (src) == PLUS
1141 && GET_CODE (XEXP (src, 1)) == CONST_INT
1142 && (INTVAL (XEXP (src, 1)) < -65536
1143 || INTVAL (XEXP (src, 1)) > 0xffffff))
1144 || (GET_CODE (dest) == REG
1145 && REGNO (dest) > 7 && REGNO (dest) < FIRST_PSEUDO_REGISTER))
1151 mep_split_mov (rtx *operands, int symbolic)
1155 if (move_needs_splitting (operands[0], operands[1], SImode))
1160 if (GET_CODE (operands[1]) != CONST_INT)
1163 if (constraint_satisfied_p (operands[1], CONSTRAINT_I)
1164 || constraint_satisfied_p (operands[1], CONSTRAINT_J)
1165 || constraint_satisfied_p (operands[1], CONSTRAINT_O))
1168 if (((!reload_completed && !reload_in_progress)
1169 || (REG_P (operands[0]) && REGNO (operands[0]) < 8))
1170 && constraint_satisfied_p (operands[1], CONSTRAINT_K))
1176 /* Irritatingly, the "jsrv" insn *toggles* PSW.OM rather than set
1177 it to one specific value. So the insn chosen depends on whether
1178 the source and destination modes match. */
1181 mep_vliw_mode_match (rtx tgt)
1183 bool src_vliw = mep_vliw_function_p (cfun->decl);
1184 bool tgt_vliw = INTVAL (tgt);
1186 return src_vliw == tgt_vliw;
1189 /* Like the above, but also test for near/far mismatches. */
1192 mep_vliw_jmp_match (rtx tgt)
1194 bool src_vliw = mep_vliw_function_p (cfun->decl);
1195 bool tgt_vliw = INTVAL (tgt);
1197 if (mep_section_tag (DECL_RTL (cfun->decl)) == 'f')
1200 return src_vliw == tgt_vliw;
1204 mep_multi_slot (rtx x)
1206 return get_attr_slot (x) == SLOT_MULTI;
1210 /* Be careful not to use macros that need to be compiled one way for
1211 strict, and another way for not-strict, like REG_OK_FOR_BASE_P. */
1214 mep_legitimate_address (enum machine_mode mode, rtx x, int strict)
1218 #define DEBUG_LEGIT 0
1220 fprintf (stderr, "legit: mode %s strict %d ", mode_name[mode], strict);
1224 if (GET_CODE (x) == LO_SUM
1225 && GET_CODE (XEXP (x, 0)) == REG
1226 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1227 && CONSTANT_P (XEXP (x, 1)))
1229 if (GET_MODE_SIZE (mode) > 4)
1231 /* We will end up splitting this, and lo_sums are not
1232 offsettable for us. */
1234 fprintf(stderr, " - nope, %%lo(sym)[reg] not splittable\n");
1239 fprintf (stderr, " - yup, %%lo(sym)[reg]\n");
1244 if (GET_CODE (x) == REG
1245 && GEN_REG (REGNO (x), strict))
1248 fprintf (stderr, " - yup, [reg]\n");
1253 if (GET_CODE (x) == PLUS
1254 && GET_CODE (XEXP (x, 0)) == REG
1255 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1256 && const_in_range (XEXP (x, 1), -32768, 32767))
1259 fprintf (stderr, " - yup, [reg+const]\n");
1264 if (GET_CODE (x) == PLUS
1265 && GET_CODE (XEXP (x, 0)) == REG
1266 && GEN_REG (REGNO (XEXP (x, 0)), strict)
1267 && GET_CODE (XEXP (x, 1)) == CONST
1268 && (GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
1269 || (GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS
1270 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == UNSPEC
1271 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT)))
1274 fprintf (stderr, " - yup, [reg+unspec]\n");
1279 the_tag = mep_section_tag (x);
1284 fprintf (stderr, " - nope, [far]\n");
1289 if (mode == VOIDmode
1290 && GET_CODE (x) == SYMBOL_REF)
1293 fprintf (stderr, " - yup, call [symbol]\n");
1298 if ((mode == SImode || mode == SFmode)
1300 && LEGITIMATE_CONSTANT_P (x)
1301 && the_tag != 't' && the_tag != 'b')
1303 if (GET_CODE (x) != CONST_INT
1304 || (INTVAL (x) <= 0xfffff
1306 && (INTVAL (x) % 4) == 0))
1309 fprintf (stderr, " - yup, [const]\n");
1316 fprintf (stderr, " - nope.\n");
1322 mep_legitimize_reload_address (rtx *x, enum machine_mode mode, int opnum,
1323 enum reload_type type,
1324 int ind_levels ATTRIBUTE_UNUSED)
1326 if (GET_CODE (*x) == PLUS
1327 && GET_CODE (XEXP (*x, 0)) == MEM
1328 && GET_CODE (XEXP (*x, 1)) == REG)
1330 /* GCC will by default copy the MEM into a REG, which results in
1331 an invalid address. For us, the best thing to do is move the
1332 whole expression to a REG. */
1333 push_reload (*x, NULL_RTX, x, NULL,
1334 GENERAL_REGS, mode, VOIDmode,
1339 if (GET_CODE (*x) == PLUS
1340 && GET_CODE (XEXP (*x, 0)) == SYMBOL_REF
1341 && GET_CODE (XEXP (*x, 1)) == CONST_INT)
1343 char e = mep_section_tag (XEXP (*x, 0));
1345 if (e != 't' && e != 'b')
1347 /* GCC thinks that (sym+const) is a valid address. Well,
1348 sometimes it is, this time it isn't. The best thing to
1349 do is reload the symbol to a register, since reg+int
1350 tends to work, and we can't just add the symbol and
1352 push_reload (XEXP (*x, 0), NULL_RTX, &(XEXP(*x, 0)), NULL,
1353 GENERAL_REGS, mode, VOIDmode,
1362 mep_core_address_length (rtx insn, int opn)
1364 rtx set = single_set (insn);
1365 rtx mem = XEXP (set, opn);
1366 rtx other = XEXP (set, 1-opn);
1367 rtx addr = XEXP (mem, 0);
1369 if (register_operand (addr, Pmode))
1371 if (GET_CODE (addr) == PLUS)
1373 rtx addend = XEXP (addr, 1);
1375 gcc_assert (REG_P (XEXP (addr, 0)));
1377 switch (REGNO (XEXP (addr, 0)))
1379 case STACK_POINTER_REGNUM:
1380 if (GET_MODE_SIZE (GET_MODE (mem)) == 4
1381 && mep_imm7a4_operand (addend, VOIDmode))
1386 gcc_assert (REG_P (other));
1388 if (REGNO (other) >= 8)
1391 if (GET_CODE (addend) == CONST
1392 && GET_CODE (XEXP (addend, 0)) == UNSPEC
1393 && XINT (XEXP (addend, 0), 1) == UNS_TPREL)
1396 if (GET_CODE (addend) == CONST_INT
1397 && INTVAL (addend) >= 0
1398 && INTVAL (addend) <= 127
1399 && INTVAL (addend) % GET_MODE_SIZE (GET_MODE (mem)) == 0)
1409 mep_cop_address_length (rtx insn, int opn)
1411 rtx set = single_set (insn);
1412 rtx mem = XEXP (set, opn);
1413 rtx addr = XEXP (mem, 0);
1415 if (GET_CODE (mem) != MEM)
1417 if (register_operand (addr, Pmode))
1419 if (GET_CODE (addr) == POST_INC)
1425 #define DEBUG_EXPAND_MOV 0
1427 mep_expand_mov (rtx *operands, enum machine_mode mode)
1432 int post_reload = 0;
1434 tag[0] = mep_section_tag (operands[0]);
1435 tag[1] = mep_section_tag (operands[1]);
1437 if (!reload_in_progress
1438 && !reload_completed
1439 && GET_CODE (operands[0]) != REG
1440 && GET_CODE (operands[0]) != SUBREG
1441 && GET_CODE (operands[1]) != REG
1442 && GET_CODE (operands[1]) != SUBREG)
1443 operands[1] = copy_to_mode_reg (mode, operands[1]);
1445 #if DEBUG_EXPAND_MOV
1446 fprintf(stderr, "expand move %s %d\n", mode_name[mode],
1447 reload_in_progress || reload_completed);
1448 debug_rtx (operands[0]);
1449 debug_rtx (operands[1]);
1452 if (mode == DImode || mode == DFmode)
1455 if (reload_in_progress || reload_completed)
1459 if (GET_CODE (operands[0]) == REG && REGNO (operands[0]) == TP_REGNO)
1460 cfun->machine->reload_changes_tp = true;
1462 if (tag[0] == 't' || tag[1] == 't')
1464 r = has_hard_reg_initial_val (Pmode, GP_REGNO);
1465 if (!r || GET_CODE (r) != REG || REGNO (r) != GP_REGNO)
1468 if (tag[0] == 'b' || tag[1] == 'b')
1470 r = has_hard_reg_initial_val (Pmode, TP_REGNO);
1471 if (!r || GET_CODE (r) != REG || REGNO (r) != TP_REGNO)
1474 if (cfun->machine->reload_changes_tp == true)
1481 if (symbol_p (operands[1]))
1483 t = mep_section_tag (operands[1]);
1484 if (t == 'b' || t == 't')
1487 if (GET_CODE (operands[1]) == SYMBOL_REF)
1489 tpsym = operands[1];
1490 n = gen_rtx_UNSPEC (mode,
1491 gen_rtvec (1, operands[1]),
1492 t == 'b' ? UNS_TPREL : UNS_GPREL);
1493 n = gen_rtx_CONST (mode, n);
1495 else if (GET_CODE (operands[1]) == CONST
1496 && GET_CODE (XEXP (operands[1], 0)) == PLUS
1497 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF
1498 && GET_CODE (XEXP (XEXP (operands[1], 0), 1)) == CONST_INT)
1500 tpsym = XEXP (XEXP (operands[1], 0), 0);
1501 tpoffs = XEXP (XEXP (operands[1], 0), 1);
1502 n = gen_rtx_UNSPEC (mode,
1503 gen_rtvec (1, tpsym),
1504 t == 'b' ? UNS_TPREL : UNS_GPREL);
1505 n = gen_rtx_PLUS (mode, n, tpoffs);
1506 n = gen_rtx_CONST (mode, n);
1508 else if (GET_CODE (operands[1]) == CONST
1509 && GET_CODE (XEXP (operands[1], 0)) == UNSPEC)
1513 error ("unusual TP-relative address");
1517 n = gen_rtx_PLUS (mode, (t == 'b' ? mep_tp_rtx ()
1518 : mep_gp_rtx ()), n);
1519 n = emit_insn (gen_rtx_SET (mode, operands[0], n));
1520 #if DEBUG_EXPAND_MOV
1521 fprintf(stderr, "mep_expand_mov emitting ");
1528 for (i=0; i < 2; i++)
1530 t = mep_section_tag (operands[i]);
1531 if (GET_CODE (operands[i]) == MEM && (t == 'b' || t == 't'))
1536 sym = XEXP (operands[i], 0);
1537 if (GET_CODE (sym) == CONST
1538 && GET_CODE (XEXP (sym, 0)) == UNSPEC)
1539 sym = XVECEXP (XEXP (sym, 0), 0, 0);
1552 n = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, sym), u);
1553 n = gen_rtx_CONST (Pmode, n);
1554 n = gen_rtx_PLUS (Pmode, r, n);
1555 operands[i] = replace_equiv_address (operands[i], n);
1560 if ((GET_CODE (operands[1]) != REG
1561 && MEP_CONTROL_REG (operands[0]))
1562 || (GET_CODE (operands[0]) != REG
1563 && MEP_CONTROL_REG (operands[1])))
1566 #if DEBUG_EXPAND_MOV
1567 fprintf (stderr, "cr-mem, forcing op1 to reg\n");
1569 temp = gen_reg_rtx (mode);
1570 emit_move_insn (temp, operands[1]);
1574 if (symbolref_p (operands[0])
1575 && (mep_section_tag (XEXP (operands[0], 0)) == 'f'
1576 || (GET_MODE_SIZE (mode) != 4)))
1580 gcc_assert (!reload_in_progress && !reload_completed);
1582 temp = force_reg (Pmode, XEXP (operands[0], 0));
1583 operands[0] = replace_equiv_address (operands[0], temp);
1584 emit_move_insn (operands[0], operands[1]);
1588 if (!post_reload && (tag[1] == 't' || tag[1] == 'b'))
1591 if (symbol_p (operands[1])
1592 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1594 emit_insn (gen_movsi_topsym_s (operands[0], operands[1]));
1595 emit_insn (gen_movsi_botsym_s (operands[0], operands[0], operands[1]));
1599 if (symbolref_p (operands[1])
1600 && (tag[1] == 'f' || tag[1] == 't' || tag[1] == 'b'))
1604 if (reload_in_progress || reload_completed)
1607 temp = gen_reg_rtx (Pmode);
1609 emit_insn (gen_movsi_topsym_s (temp, operands[1]));
1610 emit_insn (gen_movsi_botsym_s (temp, temp, operands[1]));
1611 emit_move_insn (operands[0], replace_equiv_address (operands[1], temp));
1618 /* Cases where the pattern can't be made to use at all. */
1621 mep_mov_ok (rtx *operands, enum machine_mode mode ATTRIBUTE_UNUSED)
1625 #define DEBUG_MOV_OK 0
1627 fprintf (stderr, "mep_mov_ok %s %c=%c\n", mode_name[mode], mep_section_tag (operands[0]),
1628 mep_section_tag (operands[1]));
1629 debug_rtx (operands[0]);
1630 debug_rtx (operands[1]);
1633 /* We want the movh patterns to get these. */
1634 if (GET_CODE (operands[1]) == HIGH)
1637 /* We can't store a register to a far variable without using a
1638 scratch register to hold the address. Using far variables should
1639 be split by mep_emit_mov anyway. */
1640 if (mep_section_tag (operands[0]) == 'f'
1641 || mep_section_tag (operands[1]) == 'f')
1644 fprintf (stderr, " - no, f\n");
1648 i = mep_section_tag (operands[1]);
1649 if ((i == 'b' || i == 't') && !reload_completed && !reload_in_progress)
1650 /* These are supposed to be generated with adds of the appropriate
1651 register. During and after reload, however, we allow them to
1652 be accessed as normal symbols because adding a dependency on
1653 the base register now might cause problems. */
1656 fprintf (stderr, " - no, bt\n");
1661 /* The only moves we can allow involve at least one general
1662 register, so require it. */
1663 for (i = 0; i < 2; i ++)
1665 /* Allow subregs too, before reload. */
1666 rtx x = operands[i];
1668 if (GET_CODE (x) == SUBREG)
1670 if (GET_CODE (x) == REG
1671 && ! MEP_CONTROL_REG (x))
1674 fprintf (stderr, " - ok\n");
1680 fprintf (stderr, " - no, no gen reg\n");
1685 #define DEBUG_SPLIT_WIDE_MOVE 0
1687 mep_split_wide_move (rtx *operands, enum machine_mode mode)
1691 #if DEBUG_SPLIT_WIDE_MOVE
1692 fprintf (stderr, "\n\033[34mmep_split_wide_move\033[0m mode %s\n", mode_name[mode]);
1693 debug_rtx (operands[0]);
1694 debug_rtx (operands[1]);
1697 for (i = 0; i <= 1; i++)
1699 rtx op = operands[i], hi, lo;
1701 switch (GET_CODE (op))
1705 unsigned int regno = REGNO (op);
1707 if (TARGET_64BIT_CR_REGS && CR_REGNO_P (regno))
1711 lo = gen_rtx_REG (SImode, regno);
1713 hi = gen_rtx_ZERO_EXTRACT (SImode,
1714 gen_rtx_REG (DImode, regno),
1719 hi = gen_rtx_REG (SImode, regno + TARGET_LITTLE_ENDIAN);
1720 lo = gen_rtx_REG (SImode, regno + TARGET_BIG_ENDIAN);
1728 hi = operand_subword (op, TARGET_LITTLE_ENDIAN, 0, mode);
1729 lo = operand_subword (op, TARGET_BIG_ENDIAN, 0, mode);
1736 /* The high part of CR <- GPR moves must be done after the low part. */
1737 operands [i + 4] = lo;
1738 operands [i + 2] = hi;
1741 if (reg_mentioned_p (operands[2], operands[5])
1742 || GET_CODE (operands[2]) == ZERO_EXTRACT
1743 || GET_CODE (operands[4]) == ZERO_EXTRACT)
1747 /* Overlapping register pairs -- make sure we don't
1748 early-clobber ourselves. */
1750 operands[2] = operands[4];
1753 operands[3] = operands[5];
1757 #if DEBUG_SPLIT_WIDE_MOVE
1758 fprintf(stderr, "\033[34m");
1759 debug_rtx (operands[2]);
1760 debug_rtx (operands[3]);
1761 debug_rtx (operands[4]);
1762 debug_rtx (operands[5]);
1763 fprintf(stderr, "\033[0m");
1767 /* Emit a setcc instruction in its entirity. */
1770 mep_expand_setcc_1 (enum rtx_code code, rtx dest, rtx op1, rtx op2)
1778 tmp = op1, op1 = op2, op2 = tmp;
1779 code = swap_condition (code);
1784 op1 = force_reg (SImode, op1);
1785 emit_insn (gen_rtx_SET (VOIDmode, dest,
1786 gen_rtx_fmt_ee (code, SImode, op1, op2)));
1790 if (op2 != const0_rtx)
1791 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1792 mep_expand_setcc_1 (LTU, dest, op1, const1_rtx);
1796 /* Branchful sequence:
1798 beq op1, op2, Lover 16-bit (op2 < 16), 32-bit otherwise
1801 Branchless sequence:
1802 add3 tmp, op1, -op2 32-bit (or mov + sub)
1803 sltu3 tmp, tmp, 1 16-bit
1804 xor3 dest, tmp, 1 32-bit
1806 if (optimize_size && op2 != const0_rtx)
1809 if (op2 != const0_rtx)
1810 op1 = expand_binop (SImode, sub_optab, op1, op2, NULL, 1, OPTAB_WIDEN);
1812 op2 = gen_reg_rtx (SImode);
1813 mep_expand_setcc_1 (LTU, op2, op1, const1_rtx);
1815 emit_insn (gen_rtx_SET (VOIDmode, dest,
1816 gen_rtx_XOR (SImode, op2, const1_rtx)));
1820 if (GET_CODE (op2) != CONST_INT
1821 || INTVAL (op2) == 0x7ffffff)
1823 op2 = GEN_INT (INTVAL (op2) + 1);
1824 return mep_expand_setcc_1 (LT, dest, op1, op2);
1827 if (GET_CODE (op2) != CONST_INT
1828 || INTVAL (op2) == -1)
1830 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) + 1, SImode));
1831 return mep_expand_setcc_1 (LTU, dest, op1, op2);
1834 if (GET_CODE (op2) != CONST_INT
1835 || INTVAL (op2) == trunc_int_for_mode (0x80000000, SImode))
1837 op2 = GEN_INT (INTVAL (op2) - 1);
1838 return mep_expand_setcc_1 (GT, dest, op1, op2);
1841 if (GET_CODE (op2) != CONST_INT
1842 || op2 == const0_rtx)
1844 op2 = GEN_INT (trunc_int_for_mode (INTVAL (op2) - 1, SImode));
1845 return mep_expand_setcc_1 (GTU, dest, op1, op2);
1853 mep_expand_setcc (rtx *operands)
1855 rtx dest = operands[0];
1856 enum rtx_code code = GET_CODE (operands[1]);
1857 rtx op0 = operands[2];
1858 rtx op1 = operands[3];
1860 return mep_expand_setcc_1 (code, dest, op0, op1);
1864 mep_expand_cbranch (rtx *operands)
1866 enum rtx_code code = GET_CODE (operands[0]);
1867 rtx op0 = operands[1];
1868 rtx op1 = operands[2];
1875 if (mep_imm4_operand (op1, SImode))
1878 tmp = gen_reg_rtx (SImode);
1879 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1886 if (mep_imm4_operand (op1, SImode))
1889 tmp = gen_reg_rtx (SImode);
1890 gcc_assert (mep_expand_setcc_1 (LT, tmp, op0, op1));
1899 if (! mep_reg_or_imm4_operand (op1, SImode))
1900 op1 = force_reg (SImode, op1);
1905 if (GET_CODE (op1) == CONST_INT
1906 && INTVAL (op1) != 0x7fffffff)
1908 op1 = GEN_INT (INTVAL (op1) + 1);
1909 code = (code == LE ? LT : GE);
1913 tmp = gen_reg_rtx (SImode);
1914 gcc_assert (mep_expand_setcc_1 (LT, tmp, op1, op0));
1916 code = (code == LE ? EQ : NE);
1922 if (op1 == const1_rtx)
1929 tmp = gen_reg_rtx (SImode);
1930 gcc_assert (mep_expand_setcc_1 (LTU, tmp, op0, op1));
1937 tmp = gen_reg_rtx (SImode);
1938 if (mep_expand_setcc_1 (LEU, tmp, op0, op1))
1940 else if (mep_expand_setcc_1 (LTU, tmp, op1, op0))
1949 tmp = gen_reg_rtx (SImode);
1950 gcc_assert (mep_expand_setcc_1 (GTU, tmp, op0, op1)
1951 || mep_expand_setcc_1 (LTU, tmp, op1, op0));
1958 tmp = gen_reg_rtx (SImode);
1959 if (mep_expand_setcc_1 (GEU, tmp, op0, op1))
1961 else if (mep_expand_setcc_1 (LTU, tmp, op0, op1))
1973 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
1977 mep_emit_cbranch (rtx *operands, int ne)
1979 if (GET_CODE (operands[1]) == REG)
1980 return ne ? "bne\t%0, %1, %l2" : "beq\t%0, %1, %l2";
1981 else if (INTVAL (operands[1]) == 0)
1982 return ne ? "bnez\t%0, %l2" : "beqz\t%0, %l2";
1984 return ne ? "bnei\t%0, %1, %l2" : "beqi\t%0, %1, %l2";
1988 mep_expand_call (rtx *operands, int returns_value)
1990 rtx addr = operands[returns_value];
1991 rtx tp = mep_tp_rtx ();
1992 rtx gp = mep_gp_rtx ();
1994 gcc_assert (GET_CODE (addr) == MEM);
1996 addr = XEXP (addr, 0);
1998 if (! mep_call_address_operand (addr, VOIDmode))
1999 addr = force_reg (SImode, addr);
2001 if (! operands[returns_value+2])
2002 operands[returns_value+2] = const0_rtx;
2005 emit_call_insn (gen_call_value_internal (operands[0], addr, operands[2],
2006 operands[3], tp, gp));
2008 emit_call_insn (gen_call_internal (addr, operands[1],
2009 operands[2], tp, gp));
2012 /* Aliasing Support. */
2014 /* If X is a machine specific address (i.e. a symbol or label being
2015 referenced as a displacement from the GOT implemented using an
2016 UNSPEC), then return the base term. Otherwise return X. */
2019 mep_find_base_term (rtx x)
2024 if (GET_CODE (x) != PLUS)
2029 if (has_hard_reg_initial_val(Pmode, TP_REGNO)
2030 && base == mep_tp_rtx ())
2032 else if (has_hard_reg_initial_val(Pmode, GP_REGNO)
2033 && base == mep_gp_rtx ())
2038 if (GET_CODE (term) != CONST)
2040 term = XEXP (term, 0);
2042 if (GET_CODE (term) != UNSPEC
2043 || XINT (term, 1) != unspec)
2046 return XVECEXP (term, 0, 0);
2049 /* Reload Support. */
2051 /* Return true if the registers in CLASS cannot represent the change from
2052 modes FROM to TO. */
2055 mep_cannot_change_mode_class (enum machine_mode from, enum machine_mode to,
2056 enum reg_class regclass)
2061 /* 64-bit COP regs must remain 64-bit COP regs. */
2062 if (TARGET_64BIT_CR_REGS
2063 && (regclass == CR_REGS
2064 || regclass == LOADABLE_CR_REGS)
2065 && (GET_MODE_SIZE (to) < 8
2066 || GET_MODE_SIZE (from) < 8))
2072 #define MEP_NONGENERAL_CLASS(C) (!reg_class_subset_p (C, GENERAL_REGS))
2075 mep_general_reg (rtx x)
2077 while (GET_CODE (x) == SUBREG)
2079 return GET_CODE (x) == REG && GR_REGNO_P (REGNO (x));
2083 mep_nongeneral_reg (rtx x)
2085 while (GET_CODE (x) == SUBREG)
2087 return (GET_CODE (x) == REG
2088 && !GR_REGNO_P (REGNO (x)) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2092 mep_general_copro_reg (rtx x)
2094 while (GET_CODE (x) == SUBREG)
2096 return (GET_CODE (x) == REG && CR_REGNO_P (REGNO (x)));
2100 mep_nonregister (rtx x)
2102 while (GET_CODE (x) == SUBREG)
2104 return (GET_CODE (x) != REG || REGNO (x) >= FIRST_PSEUDO_REGISTER);
2107 #define DEBUG_RELOAD 0
2109 /* Return the secondary reload class needed for moving value X to or
2110 from a register in coprocessor register class CLASS. */
2112 static enum reg_class
2113 mep_secondary_copro_reload_class (enum reg_class rclass, rtx x)
2115 if (mep_general_reg (x))
2116 /* We can do the move directly if mep_have_core_copro_moves_p,
2117 otherwise we need to go through memory. Either way, no secondary
2118 register is needed. */
2121 if (mep_general_copro_reg (x))
2123 /* We can do the move directly if mep_have_copro_copro_moves_p. */
2124 if (mep_have_copro_copro_moves_p)
2127 /* Otherwise we can use a temporary if mep_have_core_copro_moves_p. */
2128 if (mep_have_core_copro_moves_p)
2129 return GENERAL_REGS;
2131 /* Otherwise we need to do it through memory. No secondary
2132 register is needed. */
2136 if (reg_class_subset_p (rclass, LOADABLE_CR_REGS)
2137 && constraint_satisfied_p (x, CONSTRAINT_U))
2138 /* X is a memory value that we can access directly. */
2141 /* We have to move X into a GPR first and then copy it to
2142 the coprocessor register. The move from the GPR to the
2143 coprocessor might be done directly or through memory,
2144 depending on mep_have_core_copro_moves_p. */
2145 return GENERAL_REGS;
2148 /* Copying X to register in RCLASS. */
2151 mep_secondary_input_reload_class (enum reg_class rclass,
2152 enum machine_mode mode ATTRIBUTE_UNUSED,
2158 fprintf (stderr, "secondary input reload copy to %s %s from ", reg_class_names[rclass], mode_name[mode]);
2162 if (reg_class_subset_p (rclass, CR_REGS))
2163 rv = mep_secondary_copro_reload_class (rclass, x);
2164 else if (MEP_NONGENERAL_CLASS (rclass)
2165 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2169 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2174 /* Copying register in RCLASS to X. */
2177 mep_secondary_output_reload_class (enum reg_class rclass,
2178 enum machine_mode mode ATTRIBUTE_UNUSED,
2184 fprintf (stderr, "secondary output reload copy from %s %s to ", reg_class_names[rclass], mode_name[mode]);
2188 if (reg_class_subset_p (rclass, CR_REGS))
2189 rv = mep_secondary_copro_reload_class (rclass, x);
2190 else if (MEP_NONGENERAL_CLASS (rclass)
2191 && (mep_nonregister (x) || mep_nongeneral_reg (x)))
2195 fprintf (stderr, " - requires %s\n", reg_class_names[rv]);
2201 /* Implement SECONDARY_MEMORY_NEEDED. */
2204 mep_secondary_memory_needed (enum reg_class rclass1, enum reg_class rclass2,
2205 enum machine_mode mode ATTRIBUTE_UNUSED)
2207 if (!mep_have_core_copro_moves_p)
2209 if (reg_classes_intersect_p (rclass1, CR_REGS)
2210 && reg_classes_intersect_p (rclass2, GENERAL_REGS))
2212 if (reg_classes_intersect_p (rclass2, CR_REGS)
2213 && reg_classes_intersect_p (rclass1, GENERAL_REGS))
2215 if (!mep_have_copro_copro_moves_p
2216 && reg_classes_intersect_p (rclass1, CR_REGS)
2217 && reg_classes_intersect_p (rclass2, CR_REGS))
2224 mep_expand_reload (rtx *operands, enum machine_mode mode)
2226 /* There are three cases for each direction:
2231 int s0 = mep_section_tag (operands[0]) == 'f';
2232 int s1 = mep_section_tag (operands[1]) == 'f';
2233 int c0 = mep_nongeneral_reg (operands[0]);
2234 int c1 = mep_nongeneral_reg (operands[1]);
2235 int which = (s0 ? 20:0) + (c0 ? 10:0) + (s1 ? 2:0) + (c1 ? 1:0);
2238 fprintf (stderr, "expand_reload %s\n", mode_name[mode]);
2239 debug_rtx (operands[0]);
2240 debug_rtx (operands[1]);
2245 case 00: /* Don't know why this gets here. */
2246 case 02: /* general = far */
2247 emit_move_insn (operands[0], operands[1]);
2250 case 10: /* cr = mem */
2251 case 11: /* cr = cr */
2252 case 01: /* mem = cr */
2253 case 12: /* cr = far */
2254 emit_move_insn (operands[2], operands[1]);
2255 emit_move_insn (operands[0], operands[2]);
2258 case 20: /* far = general */
2259 emit_move_insn (operands[2], XEXP (operands[1], 0));
2260 emit_move_insn (operands[0], gen_rtx_MEM (mode, operands[2]));
2263 case 21: /* far = cr */
2264 case 22: /* far = far */
2266 fprintf (stderr, "unsupported expand reload case %02d for mode %s\n",
2267 which, mode_name[mode]);
2268 debug_rtx (operands[0]);
2269 debug_rtx (operands[1]);
2274 /* Implement PREFERRED_RELOAD_CLASS. See whether X is a constant that
2275 can be moved directly into registers 0 to 7, but not into the rest.
2276 If so, and if the required class includes registers 0 to 7, restrict
2277 it to those registers. */
2280 mep_preferred_reload_class (rtx x, enum reg_class rclass)
2282 switch (GET_CODE (x))
2285 if (INTVAL (x) >= 0x10000
2286 && INTVAL (x) < 0x01000000
2287 && (INTVAL (x) & 0xffff) != 0
2288 && reg_class_subset_p (TPREL_REGS, rclass))
2289 rclass = TPREL_REGS;
2295 if (mep_section_tag (x) != 'f'
2296 && reg_class_subset_p (TPREL_REGS, rclass))
2297 rclass = TPREL_REGS;
2306 /* Implement REGISTER_MOVE_COST. Return 2 for direct single-register
2307 moves, 4 for direct double-register moves, and 1000 for anything
2308 that requires a temporary register or temporary stack slot. */
2311 mep_register_move_cost (enum machine_mode mode, enum reg_class from, enum reg_class to)
2313 if (mep_have_copro_copro_moves_p
2314 && reg_class_subset_p (from, CR_REGS)
2315 && reg_class_subset_p (to, CR_REGS))
2317 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2321 if (reg_class_subset_p (from, CR_REGS)
2322 && reg_class_subset_p (to, CR_REGS))
2324 if (TARGET_32BIT_CR_REGS && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2328 if (reg_class_subset_p (from, CR_REGS)
2329 || reg_class_subset_p (to, CR_REGS))
2331 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2335 if (mep_secondary_memory_needed (from, to, mode))
2337 if (MEP_NONGENERAL_CLASS (from) && MEP_NONGENERAL_CLASS (to))
2340 if (GET_MODE_SIZE (mode) > 4)
2347 /* Functions to save and restore machine-specific function data. */
2349 static struct machine_function *
2350 mep_init_machine_status (void)
2352 struct machine_function *f;
2354 f = (struct machine_function *) ggc_alloc_cleared (sizeof (struct machine_function));
2360 mep_allocate_initial_value (rtx reg)
2364 if (GET_CODE (reg) != REG)
2367 if (REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2370 /* In interrupt functions, the "initial" values of $gp and $tp are
2371 provided by the prologue. They are not necessarily the same as
2372 the values that the caller was using. */
2373 if (REGNO (reg) == TP_REGNO || REGNO (reg) == GP_REGNO)
2374 if (mep_interrupt_p ())
2377 if (! cfun->machine->reg_save_slot[REGNO(reg)])
2379 cfun->machine->reg_save_size += 4;
2380 cfun->machine->reg_save_slot[REGNO(reg)] = cfun->machine->reg_save_size;
2383 rss = cfun->machine->reg_save_slot[REGNO(reg)];
2384 return gen_rtx_MEM (SImode, plus_constant (arg_pointer_rtx, -rss));
2388 mep_return_addr_rtx (int count)
2393 return get_hard_reg_initial_val (Pmode, LP_REGNO);
2399 return get_hard_reg_initial_val (Pmode, TP_REGNO);
2405 return get_hard_reg_initial_val (Pmode, GP_REGNO);
2409 mep_interrupt_p (void)
2411 if (cfun->machine->interrupt_handler == 0)
2413 int interrupt_handler
2414 = (lookup_attribute ("interrupt",
2415 DECL_ATTRIBUTES (current_function_decl))
2417 cfun->machine->interrupt_handler = interrupt_handler ? 2 : 1;
2419 return cfun->machine->interrupt_handler == 2;
2423 mep_disinterrupt_p (void)
2425 if (cfun->machine->disable_interrupts == 0)
2427 int disable_interrupts
2428 = (lookup_attribute ("disinterrupt",
2429 DECL_ATTRIBUTES (current_function_decl))
2431 cfun->machine->disable_interrupts = disable_interrupts ? 2 : 1;
2433 return cfun->machine->disable_interrupts == 2;
2437 /* Frame/Epilog/Prolog Related. */
2440 mep_reg_set_p (rtx reg, rtx insn)
2442 /* Similar to reg_set_p in rtlanal.c, but we ignore calls */
2445 if (FIND_REG_INC_NOTE (insn, reg))
2447 insn = PATTERN (insn);
2450 if (GET_CODE (insn) == SET
2451 && GET_CODE (XEXP (insn, 0)) == REG
2452 && GET_CODE (XEXP (insn, 1)) == REG
2453 && REGNO (XEXP (insn, 0)) == REGNO (XEXP (insn, 1)))
2456 return set_of (reg, insn) != NULL_RTX;
2460 #define MEP_SAVES_UNKNOWN 0
2461 #define MEP_SAVES_YES 1
2462 #define MEP_SAVES_MAYBE 2
2463 #define MEP_SAVES_NO 3
2466 mep_reg_set_in_function (int regno)
2470 if (mep_interrupt_p () && df_regs_ever_live_p(regno))
2473 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2476 push_topmost_sequence ();
2477 insn = get_insns ();
2478 pop_topmost_sequence ();
2483 reg = gen_rtx_REG (SImode, regno);
2485 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
2486 if (INSN_P (insn) && mep_reg_set_p (reg, insn))
2492 mep_asm_without_operands_p (void)
2494 if (cfun->machine->asms_without_operands == 0)
2498 push_topmost_sequence ();
2499 insn = get_insns ();
2500 pop_topmost_sequence ();
2502 cfun->machine->asms_without_operands = 1;
2506 && GET_CODE (PATTERN (insn)) == ASM_INPUT)
2508 cfun->machine->asms_without_operands = 2;
2511 insn = NEXT_INSN (insn);
2515 return cfun->machine->asms_without_operands == 2;
2518 /* Interrupt functions save/restore every call-preserved register, and
2519 any call-used register it uses (or all if it calls any function,
2520 since they may get clobbered there too). Here we check to see
2521 which call-used registers need saving. */
2523 #define IVC2_ISAVED_REG(r) (TARGET_IVC2 \
2524 && (r == FIRST_CCR_REGNO + 1 \
2525 || (r >= FIRST_CCR_REGNO + 8 && r <= FIRST_CCR_REGNO + 11) \
2526 || (r >= FIRST_CCR_REGNO + 16 && r <= FIRST_CCR_REGNO + 31)))
2529 mep_interrupt_saved_reg (int r)
2531 if (!mep_interrupt_p ())
2533 if (r == REGSAVE_CONTROL_TEMP
2534 || (TARGET_64BIT_CR_REGS && TARGET_COP && r == REGSAVE_CONTROL_TEMP+1))
2536 if (mep_asm_without_operands_p ()
2538 || (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO || r == LP_REGNO)
2539 || IVC2_ISAVED_REG (r)))
2541 if (!current_function_is_leaf)
2542 /* Function calls mean we need to save $lp. */
2543 if (r == LP_REGNO || IVC2_ISAVED_REG (r))
2545 if (!current_function_is_leaf || cfun->machine->doloop_tags > 0)
2546 /* The interrupt handler might use these registers for repeat blocks,
2547 or it might call a function that does so. */
2548 if (r == RPB_REGNO || r == RPE_REGNO || r == RPC_REGNO)
2550 if (current_function_is_leaf && call_used_regs[r] && !df_regs_ever_live_p(r))
2552 /* Functions we call might clobber these. */
2553 if (call_used_regs[r] && !fixed_regs[r])
2555 /* Additional registers that need to be saved for IVC2. */
2556 if (IVC2_ISAVED_REG (r))
2563 mep_call_saves_register (int r)
2565 /* if (cfun->machine->reg_saved[r] == MEP_SAVES_UNKNOWN)*/
2567 int rv = MEP_SAVES_NO;
2569 if (cfun->machine->reg_save_slot[r])
2571 else if (r == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2573 else if (r == FRAME_POINTER_REGNUM && frame_pointer_needed)
2575 else if ((!call_used_regs[r] || r == LP_REGNO) && df_regs_ever_live_p(r))
2577 else if (crtl->calls_eh_return && (r == 10 || r == 11))
2578 /* We need these to have stack slots so that they can be set during
2581 else if (mep_interrupt_saved_reg (r))
2583 cfun->machine->reg_saved[r] = rv;
2585 return cfun->machine->reg_saved[r] == MEP_SAVES_YES;
2588 /* Return true if epilogue uses register REGNO. */
2591 mep_epilogue_uses (int regno)
2593 /* Since $lp is a call-saved register, the generic code will normally
2594 mark it used in the epilogue if it needs to be saved and restored.
2595 However, when profiling is enabled, the profiling code will implicitly
2596 clobber $11. This case has to be handled specially both here and in
2597 mep_call_saves_register. */
2598 if (regno == LP_REGNO && (profile_arc_flag > 0 || profile_flag > 0))
2600 /* Interrupt functions save/restore pretty much everything. */
2601 return (reload_completed && mep_interrupt_saved_reg (regno));
2605 mep_reg_size (int regno)
2607 if (CR_REGNO_P (regno) && TARGET_64BIT_CR_REGS)
2613 mep_elimination_offset (int from, int to)
2617 int frame_size = get_frame_size () + crtl->outgoing_args_size;
2620 memset (cfun->machine->reg_saved, 0, sizeof (cfun->machine->reg_saved));
2622 /* We don't count arg_regs_to_save in the arg pointer offset, because
2623 gcc thinks the arg pointer has moved along with the saved regs.
2624 However, we do count it when we adjust $sp in the prologue. */
2626 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2627 if (mep_call_saves_register (i))
2628 reg_save_size += mep_reg_size (i);
2630 if (reg_save_size % 8)
2631 cfun->machine->regsave_filler = 8 - (reg_save_size % 8);
2633 cfun->machine->regsave_filler = 0;
2635 /* This is what our total stack adjustment looks like. */
2636 total_size = (reg_save_size + frame_size + cfun->machine->regsave_filler);
2639 cfun->machine->frame_filler = 8 - (total_size % 8);
2641 cfun->machine->frame_filler = 0;
2644 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
2645 return reg_save_size + cfun->machine->regsave_filler;
2647 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2648 return cfun->machine->frame_filler + frame_size;
2650 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
2651 return reg_save_size + cfun->machine->regsave_filler + cfun->machine->frame_filler + frame_size;
2659 RTX_FRAME_RELATED_P (x) = 1;
2663 /* Since the prologue/epilogue code is generated after optimization,
2664 we can't rely on gcc to split constants for us. So, this code
2665 captures all the ways to add a constant to a register in one logic
2666 chunk, including optimizing away insns we just don't need. This
2667 makes the prolog/epilog code easier to follow. */
2669 add_constant (int dest, int src, int value, int mark_frame)
2674 if (src == dest && value == 0)
2679 insn = emit_move_insn (gen_rtx_REG (SImode, dest),
2680 gen_rtx_REG (SImode, src));
2682 RTX_FRAME_RELATED_P(insn) = 1;
2686 if (value >= -32768 && value <= 32767)
2688 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2689 gen_rtx_REG (SImode, src),
2692 RTX_FRAME_RELATED_P(insn) = 1;
2696 /* Big constant, need to use a temp register. We use
2697 REGSAVE_CONTROL_TEMP because it's call clobberable (the reg save
2698 area is always small enough to directly add to). */
2700 hi = trunc_int_for_mode (value & 0xffff0000, SImode);
2701 lo = value & 0xffff;
2703 insn = emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2708 insn = emit_insn (gen_iorsi3 (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2709 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2713 insn = emit_insn (gen_addsi3 (gen_rtx_REG (SImode, dest),
2714 gen_rtx_REG (SImode, src),
2715 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP)));
2718 RTX_FRAME_RELATED_P(insn) = 1;
2719 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2720 gen_rtx_SET (SImode,
2721 gen_rtx_REG (SImode, dest),
2722 gen_rtx_PLUS (SImode,
2723 gen_rtx_REG (SImode, dest),
2729 mep_function_uses_sp (void)
2732 struct sequence_stack *seq;
2733 rtx sp = gen_rtx_REG (SImode, SP_REGNO);
2735 insn = get_insns ();
2736 for (seq = crtl->emit.sequence_stack;
2738 insn = seq->first, seq = seq->next);
2742 if (mep_mentioned_p (insn, sp, 0))
2744 insn = NEXT_INSN (insn);
2749 /* Move SRC to DEST. Mark the move as being potentially dead if
2753 maybe_dead_move (rtx dest, rtx src, bool ATTRIBUTE_UNUSED maybe_dead_p)
2755 rtx insn = emit_move_insn (dest, src);
2758 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD, const0_rtx, NULL);
2763 /* Used for interrupt functions, which can't assume that $tp and $gp
2764 contain the correct pointers. */
2767 mep_reload_pointer (int regno, const char *symbol)
2771 if (!df_regs_ever_live_p(regno) && current_function_is_leaf)
2774 reg = gen_rtx_REG (SImode, regno);
2775 sym = gen_rtx_SYMBOL_REF (SImode, symbol);
2776 emit_insn (gen_movsi_topsym_s (reg, sym));
2777 emit_insn (gen_movsi_botsym_s (reg, reg, sym));
2781 mep_expand_prologue (void)
2783 int i, rss, sp_offset = 0;
2786 int really_need_stack_frame = frame_size;
2789 /* We must not allow register renaming in interrupt functions,
2790 because that invalidates the correctness of the set of call-used
2791 registers we're going to save/restore. */
2792 mep_set_leaf_registers (mep_interrupt_p () ? 0 : 1);
2794 if (mep_disinterrupt_p ())
2795 emit_insn (gen_mep_disable_int ());
2797 cfun->machine->mep_frame_pointer_needed = frame_pointer_needed;
2799 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2800 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2802 /* Assign save slots for any register not already saved. DImode
2803 registers go at the end of the reg save area; the rest go at the
2804 beginning. This is for alignment purposes. */
2805 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2806 if (mep_call_saves_register(i))
2808 int regsize = mep_reg_size (i);
2810 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
2811 || mep_reg_set_in_function (i))
2812 really_need_stack_frame = 1;
2814 if (cfun->machine->reg_save_slot[i])
2819 cfun->machine->reg_save_size += regsize;
2820 cfun->machine->reg_save_slot[i] = cfun->machine->reg_save_size;
2824 cfun->machine->reg_save_slot[i] = reg_save_size - di_ofs;
2829 sp_offset = reg_save_size;
2830 if (sp_offset + frame_size < 128)
2831 sp_offset += frame_size ;
2833 add_constant (SP_REGNO, SP_REGNO, -sp_offset, 1);
2835 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
2836 if (mep_call_saves_register(i))
2840 enum machine_mode rmode;
2842 rss = cfun->machine->reg_save_slot[i];
2844 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
2845 && (!mep_reg_set_in_function (i)
2846 && !mep_interrupt_p ()))
2849 if (mep_reg_size (i) == 8)
2854 /* If there is a pseudo associated with this register's initial value,
2855 reload might have already spilt it to the stack slot suggested by
2856 ALLOCATE_INITIAL_VALUE. The moves emitted here can then be safely
2858 mem = gen_rtx_MEM (rmode,
2859 plus_constant (stack_pointer_rtx, sp_offset - rss));
2860 maybe_dead_p = rtx_equal_p (mem, has_hard_reg_initial_val (rmode, i));
2862 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
2863 F(maybe_dead_move (mem, gen_rtx_REG (rmode, i), maybe_dead_p));
2864 else if (rmode == DImode)
2867 int be = TARGET_BIG_ENDIAN ? 4 : 0;
2869 mem = gen_rtx_MEM (SImode,
2870 plus_constant (stack_pointer_rtx, sp_offset - rss + be));
2872 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2873 gen_rtx_REG (SImode, i),
2875 maybe_dead_move (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2876 gen_rtx_ZERO_EXTRACT (SImode,
2877 gen_rtx_REG (DImode, i),
2881 insn = maybe_dead_move (mem,
2882 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
2884 RTX_FRAME_RELATED_P (insn) = 1;
2886 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2887 gen_rtx_SET (VOIDmode,
2889 gen_rtx_REG (rmode, i)));
2890 mem = gen_rtx_MEM (SImode,
2891 plus_constant (stack_pointer_rtx, sp_offset - rss + (4-be)));
2892 insn = maybe_dead_move (mem,
2893 gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP+1),
2899 maybe_dead_move (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2900 gen_rtx_REG (rmode, i),
2902 insn = maybe_dead_move (mem,
2903 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
2905 RTX_FRAME_RELATED_P (insn) = 1;
2907 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2908 gen_rtx_SET (VOIDmode,
2910 gen_rtx_REG (rmode, i)));
2914 if (frame_pointer_needed)
2916 /* We've already adjusted down by sp_offset. Total $sp change
2917 is reg_save_size + frame_size. We want a net change here of
2918 just reg_save_size. */
2919 add_constant (FP_REGNO, SP_REGNO, sp_offset - reg_save_size, 1);
2922 add_constant (SP_REGNO, SP_REGNO, sp_offset-(reg_save_size+frame_size), 1);
2924 if (mep_interrupt_p ())
2926 mep_reload_pointer(GP_REGNO, "__sdabase");
2927 mep_reload_pointer(TP_REGNO, "__tpbase");
2932 mep_start_function (FILE *file, HOST_WIDE_INT hwi_local)
2934 int local = hwi_local;
2935 int frame_size = local + crtl->outgoing_args_size;
2940 int slot_map[FIRST_PSEUDO_REGISTER], si, sj;
2942 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
2943 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
2944 sp_offset = reg_save_size + frame_size;
2946 ffill = cfun->machine->frame_filler;
2948 if (cfun->machine->mep_frame_pointer_needed)
2949 reg_names[FP_REGNO] = "$fp";
2951 reg_names[FP_REGNO] = "$8";
2956 if (debug_info_level == DINFO_LEVEL_NONE)
2958 fprintf (file, "\t# frame: %d", sp_offset);
2960 fprintf (file, " %d regs", reg_save_size);
2962 fprintf (file, " %d locals", local);
2963 if (crtl->outgoing_args_size)
2964 fprintf (file, " %d args", crtl->outgoing_args_size);
2965 fprintf (file, "\n");
2969 fprintf (file, "\t#\n");
2970 fprintf (file, "\t# Initial Frame Information:\n");
2971 if (sp_offset || !frame_pointer_needed)
2972 fprintf (file, "\t# Entry ---------- 0\n");
2974 /* Sort registers by save slots, so they're printed in the order
2975 they appear in memory, not the order they're saved in. */
2976 for (si=0; si<FIRST_PSEUDO_REGISTER; si++)
2978 for (si=0; si<FIRST_PSEUDO_REGISTER-1; si++)
2979 for (sj=si+1; sj<FIRST_PSEUDO_REGISTER; sj++)
2980 if (cfun->machine->reg_save_slot[slot_map[si]]
2981 > cfun->machine->reg_save_slot[slot_map[sj]])
2983 int t = slot_map[si];
2984 slot_map[si] = slot_map[sj];
2989 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2992 int r = slot_map[i];
2993 int rss = cfun->machine->reg_save_slot[r];
2998 rsize = mep_reg_size(r);
2999 skip = rss - (sp+rsize);
3001 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3002 fprintf (file, "\t# %3d bytes for saved %-3s %3d($sp)\n",
3003 rsize, reg_names[r], sp_offset - rss);
3007 skip = reg_save_size - sp;
3009 fprintf (file, "\t# %3d bytes for alignment\n", skip);
3011 if (frame_pointer_needed)
3012 fprintf (file, "\t# FP ---> ---------- %d (sp-%d)\n", reg_save_size, sp_offset-reg_save_size);
3014 fprintf (file, "\t# %3d bytes for local vars\n", local);
3016 fprintf (file, "\t# %3d bytes for alignment\n", ffill);
3017 if (crtl->outgoing_args_size)
3018 fprintf (file, "\t# %3d bytes for outgoing args\n",
3019 crtl->outgoing_args_size);
3020 fprintf (file, "\t# SP ---> ---------- %d\n", sp_offset);
3021 fprintf (file, "\t#\n");
3025 static int mep_prevent_lp_restore = 0;
3026 static int mep_sibcall_epilogue = 0;
3029 mep_expand_epilogue (void)
3031 int i, sp_offset = 0;
3032 int reg_save_size = 0;
3034 int lp_temp = LP_REGNO, lp_slot = -1;
3035 int really_need_stack_frame = get_frame_size() + crtl->outgoing_args_size;
3036 int interrupt_handler = mep_interrupt_p ();
3038 if (profile_arc_flag == 2)
3039 emit_insn (gen_mep_bb_trace_ret ());
3041 reg_save_size = mep_elimination_offset (ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM);
3042 frame_size = mep_elimination_offset (FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM);
3044 /* All save slots are set by mep_expand_prologue. */
3045 for (i=0; i<FIRST_PSEUDO_REGISTER; i++)
3046 if (mep_call_saves_register(i))
3048 if ((i != TP_REGNO && i != GP_REGNO && i != LP_REGNO)
3049 || mep_reg_set_in_function (i))
3050 really_need_stack_frame = 1;
3053 if (frame_pointer_needed)
3055 /* If we have a frame pointer, we won't have a reliable stack
3056 pointer (alloca, you know), so rebase SP from FP */
3057 emit_move_insn (gen_rtx_REG (SImode, SP_REGNO),
3058 gen_rtx_REG (SImode, FP_REGNO));
3059 sp_offset = reg_save_size;
3063 /* SP is right under our local variable space. Adjust it if
3065 sp_offset = reg_save_size + frame_size;
3066 if (sp_offset >= 128)
3068 add_constant (SP_REGNO, SP_REGNO, frame_size, 0);
3069 sp_offset -= frame_size;
3073 /* This is backwards so that we restore the control and coprocessor
3074 registers before the temporary registers we use to restore
3076 for (i=FIRST_PSEUDO_REGISTER-1; i>=1; i--)
3077 if (mep_call_saves_register (i))
3079 enum machine_mode rmode;
3080 int rss = cfun->machine->reg_save_slot[i];
3082 if (mep_reg_size (i) == 8)
3087 if ((i == TP_REGNO || i == GP_REGNO || i == LP_REGNO)
3088 && !(mep_reg_set_in_function (i) || interrupt_handler))
3090 if (mep_prevent_lp_restore && i == LP_REGNO)
3092 if (!mep_prevent_lp_restore
3093 && !interrupt_handler
3094 && (i == 10 || i == 11))
3097 if (GR_REGNO_P (i) || LOADABLE_CR_REGNO_P (i))
3098 emit_move_insn (gen_rtx_REG (rmode, i),
3100 plus_constant (stack_pointer_rtx,
3104 if (i == LP_REGNO && !mep_sibcall_epilogue && !interrupt_handler)
3105 /* Defer this one so we can jump indirect rather than
3106 copying the RA to $lp and "ret". EH epilogues
3107 automatically skip this anyway. */
3108 lp_slot = sp_offset-rss;
3111 emit_move_insn (gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP),
3113 plus_constant (stack_pointer_rtx,
3115 emit_move_insn (gen_rtx_REG (rmode, i),
3116 gen_rtx_REG (rmode, REGSAVE_CONTROL_TEMP));
3122 /* Restore this one last so we know it will be in the temp
3123 register when we return by jumping indirectly via the temp. */
3124 emit_move_insn (gen_rtx_REG (SImode, REGSAVE_CONTROL_TEMP),
3125 gen_rtx_MEM (SImode,
3126 plus_constant (stack_pointer_rtx,
3128 lp_temp = REGSAVE_CONTROL_TEMP;
3132 add_constant (SP_REGNO, SP_REGNO, sp_offset, 0);
3134 if (crtl->calls_eh_return && mep_prevent_lp_restore)
3135 emit_insn (gen_addsi3 (gen_rtx_REG (SImode, SP_REGNO),
3136 gen_rtx_REG (SImode, SP_REGNO),
3137 cfun->machine->eh_stack_adjust));
3139 if (mep_sibcall_epilogue)
3142 if (mep_disinterrupt_p ())
3143 emit_insn (gen_mep_enable_int ());
3145 if (mep_prevent_lp_restore)
3147 emit_jump_insn (gen_eh_return_internal ());
3150 else if (interrupt_handler)
3151 emit_jump_insn (gen_mep_reti ());
3153 emit_jump_insn (gen_return_internal (gen_rtx_REG (SImode, lp_temp)));
3157 mep_expand_eh_return (rtx *operands)
3159 if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != LP_REGNO)
3161 rtx ra = gen_rtx_REG (Pmode, LP_REGNO);
3162 emit_move_insn (ra, operands[0]);
3166 emit_insn (gen_eh_epilogue (operands[0]));
3170 mep_emit_eh_epilogue (rtx *operands ATTRIBUTE_UNUSED)
3172 cfun->machine->eh_stack_adjust = gen_rtx_REG (Pmode, 0);
3173 mep_prevent_lp_restore = 1;
3174 mep_expand_epilogue ();
3175 mep_prevent_lp_restore = 0;
3179 mep_expand_sibcall_epilogue (void)
3181 mep_sibcall_epilogue = 1;
3182 mep_expand_epilogue ();
3183 mep_sibcall_epilogue = 0;
3187 mep_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
3192 if (mep_section_tag (DECL_RTL (decl)) == 'f')
3195 /* Can't call to a sibcall from an interrupt or disinterrupt function. */
3196 if (mep_interrupt_p () || mep_disinterrupt_p ())
3203 mep_return_stackadj_rtx (void)
3205 return gen_rtx_REG (SImode, 10);
3209 mep_return_handler_rtx (void)
3211 return gen_rtx_REG (SImode, LP_REGNO);
3215 mep_function_profiler (FILE *file)
3217 /* Always right at the beginning of the function. */
3218 fprintf (file, "\t# mep function profiler\n");
3219 fprintf (file, "\tadd\t$sp, -8\n");
3220 fprintf (file, "\tsw\t$0, ($sp)\n");
3221 fprintf (file, "\tldc\t$0, $lp\n");
3222 fprintf (file, "\tsw\t$0, 4($sp)\n");
3223 fprintf (file, "\tbsr\t__mep_mcount\n");
3224 fprintf (file, "\tlw\t$0, 4($sp)\n");
3225 fprintf (file, "\tstc\t$0, $lp\n");
3226 fprintf (file, "\tlw\t$0, ($sp)\n");
3227 fprintf (file, "\tadd\t$sp, 8\n\n");
3231 mep_emit_bb_trace_ret (void)
3233 fprintf (asm_out_file, "\t# end of block profiling\n");
3234 fprintf (asm_out_file, "\tadd\t$sp, -8\n");
3235 fprintf (asm_out_file, "\tsw\t$0, ($sp)\n");
3236 fprintf (asm_out_file, "\tldc\t$0, $lp\n");
3237 fprintf (asm_out_file, "\tsw\t$0, 4($sp)\n");
3238 fprintf (asm_out_file, "\tbsr\t__bb_trace_ret\n");
3239 fprintf (asm_out_file, "\tlw\t$0, 4($sp)\n");
3240 fprintf (asm_out_file, "\tstc\t$0, $lp\n");
3241 fprintf (asm_out_file, "\tlw\t$0, ($sp)\n");
3242 fprintf (asm_out_file, "\tadd\t$sp, 8\n\n");
3249 /* Operand Printing. */
3252 mep_print_operand_address (FILE *stream, rtx address)
3254 if (GET_CODE (address) == MEM)
3255 address = XEXP (address, 0);
3257 /* cf: gcc.dg/asm-4.c. */
3258 gcc_assert (GET_CODE (address) == REG);
3260 mep_print_operand (stream, address, 0);
3266 const char *pattern;
3269 const conversions[] =
3272 { 0, "m+ri", "3(2)" },
3275 { 0, "mLrs", "%lo(3)(2)" },
3276 { 0, "mLr+si", "%lo(4+5)(2)" },
3277 { 0, "m+ru2s", "%tpoff(5)(2)" },
3278 { 0, "m+ru3s", "%sdaoff(5)(2)" },
3279 { 0, "m+r+u2si", "%tpoff(6+7)(2)" },
3280 { 0, "m+ru2+si", "%tpoff(6+7)(2)" },
3281 { 0, "m+r+u3si", "%sdaoff(6+7)(2)" },
3282 { 0, "m+ru3+si", "%sdaoff(6+7)(2)" },
3284 { 0, "m+si", "(2+3)" },
3285 { 0, "m+li", "(2+3)" },
3288 { 0, "+si", "1+2" },
3289 { 0, "+u2si", "%tpoff(3+4)" },
3290 { 0, "+u3si", "%sdaoff(3+4)" },
3296 { 'h', "Hs", "%hi(1)" },
3298 { 'I', "u2s", "%tpoff(2)" },
3299 { 'I', "u3s", "%sdaoff(2)" },
3300 { 'I', "+u2si", "%tpoff(3+4)" },
3301 { 'I', "+u3si", "%sdaoff(3+4)" },
3303 { 'P', "mr", "(1\\+),\\0" },
3309 unique_bit_in (HOST_WIDE_INT i)
3313 case 0x01: case 0xfe: return 0;
3314 case 0x02: case 0xfd: return 1;
3315 case 0x04: case 0xfb: return 2;
3316 case 0x08: case 0xf7: return 3;
3317 case 0x10: case 0x7f: return 4;
3318 case 0x20: case 0xbf: return 5;
3319 case 0x40: case 0xdf: return 6;
3320 case 0x80: case 0xef: return 7;
3327 bit_size_for_clip (HOST_WIDE_INT i)
3331 for (rv = 0; rv < 31; rv ++)
3332 if (((HOST_WIDE_INT) 1 << rv) > i)
3337 /* Print an operand to a assembler instruction. */
3340 mep_print_operand (FILE *file, rtx x, int code)
3343 const char *real_name;
3347 /* Print a mnemonic to do CR <- CR moves. Find out which intrinsic
3348 we're using, then skip over the "mep_" part of its name. */
3349 const struct cgen_insn *insn;
3351 if (mep_get_move_insn (mep_cmov, &insn))
3352 fputs (cgen_intrinsics[insn->intrinsic] + 4, file);
3354 mep_intrinsic_unavailable (mep_cmov);
3359 switch (GET_CODE (x))
3362 fputs ("clr", file);
3365 fputs ("set", file);
3368 fputs ("not", file);
3371 output_operand_lossage ("invalid %%L code");
3376 /* Print the second operand of a CR <- CR move. If we're using
3377 a two-operand instruction (i.e., a real cmov), then just print
3378 the operand normally. If we're using a "reg, reg, immediate"
3379 instruction such as caddi3, print the operand followed by a
3380 zero field. If we're using a three-register instruction,
3381 print the operand twice. */
3382 const struct cgen_insn *insn;
3384 mep_print_operand (file, x, 0);
3385 if (mep_get_move_insn (mep_cmov, &insn)
3386 && insn_data[insn->icode].n_operands == 3)
3389 if (insn_data[insn->icode].operand[2].predicate (x, VOIDmode))
3390 mep_print_operand (file, x, 0);
3392 mep_print_operand (file, const0_rtx, 0);
3398 for (i = 0; conversions[i].pattern; i++)
3399 if (conversions[i].code == code
3400 && strcmp(conversions[i].pattern, pattern) == 0)
3402 for (j = 0; conversions[i].format[j]; j++)
3403 if (conversions[i].format[j] == '\\')
3405 fputc (conversions[i].format[j+1], file);
3408 else if (ISDIGIT(conversions[i].format[j]))
3410 rtx r = patternr[conversions[i].format[j] - '0'];
3411 switch (GET_CODE (r))
3414 fprintf (file, "%s", reg_names [REGNO (r)]);
3420 fprintf (file, "%d", unique_bit_in (INTVAL (r)));
3423 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)));
3426 fprintf (file, "0x%x", ((int) INTVAL (r) >> 16) & 0xffff);
3429 fprintf (file, "%d", bit_size_for_clip (INTVAL (r)) - 1);
3432 fprintf (file, "0x%x", (int) INTVAL (r) & 0xffff);
3435 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3436 && !(INTVAL (r) & 0xff))
3437 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL(r));
3439 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3442 if (INTVAL (r) & ~(HOST_WIDE_INT)0xff
3443 && conversions[i].format[j+1] == 0)
3445 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (r));
3446 fprintf (file, " # 0x%x", (int) INTVAL(r) & 0xffff);
3449 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3452 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL(r));
3457 fprintf(file, "[const_double 0x%lx]",
3458 (unsigned long) CONST_DOUBLE_HIGH(r));
3461 real_name = TARGET_STRIP_NAME_ENCODING (XSTR (r, 0));
3462 assemble_name (file, real_name);
3465 output_asm_label (r);
3468 fprintf (stderr, "don't know how to print this operand:");
3475 if (conversions[i].format[j] == '+'
3476 && (!code || code == 'I')
3477 && ISDIGIT (conversions[i].format[j+1])
3478 && GET_CODE (patternr[conversions[i].format[j+1] - '0']) == CONST_INT
3479 && INTVAL (patternr[conversions[i].format[j+1] - '0']) < 0)
3481 fputc(conversions[i].format[j], file);
3485 if (!conversions[i].pattern)
3487 error ("unconvertible operand %c %qs", code?code:'-', pattern);
3495 mep_final_prescan_insn (rtx insn, rtx *operands ATTRIBUTE_UNUSED,
3496 int noperands ATTRIBUTE_UNUSED)
3498 /* Despite the fact that MeP is perfectly capable of branching and
3499 doing something else in the same bundle, gcc does jump
3500 optimization *after* scheduling, so we cannot trust the bundling
3501 flags on jump instructions. */
3502 if (GET_MODE (insn) == BImode
3503 && get_attr_slots (insn) != SLOTS_CORE)
3504 fputc ('+', asm_out_file);
3507 /* Function args in registers. */
3510 mep_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
3511 enum machine_mode mode ATTRIBUTE_UNUSED,
3512 tree type ATTRIBUTE_UNUSED, int *pretend_size,
3513 int second_time ATTRIBUTE_UNUSED)
3515 int nsave = 4 - (cum->nregs + 1);
3518 cfun->machine->arg_regs_to_save = nsave;
3519 *pretend_size = nsave * 4;
3523 bytesize (const_tree type, enum machine_mode mode)
3525 if (mode == BLKmode)
3526 return int_size_in_bytes (type);
3527 return GET_MODE_SIZE (mode);
3531 mep_expand_builtin_saveregs (void)
3536 ns = cfun->machine->arg_regs_to_save;
3539 bufsize = 8 * ((ns + 1) / 2) + 8 * ns;
3540 regbuf = assign_stack_local (SImode, bufsize, 64);
3545 regbuf = assign_stack_local (SImode, bufsize, 32);
3548 move_block_from_reg (5-ns, regbuf, ns);
3552 rtx tmp = gen_rtx_MEM (DImode, XEXP (regbuf, 0));
3553 int ofs = 8 * ((ns+1)/2);
3555 for (i=0; i<ns; i++)
3557 int rn = (4-ns) + i + 49;
3560 ptr = offset_address (tmp, GEN_INT (ofs), 2);
3561 emit_move_insn (ptr, gen_rtx_REG (DImode, rn));
3565 return XEXP (regbuf, 0);
3568 #define VECTOR_TYPE_P(t) (TREE_CODE(t) == VECTOR_TYPE)
3571 mep_build_builtin_va_list (void)
3573 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3577 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
3579 f_next_gp = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3580 get_identifier ("__va_next_gp"), ptr_type_node);
3581 f_next_gp_limit = build_decl (BUILTINS_LOCATION, FIELD_DECL,
3582 get_identifier ("__va_next_gp_limit"),
3584 f_next_cop = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_cop"),
3586 f_next_stack = build_decl (BUILTINS_LOCATION, FIELD_DECL, get_identifier ("__va_next_stack"),
3589 DECL_FIELD_CONTEXT (f_next_gp) = record;
3590 DECL_FIELD_CONTEXT (f_next_gp_limit) = record;
3591 DECL_FIELD_CONTEXT (f_next_cop) = record;
3592 DECL_FIELD_CONTEXT (f_next_stack) = record;
3594 TYPE_FIELDS (record) = f_next_gp;
3595 TREE_CHAIN (f_next_gp) = f_next_gp_limit;
3596 TREE_CHAIN (f_next_gp_limit) = f_next_cop;
3597 TREE_CHAIN (f_next_cop) = f_next_stack;
3599 layout_type (record);
3605 mep_expand_va_start (tree valist, rtx nextarg)
3607 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3608 tree next_gp, next_gp_limit, next_cop, next_stack;
3612 ns = cfun->machine->arg_regs_to_save;
3614 f_next_gp = TYPE_FIELDS (va_list_type_node);
3615 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3616 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3617 f_next_stack = TREE_CHAIN (f_next_cop);
3619 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3621 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3622 valist, f_next_gp_limit, NULL_TREE);
3623 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3625 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3626 valist, f_next_stack, NULL_TREE);
3628 /* va_list.next_gp = expand_builtin_saveregs (); */
3629 u = make_tree (sizetype, expand_builtin_saveregs ());
3630 u = fold_convert (ptr_type_node, u);
3631 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp, u);
3632 TREE_SIDE_EFFECTS (t) = 1;
3633 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3635 /* va_list.next_gp_limit = va_list.next_gp + 4 * ns; */
3636 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3638 t = build2 (MODIFY_EXPR, ptr_type_node, next_gp_limit, u);
3639 TREE_SIDE_EFFECTS (t) = 1;
3640 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3642 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
3643 size_int (8 * ((ns+1)/2)));
3644 /* va_list.next_cop = ROUND_UP(va_list.next_gp_limit,8); */
3645 t = build2 (MODIFY_EXPR, ptr_type_node, next_cop, u);
3646 TREE_SIDE_EFFECTS (t) = 1;
3647 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3649 /* va_list.next_stack = nextarg; */
3650 u = make_tree (ptr_type_node, nextarg);
3651 t = build2 (MODIFY_EXPR, ptr_type_node, next_stack, u);
3652 TREE_SIDE_EFFECTS (t) = 1;
3653 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3657 mep_gimplify_va_arg_expr (tree valist, tree type,
3658 tree *pre_p, tree *post_p ATTRIBUTE_UNUSED)
3660 HOST_WIDE_INT size, rsize;
3661 bool by_reference, ivc2_vec;
3662 tree f_next_gp, f_next_gp_limit, f_next_cop, f_next_stack;
3663 tree next_gp, next_gp_limit, next_cop, next_stack;
3664 tree label_sover, label_selse;
3667 ivc2_vec = TARGET_IVC2 && VECTOR_TYPE_P (type);
3669 size = int_size_in_bytes (type);
3670 by_reference = (size > (ivc2_vec ? 8 : 4)) || (size <= 0);
3674 type = build_pointer_type (type);
3677 rsize = (size + UNITS_PER_WORD - 1) & -UNITS_PER_WORD;
3679 f_next_gp = TYPE_FIELDS (va_list_type_node);
3680 f_next_gp_limit = TREE_CHAIN (f_next_gp);
3681 f_next_cop = TREE_CHAIN (f_next_gp_limit);
3682 f_next_stack = TREE_CHAIN (f_next_cop);
3684 next_gp = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp), valist, f_next_gp,
3686 next_gp_limit = build3 (COMPONENT_REF, TREE_TYPE (f_next_gp_limit),
3687 valist, f_next_gp_limit, NULL_TREE);
3688 next_cop = build3 (COMPONENT_REF, TREE_TYPE (f_next_cop), valist, f_next_cop,
3690 next_stack = build3 (COMPONENT_REF, TREE_TYPE (f_next_stack),
3691 valist, f_next_stack, NULL_TREE);
3693 /* if f_next_gp < f_next_gp_limit
3694 IF (VECTOR_P && IVC2)
3702 val = *f_next_stack;
3703 f_next_stack += rsize;
3707 label_sover = create_artificial_label (UNKNOWN_LOCATION);
3708 label_selse = create_artificial_label (UNKNOWN_LOCATION);
3709 res_addr = create_tmp_var (ptr_type_node, NULL);
3711 tmp = build2 (GE_EXPR, boolean_type_node, next_gp,
3712 unshare_expr (next_gp_limit));
3713 tmp = build3 (COND_EXPR, void_type_node, tmp,
3714 build1 (GOTO_EXPR, void_type_node,
3715 unshare_expr (label_selse)),
3717 gimplify_and_add (tmp, pre_p);
3721 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_cop);
3722 gimplify_and_add (tmp, pre_p);
3726 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, next_gp);
3727 gimplify_and_add (tmp, pre_p);
3730 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3731 unshare_expr (next_gp), size_int (4));
3732 gimplify_assign (unshare_expr (next_gp), tmp, pre_p);
3734 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3735 unshare_expr (next_cop), size_int (8));
3736 gimplify_assign (unshare_expr (next_cop), tmp, pre_p);
3738 tmp = build1 (GOTO_EXPR, void_type_node, unshare_expr (label_sover));
3739 gimplify_and_add (tmp, pre_p);
3743 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_selse));
3744 gimplify_and_add (tmp, pre_p);
3746 tmp = build2 (MODIFY_EXPR, void_type_node, res_addr, unshare_expr (next_stack));
3747 gimplify_and_add (tmp, pre_p);
3749 tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
3750 unshare_expr (next_stack), size_int (rsize));
3751 gimplify_assign (unshare_expr (next_stack), tmp, pre_p);
3755 tmp = build1 (LABEL_EXPR, void_type_node, unshare_expr (label_sover));
3756 gimplify_and_add (tmp, pre_p);
3758 res_addr = fold_convert (build_pointer_type (type), res_addr);
3761 res_addr = build_va_arg_indirect_ref (res_addr);
3763 return build_va_arg_indirect_ref (res_addr);
3767 mep_init_cumulative_args (CUMULATIVE_ARGS *pcum, tree fntype,
3768 rtx libname ATTRIBUTE_UNUSED,
3769 tree fndecl ATTRIBUTE_UNUSED)
3773 if (fntype && lookup_attribute ("vliw", TYPE_ATTRIBUTES (fntype)))
3780 mep_function_arg (CUMULATIVE_ARGS cum, enum machine_mode mode,
3781 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3783 /* VOIDmode is a signal for the backend to pass data to the call
3784 expander via the second operand to the call pattern. We use
3785 this to determine whether to use "jsr" or "jsrv". */
3786 if (mode == VOIDmode)
3787 return GEN_INT (cum.vliw);
3789 /* If we havn't run out of argument registers, return the next. */
3792 if (type && TARGET_IVC2 && VECTOR_TYPE_P (type))
3793 return gen_rtx_REG (mode, cum.nregs + 49);
3795 return gen_rtx_REG (mode, cum.nregs + 1);
3798 /* Otherwise the argument goes on the stack. */
3803 mep_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
3804 enum machine_mode mode,
3806 bool named ATTRIBUTE_UNUSED)
3808 int size = bytesize (type, mode);
3809 if (type && TARGET_IVC2 && cum->nregs < 4 && VECTOR_TYPE_P (type))
3810 return size <= 0 || size > 8;
3811 return size <= 0 || size > 4;
3815 mep_arg_advance (CUMULATIVE_ARGS *pcum,
3816 enum machine_mode mode ATTRIBUTE_UNUSED,
3817 tree type ATTRIBUTE_UNUSED, int named ATTRIBUTE_UNUSED)
3823 mep_return_in_memory (const_tree type, const_tree decl ATTRIBUTE_UNUSED)
3825 int size = bytesize (type, BLKmode);
3826 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3827 return size >= 0 && size <= 8 ? 0 : 1;
3828 return size >= 0 && size <= 4 ? 0 : 1;
3832 mep_narrow_volatile_bitfield (void)
3838 /* Implement FUNCTION_VALUE. All values are returned in $0. */
3841 mep_function_value (tree type, tree func ATTRIBUTE_UNUSED)
3843 if (TARGET_IVC2 && VECTOR_TYPE_P (type))
3844 return gen_rtx_REG (TYPE_MODE (type), 48);
3845 return gen_rtx_REG (TYPE_MODE (type), RETURN_VALUE_REGNUM);
3848 /* Implement LIBCALL_VALUE, using the same rules as mep_function_value. */
3851 mep_libcall_value (enum machine_mode mode)
3853 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
3856 /* Handle pipeline hazards. */
3858 typedef enum { op_none, op_stc, op_fsft, op_ret } op_num;
3859 static const char *opnames[] = { "", "stc", "fsft", "ret" };
3861 static int prev_opcode = 0;
3863 /* This isn't as optimal as it could be, because we don't know what
3864 control register the STC opcode is storing in. We only need to add
3865 the nop if it's the relevent register, but we add it for irrelevent
3869 mep_asm_output_opcode (FILE *file, const char *ptr)
3871 int this_opcode = op_none;
3872 const char *hazard = 0;
3877 if (strncmp (ptr, "fsft", 4) == 0 && !ISGRAPH (ptr[4]))
3878 this_opcode = op_fsft;
3881 if (strncmp (ptr, "ret", 3) == 0 && !ISGRAPH (ptr[3]))
3882 this_opcode = op_ret;
3885 if (strncmp (ptr, "stc", 3) == 0 && !ISGRAPH (ptr[3]))
3886 this_opcode = op_stc;
3890 if (prev_opcode == op_stc && this_opcode == op_fsft)
3892 if (prev_opcode == op_stc && this_opcode == op_ret)
3896 fprintf(file, "%s\t# %s-%s hazard\n\t",
3897 hazard, opnames[prev_opcode], opnames[this_opcode]);
3899 prev_opcode = this_opcode;
3902 /* Handle attributes. */
3905 mep_validate_based_tiny (tree *node, tree name, tree args,
3906 int flags ATTRIBUTE_UNUSED, bool *no_add)
3908 if (TREE_CODE (*node) != VAR_DECL
3909 && TREE_CODE (*node) != POINTER_TYPE
3910 && TREE_CODE (*node) != TYPE_DECL)
3912 warning (0, "%qE attribute only applies to variables", name);
3915 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3917 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3919 warning (0, "address region attributes not allowed with auto storage class");
3922 /* Ignore storage attribute of pointed to variable: char __far * x; */
3923 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3925 warning (0, "address region attributes on pointed-to types ignored");
3934 mep_multiple_address_regions (tree list, bool check_section_attr)
3937 int count_sections = 0;
3938 int section_attr_count = 0;
3940 for (a = list; a; a = TREE_CHAIN (a))
3942 if (is_attribute_p ("based", TREE_PURPOSE (a))
3943 || is_attribute_p ("tiny", TREE_PURPOSE (a))
3944 || is_attribute_p ("near", TREE_PURPOSE (a))
3945 || is_attribute_p ("far", TREE_PURPOSE (a))
3946 || is_attribute_p ("io", TREE_PURPOSE (a)))
3948 if (check_section_attr)
3949 section_attr_count += is_attribute_p ("section", TREE_PURPOSE (a));
3952 if (check_section_attr)
3953 return section_attr_count;
3955 return count_sections;
3958 #define MEP_ATTRIBUTES(decl) \
3959 (TYPE_P (decl)) ? TYPE_ATTRIBUTES (decl) \
3960 : DECL_ATTRIBUTES (decl) \
3961 ? (DECL_ATTRIBUTES (decl)) \
3962 : TYPE_ATTRIBUTES (TREE_TYPE (decl))
3965 mep_validate_near_far (tree *node, tree name, tree args,
3966 int flags ATTRIBUTE_UNUSED, bool *no_add)
3968 if (TREE_CODE (*node) != VAR_DECL
3969 && TREE_CODE (*node) != FUNCTION_DECL
3970 && TREE_CODE (*node) != METHOD_TYPE
3971 && TREE_CODE (*node) != POINTER_TYPE
3972 && TREE_CODE (*node) != TYPE_DECL)
3974 warning (0, "%qE attribute only applies to variables and functions",
3978 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
3980 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
3982 warning (0, "address region attributes not allowed with auto storage class");
3985 /* Ignore storage attribute of pointed to variable: char __far * x; */
3986 if (TREE_TYPE (*node) && TREE_CODE (TREE_TYPE (*node)) == POINTER_TYPE)
3988 warning (0, "address region attributes on pointed-to types ignored");
3992 else if (mep_multiple_address_regions (MEP_ATTRIBUTES (*node), false) > 0)
3994 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
3995 name, DECL_NAME (*node), DECL_SOURCE_LINE (*node));
3996 DECL_ATTRIBUTES (*node) = NULL_TREE;
4002 mep_validate_disinterrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4003 int flags ATTRIBUTE_UNUSED, bool *no_add)
4005 if (TREE_CODE (*node) != FUNCTION_DECL
4006 && TREE_CODE (*node) != METHOD_TYPE)
4008 warning (0, "%qE attribute only applies to functions", name);
4015 mep_validate_interrupt (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4016 int flags ATTRIBUTE_UNUSED, bool *no_add)
4020 if (TREE_CODE (*node) != FUNCTION_DECL)
4022 warning (0, "%qE attribute only applies to functions", name);
4027 if (DECL_DECLARED_INLINE_P (*node))
4028 error ("cannot inline interrupt function %qE", DECL_NAME (*node));
4029 DECL_UNINLINABLE (*node) = 1;
4031 function_type = TREE_TYPE (*node);
4033 if (TREE_TYPE (function_type) != void_type_node)
4034 error ("interrupt function must have return type of void");
4036 if (TYPE_ARG_TYPES (function_type)
4037 && (TREE_VALUE (TYPE_ARG_TYPES (function_type)) != void_type_node
4038 || TREE_CHAIN (TYPE_ARG_TYPES (function_type)) != NULL_TREE))
4039 error ("interrupt function must have no arguments");
4045 mep_validate_io_cb (tree *node, tree name, tree args,
4046 int flags ATTRIBUTE_UNUSED, bool *no_add)
4048 if (TREE_CODE (*node) != VAR_DECL)
4050 warning (0, "%qE attribute only applies to variables", name);
4054 if (args != NULL_TREE)
4056 if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
4057 TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
4058 if (TREE_CODE (TREE_VALUE (args)) != INTEGER_CST)
4060 warning (0, "%qE attribute allows only an integer constant argument",
4066 if (*no_add == false && !TARGET_IO_NO_VOLATILE)
4067 TREE_THIS_VOLATILE (*node) = 1;
4073 mep_validate_vliw (tree *node, tree name, tree args ATTRIBUTE_UNUSED,
4074 int flags ATTRIBUTE_UNUSED, bool *no_add)
4076 if (TREE_CODE (*node) != FUNCTION_TYPE
4077 && TREE_CODE (*node) != FUNCTION_DECL
4078 && TREE_CODE (*node) != METHOD_TYPE
4079 && TREE_CODE (*node) != FIELD_DECL
4080 && TREE_CODE (*node) != TYPE_DECL)
4082 static int gave_pointer_note = 0;
4083 static int gave_array_note = 0;
4084 static const char * given_type = NULL;
4086 given_type = tree_code_name[TREE_CODE (*node)];
4087 if (TREE_CODE (*node) == POINTER_TYPE)
4088 given_type = "pointers";
4089 if (TREE_CODE (*node) == ARRAY_TYPE)
4090 given_type = "arrays";
4093 warning (0, "%qE attribute only applies to functions, not %s",
4096 warning (0, "%qE attribute only applies to functions",
4100 if (TREE_CODE (*node) == POINTER_TYPE
4101 && !gave_pointer_note)
4103 inform (input_location, "To describe a pointer to a VLIW function, use syntax like this:");
4104 inform (input_location, " typedef int (__vliw *vfuncptr) ();");
4105 gave_pointer_note = 1;
4108 if (TREE_CODE (*node) == ARRAY_TYPE
4109 && !gave_array_note)
4111 inform (input_location, "To describe an array of VLIW function pointers, use syntax like this:");
4112 inform (input_location, " typedef int (__vliw *vfuncptr[]) ();");
4113 gave_array_note = 1;
4117 error ("VLIW functions are not allowed without a VLIW configuration");
4121 static const struct attribute_spec mep_attribute_table[11] =
4123 /* name min max decl type func handler */
4124 { "based", 0, 0, false, false, false, mep_validate_based_tiny },
4125 { "tiny", 0, 0, false, false, false, mep_validate_based_tiny },
4126 { "near", 0, 0, false, false, false, mep_validate_near_far },
4127 { "far", 0, 0, false, false, false, mep_validate_near_far },
4128 { "disinterrupt", 0, 0, false, false, false, mep_validate_disinterrupt },
4129 { "interrupt", 0, 0, false, false, false, mep_validate_interrupt },
4130 { "io", 0, 1, false, false, false, mep_validate_io_cb },
4131 { "cb", 0, 1, false, false, false, mep_validate_io_cb },
4132 { "vliw", 0, 0, false, true, false, mep_validate_vliw },
4133 { NULL, 0, 0, false, false, false, NULL }
4137 mep_function_attribute_inlinable_p (const_tree callee)
4139 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee));
4140 if (!attrs) attrs = DECL_ATTRIBUTES (callee);
4141 return (lookup_attribute ("disinterrupt", attrs) == 0
4142 && lookup_attribute ("interrupt", attrs) == 0);
4146 mep_can_inline_p (tree caller, tree callee)
4148 if (TREE_CODE (callee) == ADDR_EXPR)
4149 callee = TREE_OPERAND (callee, 0);
4151 if (!mep_vliw_function_p (caller)
4152 && mep_vliw_function_p (callee))
4160 #define FUNC_DISINTERRUPT 2
4163 struct GTY(()) pragma_entry {
4166 const char *funcname;
4168 typedef struct pragma_entry pragma_entry;
4170 /* Hash table of farcall-tagged sections. */
4171 static GTY((param_is (pragma_entry))) htab_t pragma_htab;
4174 pragma_entry_eq (const void *p1, const void *p2)
4176 const pragma_entry *old = (const pragma_entry *) p1;
4177 const char *new_name = (const char *) p2;
4179 return strcmp (old->funcname, new_name) == 0;
4183 pragma_entry_hash (const void *p)
4185 const pragma_entry *old = (const pragma_entry *) p;
4186 return htab_hash_string (old->funcname);
4190 mep_note_pragma_flag (const char *funcname, int flag)
4192 pragma_entry **slot;
4195 pragma_htab = htab_create_ggc (31, pragma_entry_hash,
4196 pragma_entry_eq, NULL);
4198 slot = (pragma_entry **)
4199 htab_find_slot_with_hash (pragma_htab, funcname,
4200 htab_hash_string (funcname), INSERT);
4204 *slot = GGC_NEW (pragma_entry);
4207 (*slot)->funcname = ggc_strdup (funcname);
4209 (*slot)->flag |= flag;
4213 mep_lookup_pragma_flag (const char *funcname, int flag)
4215 pragma_entry **slot;
4220 if (funcname[0] == '@' && funcname[2] == '.')
4223 slot = (pragma_entry **)
4224 htab_find_slot_with_hash (pragma_htab, funcname,
4225 htab_hash_string (funcname), NO_INSERT);
4226 if (slot && *slot && ((*slot)->flag & flag))
4228 (*slot)->used |= flag;
4235 mep_lookup_pragma_call (const char *funcname)
4237 return mep_lookup_pragma_flag (funcname, FUNC_CALL);
4241 mep_note_pragma_call (const char *funcname)
4243 mep_note_pragma_flag (funcname, FUNC_CALL);
4247 mep_lookup_pragma_disinterrupt (const char *funcname)
4249 return mep_lookup_pragma_flag (funcname, FUNC_DISINTERRUPT);
4253 mep_note_pragma_disinterrupt (const char *funcname)
4255 mep_note_pragma_flag (funcname, FUNC_DISINTERRUPT);
4259 note_unused_pragma_disinterrupt (void **slot, void *data ATTRIBUTE_UNUSED)
4261 const pragma_entry *d = (const pragma_entry *)(*slot);
4263 if ((d->flag & FUNC_DISINTERRUPT)
4264 && !(d->used & FUNC_DISINTERRUPT))
4265 warning (0, "\"#pragma disinterrupt %s\" not used", d->funcname);
4270 mep_file_cleanups (void)
4273 htab_traverse (pragma_htab, note_unused_pragma_disinterrupt, NULL);
4278 mep_attrlist_to_encoding (tree list, tree decl)
4280 if (mep_multiple_address_regions (list, false) > 1)
4282 warning (0, "duplicate address region attribute %qE in declaration of %qE on line %d",
4283 TREE_PURPOSE (TREE_CHAIN (list)),
4285 DECL_SOURCE_LINE (decl));
4286 TREE_CHAIN (list) = NULL_TREE;
4291 if (is_attribute_p ("based", TREE_PURPOSE (list)))
4293 if (is_attribute_p ("tiny", TREE_PURPOSE (list)))
4295 if (is_attribute_p ("near", TREE_PURPOSE (list)))
4297 if (is_attribute_p ("far", TREE_PURPOSE (list)))
4299 if (is_attribute_p ("io", TREE_PURPOSE (list)))
4301 if (TREE_VALUE (list)
4302 && TREE_VALUE (TREE_VALUE (list))
4303 && TREE_CODE (TREE_VALUE (TREE_VALUE (list))) == INTEGER_CST)
4305 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(list)));
4307 && location <= 0x1000000)
4312 if (is_attribute_p ("cb", TREE_PURPOSE (list)))
4314 list = TREE_CHAIN (list);
4317 && TREE_CODE (decl) == FUNCTION_DECL
4318 && DECL_SECTION_NAME (decl) == 0)
4324 mep_comp_type_attributes (const_tree t1, const_tree t2)
4328 vliw1 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t1)) != 0);
4329 vliw2 = (lookup_attribute ("vliw", TYPE_ATTRIBUTES (t2)) != 0);
4338 mep_insert_attributes (tree decl, tree *attributes)
4341 const char *secname = 0;
4342 tree attrib, attrlist;
4345 if (TREE_CODE (decl) == FUNCTION_DECL)
4347 const char *funcname = IDENTIFIER_POINTER (DECL_NAME (decl));
4349 if (mep_lookup_pragma_disinterrupt (funcname))
4351 attrib = build_tree_list (get_identifier ("disinterrupt"), NULL_TREE);
4352 *attributes = chainon (*attributes, attrib);
4356 if (TREE_CODE (decl) != VAR_DECL
4357 || ! (TREE_PUBLIC (decl) || TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
4360 if (TREE_READONLY (decl) && TARGET_DC)
4361 /* -mdc means that const variables default to the near section,
4362 regardless of the size cutoff. */
4365 /* User specified an attribute, so override the default.
4366 Ignore storage attribute of pointed to variable. char __far * x; */
4367 if (! (TREE_TYPE (decl) && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE))
4369 if (TYPE_P (decl) && TYPE_ATTRIBUTES (decl) && *attributes)
4370 TYPE_ATTRIBUTES (decl) = NULL_TREE;
4371 else if (DECL_ATTRIBUTES (decl) && *attributes)
4372 DECL_ATTRIBUTES (decl) = NULL_TREE;
4375 attrlist = *attributes ? *attributes : DECL_ATTRIBUTES (decl);
4376 encoding = mep_attrlist_to_encoding (attrlist, decl);
4377 if (!encoding && TYPE_P (TREE_TYPE (decl)))
4379 attrlist = TYPE_ATTRIBUTES (TREE_TYPE (decl));
4380 encoding = mep_attrlist_to_encoding (attrlist, decl);
4384 /* This means that the declaration has a specific section
4385 attribute, so we should not apply the default rules. */
4387 if (encoding == 'i' || encoding == 'I')
4389 tree attr = lookup_attribute ("io", attrlist);
4391 && TREE_VALUE (attr)
4392 && TREE_VALUE (TREE_VALUE(attr)))
4394 int location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4395 static tree previous_value = 0;
4396 static int previous_location = 0;
4397 static tree previous_name = 0;
4399 /* We take advantage of the fact that gcc will reuse the
4400 same tree pointer when applying an attribute to a
4401 list of decls, but produce a new tree for attributes
4402 on separate source lines, even when they're textually
4403 identical. This is the behavior we want. */
4404 if (TREE_VALUE (attr) == previous_value
4405 && location == previous_location)
4407 warning(0, "__io address 0x%x is the same for %qE and %qE",
4408 location, previous_name, DECL_NAME (decl));
4410 previous_name = DECL_NAME (decl);
4411 previous_location = location;
4412 previous_value = TREE_VALUE (attr);
4419 /* Declarations of arrays can change size. Don't trust them. */
4420 if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
4423 size = int_size_in_bytes (TREE_TYPE (decl));
4425 if (TARGET_RAND_TPGP && size <= 4 && size > 0)
4427 if (TREE_PUBLIC (decl)
4428 || DECL_EXTERNAL (decl)
4429 || TREE_STATIC (decl))
4431 const char *name = IDENTIFIER_POINTER (DECL_NAME (decl));
4455 if (size <= mep_based_cutoff && size > 0)
4457 else if (size <= mep_tiny_cutoff && size > 0)
4463 if (mep_const_section && TREE_READONLY (decl))
4465 if (strcmp (mep_const_section, "tiny") == 0)
4467 else if (strcmp (mep_const_section, "near") == 0)
4469 else if (strcmp (mep_const_section, "far") == 0)
4476 if (!mep_multiple_address_regions (*attributes, true)
4477 && !mep_multiple_address_regions (DECL_ATTRIBUTES (decl), false))
4479 attrib = build_tree_list (get_identifier (secname), NULL_TREE);
4481 /* Chain the attribute directly onto the variable's DECL_ATTRIBUTES
4482 in order to avoid the POINTER_TYPE bypasses in mep_validate_near_far
4483 and mep_validate_based_tiny. */
4484 DECL_ATTRIBUTES (decl) = chainon (DECL_ATTRIBUTES (decl), attrib);
4489 mep_encode_section_info (tree decl, rtx rtl, int first)
4492 const char *oldname;
4493 const char *secname;
4499 tree mep_attributes;
4504 if (TREE_CODE (decl) != VAR_DECL
4505 && TREE_CODE (decl) != FUNCTION_DECL)
4508 rtlname = XEXP (rtl, 0);
4509 if (GET_CODE (rtlname) == SYMBOL_REF)
4510 oldname = XSTR (rtlname, 0);
4511 else if (GET_CODE (rtlname) == MEM
4512 && GET_CODE (XEXP (rtlname, 0)) == SYMBOL_REF)
4513 oldname = XSTR (XEXP (rtlname, 0), 0);
4517 type = TREE_TYPE (decl);
4518 if (type == error_mark_node)
4520 mep_attributes = MEP_ATTRIBUTES (decl);
4522 encoding = mep_attrlist_to_encoding (mep_attributes, decl);
4526 newname = (char *) alloca (strlen (oldname) + 4);
4527 sprintf (newname, "@%c.%s", encoding, oldname);
4528 idp = get_identifier (newname);
4530 gen_rtx_SYMBOL_REF (Pmode, IDENTIFIER_POINTER (idp));
4543 maxsize = 0x1000000;
4551 if (maxsize && int_size_in_bytes (TREE_TYPE (decl)) > maxsize)
4553 warning (0, "variable %s (%ld bytes) is too large for the %s section (%d bytes)",
4555 (long) int_size_in_bytes (TREE_TYPE (decl)),
4563 mep_strip_name_encoding (const char *sym)
4569 else if (*sym == '@' && sym[2] == '.')
4577 mep_select_section (tree decl, int reloc ATTRIBUTE_UNUSED,
4578 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
4583 switch (TREE_CODE (decl))
4586 if (!TREE_READONLY (decl)
4587 || TREE_SIDE_EFFECTS (decl)
4588 || !DECL_INITIAL (decl)
4589 || (DECL_INITIAL (decl) != error_mark_node
4590 && !TREE_CONSTANT (DECL_INITIAL (decl))))
4594 if (! TREE_CONSTANT (decl))
4602 if (TREE_CODE (decl) == FUNCTION_DECL)
4604 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4606 if (name[0] == '@' && name[2] == '.')
4611 if (flag_function_sections || DECL_ONE_ONLY (decl))
4612 mep_unique_section (decl, 0);
4613 else if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4615 if (encoding == 'f')
4616 return vftext_section;
4618 return vtext_section;
4620 else if (encoding == 'f')
4621 return ftext_section;
4623 return text_section;
4626 if (TREE_CODE (decl) == VAR_DECL)
4628 const char *name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4630 if (name[0] == '@' && name[2] == '.')
4634 return based_section;
4638 return srodata_section;
4639 if (DECL_INITIAL (decl))
4640 return sdata_section;
4641 return tinybss_section;
4645 return frodata_section;
4650 error_at (DECL_SOURCE_LOCATION (decl),
4651 "variable %D of type %<io%> must be uninitialized", decl);
4652 return data_section;
4655 error_at (DECL_SOURCE_LOCATION (decl),
4656 "variable %D of type %<cb%> must be uninitialized", decl);
4657 return data_section;
4662 return readonly_data_section;
4664 return data_section;
4668 mep_unique_section (tree decl, int reloc)
4670 static const char *prefixes[][2] =
4672 { ".text.", ".gnu.linkonce.t." },
4673 { ".rodata.", ".gnu.linkonce.r." },
4674 { ".data.", ".gnu.linkonce.d." },
4675 { ".based.", ".gnu.linkonce.based." },
4676 { ".sdata.", ".gnu.linkonce.s." },
4677 { ".far.", ".gnu.linkonce.far." },
4678 { ".ftext.", ".gnu.linkonce.ft." },
4679 { ".frodata.", ".gnu.linkonce.frd." },
4680 { ".srodata.", ".gnu.linkonce.srd." },
4681 { ".vtext.", ".gnu.linkonce.v." },
4682 { ".vftext.", ".gnu.linkonce.vf." }
4684 int sec = 2; /* .data */
4686 const char *name, *prefix;
4689 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4690 if (DECL_RTL (decl))
4691 name = XSTR (XEXP (DECL_RTL (decl), 0), 0);
4693 if (TREE_CODE (decl) == FUNCTION_DECL)
4695 if (lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4696 sec = 9; /* .vtext */
4698 sec = 0; /* .text */
4700 else if (decl_readonly_section (decl, reloc))
4701 sec = 1; /* .rodata */
4703 if (name[0] == '@' && name[2] == '.')
4708 sec = 3; /* .based */
4712 sec = 8; /* .srodata */
4714 sec = 4; /* .sdata */
4718 sec = 6; /* .ftext */
4720 sec = 10; /* .vftext */
4722 sec = 7; /* .frodata */
4724 sec = 5; /* .far. */
4730 prefix = prefixes[sec][DECL_ONE_ONLY(decl)];
4731 len = strlen (name) + strlen (prefix);
4732 string = (char *) alloca (len + 1);
4734 sprintf (string, "%s%s", prefix, name);
4736 DECL_SECTION_NAME (decl) = build_string (len, string);
4739 /* Given a decl, a section name, and whether the decl initializer
4740 has relocs, choose attributes for the section. */
4742 #define SECTION_MEP_VLIW SECTION_MACH_DEP
4745 mep_section_type_flags (tree decl, const char *name, int reloc)
4747 unsigned int flags = default_section_type_flags (decl, name, reloc);
4749 if (decl && TREE_CODE (decl) == FUNCTION_DECL
4750 && lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))))
4751 flags |= SECTION_MEP_VLIW;
4756 /* Switch to an arbitrary section NAME with attributes as specified
4757 by FLAGS. ALIGN specifies any known alignment requirements for
4758 the section; 0 if the default should be used.
4760 Differs from the standard ELF version only in support of VLIW mode. */
4763 mep_asm_named_section (const char *name, unsigned int flags, tree decl ATTRIBUTE_UNUSED)
4765 char flagchars[8], *f = flagchars;
4768 if (!(flags & SECTION_DEBUG))
4770 if (flags & SECTION_WRITE)
4772 if (flags & SECTION_CODE)
4774 if (flags & SECTION_SMALL)
4776 if (flags & SECTION_MEP_VLIW)
4780 if (flags & SECTION_BSS)
4785 fprintf (asm_out_file, "\t.section\t%s,\"%s\",@%s\n",
4786 name, flagchars, type);
4788 if (flags & SECTION_CODE)
4789 fputs ((flags & SECTION_MEP_VLIW ? "\t.vliw\n" : "\t.core\n"),
4794 mep_output_aligned_common (FILE *stream, tree decl, const char *name,
4795 int size, int align, int global)
4797 /* We intentionally don't use mep_section_tag() here. */
4799 && (name[1] == 'i' || name[1] == 'I' || name[1] == 'c')
4803 tree attr = lookup_attribute ((name[1] == 'c' ? "cb" : "io"),
4804 DECL_ATTRIBUTES (decl));
4806 && TREE_VALUE (attr)
4807 && TREE_VALUE (TREE_VALUE(attr)))
4808 location = TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE(attr)));
4813 fprintf (stream, "\t.globl\t");
4814 assemble_name (stream, name);
4815 fprintf (stream, "\n");
4817 assemble_name (stream, name);
4818 fprintf (stream, " = %d\n", location);
4821 if (name[0] == '@' && name[2] == '.')
4823 const char *sec = 0;
4827 switch_to_section (based_section);
4831 switch_to_section (tinybss_section);
4835 switch_to_section (farbss_section);
4844 while (align > BITS_PER_UNIT)
4849 name2 = TARGET_STRIP_NAME_ENCODING (name);
4851 fprintf (stream, "\t.globl\t%s\n", name2);
4852 fprintf (stream, "\t.p2align %d\n", p2align);
4853 fprintf (stream, "\t.type\t%s,@object\n", name2);
4854 fprintf (stream, "\t.size\t%s,%d\n", name2, size);
4855 fprintf (stream, "%s:\n\t.zero\t%d\n", name2, size);
4862 fprintf (stream, "\t.local\t");
4863 assemble_name (stream, name);
4864 fprintf (stream, "\n");
4866 fprintf (stream, "\t.comm\t");
4867 assemble_name (stream, name);
4868 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
4874 mep_init_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
4876 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__mep_trampoline_helper"),
4877 LCT_NORMAL, VOIDmode, 3,
4880 static_chain, Pmode);
4883 /* Experimental Reorg. */
4886 mep_mentioned_p (rtx in,
4887 rtx reg, /* NULL for mem */
4888 int modes_too) /* if nonzero, modes must match also. */
4896 if (reg && GET_CODE (reg) != REG)
4899 if (GET_CODE (in) == LABEL_REF)
4902 code = GET_CODE (in);
4908 return mep_mentioned_p (XEXP (in, 0), reg, modes_too);
4914 if (modes_too && (GET_MODE (in) != GET_MODE (reg)))
4916 return (REGNO (in) == REGNO (reg));
4929 /* Set's source should be read-only. */
4930 if (code == SET && !reg)
4931 return mep_mentioned_p (SET_DEST (in), reg, modes_too);
4933 fmt = GET_RTX_FORMAT (code);
4935 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4940 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
4941 if (mep_mentioned_p (XVECEXP (in, i, j), reg, modes_too))
4944 else if (fmt[i] == 'e'
4945 && mep_mentioned_p (XEXP (in, i), reg, modes_too))
4951 #define EXPERIMENTAL_REGMOVE_REORG 1
4953 #if EXPERIMENTAL_REGMOVE_REORG
4956 mep_compatible_reg_class (int r1, int r2)
4958 if (GR_REGNO_P (r1) && GR_REGNO_P (r2))
4960 if (CR_REGNO_P (r1) && CR_REGNO_P (r2))
4966 mep_reorg_regmove (rtx insns)
4968 rtx insn, next, pat, follow, *where;
4969 int count = 0, done = 0, replace, before = 0;
4972 for (insn = insns; insn; insn = NEXT_INSN (insn))
4973 if (GET_CODE (insn) == INSN)
4976 /* We're looking for (set r2 r1) moves where r1 dies, followed by a
4977 set that uses the r2 and r2 dies there. We replace r2 with r1
4978 and see if it's still a valid insn. If so, delete the first set.
4979 Copied from reorg.c. */
4984 for (insn = insns; insn; insn = next)
4986 next = NEXT_INSN (insn);
4987 if (GET_CODE (insn) != INSN)
4989 pat = PATTERN (insn);
4993 if (GET_CODE (pat) == SET
4994 && GET_CODE (SET_SRC (pat)) == REG
4995 && GET_CODE (SET_DEST (pat)) == REG
4996 && find_regno_note (insn, REG_DEAD, REGNO (SET_SRC (pat)))
4997 && mep_compatible_reg_class (REGNO (SET_SRC (pat)), REGNO (SET_DEST (pat))))
4999 follow = next_nonnote_insn (insn);
5001 fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
5003 while (follow && GET_CODE (follow) == INSN
5004 && GET_CODE (PATTERN (follow)) == SET
5005 && !dead_or_set_p (follow, SET_SRC (pat))
5006 && !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
5007 && !mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 0))
5010 fprintf (dump_file, "\tskipping %d\n", INSN_UID (follow));
5011 follow = next_nonnote_insn (follow);
5015 fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
5016 if (follow && GET_CODE (follow) == INSN
5017 && GET_CODE (PATTERN (follow)) == SET
5018 && find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
5020 if (GET_CODE (SET_DEST (PATTERN (follow))) == REG)
5022 if (mep_mentioned_p (SET_SRC (PATTERN (follow)), SET_DEST (pat), 1))
5025 where = & SET_SRC (PATTERN (follow));
5028 else if (GET_CODE (SET_DEST (PATTERN (follow))) == MEM)
5030 if (mep_mentioned_p (PATTERN (follow), SET_DEST (pat), 1))
5033 where = & PATTERN (follow);
5039 /* If so, follow is the corresponding insn */
5046 fprintf (dump_file, "----- Candidate for superfluous move deletion:\n\n");
5047 for (x = insn; x ;x = NEXT_INSN (x))
5049 print_rtl_single (dump_file, x);
5052 fprintf (dump_file, "\n");
5056 if (validate_replace_rtx_subexp (SET_DEST (pat), SET_SRC (pat),
5060 next = delete_insn (insn);
5063 fprintf (dump_file, "\n----- Success! new insn:\n\n");
5064 print_rtl_single (dump_file, follow);
5074 fprintf (dump_file, "\n%d insn%s deleted out of %d.\n\n", count, count == 1 ? "" : "s", before);
5075 fprintf (dump_file, "=====\n");
5081 /* Figure out where to put LABEL, which is the label for a repeat loop.
5082 If INCLUDING, LAST_INSN is the last instruction in the loop, otherwise
5083 the loop ends just before LAST_INSN. If SHARED, insns other than the
5084 "repeat" might use LABEL to jump to the loop's continuation point.
5086 Return the last instruction in the adjusted loop. */
5089 mep_insert_repeat_label_last (rtx last_insn, rtx label, bool including,
5093 int count = 0, code, icode;
5096 fprintf (dump_file, "considering end of repeat loop at insn %d\n",
5097 INSN_UID (last_insn));
5099 /* Set PREV to the last insn in the loop. */
5102 prev = PREV_INSN (prev);
5104 /* Set NEXT to the next insn after the repeat label. */
5109 code = GET_CODE (prev);
5110 if (code == CALL_INSN || code == CODE_LABEL || code == BARRIER)
5115 if (GET_CODE (PATTERN (prev)) == SEQUENCE)
5116 prev = XVECEXP (PATTERN (prev), 0, 1);
5118 /* Other insns that should not be in the last two opcodes. */
5119 icode = recog_memoized (prev);
5121 || icode == CODE_FOR_repeat
5122 || icode == CODE_FOR_erepeat
5123 || get_attr_may_trap (prev) == MAY_TRAP_YES)
5126 /* That leaves JUMP_INSN and INSN. It will have BImode if it
5127 is the second instruction in a VLIW bundle. In that case,
5128 loop again: if the first instruction also satisfies the
5129 conditions above then we will reach here again and put
5130 both of them into the repeat epilogue. Otherwise both
5131 should remain outside. */
5132 if (GET_MODE (prev) != BImode)
5137 print_rtl_single (dump_file, next);
5142 prev = PREV_INSN (prev);
5145 /* See if we're adding the label immediately after the repeat insn.
5146 If so, we need to separate them with a nop. */
5147 prev = prev_real_insn (next);
5149 switch (recog_memoized (prev))
5151 case CODE_FOR_repeat:
5152 case CODE_FOR_erepeat:
5154 fprintf (dump_file, "Adding nop inside loop\n");
5155 emit_insn_before (gen_nop (), next);
5162 /* Insert the label. */
5163 emit_label_before (label, next);
5165 /* Insert the nops. */
5166 if (dump_file && count < 2)
5167 fprintf (dump_file, "Adding %d nop%s\n\n",
5168 2 - count, count == 1 ? "" : "s");
5170 for (; count < 2; count++)
5172 last_insn = emit_insn_after (gen_nop (), last_insn);
5174 emit_insn_before (gen_nop (), last_insn);
5181 mep_emit_doloop (rtx *operands, int is_end)
5185 if (cfun->machine->doloop_tags == 0
5186 || cfun->machine->doloop_tag_from_end == is_end)
5188 cfun->machine->doloop_tags++;
5189 cfun->machine->doloop_tag_from_end = is_end;
5192 tag = GEN_INT (cfun->machine->doloop_tags - 1);
5194 emit_jump_insn (gen_doloop_end_internal (operands[0], operands[4], tag));
5196 emit_insn (gen_doloop_begin_internal (operands[0], operands[0], tag));
5200 /* Code for converting doloop_begins and doloop_ends into valid
5201 MeP instructions. A doloop_begin is just a placeholder:
5203 $count = unspec ($count)
5205 where $count is initially the number of iterations - 1.
5206 doloop_end has the form:
5208 if ($count-- == 0) goto label
5210 The counter variable is private to the doloop insns, nothing else
5211 relies on its value.
5213 There are three cases, in decreasing order of preference:
5215 1. A loop has exactly one doloop_begin and one doloop_end.
5216 The doloop_end branches to the first instruction after
5219 In this case we can replace the doloop_begin with a repeat
5220 instruction and remove the doloop_end. I.e.:
5222 $count1 = unspec ($count1)
5227 if ($count2-- == 0) goto label
5231 repeat $count1,repeat_label
5239 2. As for (1), except there are several doloop_ends. One of them
5240 (call it X) falls through to a label L. All the others fall
5241 through to branches to L.
5243 In this case, we remove X and replace the other doloop_ends
5244 with branches to the repeat label. For example:
5246 $count1 = unspec ($count1)
5249 if ($count2-- == 0) goto label
5252 if ($count3-- == 0) goto label
5257 repeat $count1,repeat_label
5268 3. The fallback case. Replace doloop_begins with:
5272 Replace doloop_ends with the equivalent of:
5275 if ($count == 0) goto label
5277 Note that this might need a scratch register if $count
5278 is stored in memory. */
5280 /* A structure describing one doloop_begin. */
5281 struct mep_doloop_begin {
5282 /* The next doloop_begin with the same tag. */
5283 struct mep_doloop_begin *next;
5285 /* The instruction itself. */
5288 /* The initial counter value. This is known to be a general register. */
5292 /* A structure describing a doloop_end. */
5293 struct mep_doloop_end {
5294 /* The next doloop_end with the same loop tag. */
5295 struct mep_doloop_end *next;
5297 /* The instruction itself. */
5300 /* The first instruction after INSN when the branch isn't taken. */
5303 /* The location of the counter value. Since doloop_end_internal is a
5304 jump instruction, it has to allow the counter to be stored anywhere
5305 (any non-fixed register or memory location). */
5308 /* The target label (the place where the insn branches when the counter
5312 /* A scratch register. Only available when COUNTER isn't stored
5313 in a general register. */
5318 /* One do-while loop. */
5320 /* All the doloop_begins for this loop (in no particular order). */
5321 struct mep_doloop_begin *begin;
5323 /* All the doloop_ends. When there is more than one, arrange things
5324 so that the first one is the most likely to be X in case (2) above. */
5325 struct mep_doloop_end *end;
5329 /* Return true if LOOP can be converted into repeat/repeat_end form
5330 (that is, if it matches cases (1) or (2) above). */
5333 mep_repeat_loop_p (struct mep_doloop *loop)
5335 struct mep_doloop_end *end;
5338 /* There must be exactly one doloop_begin and at least one doloop_end. */
5339 if (loop->begin == 0 || loop->end == 0 || loop->begin->next != 0)
5342 /* The first doloop_end (X) must branch back to the insn after
5343 the doloop_begin. */
5344 if (prev_real_insn (loop->end->label) != loop->begin->insn)
5347 /* All the other doloop_ends must branch to the same place as X.
5348 When the branch isn't taken, they must jump to the instruction
5350 fallthrough = loop->end->fallthrough;
5351 for (end = loop->end->next; end != 0; end = end->next)
5352 if (end->label != loop->end->label
5353 || !simplejump_p (end->fallthrough)
5354 || next_real_insn (JUMP_LABEL (end->fallthrough)) != fallthrough)
5361 /* The main repeat reorg function. See comment above for details. */
5364 mep_reorg_repeat (rtx insns)
5367 struct mep_doloop *loops, *loop;
5368 struct mep_doloop_begin *begin;
5369 struct mep_doloop_end *end;
5371 /* Quick exit if we haven't created any loops. */
5372 if (cfun->machine->doloop_tags == 0)
5375 /* Create an array of mep_doloop structures. */
5376 loops = (struct mep_doloop *) alloca (sizeof (loops[0]) * cfun->machine->doloop_tags);
5377 memset (loops, 0, sizeof (loops[0]) * cfun->machine->doloop_tags);
5379 /* Search the function for do-while insns and group them by loop tag. */
5380 for (insn = insns; insn; insn = NEXT_INSN (insn))
5382 switch (recog_memoized (insn))
5384 case CODE_FOR_doloop_begin_internal:
5385 insn_extract (insn);
5386 loop = &loops[INTVAL (recog_data.operand[2])];
5388 begin = (struct mep_doloop_begin *) alloca (sizeof (struct mep_doloop_begin));
5389 begin->next = loop->begin;
5391 begin->counter = recog_data.operand[0];
5393 loop->begin = begin;
5396 case CODE_FOR_doloop_end_internal:
5397 insn_extract (insn);
5398 loop = &loops[INTVAL (recog_data.operand[2])];
5400 end = (struct mep_doloop_end *) alloca (sizeof (struct mep_doloop_end));
5402 end->fallthrough = next_real_insn (insn);
5403 end->counter = recog_data.operand[0];
5404 end->label = recog_data.operand[1];
5405 end->scratch = recog_data.operand[3];
5407 /* If this insn falls through to an unconditional jump,
5408 give it a lower priority than the others. */
5409 if (loop->end != 0 && simplejump_p (end->fallthrough))
5411 end->next = loop->end->next;
5412 loop->end->next = end;
5416 end->next = loop->end;
5422 /* Convert the insns for each loop in turn. */
5423 for (loop = loops; loop < loops + cfun->machine->doloop_tags; loop++)
5424 if (mep_repeat_loop_p (loop))
5426 /* Case (1) or (2). */
5427 rtx repeat_label, label_ref;
5429 /* Create a new label for the repeat insn. */
5430 repeat_label = gen_label_rtx ();
5432 /* Replace the doloop_begin with a repeat. */
5433 label_ref = gen_rtx_LABEL_REF (VOIDmode, repeat_label);
5434 emit_insn_before (gen_repeat (loop->begin->counter, label_ref),
5436 delete_insn (loop->begin->insn);
5438 /* Insert the repeat label before the first doloop_end.
5439 Fill the gap with nops if there are other doloop_ends. */
5440 mep_insert_repeat_label_last (loop->end->insn, repeat_label,
5441 false, loop->end->next != 0);
5443 /* Emit a repeat_end (to improve the readability of the output). */
5444 emit_insn_before (gen_repeat_end (), loop->end->insn);
5446 /* Delete the first doloop_end. */
5447 delete_insn (loop->end->insn);
5449 /* Replace the others with branches to REPEAT_LABEL. */
5450 for (end = loop->end->next; end != 0; end = end->next)
5452 emit_jump_insn_before (gen_jump (repeat_label), end->insn);
5453 delete_insn (end->insn);
5454 delete_insn (end->fallthrough);
5459 /* Case (3). First replace all the doloop_begins with increment
5461 for (begin = loop->begin; begin != 0; begin = begin->next)
5463 emit_insn_before (gen_add3_insn (copy_rtx (begin->counter),
5464 begin->counter, const1_rtx),
5466 delete_insn (begin->insn);
5469 /* Replace all the doloop_ends with decrement-and-branch sequences. */
5470 for (end = loop->end; end != 0; end = end->next)
5476 /* Load the counter value into a general register. */
5478 if (!REG_P (reg) || REGNO (reg) > 15)
5481 emit_move_insn (copy_rtx (reg), copy_rtx (end->counter));
5484 /* Decrement the counter. */
5485 emit_insn (gen_add3_insn (copy_rtx (reg), copy_rtx (reg),
5488 /* Copy it back to its original location. */
5489 if (reg != end->counter)
5490 emit_move_insn (copy_rtx (end->counter), copy_rtx (reg));
5492 /* Jump back to the start label. */
5493 insn = emit_jump_insn (gen_mep_bne_true (reg, const0_rtx,
5495 JUMP_LABEL (insn) = end->label;
5496 LABEL_NUSES (end->label)++;
5498 /* Emit the whole sequence before the doloop_end. */
5499 insn = get_insns ();
5501 emit_insn_before (insn, end->insn);
5503 /* Delete the doloop_end. */
5504 delete_insn (end->insn);
5511 mep_invertable_branch_p (rtx insn)
5514 enum rtx_code old_code;
5517 set = PATTERN (insn);
5518 if (GET_CODE (set) != SET)
5520 if (GET_CODE (XEXP (set, 1)) != IF_THEN_ELSE)
5522 cond = XEXP (XEXP (set, 1), 0);
5523 old_code = GET_CODE (cond);
5527 PUT_CODE (cond, NE);
5530 PUT_CODE (cond, EQ);
5533 PUT_CODE (cond, GE);
5536 PUT_CODE (cond, LT);
5541 INSN_CODE (insn) = -1;
5542 i = recog_memoized (insn);
5543 PUT_CODE (cond, old_code);
5544 INSN_CODE (insn) = -1;
5549 mep_invert_branch (rtx insn, rtx after)
5551 rtx cond, set, label;
5554 set = PATTERN (insn);
5556 gcc_assert (GET_CODE (set) == SET);
5557 gcc_assert (GET_CODE (XEXP (set, 1)) == IF_THEN_ELSE);
5559 cond = XEXP (XEXP (set, 1), 0);
5560 switch (GET_CODE (cond))
5563 PUT_CODE (cond, NE);
5566 PUT_CODE (cond, EQ);
5569 PUT_CODE (cond, GE);
5572 PUT_CODE (cond, LT);
5577 label = gen_label_rtx ();
5578 emit_label_after (label, after);
5579 for (i=1; i<=2; i++)
5580 if (GET_CODE (XEXP (XEXP (set, 1), i)) == LABEL_REF)
5582 rtx ref = XEXP (XEXP (set, 1), i);
5583 if (LABEL_NUSES (XEXP (ref, 0)) == 1)
5584 delete_insn (XEXP (ref, 0));
5585 XEXP (ref, 0) = label;
5586 LABEL_NUSES (label) ++;
5587 JUMP_LABEL (insn) = label;
5589 INSN_CODE (insn) = -1;
5590 i = recog_memoized (insn);
5591 gcc_assert (i >= 0);
5595 mep_reorg_erepeat (rtx insns)
5597 rtx insn, prev, label_before, l, x;
5600 for (insn = insns; insn; insn = NEXT_INSN (insn))
5602 && ! JUMP_TABLE_DATA_P (insn)
5603 && mep_invertable_branch_p (insn))
5607 fprintf (dump_file, "\n------------------------------\n");
5608 fprintf (dump_file, "erepeat: considering this jump:\n");
5609 print_rtl_single (dump_file, insn);
5611 count = simplejump_p (insn) ? 0 : 1;
5613 for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
5615 if (GET_CODE (prev) == CALL_INSN
5616 || BARRIER_P (prev))
5619 if (prev == JUMP_LABEL (insn))
5623 fprintf (dump_file, "found loop top, %d insns\n", count);
5625 if (LABEL_NUSES (prev) == 1)
5626 /* We're the only user, always safe */ ;
5627 else if (LABEL_NUSES (prev) == 2)
5629 /* See if there's a barrier before this label. If
5630 so, we know nobody inside the loop uses it.
5631 But we must be careful to put the erepeat
5632 *after* the label. */
5634 for (barrier = PREV_INSN (prev);
5635 barrier && GET_CODE (barrier) == NOTE;
5636 barrier = PREV_INSN (barrier))
5638 if (barrier && GET_CODE (barrier) != BARRIER)
5643 /* We don't know who else, within or without our loop, uses this */
5645 fprintf (dump_file, "... but there are multiple users, too risky.\n");
5649 /* Generate a label to be used by the erepat insn. */
5650 l = gen_label_rtx ();
5652 /* Insert the erepeat after INSN's target label. */
5653 x = gen_erepeat (gen_rtx_LABEL_REF (VOIDmode, l));
5655 emit_insn_after (x, prev);
5657 /* Insert the erepeat label. */
5658 newlast = (mep_insert_repeat_label_last
5659 (insn, l, !simplejump_p (insn), false));
5660 if (simplejump_p (insn))
5662 emit_insn_before (gen_erepeat_end (), insn);
5667 mep_invert_branch (insn, newlast);
5668 emit_insn_after (gen_erepeat_end (), newlast);
5675 /* A label is OK if there is exactly one user, and we
5676 can find that user before the next label. */
5679 if (LABEL_NUSES (prev) == 1)
5681 for (user = PREV_INSN (prev);
5682 user && (INSN_P (user) || GET_CODE (user) == NOTE);
5683 user = PREV_INSN (user))
5684 if (GET_CODE (user) == JUMP_INSN
5685 && JUMP_LABEL (user) == prev)
5687 safe = INSN_UID (user);
5694 fprintf (dump_file, "... ignoring jump from insn %d to %d\n",
5695 safe, INSN_UID (prev));
5702 label_before = prev;
5707 fprintf (dump_file, "\n==============================\n");
5710 /* Replace a jump to a return, with a copy of the return. GCC doesn't
5711 always do this on its own. */
5714 mep_jmp_return_reorg (rtx insns)
5716 rtx insn, label, ret;
5719 for (insn = insns; insn; insn = NEXT_INSN (insn))
5720 if (simplejump_p (insn))
5722 /* Find the fist real insn the jump jumps to. */
5723 label = ret = JUMP_LABEL (insn);
5725 && (GET_CODE (ret) == NOTE
5726 || GET_CODE (ret) == CODE_LABEL
5727 || GET_CODE (PATTERN (ret)) == USE))
5728 ret = NEXT_INSN (ret);
5732 /* Is it a return? */
5733 ret_code = recog_memoized (ret);
5734 if (ret_code == CODE_FOR_return_internal
5735 || ret_code == CODE_FOR_eh_return_internal)
5737 /* It is. Replace the jump with a return. */
5738 LABEL_NUSES (label) --;
5739 if (LABEL_NUSES (label) == 0)
5740 delete_insn (label);
5741 PATTERN (insn) = copy_rtx (PATTERN (ret));
5742 INSN_CODE (insn) = -1;
5750 mep_reorg_addcombine (rtx insns)
5754 for (i = insns; i; i = NEXT_INSN (i))
5756 && INSN_CODE (i) == CODE_FOR_addsi3
5757 && GET_CODE (SET_DEST (PATTERN (i))) == REG
5758 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 0)) == REG
5759 && REGNO (SET_DEST (PATTERN (i))) == REGNO (XEXP (SET_SRC (PATTERN (i)), 0))
5760 && GET_CODE (XEXP (SET_SRC (PATTERN (i)), 1)) == CONST_INT)
5764 && INSN_CODE (n) == CODE_FOR_addsi3
5765 && GET_CODE (SET_DEST (PATTERN (n))) == REG
5766 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 0)) == REG
5767 && REGNO (SET_DEST (PATTERN (n))) == REGNO (XEXP (SET_SRC (PATTERN (n)), 0))
5768 && GET_CODE (XEXP (SET_SRC (PATTERN (n)), 1)) == CONST_INT)
5770 int ic = INTVAL (XEXP (SET_SRC (PATTERN (i)), 1));
5771 int nc = INTVAL (XEXP (SET_SRC (PATTERN (n)), 1));
5772 if (REGNO (SET_DEST (PATTERN (i))) == REGNO (SET_DEST (PATTERN (n)))
5774 && ic + nc > -32768)
5776 XEXP (SET_SRC (PATTERN (i)), 1) = GEN_INT (ic + nc);
5777 NEXT_INSN (i) = NEXT_INSN (n);
5779 PREV_INSN (NEXT_INSN (i)) = i;
5785 /* If this insn adjusts the stack, return the adjustment, else return
5788 add_sp_insn_p (rtx insn)
5792 if (! single_set (insn))
5794 pat = PATTERN (insn);
5795 if (GET_CODE (SET_DEST (pat)) != REG)
5797 if (REGNO (SET_DEST (pat)) != SP_REGNO)
5799 if (GET_CODE (SET_SRC (pat)) != PLUS)
5801 if (GET_CODE (XEXP (SET_SRC (pat), 0)) != REG)
5803 if (REGNO (XEXP (SET_SRC (pat), 0)) != SP_REGNO)
5805 if (GET_CODE (XEXP (SET_SRC (pat), 1)) != CONST_INT)
5807 return INTVAL (XEXP (SET_SRC (pat), 1));
5810 /* Check for trivial functions that set up an unneeded stack
5813 mep_reorg_noframe (rtx insns)
5815 rtx start_frame_insn;
5816 rtx end_frame_insn = 0;
5820 /* The first insn should be $sp = $sp + N */
5821 while (insns && ! INSN_P (insns))
5822 insns = NEXT_INSN (insns);
5826 sp_adjust = add_sp_insn_p (insns);
5830 start_frame_insn = insns;
5831 sp = SET_DEST (PATTERN (start_frame_insn));
5833 insns = next_real_insn (insns);
5837 rtx next = next_real_insn (insns);
5841 sp2 = add_sp_insn_p (insns);
5846 end_frame_insn = insns;
5847 if (sp2 != -sp_adjust)
5850 else if (mep_mentioned_p (insns, sp, 0))
5852 else if (CALL_P (insns))
5860 delete_insn (start_frame_insn);
5861 delete_insn (end_frame_insn);
5868 rtx insns = get_insns ();
5869 mep_reorg_addcombine (insns);
5870 #if EXPERIMENTAL_REGMOVE_REORG
5871 /* VLIW packing has been done already, so we can't just delete things. */
5872 if (!mep_vliw_function_p (cfun->decl))
5873 mep_reorg_regmove (insns);
5875 mep_jmp_return_reorg (insns);
5876 mep_bundle_insns (insns);
5877 mep_reorg_repeat (insns);
5880 && !profile_arc_flag
5881 && TARGET_OPT_REPEAT
5882 && (!mep_interrupt_p () || mep_interrupt_saved_reg (RPB_REGNO)))
5883 mep_reorg_erepeat (insns);
5885 /* This may delete *insns so make sure it's last. */
5886 mep_reorg_noframe (insns);
5891 /*----------------------------------------------------------------------*/
5893 /*----------------------------------------------------------------------*/
5895 /* Element X gives the index into cgen_insns[] of the most general
5896 implementation of intrinsic X. Unimplemented intrinsics are
5898 int mep_intrinsic_insn[ARRAY_SIZE (cgen_intrinsics)];
5900 /* Element X gives the index of another instruction that is mapped to
5901 the same intrinsic as cgen_insns[X]. It is -1 when there is no other
5904 Things are set up so that mep_intrinsic_chain[X] < X. */
5905 static int mep_intrinsic_chain[ARRAY_SIZE (cgen_insns)];
5907 /* The bitmask for the current ISA. The ISA masks are declared
5909 unsigned int mep_selected_isa;
5912 const char *config_name;
5916 static struct mep_config mep_configs[] = {
5917 #ifdef COPROC_SELECTION_TABLE
5918 COPROC_SELECTION_TABLE,
5923 /* Initialize the global intrinsics variables above. */
5926 mep_init_intrinsics (void)
5930 /* Set MEP_SELECTED_ISA to the ISA flag for this configuration. */
5931 mep_selected_isa = mep_configs[0].isa;
5932 if (mep_config_string != 0)
5933 for (i = 0; mep_configs[i].config_name; i++)
5934 if (strcmp (mep_config_string, mep_configs[i].config_name) == 0)
5936 mep_selected_isa = mep_configs[i].isa;
5940 /* Assume all intrinsics are unavailable. */
5941 for (i = 0; i < ARRAY_SIZE (mep_intrinsic_insn); i++)
5942 mep_intrinsic_insn[i] = -1;
5944 /* Build up the global intrinsic tables. */
5945 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
5946 if ((cgen_insns[i].isas & mep_selected_isa) != 0)
5948 mep_intrinsic_chain[i] = mep_intrinsic_insn[cgen_insns[i].intrinsic];
5949 mep_intrinsic_insn[cgen_insns[i].intrinsic] = i;
5951 /* See whether we can directly move values between one coprocessor
5952 register and another. */
5953 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
5954 if (MEP_INTRINSIC_AVAILABLE_P (mep_cmov_insns[i]))
5955 mep_have_copro_copro_moves_p = true;
5957 /* See whether we can directly move values between core and
5958 coprocessor registers. */
5959 mep_have_core_copro_moves_p = (MEP_INTRINSIC_AVAILABLE_P (mep_cmov1)
5960 && MEP_INTRINSIC_AVAILABLE_P (mep_cmov2));
5962 mep_have_core_copro_moves_p = 1;
5965 /* Declare all available intrinsic functions. Called once only. */
5967 static tree cp_data_bus_int_type_node;
5968 static tree opaque_vector_type_node;
5969 static tree v8qi_type_node;
5970 static tree v4hi_type_node;
5971 static tree v2si_type_node;
5972 static tree v8uqi_type_node;
5973 static tree v4uhi_type_node;
5974 static tree v2usi_type_node;
5977 mep_cgen_regnum_to_type (enum cgen_regnum_operand_type cr)
5981 case cgen_regnum_operand_type_POINTER: return ptr_type_node;
5982 case cgen_regnum_operand_type_LONG: return long_integer_type_node;
5983 case cgen_regnum_operand_type_ULONG: return long_unsigned_type_node;
5984 case cgen_regnum_operand_type_SHORT: return short_integer_type_node;
5985 case cgen_regnum_operand_type_USHORT: return short_unsigned_type_node;
5986 case cgen_regnum_operand_type_CHAR: return char_type_node;
5987 case cgen_regnum_operand_type_UCHAR: return unsigned_char_type_node;
5988 case cgen_regnum_operand_type_SI: return intSI_type_node;
5989 case cgen_regnum_operand_type_DI: return intDI_type_node;
5990 case cgen_regnum_operand_type_VECTOR: return opaque_vector_type_node;
5991 case cgen_regnum_operand_type_V8QI: return v8qi_type_node;
5992 case cgen_regnum_operand_type_V4HI: return v4hi_type_node;
5993 case cgen_regnum_operand_type_V2SI: return v2si_type_node;
5994 case cgen_regnum_operand_type_V8UQI: return v8uqi_type_node;
5995 case cgen_regnum_operand_type_V4UHI: return v4uhi_type_node;
5996 case cgen_regnum_operand_type_V2USI: return v2usi_type_node;
5997 case cgen_regnum_operand_type_CP_DATA_BUS_INT: return cp_data_bus_int_type_node;
5999 return void_type_node;
6004 mep_init_builtins (void)
6008 if (TARGET_64BIT_CR_REGS)
6009 cp_data_bus_int_type_node = long_long_integer_type_node;
6011 cp_data_bus_int_type_node = long_integer_type_node;
6013 opaque_vector_type_node = build_opaque_vector_type (intQI_type_node, 8);
6014 v8qi_type_node = build_vector_type (intQI_type_node, 8);
6015 v4hi_type_node = build_vector_type (intHI_type_node, 4);
6016 v2si_type_node = build_vector_type (intSI_type_node, 2);
6017 v8uqi_type_node = build_vector_type (unsigned_intQI_type_node, 8);
6018 v4uhi_type_node = build_vector_type (unsigned_intHI_type_node, 4);
6019 v2usi_type_node = build_vector_type (unsigned_intSI_type_node, 2);
6021 (*lang_hooks.decls.pushdecl)
6022 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_data_bus_int"),
6023 cp_data_bus_int_type_node));
6025 (*lang_hooks.decls.pushdecl)
6026 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_vector"),
6027 opaque_vector_type_node));
6029 (*lang_hooks.decls.pushdecl)
6030 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8qi"),
6032 (*lang_hooks.decls.pushdecl)
6033 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4hi"),
6035 (*lang_hooks.decls.pushdecl)
6036 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2si"),
6039 (*lang_hooks.decls.pushdecl)
6040 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v8uqi"),
6042 (*lang_hooks.decls.pushdecl)
6043 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v4uhi"),
6045 (*lang_hooks.decls.pushdecl)
6046 (build_decl (BUILTINS_LOCATION, TYPE_DECL, get_identifier ("cp_v2usi"),
6049 /* Intrinsics like mep_cadd3 are implemented with two groups of
6050 instructions, one which uses UNSPECs and one which uses a specific
6051 rtl code such as PLUS. Instructions in the latter group belong
6052 to GROUP_KNOWN_CODE.
6054 In such cases, the intrinsic will have two entries in the global
6055 tables above. The unspec form is accessed using builtin functions
6056 while the specific form is accessed using the mep_* enum in
6059 The idea is that __cop arithmetic and builtin functions have
6060 different optimization requirements. If mep_cadd3() appears in
6061 the source code, the user will surely except gcc to use cadd3
6062 rather than a work-alike such as add3. However, if the user
6063 just writes "a + b", where a or b are __cop variables, it is
6064 reasonable for gcc to choose a core instruction rather than
6065 cadd3 if it believes that is more optimal. */
6066 for (i = 0; i < ARRAY_SIZE (cgen_insns); i++)
6067 if ((cgen_insns[i].groups & GROUP_KNOWN_CODE) == 0
6068 && mep_intrinsic_insn[cgen_insns[i].intrinsic] >= 0)
6070 tree ret_type = void_type_node;
6073 if (i > 0 && cgen_insns[i].intrinsic == cgen_insns[i-1].intrinsic)
6076 if (cgen_insns[i].cret_p)
6077 ret_type = mep_cgen_regnum_to_type (cgen_insns[i].regnums[0].type);
6079 bi_type = build_function_type (ret_type, 0);
6080 add_builtin_function (cgen_intrinsics[cgen_insns[i].intrinsic],
6082 cgen_insns[i].intrinsic, BUILT_IN_MD, NULL, NULL);
6086 /* Report the unavailablity of the given intrinsic. */
6090 mep_intrinsic_unavailable (int intrinsic)
6092 static int already_reported_p[ARRAY_SIZE (cgen_intrinsics)];
6094 if (already_reported_p[intrinsic])
6097 if (mep_intrinsic_insn[intrinsic] < 0)
6098 error ("coprocessor intrinsic %qs is not available in this configuration",
6099 cgen_intrinsics[intrinsic]);
6100 else if (CGEN_CURRENT_GROUP == GROUP_VLIW)
6101 error ("%qs is not available in VLIW functions",
6102 cgen_intrinsics[intrinsic]);
6104 error ("%qs is not available in non-VLIW functions",
6105 cgen_intrinsics[intrinsic]);
6107 already_reported_p[intrinsic] = 1;
6112 /* See if any implementation of INTRINSIC is available to the
6113 current function. If so, store the most general implementation
6114 in *INSN_PTR and return true. Return false otherwise. */
6117 mep_get_intrinsic_insn (int intrinsic ATTRIBUTE_UNUSED, const struct cgen_insn **insn_ptr ATTRIBUTE_UNUSED)
6121 i = mep_intrinsic_insn[intrinsic];
6122 while (i >= 0 && !CGEN_ENABLE_INSN_P (i))
6123 i = mep_intrinsic_chain[i];
6127 *insn_ptr = &cgen_insns[i];
6134 /* Like mep_get_intrinsic_insn, but with extra handling for moves.
6135 If INTRINSIC is mep_cmov, but there is no pure CR <- CR move insn,
6136 try using a work-alike instead. In this case, the returned insn
6137 may have three operands rather than two. */
6140 mep_get_move_insn (int intrinsic, const struct cgen_insn **cgen_insn)
6144 if (intrinsic == mep_cmov)
6146 for (i = 0; i < ARRAY_SIZE (mep_cmov_insns); i++)
6147 if (mep_get_intrinsic_insn (mep_cmov_insns[i], cgen_insn))
6151 return mep_get_intrinsic_insn (intrinsic, cgen_insn);
6155 /* If ARG is a register operand that is the same size as MODE, convert it
6156 to MODE using a subreg. Otherwise return ARG as-is. */
6159 mep_convert_arg (enum machine_mode mode, rtx arg)
6161 if (GET_MODE (arg) != mode
6162 && register_operand (arg, VOIDmode)
6163 && GET_MODE_SIZE (GET_MODE (arg)) == GET_MODE_SIZE (mode))
6164 return simplify_gen_subreg (mode, arg, GET_MODE (arg), 0);
6169 /* Apply regnum conversions to ARG using the description given by REGNUM.
6170 Return the new argument on success and null on failure. */
6173 mep_convert_regnum (const struct cgen_regnum_operand *regnum, rtx arg)
6175 if (regnum->count == 0)
6178 if (GET_CODE (arg) != CONST_INT
6180 || INTVAL (arg) >= regnum->count)
6183 return gen_rtx_REG (SImode, INTVAL (arg) + regnum->base);
6187 /* Try to make intrinsic argument ARG match the given operand.
6188 UNSIGNED_P is true if the argument has an unsigned type. */
6191 mep_legitimize_arg (const struct insn_operand_data *operand, rtx arg,
6194 if (GET_CODE (arg) == CONST_INT)
6196 /* CONST_INTs can only be bound to integer operands. */
6197 if (GET_MODE_CLASS (operand->mode) != MODE_INT)
6200 else if (GET_CODE (arg) == CONST_DOUBLE)
6201 /* These hold vector constants. */;
6202 else if (GET_MODE_SIZE (GET_MODE (arg)) != GET_MODE_SIZE (operand->mode))
6204 /* If the argument is a different size from what's expected, we must
6205 have a value in the right mode class in order to convert it. */
6206 if (GET_MODE_CLASS (operand->mode) != GET_MODE_CLASS (GET_MODE (arg)))
6209 /* If the operand is an rvalue, promote or demote it to match the
6210 operand's size. This might not need extra instructions when
6211 ARG is a register value. */
6212 if (operand->constraint[0] != '=')
6213 arg = convert_to_mode (operand->mode, arg, unsigned_p);
6216 /* If the operand is an lvalue, bind the operand to a new register.
6217 The caller will copy this value into ARG after the main
6218 instruction. By doing this always, we produce slightly more
6220 /* But not for control registers. */
6221 if (operand->constraint[0] == '='
6223 || ! (CONTROL_REGNO_P (REGNO (arg))
6224 || CCR_REGNO_P (REGNO (arg))
6225 || CR_REGNO_P (REGNO (arg)))
6227 return gen_reg_rtx (operand->mode);
6229 /* Try simple mode punning. */
6230 arg = mep_convert_arg (operand->mode, arg);
6231 if (operand->predicate (arg, operand->mode))
6234 /* See if forcing the argument into a register will make it match. */
6235 if (GET_CODE (arg) == CONST_INT || GET_CODE (arg) == CONST_DOUBLE)
6236 arg = force_reg (operand->mode, arg);
6238 arg = mep_convert_arg (operand->mode, force_reg (GET_MODE (arg), arg));
6239 if (operand->predicate (arg, operand->mode))
6246 /* Report that ARG cannot be passed to argument ARGNUM of intrinsic
6247 function FNNAME. OPERAND describes the operand to which ARGNUM
6251 mep_incompatible_arg (const struct insn_operand_data *operand, rtx arg,
6252 int argnum, tree fnname)
6256 if (GET_CODE (arg) == CONST_INT)
6257 for (i = 0; i < ARRAY_SIZE (cgen_immediate_predicates); i++)
6258 if (operand->predicate == cgen_immediate_predicates[i].predicate)
6260 const struct cgen_immediate_predicate *predicate;
6261 HOST_WIDE_INT argval;
6263 predicate = &cgen_immediate_predicates[i];
6264 argval = INTVAL (arg);
6265 if (argval < predicate->lower || argval >= predicate->upper)
6266 error ("argument %d of %qE must be in the range %d...%d",
6267 argnum, fnname, predicate->lower, predicate->upper - 1);
6269 error ("argument %d of %qE must be a multiple of %d",
6270 argnum, fnname, predicate->align);
6274 error ("incompatible type for argument %d of %qE", argnum, fnname);
6278 mep_expand_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
6279 rtx subtarget ATTRIBUTE_UNUSED,
6280 enum machine_mode mode ATTRIBUTE_UNUSED,
6281 int ignore ATTRIBUTE_UNUSED)
6283 rtx pat, op[10], arg[10];
6285 int opindex, unsigned_p[10];
6287 unsigned int n_args;
6289 const struct cgen_insn *cgen_insn;
6290 const struct insn_data *idata;
6292 int return_type = void_type_node;
6295 fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6296 fnname = DECL_NAME (fndecl);
6298 /* Find out which instruction we should emit. Note that some coprocessor
6299 intrinsics may only be available in VLIW mode, or only in normal mode. */
6300 if (!mep_get_intrinsic_insn (DECL_FUNCTION_CODE (fndecl), &cgen_insn))
6302 mep_intrinsic_unavailable (DECL_FUNCTION_CODE (fndecl));
6303 return error_mark_node;
6305 idata = &insn_data[cgen_insn->icode];
6307 builtin_n_args = cgen_insn->num_args;
6309 if (cgen_insn->cret_p)
6311 if (cgen_insn->cret_p > 1)
6314 return_type = mep_cgen_regnum_to_type (cgen_insn->regnums[0].type);
6318 /* Evaluate each argument. */
6319 n_args = call_expr_nargs (exp);
6321 if (n_args < builtin_n_args)
6323 error ("too few arguments to %qE", fnname);
6324 return error_mark_node;
6326 if (n_args > builtin_n_args)
6328 error ("too many arguments to %qE", fnname);
6329 return error_mark_node;
6332 for (a = first_arg; a < builtin_n_args+first_arg; a++)
6336 args = CALL_EXPR_ARG (exp, a-first_arg);
6341 if (cgen_insn->regnums[a].reference_p)
6343 if (TREE_CODE (value) != ADDR_EXPR)
6346 error ("argument %d of %qE must be an address", a+1, fnname);
6347 return error_mark_node;
6349 value = TREE_OPERAND (value, 0);
6353 /* If the argument has been promoted to int, get the unpromoted
6354 value. This is necessary when sub-int memory values are bound
6355 to reference parameters. */
6356 if (TREE_CODE (value) == NOP_EXPR
6357 && TREE_TYPE (value) == integer_type_node
6358 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6359 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (value, 0)))
6360 < TYPE_PRECISION (TREE_TYPE (value))))
6361 value = TREE_OPERAND (value, 0);
6363 /* If the argument has been promoted to double, get the unpromoted
6364 SFmode value. This is necessary for FMAX support, for example. */
6365 if (TREE_CODE (value) == NOP_EXPR
6366 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (value))
6367 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (value, 0)))
6368 && TYPE_MODE (TREE_TYPE (value)) == DFmode
6369 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (value, 0))) == SFmode)
6370 value = TREE_OPERAND (value, 0);
6372 unsigned_p[a] = TYPE_UNSIGNED (TREE_TYPE (value));
6373 arg[a] = expand_expr (value, NULL, VOIDmode, EXPAND_NORMAL);
6374 arg[a] = mep_convert_regnum (&cgen_insn->regnums[a], arg[a]);
6375 if (cgen_insn->regnums[a].reference_p)
6377 tree pointed_to = TREE_TYPE (TREE_TYPE (value));
6378 enum machine_mode pointed_mode = TYPE_MODE (pointed_to);
6380 arg[a] = gen_rtx_MEM (pointed_mode, arg[a]);
6384 error ("argument %d of %qE must be in the range %d...%d",
6385 a + 1, fnname, 0, cgen_insn->regnums[a].count - 1);
6386 return error_mark_node;
6390 for (a=0; a<first_arg; a++)
6392 if (a == 0 && target && GET_MODE (target) == idata->operand[0].mode)
6395 arg[a] = gen_reg_rtx (idata->operand[0].mode);
6398 /* Convert the arguments into a form suitable for the intrinsic.
6399 Report an error if this isn't possible. */
6400 for (opindex = 0; opindex < idata->n_operands; opindex++)
6402 a = cgen_insn->op_mapping[opindex];
6403 op[opindex] = mep_legitimize_arg (&idata->operand[opindex],
6404 arg[a], unsigned_p[a]);
6405 if (op[opindex] == 0)
6407 mep_incompatible_arg (&idata->operand[opindex],
6408 arg[a], a + 1 - first_arg, fnname);
6409 return error_mark_node;
6413 /* Emit the instruction. */
6414 pat = idata->genfun (op[0], op[1], op[2], op[3], op[4],
6415 op[5], op[6], op[7], op[8], op[9]);
6417 if (GET_CODE (pat) == SET
6418 && GET_CODE (SET_DEST (pat)) == PC
6419 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
6420 emit_jump_insn (pat);
6424 /* Copy lvalues back to their final locations. */
6425 for (opindex = 0; opindex < idata->n_operands; opindex++)
6426 if (idata->operand[opindex].constraint[0] == '=')
6428 a = cgen_insn->op_mapping[opindex];
6431 if (GET_MODE_CLASS (GET_MODE (arg[a]))
6432 != GET_MODE_CLASS (GET_MODE (op[opindex])))
6433 emit_move_insn (arg[a], gen_lowpart (GET_MODE (arg[a]),
6437 /* First convert the operand to the right mode, then copy it
6438 into the destination. Doing the conversion as a separate
6439 step (rather than using convert_move) means that we can
6440 avoid creating no-op moves when ARG[A] and OP[OPINDEX]
6441 refer to the same register. */
6442 op[opindex] = convert_to_mode (GET_MODE (arg[a]),
6443 op[opindex], unsigned_p[a]);
6444 if (!rtx_equal_p (arg[a], op[opindex]))
6445 emit_move_insn (arg[a], op[opindex]);
6450 if (first_arg > 0 && target && target != op[0])
6452 emit_move_insn (target, op[0]);
6459 mep_vector_mode_supported_p (enum machine_mode mode ATTRIBUTE_UNUSED)
6464 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
6465 a global register. */
6468 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
6476 switch (GET_CODE (x))
6479 if (REG_P (SUBREG_REG (x)))
6481 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
6482 && global_regs[subreg_regno (x)])
6490 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
6504 /* A non-constant call might use a global register. */
6514 /* Returns nonzero if X mentions a global register. */
6517 global_reg_mentioned_p (rtx x)
6523 if (! RTL_CONST_OR_PURE_CALL_P (x))
6525 x = CALL_INSN_FUNCTION_USAGE (x);
6533 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
6535 /* Scheduling hooks for VLIW mode.
6537 Conceptually this is very simple: we have a two-pack architecture
6538 that takes one core insn and one coprocessor insn to make up either
6539 a 32- or 64-bit instruction word (depending on the option bit set in
6540 the chip). I.e. in VL32 mode, we can pack one 16-bit core insn and
6541 one 16-bit cop insn; in VL64 mode we can pack one 16-bit core insn
6542 and one 48-bit cop insn or two 32-bit core/cop insns.
6544 In practice, instruction selection will be a bear. Consider in
6545 VL64 mode the following insns
6550 these cannot pack, since the add is a 16-bit core insn and cmov
6551 is a 32-bit cop insn. However,
6556 packs just fine. For good VLIW code generation in VL64 mode, we
6557 will have to have 32-bit alternatives for many of the common core
6558 insns. Not implemented. */
6561 mep_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
6565 if (REG_NOTE_KIND (link) != 0)
6567 /* See whether INSN and DEP_INSN are intrinsics that set the same
6568 hard register. If so, it is more important to free up DEP_INSN
6569 than it is to free up INSN.
6571 Note that intrinsics like mep_mulr are handled differently from
6572 the equivalent mep.md patterns. In mep.md, if we don't care
6573 about the value of $lo and $hi, the pattern will just clobber
6574 the registers, not set them. Since clobbers don't count as
6575 output dependencies, it is often possible to reorder two mulrs,
6578 In contrast, mep_mulr() sets both $lo and $hi to specific values,
6579 so any pair of mep_mulr()s will be inter-dependent. We should
6580 therefore give the first mep_mulr() a higher priority. */
6581 if (REG_NOTE_KIND (link) == REG_DEP_OUTPUT
6582 && global_reg_mentioned_p (PATTERN (insn))
6583 && global_reg_mentioned_p (PATTERN (dep_insn)))
6586 /* If the dependence is an anti or output dependence, assume it
6591 /* If we can't recognize the insns, we can't really do anything. */
6592 if (recog_memoized (dep_insn) < 0)
6595 /* The latency attribute doesn't apply to MeP-h1: we use the stall
6596 attribute instead. */
6599 cost_specified = get_attr_latency (dep_insn);
6600 if (cost_specified != 0)
6601 return cost_specified;
6607 /* ??? We don't properly compute the length of a load/store insn,
6608 taking into account the addressing mode. */
6611 mep_issue_rate (void)
6613 return TARGET_IVC2 ? 3 : 2;
6616 /* Return true if function DECL was declared with the vliw attribute. */
6619 mep_vliw_function_p (tree decl)
6621 return lookup_attribute ("vliw", TYPE_ATTRIBUTES (TREE_TYPE (decl))) != 0;
6625 mep_find_ready_insn (rtx *ready, int nready, enum attr_slot slot, int length)
6629 for (i = nready - 1; i >= 0; --i)
6631 rtx insn = ready[i];
6632 if (recog_memoized (insn) >= 0
6633 && get_attr_slot (insn) == slot
6634 && get_attr_length (insn) == length)
6642 mep_move_ready_insn (rtx *ready, int nready, rtx insn)
6646 for (i = 0; i < nready; ++i)
6647 if (ready[i] == insn)
6649 for (; i < nready - 1; ++i)
6650 ready[i] = ready[i + 1];
6659 mep_print_sched_insn (FILE *dump, rtx insn)
6661 const char *slots = "none";
6662 const char *name = NULL;
6666 if (GET_CODE (PATTERN (insn)) == SET
6667 || GET_CODE (PATTERN (insn)) == PARALLEL)
6669 switch (get_attr_slots (insn))
6671 case SLOTS_CORE: slots = "core"; break;
6672 case SLOTS_C3: slots = "c3"; break;
6673 case SLOTS_P0: slots = "p0"; break;
6674 case SLOTS_P0_P0S: slots = "p0,p0s"; break;
6675 case SLOTS_P0_P1: slots = "p0,p1"; break;
6676 case SLOTS_P0S: slots = "p0s"; break;
6677 case SLOTS_P0S_P1: slots = "p0s,p1"; break;
6678 case SLOTS_P1: slots = "p1"; break;
6680 sprintf(buf, "%d", get_attr_slots (insn));
6685 if (GET_CODE (PATTERN (insn)) == USE)
6688 code = INSN_CODE (insn);
6690 name = get_insn_name (code);
6695 "insn %4d %4d %8s %s\n",
6703 mep_sched_reorder (FILE *dump ATTRIBUTE_UNUSED,
6704 int sched_verbose ATTRIBUTE_UNUSED, rtx *ready,
6705 int *pnready, int clock ATTRIBUTE_UNUSED)
6707 int nready = *pnready;
6708 rtx core_insn, cop_insn;
6711 if (dump && sched_verbose > 1)
6713 fprintf (dump, "\nsched_reorder: clock %d nready %d\n", clock, nready);
6714 for (i=0; i<nready; i++)
6715 mep_print_sched_insn (dump, ready[i]);
6716 fprintf (dump, "\n");
6719 if (!mep_vliw_function_p (cfun->decl))
6724 /* IVC2 uses a DFA to determine what's ready and what's not. */
6728 /* We can issue either a core or coprocessor instruction.
6729 Look for a matched pair of insns to reorder. If we don't
6730 find any, don't second-guess the scheduler's priorities. */
6732 if ((core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 2))
6733 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP,
6734 TARGET_OPT_VL64 ? 6 : 2)))
6736 else if (TARGET_OPT_VL64
6737 && (core_insn = mep_find_ready_insn (ready, nready, SLOT_CORE, 4))
6738 && (cop_insn = mep_find_ready_insn (ready, nready, SLOT_COP, 4)))
6741 /* We didn't find a pair. Issue the single insn at the head
6742 of the ready list. */
6745 /* Reorder the two insns first. */
6746 mep_move_ready_insn (ready, nready, core_insn);
6747 mep_move_ready_insn (ready, nready - 1, cop_insn);
6751 /* A for_each_rtx callback. Return true if *X is a register that is
6752 set by insn PREV. */
6755 mep_store_find_set (rtx *x, void *prev)
6757 return REG_P (*x) && reg_set_p (*x, (const_rtx) prev);
6760 /* Like mep_store_bypass_p, but takes a pattern as the second argument,
6761 not the containing insn. */
6764 mep_store_data_bypass_1 (rtx prev, rtx pat)
6766 /* Cope with intrinsics like swcpa. */
6767 if (GET_CODE (pat) == PARALLEL)
6771 for (i = 0; i < XVECLEN (pat, 0); i++)
6772 if (mep_store_data_bypass_p (prev, XVECEXP (pat, 0, i)))
6778 /* Check for some sort of store. */
6779 if (GET_CODE (pat) != SET
6780 || GET_CODE (SET_DEST (pat)) != MEM)
6783 /* Intrinsics use patterns of the form (set (mem (scratch)) (unspec ...)).
6784 The first operand to the unspec is the store data and the other operands
6785 are used to calculate the address. */
6786 if (GET_CODE (SET_SRC (pat)) == UNSPEC)
6791 src = SET_SRC (pat);
6792 for (i = 1; i < XVECLEN (src, 0); i++)
6793 if (for_each_rtx (&XVECEXP (src, 0, i), mep_store_find_set, prev))
6799 /* Otherwise just check that PREV doesn't modify any register mentioned
6800 in the memory destination. */
6801 return !for_each_rtx (&SET_DEST (pat), mep_store_find_set, prev);
6804 /* Return true if INSN is a store instruction and if the store address
6805 has no true dependence on PREV. */
6808 mep_store_data_bypass_p (rtx prev, rtx insn)
6810 return INSN_P (insn) ? mep_store_data_bypass_1 (prev, PATTERN (insn)) : false;
6813 /* A for_each_rtx subroutine of mep_mul_hilo_bypass_p. Return 1 if *X
6814 is a register other than LO or HI and if PREV sets *X. */
6817 mep_mul_hilo_bypass_1 (rtx *x, void *prev)
6820 && REGNO (*x) != LO_REGNO
6821 && REGNO (*x) != HI_REGNO
6822 && reg_set_p (*x, (const_rtx) prev));
6825 /* Return true if, apart from HI/LO, there are no true dependencies
6826 between multiplication instructions PREV and INSN. */
6829 mep_mul_hilo_bypass_p (rtx prev, rtx insn)
6833 pat = PATTERN (insn);
6834 if (GET_CODE (pat) == PARALLEL)
6835 pat = XVECEXP (pat, 0, 0);
6836 return (GET_CODE (pat) == SET
6837 && !for_each_rtx (&SET_SRC (pat), mep_mul_hilo_bypass_1, prev));
6840 /* Return true if INSN is an ldc instruction that issues to the
6841 MeP-h1 integer pipeline. This is true for instructions that
6842 read from PSW, LP, SAR, HI and LO. */
6845 mep_ipipe_ldc_p (rtx insn)
6849 pat = PATTERN (insn);
6851 /* Cope with instrinsics that set both a hard register and its shadow.
6852 The set of the hard register comes first. */
6853 if (GET_CODE (pat) == PARALLEL)
6854 pat = XVECEXP (pat, 0, 0);
6856 if (GET_CODE (pat) == SET)
6858 src = SET_SRC (pat);
6860 /* Cope with intrinsics. The first operand to the unspec is
6861 the source register. */
6862 if (GET_CODE (src) == UNSPEC || GET_CODE (src) == UNSPEC_VOLATILE)
6863 src = XVECEXP (src, 0, 0);
6866 switch (REGNO (src))
6879 /* Create a VLIW bundle from core instruction CORE and coprocessor
6880 instruction COP. COP always satisfies INSN_P, but CORE can be
6881 either a new pattern or an existing instruction.
6883 Emit the bundle in place of COP and return it. */
6886 mep_make_bundle (rtx core, rtx cop)
6890 /* If CORE is an existing instruction, remove it, otherwise put
6891 the new pattern in an INSN harness. */
6895 core = make_insn_raw (core);
6897 /* Generate the bundle sequence and replace COP with it. */
6898 insn = gen_rtx_SEQUENCE (VOIDmode, gen_rtvec (2, core, cop));
6899 insn = emit_insn_after (insn, cop);
6902 /* Set up the links of the insns inside the SEQUENCE. */
6903 PREV_INSN (core) = PREV_INSN (insn);
6904 NEXT_INSN (core) = cop;
6905 PREV_INSN (cop) = core;
6906 NEXT_INSN (cop) = NEXT_INSN (insn);
6908 /* Set the VLIW flag for the coprocessor instruction. */
6909 PUT_MODE (core, VOIDmode);
6910 PUT_MODE (cop, BImode);
6912 /* Derive a location for the bundle. Individual instructions cannot
6913 have their own location because there can be no assembler labels
6914 between CORE and COP. */
6915 INSN_LOCATOR (insn) = INSN_LOCATOR (INSN_LOCATOR (core) ? core : cop);
6916 INSN_LOCATOR (core) = 0;
6917 INSN_LOCATOR (cop) = 0;
6922 /* A helper routine for ms1_insn_dependent_p called through note_stores. */
6925 mep_insn_dependent_p_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
6927 rtx * pinsn = (rtx *) data;
6929 if (*pinsn && reg_mentioned_p (x, *pinsn))
6933 /* Return true if anything in insn X is (anti,output,true) dependent on
6934 anything in insn Y. */
6937 mep_insn_dependent_p (rtx x, rtx y)
6941 gcc_assert (INSN_P (x));
6942 gcc_assert (INSN_P (y));
6945 note_stores (PATTERN (x), mep_insn_dependent_p_1, &tmp);
6946 if (tmp == NULL_RTX)
6950 note_stores (PATTERN (y), mep_insn_dependent_p_1, &tmp);
6951 if (tmp == NULL_RTX)
6958 core_insn_p (rtx insn)
6960 if (GET_CODE (PATTERN (insn)) == USE)
6962 if (get_attr_slot (insn) == SLOT_CORE)
6967 /* Mark coprocessor instructions that can be bundled together with
6968 the immediately preceeding core instruction. This is later used
6969 to emit the "+" that tells the assembler to create a VLIW insn.
6971 For unbundled insns, the assembler will automatically add coprocessor
6972 nops, and 16-bit core nops. Due to an apparent oversight in the
6973 spec, the assembler will _not_ automatically add 32-bit core nops,
6974 so we have to emit those here.
6976 Called from mep_insn_reorg. */
6979 mep_bundle_insns (rtx insns)
6981 rtx insn, last = NULL_RTX, first = NULL_RTX;
6982 int saw_scheduling = 0;
6984 /* Only do bundling if we're in vliw mode. */
6985 if (!mep_vliw_function_p (cfun->decl))
6988 /* The first insn in a bundle are TImode, the remainder are
6989 VOIDmode. After this function, the first has VOIDmode and the
6990 rest have BImode. */
6992 /* Note: this doesn't appear to be true for JUMP_INSNs. */
6994 /* First, move any NOTEs that are within a bundle, to the beginning
6996 for (insn = insns; insn ; insn = NEXT_INSN (insn))
6998 if (NOTE_P (insn) && first)
6999 /* Don't clear FIRST. */;
7001 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == TImode)
7004 else if (NONJUMP_INSN_P (insn) && GET_MODE (insn) == VOIDmode && first)
7008 /* INSN is part of a bundle; FIRST is the first insn in that
7009 bundle. Move all intervening notes out of the bundle.
7010 In addition, since the debug pass may insert a label
7011 whenever the current line changes, set the location info
7012 for INSN to match FIRST. */
7014 INSN_LOCATOR (insn) = INSN_LOCATOR (first);
7016 note = PREV_INSN (insn);
7017 while (note && note != first)
7019 prev = PREV_INSN (note);
7023 /* Remove NOTE from here... */
7024 PREV_INSN (NEXT_INSN (note)) = PREV_INSN (note);
7025 NEXT_INSN (PREV_INSN (note)) = NEXT_INSN (note);
7026 /* ...and put it in here. */
7027 NEXT_INSN (note) = first;
7028 PREV_INSN (note) = PREV_INSN (first);
7029 NEXT_INSN (PREV_INSN (note)) = note;
7030 PREV_INSN (NEXT_INSN (note)) = note;
7037 else if (!NONJUMP_INSN_P (insn))
7041 /* Now fix up the bundles. */
7042 for (insn = insns; insn ; insn = NEXT_INSN (insn))
7047 if (!NONJUMP_INSN_P (insn))
7053 /* If we're not optimizing enough, there won't be scheduling
7054 info. We detect that here. */
7055 if (GET_MODE (insn) == TImode)
7057 if (!saw_scheduling)
7062 rtx core_insn = NULL_RTX;
7064 /* IVC2 slots are scheduled by DFA, so we just accept
7065 whatever the scheduler gives us. However, we must make
7066 sure the core insn (if any) is the first in the bundle.
7067 The IVC2 assembler can insert whatever NOPs are needed,
7068 and allows a COP insn to be first. */
7070 if (NONJUMP_INSN_P (insn)
7071 && GET_CODE (PATTERN (insn)) != USE
7072 && GET_MODE (insn) == TImode)
7076 && GET_MODE (NEXT_INSN (last)) == VOIDmode
7077 && NONJUMP_INSN_P (NEXT_INSN (last));
7078 last = NEXT_INSN (last))
7080 if (core_insn_p (last))
7083 if (core_insn_p (last))
7086 if (core_insn && core_insn != insn)
7088 /* Swap core insn to first in the bundle. */
7090 /* Remove core insn. */
7091 if (PREV_INSN (core_insn))
7092 NEXT_INSN (PREV_INSN (core_insn)) = NEXT_INSN (core_insn);
7093 if (NEXT_INSN (core_insn))
7094 PREV_INSN (NEXT_INSN (core_insn)) = PREV_INSN (core_insn);
7096 /* Re-insert core insn. */
7097 PREV_INSN (core_insn) = PREV_INSN (insn);
7098 NEXT_INSN (core_insn) = insn;
7100 if (PREV_INSN (core_insn))
7101 NEXT_INSN (PREV_INSN (core_insn)) = core_insn;
7102 PREV_INSN (insn) = core_insn;
7104 PUT_MODE (core_insn, TImode);
7105 PUT_MODE (insn, VOIDmode);
7109 /* The first insn has TImode, the rest have VOIDmode */
7110 if (GET_MODE (insn) == TImode)
7111 PUT_MODE (insn, VOIDmode);
7113 PUT_MODE (insn, BImode);
7117 PUT_MODE (insn, VOIDmode);
7118 if (recog_memoized (insn) >= 0
7119 && get_attr_slot (insn) == SLOT_COP)
7121 if (GET_CODE (insn) == JUMP_INSN
7123 || recog_memoized (last) < 0
7124 || get_attr_slot (last) != SLOT_CORE
7125 || (get_attr_length (insn)
7126 != (TARGET_OPT_VL64 ? 8 : 4) - get_attr_length (last))
7127 || mep_insn_dependent_p (insn, last))
7129 switch (get_attr_length (insn))
7134 insn = mep_make_bundle (gen_nop (), insn);
7137 if (TARGET_OPT_VL64)
7138 insn = mep_make_bundle (gen_nop32 (), insn);
7141 if (TARGET_OPT_VL64)
7142 error ("2 byte cop instructions are"
7143 " not allowed in 64-bit VLIW mode");
7145 insn = mep_make_bundle (gen_nop (), insn);
7148 error ("unexpected %d byte cop instruction",
7149 get_attr_length (insn));
7154 insn = mep_make_bundle (last, insn);
7162 /* Try to instantiate INTRINSIC with the operands given in OPERANDS.
7163 Return true on success. This function can fail if the intrinsic
7164 is unavailable or if the operands don't satisfy their predicates. */
7167 mep_emit_intrinsic (int intrinsic, const rtx *operands)
7169 const struct cgen_insn *cgen_insn;
7170 const struct insn_data *idata;
7174 if (!mep_get_intrinsic_insn (intrinsic, &cgen_insn))
7177 idata = &insn_data[cgen_insn->icode];
7178 for (i = 0; i < idata->n_operands; i++)
7180 newop[i] = mep_convert_arg (idata->operand[i].mode, operands[i]);
7181 if (!idata->operand[i].predicate (newop[i], idata->operand[i].mode))
7185 emit_insn (idata->genfun (newop[0], newop[1], newop[2],
7186 newop[3], newop[4], newop[5],
7187 newop[6], newop[7], newop[8]));
7193 /* Apply the given unary intrinsic to OPERANDS[1] and store it on
7194 OPERANDS[0]. Report an error if the instruction could not
7195 be synthesized. OPERANDS[1] is a register_operand. For sign
7196 and zero extensions, it may be smaller than SImode. */
7199 mep_expand_unary_intrinsic (int ATTRIBUTE_UNUSED intrinsic,
7200 rtx * operands ATTRIBUTE_UNUSED)
7206 /* Likewise, but apply a binary operation to OPERANDS[1] and
7207 OPERANDS[2]. OPERANDS[1] is a register_operand, OPERANDS[2]
7208 can be a general_operand.
7210 IMMEDIATE and IMMEDIATE3 are intrinsics that take an immediate
7211 third operand. REG and REG3 take register operands only. */
7214 mep_expand_binary_intrinsic (int ATTRIBUTE_UNUSED immediate,
7215 int ATTRIBUTE_UNUSED immediate3,
7216 int ATTRIBUTE_UNUSED reg,
7217 int ATTRIBUTE_UNUSED reg3,
7218 rtx * operands ATTRIBUTE_UNUSED)
7224 mep_rtx_cost (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total, bool ATTRIBUTE_UNUSED speed_t)
7229 if (INTVAL (x) >= -128 && INTVAL (x) < 127)
7231 else if (INTVAL (x) >= -32768 && INTVAL (x) < 65536)
7238 *total = optimize_size ? COSTS_N_INSNS (0) : COSTS_N_INSNS (1);
7242 *total = (GET_CODE (XEXP (x, 1)) == CONST_INT
7244 : COSTS_N_INSNS (2));
7251 mep_address_cost (rtx addr ATTRIBUTE_UNUSED, bool ATTRIBUTE_UNUSED speed_p)
7257 mep_handle_option (size_t code,
7258 const char *arg ATTRIBUTE_UNUSED,
7259 int value ATTRIBUTE_UNUSED)
7266 target_flags |= MEP_ALL_OPTS;
7270 target_flags &= ~ MEP_ALL_OPTS;
7274 target_flags |= MASK_COP;
7275 target_flags |= MASK_64BIT_CR_REGS;
7279 option_mtiny_specified = 1;
7282 target_flags |= MASK_COP;
7283 target_flags |= MASK_64BIT_CR_REGS;
7284 target_flags |= MASK_VLIW;
7285 target_flags |= MASK_OPT_VL64;
7286 target_flags |= MASK_IVC2;
7288 for (i=0; i<32; i++)
7289 fixed_regs[i+48] = 0;
7290 for (i=0; i<32; i++)
7291 call_used_regs[i+48] = 1;
7293 call_used_regs[i+48] = 0;
7295 #define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
7332 mep_asm_init_sections (void)
7335 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7336 "\t.section .based,\"aw\"");
7339 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7340 "\t.section .sbss,\"aw\"");
7343 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7344 "\t.section .sdata,\"aw\",@progbits");
7347 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
7348 "\t.section .far,\"aw\"");
7351 = get_unnamed_section (SECTION_WRITE | SECTION_BSS, output_section_asm_op,
7352 "\t.section .farbss,\"aw\"");
7355 = get_unnamed_section (0, output_section_asm_op,
7356 "\t.section .frodata,\"a\"");
7359 = get_unnamed_section (0, output_section_asm_op,
7360 "\t.section .srodata,\"a\"");
7363 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7364 "\t.section .vtext,\"axv\"\n\t.vliw");
7367 = get_unnamed_section (SECTION_CODE | SECTION_MEP_VLIW, output_section_asm_op,
7368 "\t.section .vftext,\"axv\"\t.vliw");
7371 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7372 "\t.section .ftext,\"ax\"\t.core");