1 /* Xstormy16 target functions.
2 Copyright (C) 1997-2013 Free Software Foundation, Inc.
3 Contributed by Red Hat, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "conditions.h"
30 #include "insn-flags.h"
32 #include "insn-attr.h"
35 #include "diagnostic-core.h"
43 #include "target-def.h"
45 #include "langhooks.h"
51 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52 static void xstormy16_asm_out_constructor (rtx, int);
53 static void xstormy16_asm_out_destructor (rtx, int);
54 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
57 static void xstormy16_init_builtins (void);
58 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59 static bool xstormy16_rtx_costs (rtx, int, int, int, int *, bool);
60 static int xstormy16_address_cost (rtx, enum machine_mode, addr_space_t, bool);
61 static bool xstormy16_return_in_memory (const_tree, const_tree);
63 static GTY(()) section *bss100_section;
65 /* Compute a (partial) cost for rtx X. Return true if the complete
66 cost has been computed, and false if subexpressions should be
67 scanned. In either case, *TOTAL contains the cost result. */
70 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
71 int opno ATTRIBUTE_UNUSED, int *total,
72 bool speed ATTRIBUTE_UNUSED)
77 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
78 *total = COSTS_N_INSNS (1) / 2;
79 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
80 *total = COSTS_N_INSNS (1);
82 *total = COSTS_N_INSNS (2);
89 *total = COSTS_N_INSNS (2);
93 *total = COSTS_N_INSNS (35 + 6);
96 *total = COSTS_N_INSNS (51 - 6);
105 xstormy16_address_cost (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED,
106 addr_space_t as ATTRIBUTE_UNUSED,
107 bool speed ATTRIBUTE_UNUSED)
109 return (CONST_INT_P (x) ? 2
110 : GET_CODE (x) == PLUS ? 7
114 /* Worker function for TARGET_MEMORY_MOVE_COST. */
117 xstormy16_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
120 return (5 + memory_move_secondary_cost (mode, rclass, in));
123 /* Branches are handled as follows:
125 1. HImode compare-and-branches. The machine supports these
126 natively, so the appropriate pattern is emitted directly.
128 2. SImode EQ and NE. These are emitted as pairs of HImode
129 compare-and-branches.
131 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
132 of a SImode subtract followed by a branch (not a compare-and-branch),
138 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
145 /* Emit a branch of kind CODE to location LOC. */
148 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
150 rtx condition_rtx, loc_ref, branch, cy_clobber;
152 enum machine_mode mode;
154 mode = GET_MODE (op0);
155 gcc_assert (mode == HImode || mode == SImode);
158 && (code == GT || code == LE || code == GTU || code == LEU))
160 int unsigned_p = (code == GTU || code == LEU);
161 int gt_p = (code == GT || code == GTU);
165 lab = gen_label_rtx ();
166 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
167 /* This should be generated as a comparison against the temporary
168 created by the previous insn, but reload can't handle that. */
169 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
174 else if (mode == SImode
175 && (code == NE || code == EQ)
176 && op1 != const0_rtx)
178 rtx op0_word, op1_word;
180 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
184 lab = gen_label_rtx ();
186 for (i = 0; i < num_words - 1; i++)
188 op0_word = simplify_gen_subreg (word_mode, op0, mode,
190 op1_word = simplify_gen_subreg (word_mode, op1, mode,
192 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
194 op0_word = simplify_gen_subreg (word_mode, op0, mode,
196 op1_word = simplify_gen_subreg (word_mode, op1, mode,
198 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
205 /* We can't allow reload to try to generate any reload after a branch,
206 so when some register must match we must make the temporary ourselves. */
210 tmp = gen_reg_rtx (mode);
211 emit_move_insn (tmp, op0);
215 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
216 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
217 branch = gen_rtx_SET (VOIDmode, pc_rtx,
218 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
221 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
224 vec = gen_rtvec (2, branch, cy_clobber);
225 else if (code == NE || code == EQ)
226 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
231 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
233 sub = gen_rtx_CLOBBER (SImode, op0);
235 vec = gen_rtvec (3, branch, sub, cy_clobber);
238 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
241 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
242 the arithmetic operation. Most of the work is done by
243 xstormy16_expand_arith. */
246 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
249 rtx op0 = XEXP (comparison, 0);
250 rtx op1 = XEXP (comparison, 1);
255 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
259 gcc_assert (INSN_P (seq));
262 while (NEXT_INSN (last_insn) != NULL_RTX)
263 last_insn = NEXT_INSN (last_insn);
265 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
266 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
267 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
272 /* Return the string to output a conditional branch to LABEL, which is
273 the operand number of the label.
275 OP is the conditional expression, or NULL for branch-always.
277 REVERSED is nonzero if we should reverse the sense of the comparison.
282 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
284 static char string[64];
285 int need_longbranch = (op != NULL_RTX
286 ? get_attr_length (insn) == 8
287 : get_attr_length (insn) == 4);
288 int really_reversed = reversed ^ need_longbranch;
291 const char *operands;
300 sprintf (string, "%s %s", ccode, label);
304 code = GET_CODE (op);
306 if (! REG_P (XEXP (op, 0)))
308 code = swap_condition (code);
314 /* Work out which way this really branches. */
316 code = reverse_condition (code);
320 case EQ: ccode = "z"; break;
321 case NE: ccode = "nz"; break;
322 case GE: ccode = "ge"; break;
323 case LT: ccode = "lt"; break;
324 case GT: ccode = "gt"; break;
325 case LE: ccode = "le"; break;
326 case GEU: ccode = "nc"; break;
327 case LTU: ccode = "c"; break;
328 case GTU: ccode = "hi"; break;
329 case LEU: ccode = "ls"; break;
336 templ = "b%s %s,.+8 | jmpf %s";
339 sprintf (string, templ, ccode, operands, label);
344 /* Return the string to output a conditional branch to LABEL, which is
345 the operand number of the label, but suitable for the tail of a
348 OP is the conditional expression (OP is never NULL_RTX).
350 REVERSED is nonzero if we should reverse the sense of the comparison.
355 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
357 static char string[64];
358 int need_longbranch = get_attr_length (insn) >= 8;
359 int really_reversed = reversed ^ need_longbranch;
365 code = GET_CODE (op);
367 /* Work out which way this really branches. */
369 code = reverse_condition (code);
373 case EQ: ccode = "z"; break;
374 case NE: ccode = "nz"; break;
375 case GE: ccode = "ge"; break;
376 case LT: ccode = "lt"; break;
377 case GEU: ccode = "nc"; break;
378 case LTU: ccode = "c"; break;
380 /* The missing codes above should never be generated. */
391 gcc_assert (REG_P (XEXP (op, 0)));
393 regnum = REGNO (XEXP (op, 0));
394 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
398 case GE: case LT: case GEU: case LTU:
399 strcpy (prevop, "sbc %2,%3");
407 templ = "%s | b%s .+6 | jmpf %s";
409 templ = "%s | b%s %s";
410 sprintf (string, templ, prevop, ccode, label);
415 /* Many machines have some registers that cannot be copied directly to or from
416 memory or even from other types of registers. An example is the `MQ'
417 register, which on most machines, can only be copied to or from general
418 registers, but not memory. Some machines allow copying all registers to and
419 from memory, but require a scratch register for stores to some memory
420 locations (e.g., those with symbolic address on the RT, and those with
421 certain symbolic address on the SPARC when compiling PIC). In some cases,
422 both an intermediate and a scratch register are required.
424 You should define these macros to indicate to the reload phase that it may
425 need to allocate at least one register for a reload in addition to the
426 register to contain the data. Specifically, if copying X to a register
427 RCLASS in MODE requires an intermediate register, you should define
428 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
429 whose registers can be used as intermediate registers or scratch registers.
431 If copying a register RCLASS in MODE to X requires an intermediate or scratch
432 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
433 largest register class required. If the requirements for input and output
434 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
435 instead of defining both macros identically.
437 The values returned by these macros are often `GENERAL_REGS'. Return
438 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
439 to or from a register of RCLASS in MODE without requiring a scratch register.
440 Do not define this macro if it would always return `NO_REGS'.
442 If a scratch register is required (either with or without an intermediate
443 register), you should define patterns for `reload_inM' or `reload_outM', as
444 required.. These patterns, which will normally be implemented with a
445 `define_expand', should be similar to the `movM' patterns, except that
446 operand 2 is the scratch register.
448 Define constraints for the reload register and scratch register that contain
449 a single register class. If the original reload register (whose class is
450 RCLASS) can meet the constraint given in the pattern, the value returned by
451 these macros is used for the class of the scratch register. Otherwise, two
452 additional reload registers are required. Their classes are obtained from
453 the constraints in the insn pattern.
455 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
456 either be in a hard register or in memory. Use `true_regnum' to find out;
457 it will return -1 if the pseudo is in memory and the hard register number if
460 These macros should not be used in the case where a particular class of
461 registers can only be copied to memory and not to another class of
462 registers. In that case, secondary reload registers are not needed and
463 would not be helpful. Instead, a stack location must be used to perform the
464 copy and the `movM' pattern should use memory as an intermediate storage.
465 This case often occurs between floating-point and general registers. */
468 xstormy16_secondary_reload_class (enum reg_class rclass,
469 enum machine_mode mode ATTRIBUTE_UNUSED,
472 /* This chip has the interesting property that only the first eight
473 registers can be moved to/from memory. */
475 || ((GET_CODE (x) == SUBREG || REG_P (x))
476 && (true_regnum (x) == -1
477 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
478 && ! reg_class_subset_p (rclass, EIGHT_REGS))
484 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
485 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
488 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
490 if (rclass == GENERAL_REGS && MEM_P (x))
496 /* Predicate for symbols and addresses that reflect special 8-bit
500 xstormy16_below100_symbol (rtx x,
501 enum machine_mode mode ATTRIBUTE_UNUSED)
503 if (GET_CODE (x) == CONST)
505 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
508 if (GET_CODE (x) == SYMBOL_REF)
509 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
513 HOST_WIDE_INT i = INTVAL (x);
515 if ((i >= 0x0000 && i <= 0x00ff)
516 || (i >= 0x7f00 && i <= 0x7fff))
522 /* Likewise, but only for non-volatile MEMs, for patterns where the
523 MEM will get split into smaller sized accesses. */
526 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
528 if (MEM_P (x) && MEM_VOLATILE_P (x))
530 return xstormy16_below100_operand (x, mode);
533 /* Expand an 8-bit IOR. This either detects the one case we can
534 actually do, or uses a 16-bit IOR. */
537 xstormy16_expand_iorqi3 (rtx *operands)
539 rtx in, out, outsub, val;
545 if (xstormy16_onebit_set_operand (val, QImode))
547 if (!xstormy16_below100_or_register (in, QImode))
548 in = copy_to_mode_reg (QImode, in);
549 if (!xstormy16_below100_or_register (out, QImode))
550 out = gen_reg_rtx (QImode);
551 emit_insn (gen_iorqi3_internal (out, in, val));
552 if (out != operands[0])
553 emit_move_insn (operands[0], out);
558 in = copy_to_mode_reg (QImode, in);
560 if (! REG_P (val) && ! CONST_INT_P (val))
561 val = copy_to_mode_reg (QImode, val);
564 out = gen_reg_rtx (QImode);
566 in = simplify_gen_subreg (HImode, in, QImode, 0);
567 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
569 if (! CONST_INT_P (val))
570 val = simplify_gen_subreg (HImode, val, QImode, 0);
572 emit_insn (gen_iorhi3 (outsub, in, val));
574 if (out != operands[0])
575 emit_move_insn (operands[0], out);
578 /* Expand an 8-bit AND. This either detects the one case we can
579 actually do, or uses a 16-bit AND. */
582 xstormy16_expand_andqi3 (rtx *operands)
584 rtx in, out, outsub, val;
590 if (xstormy16_onebit_clr_operand (val, QImode))
592 if (!xstormy16_below100_or_register (in, QImode))
593 in = copy_to_mode_reg (QImode, in);
594 if (!xstormy16_below100_or_register (out, QImode))
595 out = gen_reg_rtx (QImode);
596 emit_insn (gen_andqi3_internal (out, in, val));
597 if (out != operands[0])
598 emit_move_insn (operands[0], out);
603 in = copy_to_mode_reg (QImode, in);
605 if (! REG_P (val) && ! CONST_INT_P (val))
606 val = copy_to_mode_reg (QImode, val);
609 out = gen_reg_rtx (QImode);
611 in = simplify_gen_subreg (HImode, in, QImode, 0);
612 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
614 if (! CONST_INT_P (val))
615 val = simplify_gen_subreg (HImode, val, QImode, 0);
617 emit_insn (gen_andhi3 (outsub, in, val));
619 if (out != operands[0])
620 emit_move_insn (operands[0], out);
623 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
625 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
627 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
629 && INTVAL (X) + (OFFSET) >= 0 \
630 && INTVAL (X) + (OFFSET) < 0x8000 \
631 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
634 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
637 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
640 if (GET_CODE (x) == PLUS
641 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
644 /* PR 31232: Do not allow INT+INT as an address. */
649 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
650 || GET_CODE (x) == POST_INC
651 || GET_CODE (x) == PRE_DEC)
655 && REGNO_OK_FOR_BASE_P (REGNO (x))
656 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
659 if (xstormy16_below100_symbol (x, mode))
665 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
667 On this chip, this is true if the address is valid with an offset
668 of 0 but not of 6, because in that case it cannot be used as an
669 address for DImode or DFmode, or if the address is a post-increment
670 or pre-decrement address. */
673 xstormy16_mode_dependent_address_p (const_rtx x,
674 addr_space_t as ATTRIBUTE_UNUSED)
676 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
677 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
680 if (GET_CODE (x) == PLUS
681 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
682 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
685 /* Auto-increment addresses are now treated generically in recog.c. */
690 short_memory_operand (rtx x, enum machine_mode mode)
692 if (! memory_operand (x, mode))
694 return (GET_CODE (XEXP (x, 0)) != PLUS);
697 /* Splitter for the 'move' patterns, for modes not directly implemented
698 by hardware. Emit insns to copy a value of mode MODE from SRC to
701 This function is only called when reload_completed. */
704 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
706 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
707 int direction, end, i;
708 int src_modifies = 0;
709 int dest_modifies = 0;
710 int src_volatile = 0;
711 int dest_volatile = 0;
713 rtx auto_inc_reg_rtx = NULL_RTX;
715 /* Check initial conditions. */
716 gcc_assert (reload_completed
717 && mode != QImode && mode != HImode
718 && nonimmediate_operand (dest, mode)
719 && general_operand (src, mode));
721 /* This case is not supported below, and shouldn't be generated. */
722 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
724 /* This case is very very bad after reload, so trap it now. */
725 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
727 /* The general idea is to copy by words, offsetting the source and
728 destination. Normally the least-significant word will be copied
729 first, but for pre-dec operations it's better to copy the
730 most-significant word first. Only one operand can be a pre-dec
733 It's also possible that the copy overlaps so that the direction
739 mem_operand = XEXP (dest, 0);
740 dest_modifies = side_effects_p (mem_operand);
741 if (auto_inc_p (mem_operand))
742 auto_inc_reg_rtx = XEXP (mem_operand, 0);
743 dest_volatile = MEM_VOLATILE_P (dest);
746 dest = copy_rtx (dest);
747 MEM_VOLATILE_P (dest) = 0;
750 else if (MEM_P (src))
752 mem_operand = XEXP (src, 0);
753 src_modifies = side_effects_p (mem_operand);
754 if (auto_inc_p (mem_operand))
755 auto_inc_reg_rtx = XEXP (mem_operand, 0);
756 src_volatile = MEM_VOLATILE_P (src);
759 src = copy_rtx (src);
760 MEM_VOLATILE_P (src) = 0;
764 mem_operand = NULL_RTX;
766 if (mem_operand == NULL_RTX)
770 && reg_overlap_mentioned_p (dest, src)
771 && REGNO (dest) > REGNO (src))
774 else if (GET_CODE (mem_operand) == PRE_DEC
775 || (GET_CODE (mem_operand) == PLUS
776 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
778 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
782 gcc_assert (REG_P (dest));
783 regno = REGNO (dest);
785 gcc_assert (refers_to_regno_p (regno, regno + num_words,
788 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
790 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
794 /* This means something like
795 (set (reg:DI r0) (mem:DI (reg:HI r1)))
796 which we'd need to support by doing the set of the second word
801 end = direction < 0 ? -1 : num_words;
802 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
804 rtx w_src, w_dest, insn;
807 w_src = gen_rtx_MEM (word_mode, mem_operand);
809 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
811 MEM_VOLATILE_P (w_src) = 1;
813 w_dest = gen_rtx_MEM (word_mode, mem_operand);
815 w_dest = simplify_gen_subreg (word_mode, dest, mode,
818 MEM_VOLATILE_P (w_dest) = 1;
820 /* The simplify_subreg calls must always be able to simplify. */
821 gcc_assert (GET_CODE (w_src) != SUBREG
822 && GET_CODE (w_dest) != SUBREG);
824 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
825 if (auto_inc_reg_rtx)
826 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
832 /* Expander for the 'move' patterns. Emit insns to copy a value of
833 mode MODE from SRC to DEST. */
836 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
838 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
840 rtx pmv = XEXP (dest, 0);
841 rtx dest_reg = XEXP (pmv, 0);
842 rtx dest_mod = XEXP (pmv, 1);
843 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
844 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
846 dest = gen_rtx_MEM (mode, dest_reg);
847 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
849 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
851 rtx pmv = XEXP (src, 0);
852 rtx src_reg = XEXP (pmv, 0);
853 rtx src_mod = XEXP (pmv, 1);
854 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
855 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
857 src = gen_rtx_MEM (mode, src_reg);
858 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
861 /* There are only limited immediate-to-memory move instructions. */
862 if (! reload_in_progress
863 && ! reload_completed
865 && (! CONST_INT_P (XEXP (dest, 0))
866 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
867 && ! xstormy16_below100_operand (dest, mode)
869 && GET_CODE (src) != SUBREG)
870 src = copy_to_mode_reg (mode, src);
872 /* Don't emit something we would immediately split. */
874 && mode != HImode && mode != QImode)
876 xstormy16_split_move (mode, dest, src);
880 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
885 The stack is laid out as follows:
889 Register save area (up to 4 words)
890 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
892 AP-> Return address (two words)
893 9th procedure parameter word
894 10th procedure parameter word
896 last procedure parameter word
898 The frame pointer location is tuned to make it most likely that all
899 parameters and local variables can be accessed using a load-indexed
902 /* A structure to describe the layout. */
903 struct xstormy16_stack_layout
905 /* Size of the topmost three items on the stack. */
907 int register_save_size;
908 int stdarg_save_size;
909 /* Sum of the above items. */
911 /* Various offsets. */
912 int first_local_minus_ap;
917 /* Does REGNO need to be saved? */
918 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
919 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
920 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
921 && (REGNUM != CARRY_REGNUM) \
922 && (df_regs_ever_live_p (REGNUM) || ! crtl->is_leaf)))
924 /* Compute the stack layout. */
926 struct xstormy16_stack_layout
927 xstormy16_compute_stack_layout (void)
929 struct xstormy16_stack_layout layout;
931 const int ifun = xstormy16_interrupt_function_p ();
933 layout.locals_size = get_frame_size ();
935 layout.register_save_size = 0;
936 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
937 if (REG_NEEDS_SAVE (regno, ifun))
938 layout.register_save_size += UNITS_PER_WORD;
941 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
943 layout.stdarg_save_size = 0;
945 layout.frame_size = (layout.locals_size
946 + layout.register_save_size
947 + layout.stdarg_save_size);
949 if (crtl->args.size <= 2048 && crtl->args.size != -1)
951 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
952 + crtl->args.size <= 2048)
953 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
955 layout.fp_minus_ap = 2048 - crtl->args.size;
958 layout.fp_minus_ap = (layout.stdarg_save_size
959 + layout.register_save_size
960 - INCOMING_FRAME_SP_OFFSET);
961 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
962 - layout.fp_minus_ap);
963 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
967 /* Worker function for TARGET_CAN_ELIMINATE. */
970 xstormy16_can_eliminate (const int from, const int to)
972 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
973 ? ! frame_pointer_needed
977 /* Determine how all the special registers get eliminated. */
980 xstormy16_initial_elimination_offset (int from, int to)
982 struct xstormy16_stack_layout layout;
985 layout = xstormy16_compute_stack_layout ();
987 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
988 result = layout.sp_minus_fp - layout.locals_size;
989 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
990 result = - layout.locals_size;
991 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
992 result = - layout.fp_minus_ap;
993 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
994 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
1002 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1004 rtx set, clobber, insn;
1006 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1007 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1008 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1012 /* Called after register allocation to add any instructions needed for
1013 the prologue. Using a prologue insn is favored compared to putting
1014 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1015 since it allows the scheduler to intermix instructions with the
1016 saves of the caller saved registers. In some cases, it might be
1017 necessary to emit a barrier instruction as the last insn to prevent
1020 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1021 so that the debug info generation code can handle them properly. */
1024 xstormy16_expand_prologue (void)
1026 struct xstormy16_stack_layout layout;
1030 const int ifun = xstormy16_interrupt_function_p ();
1032 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1033 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1035 layout = xstormy16_compute_stack_layout ();
1037 if (layout.locals_size >= 32768)
1038 error ("local variable memory requirements exceed capacity");
1040 if (flag_stack_usage_info)
1041 current_function_static_stack_size = layout.frame_size;
1043 /* Save the argument registers if necessary. */
1044 if (layout.stdarg_save_size)
1045 for (regno = FIRST_ARGUMENT_REGISTER;
1046 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1050 rtx reg = gen_rtx_REG (HImode, regno);
1052 insn = emit_move_insn (mem_push_rtx, reg);
1053 RTX_FRAME_RELATED_P (insn) = 1;
1055 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1057 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1058 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1060 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1061 plus_constant (Pmode,
1063 GET_MODE_SIZE (Pmode)));
1064 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1065 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1066 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1069 /* Push each of the registers to save. */
1070 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1071 if (REG_NEEDS_SAVE (regno, ifun))
1074 rtx reg = gen_rtx_REG (HImode, regno);
1076 insn = emit_move_insn (mem_push_rtx, reg);
1077 RTX_FRAME_RELATED_P (insn) = 1;
1079 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1081 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1082 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1084 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1085 plus_constant (Pmode, \
1087 GET_MODE_SIZE (Pmode)));
1088 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1089 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1090 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1093 /* It's just possible that the SP here might be what we need for
1095 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1097 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1098 RTX_FRAME_RELATED_P (insn) = 1;
1101 /* Allocate space for local variables. */
1102 if (layout.locals_size)
1104 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1105 GEN_INT (layout.locals_size));
1106 RTX_FRAME_RELATED_P (insn) = 1;
1109 /* Set up the frame pointer, if required. */
1110 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1112 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1113 RTX_FRAME_RELATED_P (insn) = 1;
1115 if (layout.sp_minus_fp)
1117 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1118 hard_frame_pointer_rtx,
1119 GEN_INT (- layout.sp_minus_fp));
1120 RTX_FRAME_RELATED_P (insn) = 1;
1125 /* Do we need an epilogue at all? */
1128 direct_return (void)
1130 return (reload_completed
1131 && xstormy16_compute_stack_layout ().frame_size == 0
1132 && ! xstormy16_interrupt_function_p ());
1135 /* Called after register allocation to add any instructions needed for
1136 the epilogue. Using an epilogue insn is favored compared to putting
1137 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1138 since it allows the scheduler to intermix instructions with the
1139 saves of the caller saved registers. In some cases, it might be
1140 necessary to emit a barrier instruction as the last insn to prevent
1144 xstormy16_expand_epilogue (void)
1146 struct xstormy16_stack_layout layout;
1149 const int ifun = xstormy16_interrupt_function_p ();
1151 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1152 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1154 layout = xstormy16_compute_stack_layout ();
1156 /* Pop the stack for the locals. */
1157 if (layout.locals_size)
1159 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1160 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1162 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1163 GEN_INT (- layout.locals_size));
1166 /* Restore any call-saved registers. */
1167 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1168 if (REG_NEEDS_SAVE (regno, ifun))
1169 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1171 /* Pop the stack for the stdarg save area. */
1172 if (layout.stdarg_save_size)
1173 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1174 GEN_INT (- layout.stdarg_save_size));
1178 emit_jump_insn (gen_return_internal_interrupt ());
1180 emit_jump_insn (gen_return_internal ());
1184 xstormy16_epilogue_uses (int regno)
1186 if (reload_completed && call_used_regs[regno])
1188 const int ifun = xstormy16_interrupt_function_p ();
1189 return REG_NEEDS_SAVE (regno, ifun);
1195 xstormy16_function_profiler (void)
1197 sorry ("function_profiler support");
1200 /* Update CUM to advance past an argument in the argument list. The
1201 values MODE, TYPE and NAMED describe that argument. Once this is
1202 done, the variable CUM is suitable for analyzing the *following*
1203 argument with `TARGET_FUNCTION_ARG', etc.
1205 This function need not do anything if the argument in question was
1206 passed on the stack. The compiler knows how to track the amount of
1207 stack space used for arguments without any special help. However,
1208 it makes life easier for xstormy16_build_va_list if it does update
1212 xstormy16_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1213 const_tree type, bool named ATTRIBUTE_UNUSED)
1215 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1217 /* If an argument would otherwise be passed partially in registers,
1218 and partially on the stack, the whole of it is passed on the
1220 if (*cum < NUM_ARGUMENT_REGISTERS
1221 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1222 *cum = NUM_ARGUMENT_REGISTERS;
1224 *cum += XSTORMY16_WORD_SIZE (type, mode);
1228 xstormy16_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1229 const_tree type, bool named ATTRIBUTE_UNUSED)
1231 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1233 if (mode == VOIDmode)
1235 if (targetm.calls.must_pass_in_stack (mode, type)
1236 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1238 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1241 /* Build the va_list type.
1243 For this chip, va_list is a record containing a counter and a pointer.
1244 The counter is of type 'int' and indicates how many bytes
1245 have been used to date. The pointer indicates the stack position
1246 for arguments that have not been passed in registers.
1247 To keep the layout nice, the pointer is first in the structure. */
1250 xstormy16_build_builtin_va_list (void)
1252 tree f_1, f_2, record, type_decl;
1254 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1255 type_decl = build_decl (BUILTINS_LOCATION,
1256 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1258 f_1 = build_decl (BUILTINS_LOCATION,
1259 FIELD_DECL, get_identifier ("base"),
1261 f_2 = build_decl (BUILTINS_LOCATION,
1262 FIELD_DECL, get_identifier ("count"),
1263 unsigned_type_node);
1265 DECL_FIELD_CONTEXT (f_1) = record;
1266 DECL_FIELD_CONTEXT (f_2) = record;
1268 TYPE_STUB_DECL (record) = type_decl;
1269 TYPE_NAME (record) = type_decl;
1270 TYPE_FIELDS (record) = f_1;
1271 DECL_CHAIN (f_1) = f_2;
1273 layout_type (record);
1278 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1279 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1280 variable to initialize. NEXTARG is the machine independent notion of the
1281 'next' argument after the variable arguments. */
1284 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1286 tree f_base, f_count;
1290 if (xstormy16_interrupt_function_p ())
1291 error ("cannot use va_start in interrupt function");
1293 f_base = TYPE_FIELDS (va_list_type_node);
1294 f_count = DECL_CHAIN (f_base);
1296 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1297 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1300 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1301 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1302 u = fold_convert (TREE_TYPE (count), u);
1303 t = fold_build_pointer_plus (t, u);
1304 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1305 TREE_SIDE_EFFECTS (t) = 1;
1306 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1308 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1309 build_int_cst (NULL_TREE,
1310 crtl->args.info * UNITS_PER_WORD));
1311 TREE_SIDE_EFFECTS (t) = 1;
1312 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1315 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1316 of type va_list as a tree, TYPE is the type passed to va_arg.
1317 Note: This algorithm is documented in stormy-abi. */
1320 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1321 gimple_seq *post_p ATTRIBUTE_UNUSED)
1323 tree f_base, f_count;
1325 tree count_tmp, addr, t;
1326 tree lab_gotaddr, lab_fromstack;
1327 int size, size_of_reg_args, must_stack;
1330 f_base = TYPE_FIELDS (va_list_type_node);
1331 f_count = DECL_CHAIN (f_base);
1333 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1334 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1337 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1338 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1339 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1341 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1343 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1344 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1345 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1346 addr = create_tmp_var (ptr_type_node, NULL);
1352 t = fold_convert (TREE_TYPE (count), size_tree);
1353 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1354 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1355 t = build2 (GT_EXPR, boolean_type_node, t, r);
1356 t = build3 (COND_EXPR, void_type_node, t,
1357 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1359 gimplify_and_add (t, pre_p);
1361 t = fold_build_pointer_plus (base, count_tmp);
1362 gimplify_assign (addr, t, pre_p);
1364 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1365 gimplify_and_add (t, pre_p);
1367 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1368 gimplify_and_add (t, pre_p);
1371 /* Arguments larger than a word might need to skip over some
1372 registers, since arguments are either passed entirely in
1373 registers or entirely on the stack. */
1374 size = PUSH_ROUNDING (int_size_in_bytes (type));
1375 if (size > 2 || size < 0 || must_stack)
1379 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1380 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1382 t = fold_convert (TREE_TYPE (count), r);
1383 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1384 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1385 gimplify_and_add (t, pre_p);
1388 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1389 + INCOMING_FRAME_SP_OFFSET);
1390 t = fold_convert (TREE_TYPE (count), t);
1391 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1392 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1393 fold_convert (TREE_TYPE (count), size_tree));
1394 t = fold_convert (TREE_TYPE (t), fold (t));
1395 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1396 t = fold_build_pointer_plus (base, t);
1397 gimplify_assign (addr, t, pre_p);
1399 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1400 gimplify_and_add (t, pre_p);
1402 t = fold_convert (TREE_TYPE (count), size_tree);
1403 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1404 gimplify_assign (count, t, pre_p);
1406 addr = fold_convert (build_pointer_type (type), addr);
1407 return build_va_arg_indirect_ref (addr);
1410 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1413 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1415 rtx temp = gen_reg_rtx (HImode);
1416 rtx reg_fnaddr = gen_reg_rtx (HImode);
1417 rtx reg_addr, reg_addr_mem;
1419 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1420 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1422 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1423 emit_move_insn (reg_addr_mem, temp);
1424 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1425 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1427 emit_move_insn (temp, static_chain);
1428 emit_move_insn (reg_addr_mem, temp);
1429 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1430 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1432 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1433 emit_move_insn (temp, reg_fnaddr);
1434 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1435 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1436 emit_move_insn (reg_addr_mem, temp);
1437 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1438 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1440 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1441 emit_move_insn (reg_addr_mem, reg_fnaddr);
1444 /* Worker function for TARGET_FUNCTION_VALUE. */
1447 xstormy16_function_value (const_tree valtype,
1448 const_tree func ATTRIBUTE_UNUSED,
1449 bool outgoing ATTRIBUTE_UNUSED)
1451 enum machine_mode mode;
1452 mode = TYPE_MODE (valtype);
1453 PROMOTE_MODE (mode, 0, valtype);
1454 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1457 /* Worker function for TARGET_LIBCALL_VALUE. */
1460 xstormy16_libcall_value (enum machine_mode mode,
1461 const_rtx fun ATTRIBUTE_UNUSED)
1463 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1466 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1469 xstormy16_function_value_regno_p (const unsigned int regno)
1471 return (regno == RETURN_VALUE_REGNUM);
1474 /* A C compound statement that outputs the assembler code for a thunk function,
1475 used to implement C++ virtual function calls with multiple inheritance. The
1476 thunk acts as a wrapper around a virtual function, adjusting the implicit
1477 object parameter before handing control off to the real function.
1479 First, emit code to add the integer DELTA to the location that contains the
1480 incoming first argument. Assume that this argument contains a pointer, and
1481 is the one used to pass the `this' pointer in C++. This is the incoming
1482 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1483 addition must preserve the values of all other incoming arguments.
1485 After the addition, emit code to jump to FUNCTION, which is a
1486 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1487 the return address. Hence returning from FUNCTION will return to whoever
1488 called the current `thunk'.
1490 The effect must be as if @var{function} had been called directly
1491 with the adjusted first argument. This macro is responsible for
1492 emitting all of the code for a thunk function;
1493 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1496 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1497 extracted from it.) It might possibly be useful on some targets, but
1501 xstormy16_asm_output_mi_thunk (FILE *file,
1502 tree thunk_fndecl ATTRIBUTE_UNUSED,
1503 HOST_WIDE_INT delta,
1504 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1507 int regnum = FIRST_ARGUMENT_REGISTER;
1509 /* There might be a hidden first argument for a returned structure. */
1510 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1513 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1514 fputs ("\tjmpf ", file);
1515 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1519 /* The purpose of this function is to override the default behavior of
1520 BSS objects. Normally, they go into .bss or .sbss via ".common"
1521 directives, but we need to override that and put them in
1522 .bss_below100. We can't just use a section override (like we do
1523 for .data_below100), because that makes them initialized rather
1524 than uninitialized. */
1527 xstormy16_asm_output_aligned_common (FILE *stream,
1534 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1539 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1540 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1545 switch_to_section (bss100_section);
1553 name2 = default_strip_name_encoding (name);
1555 fprintf (stream, "\t.globl\t%s\n", name2);
1557 fprintf (stream, "\t.p2align %d\n", p2align);
1558 fprintf (stream, "\t.type\t%s, @object\n", name2);
1559 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1560 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1566 fprintf (stream, "\t.local\t");
1567 assemble_name (stream, name);
1568 fprintf (stream, "\n");
1570 fprintf (stream, "\t.comm\t");
1571 assemble_name (stream, name);
1572 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1575 /* Implement TARGET_ASM_INIT_SECTIONS. */
1578 xstormy16_asm_init_sections (void)
1581 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1582 output_section_asm_op,
1583 "\t.section \".bss_below100\",\"aw\",@nobits");
1586 /* Mark symbols with the "below100" attribute so that we can use the
1587 special addressing modes for them. */
1590 xstormy16_encode_section_info (tree decl, rtx r, int first)
1592 default_encode_section_info (decl, r, first);
1594 if (TREE_CODE (decl) == VAR_DECL
1595 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1596 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1598 rtx symbol = XEXP (r, 0);
1600 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1601 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1605 #undef TARGET_ASM_CONSTRUCTOR
1606 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1607 #undef TARGET_ASM_DESTRUCTOR
1608 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1610 /* Output constructors and destructors. Just like
1611 default_named_section_asm_out_* but don't set the sections writable. */
1614 xstormy16_asm_out_destructor (rtx symbol, int priority)
1616 const char *section = ".dtors";
1619 /* ??? This only works reliably with the GNU linker. */
1620 if (priority != DEFAULT_INIT_PRIORITY)
1622 sprintf (buf, ".dtors.%.5u",
1623 /* Invert the numbering so the linker puts us in the proper
1624 order; constructors are run from right to left, and the
1625 linker sorts in increasing order. */
1626 MAX_INIT_PRIORITY - priority);
1630 switch_to_section (get_section (section, 0, NULL));
1631 assemble_align (POINTER_SIZE);
1632 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1636 xstormy16_asm_out_constructor (rtx symbol, int priority)
1638 const char *section = ".ctors";
1641 /* ??? This only works reliably with the GNU linker. */
1642 if (priority != DEFAULT_INIT_PRIORITY)
1644 sprintf (buf, ".ctors.%.5u",
1645 /* Invert the numbering so the linker puts us in the proper
1646 order; constructors are run from right to left, and the
1647 linker sorts in increasing order. */
1648 MAX_INIT_PRIORITY - priority);
1652 switch_to_section (get_section (section, 0, NULL));
1653 assemble_align (POINTER_SIZE);
1654 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1657 /* Worker function for TARGET_PRINT_OPERAND_ADDRESS.
1659 Print a memory address as an operand to reference that memory location. */
1662 xstormy16_print_operand_address (FILE *file, rtx address)
1664 HOST_WIDE_INT offset;
1665 int pre_dec, post_inc;
1667 /* There are a few easy cases. */
1668 if (CONST_INT_P (address))
1670 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1674 if (CONSTANT_P (address) || LABEL_P (address))
1676 output_addr_const (file, address);
1680 /* Otherwise, it's hopefully something of the form
1681 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1682 if (GET_CODE (address) == PLUS)
1684 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1685 offset = INTVAL (XEXP (address, 1));
1686 address = XEXP (address, 0);
1691 pre_dec = (GET_CODE (address) == PRE_DEC);
1692 post_inc = (GET_CODE (address) == POST_INC);
1693 if (pre_dec || post_inc)
1694 address = XEXP (address, 0);
1696 gcc_assert (REG_P (address));
1701 fputs (reg_names [REGNO (address)], file);
1705 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1709 /* Worker function for TARGET_PRINT_OPERAND.
1711 Print an operand to an assembler instruction. */
1714 xstormy16_print_operand (FILE *file, rtx x, int code)
1719 /* There is either one bit set, or one bit clear, in X.
1720 Print it preceded by '#'. */
1722 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1723 HOST_WIDE_INT xx = 1;
1726 if (CONST_INT_P (x))
1729 output_operand_lossage ("'B' operand is not constant");
1731 /* GCC sign-extends masks with the MSB set, so we have to
1732 detect all the cases that differ only in sign extension
1733 beyond the bits we care about. Normally, the predicates
1734 and constraints ensure that we have the right values. This
1735 works correctly for valid masks. */
1736 if (bits_set[xx & 7] <= 1)
1738 /* Remove sign extension bits. */
1739 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1741 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1743 l = exact_log2 (xx);
1747 /* Add sign extension bits. */
1748 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1749 xx |= ~(HOST_WIDE_INT)0xff;
1750 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1751 xx |= ~(HOST_WIDE_INT)0xffff;
1752 l = exact_log2 (~xx);
1756 output_operand_lossage ("'B' operand has multiple bits set");
1758 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1763 /* Print the symbol without a surrounding @fptr(). */
1764 if (GET_CODE (x) == SYMBOL_REF)
1765 assemble_name (file, XSTR (x, 0));
1766 else if (LABEL_P (x))
1767 output_asm_label (x);
1769 xstormy16_print_operand_address (file, x);
1774 /* Print the immediate operand less one, preceded by '#'.
1775 For 'O', negate it first. */
1777 HOST_WIDE_INT xx = 0;
1779 if (CONST_INT_P (x))
1782 output_operand_lossage ("'o' operand is not constant");
1787 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1792 /* Print the shift mask for bp/bn. */
1794 HOST_WIDE_INT xx = 1;
1797 if (CONST_INT_P (x))
1800 output_operand_lossage ("'B' operand is not constant");
1804 fputs (IMMEDIATE_PREFIX, file);
1805 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1810 /* Handled below. */
1814 output_operand_lossage ("xstormy16_print_operand: unknown code");
1818 switch (GET_CODE (x))
1821 fputs (reg_names [REGNO (x)], file);
1825 xstormy16_print_operand_address (file, XEXP (x, 0));
1829 /* Some kind of constant or label; an immediate operand,
1830 so prefix it with '#' for the assembler. */
1831 fputs (IMMEDIATE_PREFIX, file);
1832 output_addr_const (file, x);
1839 /* Expander for the `casesi' pattern.
1840 INDEX is the index of the switch statement.
1841 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1842 to the first table entry.
1843 RANGE is the number of table entries.
1844 TABLE is an ADDR_VEC that is the jump table.
1845 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1846 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1849 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1850 rtx table, rtx default_label)
1852 HOST_WIDE_INT range_i = INTVAL (range);
1855 /* This code uses 'br', so it can deal only with tables of size up to
1857 if (range_i >= 8192)
1858 sorry ("switch statement of size %lu entries too large",
1859 (unsigned long) range_i);
1861 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1863 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1865 int_index = gen_lowpart_common (HImode, index);
1866 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1867 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1870 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1871 instructions, without label or alignment or any other special
1872 constructs. We know that the previous instruction will be the
1873 `tablejump_pcrel' output above.
1875 TODO: it might be nice to output 'br' instructions if they could
1879 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1883 switch_to_section (current_function_section ());
1885 vlen = XVECLEN (table, 0);
1886 for (idx = 0; idx < vlen; idx++)
1888 fputs ("\tjmpf ", file);
1889 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1894 /* Expander for the `call' patterns.
1895 RETVAL is the RTL for the return register or NULL for void functions.
1896 DEST is the function to call, expressed as a MEM.
1897 COUNTER is ignored. */
1900 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1903 enum machine_mode mode;
1905 gcc_assert (MEM_P (dest));
1906 dest = XEXP (dest, 0);
1908 if (! CONSTANT_P (dest) && ! REG_P (dest))
1909 dest = force_reg (Pmode, dest);
1914 mode = GET_MODE (retval);
1916 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1919 call = gen_rtx_SET (VOIDmode, retval, call);
1921 if (! CONSTANT_P (dest))
1923 temp = gen_reg_rtx (HImode);
1924 emit_move_insn (temp, const0_rtx);
1929 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1930 gen_rtx_USE (VOIDmode, temp)));
1931 emit_call_insn (call);
1934 /* Expanders for multiword computational operations. */
1936 /* Expander for arithmetic operations; emit insns to compute
1938 (set DEST (CODE:MODE SRC0 SRC1))
1940 When CODE is COMPARE, a branch template is generated
1941 (this saves duplicating code in xstormy16_split_cbranch). */
1944 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1945 rtx dest, rtx src0, rtx src1)
1947 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1952 emit_move_insn (src0, const0_rtx);
1954 for (i = 0; i < num_words; i++)
1956 rtx w_src0, w_src1, w_dest;
1959 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1960 i * UNITS_PER_WORD);
1961 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1962 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1968 && CONST_INT_P (w_src1)
1969 && INTVAL (w_src1) == 0)
1973 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1975 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1981 if (code == COMPARE && i == num_words - 1)
1983 rtx branch, sub, clobber, sub_1;
1985 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1986 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1987 sub = gen_rtx_SET (VOIDmode, w_dest,
1988 gen_rtx_MINUS (HImode, sub_1, w_src1));
1989 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1990 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1991 gen_rtx_IF_THEN_ELSE (VOIDmode,
1997 insn = gen_rtx_PARALLEL (VOIDmode,
1998 gen_rtvec (3, branch, sub, clobber));
2002 && CONST_INT_P (w_src1)
2003 && INTVAL (w_src1) == 0)
2006 insn = gen_subchi4 (w_dest, w_src0, w_src1);
2008 insn = gen_subchi5 (w_dest, w_src0, w_src1);
2014 if (CONST_INT_P (w_src1)
2015 && INTVAL (w_src1) == -(code == AND))
2018 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
2023 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2034 /* If we emit nothing, try_split() will think we failed. So emit
2035 something that does nothing and can be optimized away. */
2040 /* The shift operations are split at output time for constant values;
2041 variable-width shifts get handed off to a library routine.
2043 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2044 SIZE_R will be a CONST_INT, X will be a hard register. */
2047 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2048 rtx x, rtx size_r, rtx temp)
2051 const char *r0, *r1, *rt;
2054 gcc_assert (CONST_INT_P (size_r)
2058 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2063 r0 = reg_names [REGNO (x)];
2064 r1 = reg_names [REGNO (x) + 1];
2066 /* For shifts of size 1, we can use the rotate instructions. */
2072 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2075 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2078 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2086 /* For large shifts, there are easy special cases. */
2092 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2095 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2098 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2110 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2111 r1, r0, r0, r1, (int) size - 16);
2114 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2115 r0, r1, r1, r0, (int) size - 16);
2118 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2119 r0, r1, r1, r0, (int) size - 16);
2127 /* For the rest, we have to do more work. In particular, we
2128 need a temporary. */
2129 rt = reg_names [REGNO (temp)];
2134 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2135 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2140 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2141 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2146 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2147 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2156 /* Attribute handling. */
2158 /* Return nonzero if the function is an interrupt function. */
2161 xstormy16_interrupt_function_p (void)
2165 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2166 any functions are declared, which is demonstrably wrong, but
2167 it is worked around here. FIXME. */
2171 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2172 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2175 #undef TARGET_ATTRIBUTE_TABLE
2176 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2178 static tree xstormy16_handle_interrupt_attribute
2179 (tree *, tree, tree, int, bool *);
2180 static tree xstormy16_handle_below100_attribute
2181 (tree *, tree, tree, int, bool *);
2183 static const struct attribute_spec xstormy16_attribute_table[] =
2185 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2186 affects_type_identity. */
2187 { "interrupt", 0, 0, false, true, true,
2188 xstormy16_handle_interrupt_attribute , false },
2189 { "BELOW100", 0, 0, false, false, false,
2190 xstormy16_handle_below100_attribute, false },
2191 { "below100", 0, 0, false, false, false,
2192 xstormy16_handle_below100_attribute, false },
2193 { NULL, 0, 0, false, false, false, NULL, false }
2196 /* Handle an "interrupt" attribute;
2197 arguments as in struct attribute_spec.handler. */
2200 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2201 tree args ATTRIBUTE_UNUSED,
2202 int flags ATTRIBUTE_UNUSED,
2205 if (TREE_CODE (*node) != FUNCTION_TYPE)
2207 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2209 *no_add_attrs = true;
2215 /* Handle an "below" attribute;
2216 arguments as in struct attribute_spec.handler. */
2219 xstormy16_handle_below100_attribute (tree *node,
2220 tree name ATTRIBUTE_UNUSED,
2221 tree args ATTRIBUTE_UNUSED,
2222 int flags ATTRIBUTE_UNUSED,
2225 if (TREE_CODE (*node) != VAR_DECL
2226 && TREE_CODE (*node) != POINTER_TYPE
2227 && TREE_CODE (*node) != TYPE_DECL)
2229 warning (OPT_Wattributes,
2230 "%<__BELOW100__%> attribute only applies to variables");
2231 *no_add_attrs = true;
2233 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2235 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2237 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2238 "with auto storage class");
2239 *no_add_attrs = true;
2246 #undef TARGET_INIT_BUILTINS
2247 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2248 #undef TARGET_EXPAND_BUILTIN
2249 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2255 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2256 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2260 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2261 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2262 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2263 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2264 { NULL, 0, NULL, NULL }
2268 xstormy16_init_builtins (void)
2270 tree args[2], ret_type, arg = NULL_TREE, ftype;
2273 ret_type = void_type_node;
2275 for (i = 0; s16builtins[i].name; i++)
2277 n_args = strlen (s16builtins[i].arg_types) - 1;
2279 gcc_assert (n_args <= (int) ARRAY_SIZE (args));
2281 for (a = n_args - 1; a >= 0; a--)
2282 args[a] = NULL_TREE;
2284 for (a = n_args; a >= 0; a--)
2286 switch (s16builtins[i].arg_types[a])
2288 case 's': arg = short_integer_type_node; break;
2289 case 'S': arg = short_unsigned_type_node; break;
2290 case 'l': arg = long_integer_type_node; break;
2291 case 'L': arg = long_unsigned_type_node; break;
2292 default: gcc_unreachable ();
2299 ftype = build_function_type_list (ret_type, args[0], args[1], NULL_TREE);
2300 add_builtin_function (s16builtins[i].name, ftype,
2301 i, BUILT_IN_MD, NULL, NULL_TREE);
2306 xstormy16_expand_builtin (tree exp, rtx target,
2307 rtx subtarget ATTRIBUTE_UNUSED,
2308 enum machine_mode mode ATTRIBUTE_UNUSED,
2309 int ignore ATTRIBUTE_UNUSED)
2311 rtx op[10], args[10], pat, copyto[10], retval = 0;
2312 tree fndecl, argtree;
2315 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2316 argtree = TREE_OPERAND (exp, 1);
2317 i = DECL_FUNCTION_CODE (fndecl);
2318 code = s16builtins[i].md_code;
2320 for (a = 0; a < 10 && argtree; a++)
2322 args[a] = expand_normal (TREE_VALUE (argtree));
2323 argtree = TREE_CHAIN (argtree);
2326 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2328 char ao = s16builtins[i].arg_ops[o];
2329 char c = insn_data[code].operand[o].constraint[0];
2330 enum machine_mode omode;
2334 omode = (enum machine_mode) insn_data[code].operand[o].mode;
2336 op[o] = target ? target : gen_reg_rtx (omode);
2338 op[o] = gen_reg_rtx (omode);
2340 op[o] = args[(int) hex_value (ao)];
2342 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2344 if (c == '+' || c == '=')
2347 op[o] = gen_reg_rtx (omode);
2350 op[o] = copy_to_mode_reg (omode, op[o]);
2357 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2358 op[5], op[6], op[7], op[8], op[9]);
2361 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2364 emit_move_insn (copyto[o], op[o]);
2365 if (op[o] == retval)
2372 /* Look for combinations of insns that can be converted to BN or BP
2373 opcodes. This is, unfortunately, too complex to do with MD
2377 combine_bnp (rtx insn)
2379 int insn_code, regno, need_extend;
2381 rtx cond, reg, and_insn, load, qireg, mem;
2382 enum machine_mode load_mode = QImode;
2383 enum machine_mode and_mode = QImode;
2384 rtx shift = NULL_RTX;
2386 insn_code = recog_memoized (insn);
2387 if (insn_code != CODE_FOR_cbranchhi
2388 && insn_code != CODE_FOR_cbranchhi_neg)
2391 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2392 cond = XEXP (cond, 1); /* if */
2393 cond = XEXP (cond, 0); /* cond */
2394 switch (GET_CODE (cond))
2408 reg = XEXP (cond, 0);
2411 regno = REGNO (reg);
2412 if (XEXP (cond, 1) != const0_rtx)
2414 if (! find_regno_note (insn, REG_DEAD, regno))
2416 qireg = gen_rtx_REG (QImode, regno);
2420 /* LT and GE conditionals should have a sign extend before
2422 for (and_insn = prev_real_insn (insn);
2423 and_insn != NULL_RTX;
2424 and_insn = prev_real_insn (and_insn))
2426 int and_code = recog_memoized (and_insn);
2428 if (and_code == CODE_FOR_extendqihi2
2429 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2430 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2433 if (and_code == CODE_FOR_movhi_internal
2434 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2436 /* This is for testing bit 15. */
2441 if (reg_mentioned_p (reg, and_insn))
2444 if (GET_CODE (and_insn) != NOTE
2445 && GET_CODE (and_insn) != INSN)
2451 /* EQ and NE conditionals have an AND before them. */
2452 for (and_insn = prev_real_insn (insn);
2453 and_insn != NULL_RTX;
2454 and_insn = prev_real_insn (and_insn))
2456 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2457 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2458 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2461 if (reg_mentioned_p (reg, and_insn))
2464 if (GET_CODE (and_insn) != NOTE
2465 && GET_CODE (and_insn) != INSN)
2471 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2472 followed by an AND like this:
2474 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2475 (clobber (reg:BI carry))]
2477 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2479 Attempt to detect this here. */
2480 for (shift = prev_real_insn (and_insn); shift;
2481 shift = prev_real_insn (shift))
2483 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2484 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2485 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2488 if (reg_mentioned_p (reg, shift)
2489 || (GET_CODE (shift) != NOTE
2490 && GET_CODE (shift) != INSN))
2499 if (and_insn == NULL_RTX)
2502 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2504 load = prev_real_insn (load))
2506 int load_code = recog_memoized (load);
2508 if (load_code == CODE_FOR_movhi_internal
2509 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2510 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2511 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2517 if (load_code == CODE_FOR_movqi_internal
2518 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2519 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2525 if (load_code == CODE_FOR_zero_extendqihi2
2526 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2527 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2534 if (reg_mentioned_p (reg, load))
2537 if (GET_CODE (load) != NOTE
2538 && GET_CODE (load) != INSN)
2544 mem = SET_SRC (PATTERN (load));
2548 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2550 /* If the mem includes a zero-extend operation and we are
2551 going to generate a sign-extend operation then move the
2552 mem inside the zero-extend. */
2553 if (GET_CODE (mem) == ZERO_EXTEND)
2554 mem = XEXP (mem, 0);
2558 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2562 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2565 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2568 if (load_mode == HImode)
2570 rtx addr = XEXP (mem, 0);
2572 if (! (mask & 0xff))
2574 addr = plus_constant (Pmode, addr, 1);
2577 mem = gen_rtx_MEM (QImode, addr);
2581 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2583 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2585 INSN_CODE (insn) = -1;
2588 if (and_insn != insn)
2589 delete_insn (and_insn);
2591 if (shift != NULL_RTX)
2592 delete_insn (shift);
2596 xstormy16_reorg (void)
2600 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2602 if (! JUMP_P (insn))
2608 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2611 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2613 const HOST_WIDE_INT size = int_size_in_bytes (type);
2614 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2617 #undef TARGET_ASM_ALIGNED_HI_OP
2618 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2619 #undef TARGET_ASM_ALIGNED_SI_OP
2620 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2621 #undef TARGET_ENCODE_SECTION_INFO
2622 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2624 /* Select_section doesn't handle .bss_below100. */
2625 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2626 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2628 #undef TARGET_ASM_OUTPUT_MI_THUNK
2629 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2630 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2631 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2633 #undef TARGET_PRINT_OPERAND
2634 #define TARGET_PRINT_OPERAND xstormy16_print_operand
2635 #undef TARGET_PRINT_OPERAND_ADDRESS
2636 #define TARGET_PRINT_OPERAND_ADDRESS xstormy16_print_operand_address
2638 #undef TARGET_MEMORY_MOVE_COST
2639 #define TARGET_MEMORY_MOVE_COST xstormy16_memory_move_cost
2640 #undef TARGET_RTX_COSTS
2641 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2642 #undef TARGET_ADDRESS_COST
2643 #define TARGET_ADDRESS_COST xstormy16_address_cost
2645 #undef TARGET_BUILD_BUILTIN_VA_LIST
2646 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2647 #undef TARGET_EXPAND_BUILTIN_VA_START
2648 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2649 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2650 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2652 #undef TARGET_PROMOTE_FUNCTION_MODE
2653 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2654 #undef TARGET_PROMOTE_PROTOTYPES
2655 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2657 #undef TARGET_FUNCTION_ARG
2658 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2659 #undef TARGET_FUNCTION_ARG_ADVANCE
2660 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2662 #undef TARGET_RETURN_IN_MEMORY
2663 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2664 #undef TARGET_FUNCTION_VALUE
2665 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2666 #undef TARGET_LIBCALL_VALUE
2667 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2668 #undef TARGET_FUNCTION_VALUE_REGNO_P
2669 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2671 #undef TARGET_MACHINE_DEPENDENT_REORG
2672 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2674 #undef TARGET_PREFERRED_RELOAD_CLASS
2675 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2676 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2677 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2679 #undef TARGET_LEGITIMATE_ADDRESS_P
2680 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2681 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2682 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2684 #undef TARGET_CAN_ELIMINATE
2685 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2687 #undef TARGET_TRAMPOLINE_INIT
2688 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2690 struct gcc_target targetm = TARGET_INITIALIZER;
2692 #include "gt-stormy16.h"