1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
33 #include "insn-attr.h"
36 #include "diagnostic-core.h"
44 #include "target-def.h"
46 #include "langhooks.h"
51 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52 static void xstormy16_asm_out_constructor (rtx, int);
53 static void xstormy16_asm_out_destructor (rtx, int);
54 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
57 static void xstormy16_init_builtins (void);
58 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59 static bool xstormy16_rtx_costs (rtx, int, int, int *, bool);
60 static int xstormy16_address_cost (rtx, bool);
61 static bool xstormy16_return_in_memory (const_tree, const_tree);
63 static GTY(()) section *bss100_section;
65 /* Compute a (partial) cost for rtx X. Return true if the complete
66 cost has been computed, and false if subexpressions should be
67 scanned. In either case, *TOTAL contains the cost result. */
70 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
71 int *total, bool speed ATTRIBUTE_UNUSED)
76 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
77 *total = COSTS_N_INSNS (1) / 2;
78 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
79 *total = COSTS_N_INSNS (1);
81 *total = COSTS_N_INSNS (2);
88 *total = COSTS_N_INSNS (2);
92 *total = COSTS_N_INSNS (35 + 6);
95 *total = COSTS_N_INSNS (51 - 6);
104 xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
106 return (CONST_INT_P (x) ? 2
107 : GET_CODE (x) == PLUS ? 7
111 /* Branches are handled as follows:
113 1. HImode compare-and-branches. The machine supports these
114 natively, so the appropriate pattern is emitted directly.
116 2. SImode EQ and NE. These are emitted as pairs of HImode
117 compare-and-branches.
119 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
120 of a SImode subtract followed by a branch (not a compare-and-branch),
126 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
133 /* Emit a branch of kind CODE to location LOC. */
136 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
138 rtx condition_rtx, loc_ref, branch, cy_clobber;
140 enum machine_mode mode;
142 mode = GET_MODE (op0);
143 gcc_assert (mode == HImode || mode == SImode);
146 && (code == GT || code == LE || code == GTU || code == LEU))
148 int unsigned_p = (code == GTU || code == LEU);
149 int gt_p = (code == GT || code == GTU);
153 lab = gen_label_rtx ();
154 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
155 /* This should be generated as a comparison against the temporary
156 created by the previous insn, but reload can't handle that. */
157 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
162 else if (mode == SImode
163 && (code == NE || code == EQ)
164 && op1 != const0_rtx)
166 rtx op0_word, op1_word;
168 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
172 lab = gen_label_rtx ();
174 for (i = 0; i < num_words - 1; i++)
176 op0_word = simplify_gen_subreg (word_mode, op0, mode,
178 op1_word = simplify_gen_subreg (word_mode, op1, mode,
180 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
182 op0_word = simplify_gen_subreg (word_mode, op0, mode,
184 op1_word = simplify_gen_subreg (word_mode, op1, mode,
186 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
193 /* We can't allow reload to try to generate any reload after a branch,
194 so when some register must match we must make the temporary ourselves. */
198 tmp = gen_reg_rtx (mode);
199 emit_move_insn (tmp, op0);
203 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
204 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
205 branch = gen_rtx_SET (VOIDmode, pc_rtx,
206 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
209 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
212 vec = gen_rtvec (2, branch, cy_clobber);
213 else if (code == NE || code == EQ)
214 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
219 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
221 sub = gen_rtx_CLOBBER (SImode, op0);
223 vec = gen_rtvec (3, branch, sub, cy_clobber);
226 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
229 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
230 the arithmetic operation. Most of the work is done by
231 xstormy16_expand_arith. */
234 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
237 rtx op0 = XEXP (comparison, 0);
238 rtx op1 = XEXP (comparison, 1);
243 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
247 gcc_assert (INSN_P (seq));
250 while (NEXT_INSN (last_insn) != NULL_RTX)
251 last_insn = NEXT_INSN (last_insn);
253 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
254 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
255 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
260 /* Return the string to output a conditional branch to LABEL, which is
261 the operand number of the label.
263 OP is the conditional expression, or NULL for branch-always.
265 REVERSED is nonzero if we should reverse the sense of the comparison.
270 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
272 static char string[64];
273 int need_longbranch = (op != NULL_RTX
274 ? get_attr_length (insn) == 8
275 : get_attr_length (insn) == 4);
276 int really_reversed = reversed ^ need_longbranch;
279 const char *operands;
288 sprintf (string, "%s %s", ccode, label);
292 code = GET_CODE (op);
294 if (! REG_P (XEXP (op, 0)))
296 code = swap_condition (code);
302 /* Work out which way this really branches. */
304 code = reverse_condition (code);
308 case EQ: ccode = "z"; break;
309 case NE: ccode = "nz"; break;
310 case GE: ccode = "ge"; break;
311 case LT: ccode = "lt"; break;
312 case GT: ccode = "gt"; break;
313 case LE: ccode = "le"; break;
314 case GEU: ccode = "nc"; break;
315 case LTU: ccode = "c"; break;
316 case GTU: ccode = "hi"; break;
317 case LEU: ccode = "ls"; break;
324 templ = "b%s %s,.+8 | jmpf %s";
327 sprintf (string, templ, ccode, operands, label);
332 /* Return the string to output a conditional branch to LABEL, which is
333 the operand number of the label, but suitable for the tail of a
336 OP is the conditional expression (OP is never NULL_RTX).
338 REVERSED is nonzero if we should reverse the sense of the comparison.
343 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
345 static char string[64];
346 int need_longbranch = get_attr_length (insn) >= 8;
347 int really_reversed = reversed ^ need_longbranch;
353 code = GET_CODE (op);
355 /* Work out which way this really branches. */
357 code = reverse_condition (code);
361 case EQ: ccode = "z"; break;
362 case NE: ccode = "nz"; break;
363 case GE: ccode = "ge"; break;
364 case LT: ccode = "lt"; break;
365 case GEU: ccode = "nc"; break;
366 case LTU: ccode = "c"; break;
368 /* The missing codes above should never be generated. */
379 gcc_assert (REG_P (XEXP (op, 0)));
381 regnum = REGNO (XEXP (op, 0));
382 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
386 case GE: case LT: case GEU: case LTU:
387 strcpy (prevop, "sbc %2,%3");
395 templ = "%s | b%s .+6 | jmpf %s";
397 templ = "%s | b%s %s";
398 sprintf (string, templ, prevop, ccode, label);
403 /* Many machines have some registers that cannot be copied directly to or from
404 memory or even from other types of registers. An example is the `MQ'
405 register, which on most machines, can only be copied to or from general
406 registers, but not memory. Some machines allow copying all registers to and
407 from memory, but require a scratch register for stores to some memory
408 locations (e.g., those with symbolic address on the RT, and those with
409 certain symbolic address on the SPARC when compiling PIC). In some cases,
410 both an intermediate and a scratch register are required.
412 You should define these macros to indicate to the reload phase that it may
413 need to allocate at least one register for a reload in addition to the
414 register to contain the data. Specifically, if copying X to a register
415 RCLASS in MODE requires an intermediate register, you should define
416 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
417 whose registers can be used as intermediate registers or scratch registers.
419 If copying a register RCLASS in MODE to X requires an intermediate or scratch
420 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
421 largest register class required. If the requirements for input and output
422 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
423 instead of defining both macros identically.
425 The values returned by these macros are often `GENERAL_REGS'. Return
426 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
427 to or from a register of RCLASS in MODE without requiring a scratch register.
428 Do not define this macro if it would always return `NO_REGS'.
430 If a scratch register is required (either with or without an intermediate
431 register), you should define patterns for `reload_inM' or `reload_outM', as
432 required.. These patterns, which will normally be implemented with a
433 `define_expand', should be similar to the `movM' patterns, except that
434 operand 2 is the scratch register.
436 Define constraints for the reload register and scratch register that contain
437 a single register class. If the original reload register (whose class is
438 RCLASS) can meet the constraint given in the pattern, the value returned by
439 these macros is used for the class of the scratch register. Otherwise, two
440 additional reload registers are required. Their classes are obtained from
441 the constraints in the insn pattern.
443 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
444 either be in a hard register or in memory. Use `true_regnum' to find out;
445 it will return -1 if the pseudo is in memory and the hard register number if
448 These macros should not be used in the case where a particular class of
449 registers can only be copied to memory and not to another class of
450 registers. In that case, secondary reload registers are not needed and
451 would not be helpful. Instead, a stack location must be used to perform the
452 copy and the `movM' pattern should use memory as an intermediate storage.
453 This case often occurs between floating-point and general registers. */
456 xstormy16_secondary_reload_class (enum reg_class rclass,
457 enum machine_mode mode ATTRIBUTE_UNUSED,
460 /* This chip has the interesting property that only the first eight
461 registers can be moved to/from memory. */
463 || ((GET_CODE (x) == SUBREG || REG_P (x))
464 && (true_regnum (x) == -1
465 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
466 && ! reg_class_subset_p (rclass, EIGHT_REGS))
472 /* Worker function for TARGET_PREFERRED_RELOAD_CLASS
473 and TARGET_PREFERRED_OUTPUT_RELOAD_CLASS. */
476 xstormy16_preferred_reload_class (rtx x, reg_class_t rclass)
478 if (rclass == GENERAL_REGS && MEM_P (x))
484 /* Predicate for symbols and addresses that reflect special 8-bit
488 xstormy16_below100_symbol (rtx x,
489 enum machine_mode mode ATTRIBUTE_UNUSED)
491 if (GET_CODE (x) == CONST)
493 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
496 if (GET_CODE (x) == SYMBOL_REF)
497 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
501 HOST_WIDE_INT i = INTVAL (x);
503 if ((i >= 0x0000 && i <= 0x00ff)
504 || (i >= 0x7f00 && i <= 0x7fff))
510 /* Likewise, but only for non-volatile MEMs, for patterns where the
511 MEM will get split into smaller sized accesses. */
514 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
516 if (MEM_P (x) && MEM_VOLATILE_P (x))
518 return xstormy16_below100_operand (x, mode);
521 /* Expand an 8-bit IOR. This either detects the one case we can
522 actually do, or uses a 16-bit IOR. */
525 xstormy16_expand_iorqi3 (rtx *operands)
527 rtx in, out, outsub, val;
533 if (xstormy16_onebit_set_operand (val, QImode))
535 if (!xstormy16_below100_or_register (in, QImode))
536 in = copy_to_mode_reg (QImode, in);
537 if (!xstormy16_below100_or_register (out, QImode))
538 out = gen_reg_rtx (QImode);
539 emit_insn (gen_iorqi3_internal (out, in, val));
540 if (out != operands[0])
541 emit_move_insn (operands[0], out);
546 in = copy_to_mode_reg (QImode, in);
548 if (! REG_P (val) && ! CONST_INT_P (val))
549 val = copy_to_mode_reg (QImode, val);
552 out = gen_reg_rtx (QImode);
554 in = simplify_gen_subreg (HImode, in, QImode, 0);
555 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
557 if (! CONST_INT_P (val))
558 val = simplify_gen_subreg (HImode, val, QImode, 0);
560 emit_insn (gen_iorhi3 (outsub, in, val));
562 if (out != operands[0])
563 emit_move_insn (operands[0], out);
566 /* Expand an 8-bit AND. This either detects the one case we can
567 actually do, or uses a 16-bit AND. */
570 xstormy16_expand_andqi3 (rtx *operands)
572 rtx in, out, outsub, val;
578 if (xstormy16_onebit_clr_operand (val, QImode))
580 if (!xstormy16_below100_or_register (in, QImode))
581 in = copy_to_mode_reg (QImode, in);
582 if (!xstormy16_below100_or_register (out, QImode))
583 out = gen_reg_rtx (QImode);
584 emit_insn (gen_andqi3_internal (out, in, val));
585 if (out != operands[0])
586 emit_move_insn (operands[0], out);
591 in = copy_to_mode_reg (QImode, in);
593 if (! REG_P (val) && ! CONST_INT_P (val))
594 val = copy_to_mode_reg (QImode, val);
597 out = gen_reg_rtx (QImode);
599 in = simplify_gen_subreg (HImode, in, QImode, 0);
600 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
602 if (! CONST_INT_P (val))
603 val = simplify_gen_subreg (HImode, val, QImode, 0);
605 emit_insn (gen_andhi3 (outsub, in, val));
607 if (out != operands[0])
608 emit_move_insn (operands[0], out);
611 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
613 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
615 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
617 && INTVAL (X) + (OFFSET) >= 0 \
618 && INTVAL (X) + (OFFSET) < 0x8000 \
619 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
622 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
625 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
628 if (GET_CODE (x) == PLUS
629 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
632 /* PR 31232: Do not allow INT+INT as an address. */
637 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
638 || GET_CODE (x) == POST_INC
639 || GET_CODE (x) == PRE_DEC)
643 && REGNO_OK_FOR_BASE_P (REGNO (x))
644 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
647 if (xstormy16_below100_symbol (x, mode))
653 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P.
655 On this chip, this is true if the address is valid with an offset
656 of 0 but not of 6, because in that case it cannot be used as an
657 address for DImode or DFmode, or if the address is a post-increment
658 or pre-decrement address. */
661 xstormy16_mode_dependent_address_p (const_rtx x)
663 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
664 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
667 if (GET_CODE (x) == PLUS
668 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
669 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
672 /* Auto-increment addresses are now treated generically in recog.c. */
677 short_memory_operand (rtx x, enum machine_mode mode)
679 if (! memory_operand (x, mode))
681 return (GET_CODE (XEXP (x, 0)) != PLUS);
684 /* Splitter for the 'move' patterns, for modes not directly implemented
685 by hardware. Emit insns to copy a value of mode MODE from SRC to
688 This function is only called when reload_completed. */
691 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
693 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
694 int direction, end, i;
695 int src_modifies = 0;
696 int dest_modifies = 0;
697 int src_volatile = 0;
698 int dest_volatile = 0;
700 rtx auto_inc_reg_rtx = NULL_RTX;
702 /* Check initial conditions. */
703 gcc_assert (reload_completed
704 && mode != QImode && mode != HImode
705 && nonimmediate_operand (dest, mode)
706 && general_operand (src, mode));
708 /* This case is not supported below, and shouldn't be generated. */
709 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
711 /* This case is very very bad after reload, so trap it now. */
712 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
714 /* The general idea is to copy by words, offsetting the source and
715 destination. Normally the least-significant word will be copied
716 first, but for pre-dec operations it's better to copy the
717 most-significant word first. Only one operand can be a pre-dec
720 It's also possible that the copy overlaps so that the direction
726 mem_operand = XEXP (dest, 0);
727 dest_modifies = side_effects_p (mem_operand);
728 if (auto_inc_p (mem_operand))
729 auto_inc_reg_rtx = XEXP (mem_operand, 0);
730 dest_volatile = MEM_VOLATILE_P (dest);
733 dest = copy_rtx (dest);
734 MEM_VOLATILE_P (dest) = 0;
737 else if (MEM_P (src))
739 mem_operand = XEXP (src, 0);
740 src_modifies = side_effects_p (mem_operand);
741 if (auto_inc_p (mem_operand))
742 auto_inc_reg_rtx = XEXP (mem_operand, 0);
743 src_volatile = MEM_VOLATILE_P (src);
746 src = copy_rtx (src);
747 MEM_VOLATILE_P (src) = 0;
751 mem_operand = NULL_RTX;
753 if (mem_operand == NULL_RTX)
757 && reg_overlap_mentioned_p (dest, src)
758 && REGNO (dest) > REGNO (src))
761 else if (GET_CODE (mem_operand) == PRE_DEC
762 || (GET_CODE (mem_operand) == PLUS
763 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
765 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
769 gcc_assert (REG_P (dest));
770 regno = REGNO (dest);
772 gcc_assert (refers_to_regno_p (regno, regno + num_words,
775 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
777 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
781 /* This means something like
782 (set (reg:DI r0) (mem:DI (reg:HI r1)))
783 which we'd need to support by doing the set of the second word
788 end = direction < 0 ? -1 : num_words;
789 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
791 rtx w_src, w_dest, insn;
794 w_src = gen_rtx_MEM (word_mode, mem_operand);
796 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
798 MEM_VOLATILE_P (w_src) = 1;
800 w_dest = gen_rtx_MEM (word_mode, mem_operand);
802 w_dest = simplify_gen_subreg (word_mode, dest, mode,
805 MEM_VOLATILE_P (w_dest) = 1;
807 /* The simplify_subreg calls must always be able to simplify. */
808 gcc_assert (GET_CODE (w_src) != SUBREG
809 && GET_CODE (w_dest) != SUBREG);
811 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
812 if (auto_inc_reg_rtx)
813 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
819 /* Expander for the 'move' patterns. Emit insns to copy a value of
820 mode MODE from SRC to DEST. */
823 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
825 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
827 rtx pmv = XEXP (dest, 0);
828 rtx dest_reg = XEXP (pmv, 0);
829 rtx dest_mod = XEXP (pmv, 1);
830 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
831 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
833 dest = gen_rtx_MEM (mode, dest_reg);
834 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
836 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
838 rtx pmv = XEXP (src, 0);
839 rtx src_reg = XEXP (pmv, 0);
840 rtx src_mod = XEXP (pmv, 1);
841 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
842 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
844 src = gen_rtx_MEM (mode, src_reg);
845 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
848 /* There are only limited immediate-to-memory move instructions. */
849 if (! reload_in_progress
850 && ! reload_completed
852 && (! CONST_INT_P (XEXP (dest, 0))
853 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
854 && ! xstormy16_below100_operand (dest, mode)
856 && GET_CODE (src) != SUBREG)
857 src = copy_to_mode_reg (mode, src);
859 /* Don't emit something we would immediately split. */
861 && mode != HImode && mode != QImode)
863 xstormy16_split_move (mode, dest, src);
867 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
872 The stack is laid out as follows:
876 Register save area (up to 4 words)
877 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
879 AP-> Return address (two words)
880 9th procedure parameter word
881 10th procedure parameter word
883 last procedure parameter word
885 The frame pointer location is tuned to make it most likely that all
886 parameters and local variables can be accessed using a load-indexed
889 /* A structure to describe the layout. */
890 struct xstormy16_stack_layout
892 /* Size of the topmost three items on the stack. */
894 int register_save_size;
895 int stdarg_save_size;
896 /* Sum of the above items. */
898 /* Various offsets. */
899 int first_local_minus_ap;
904 /* Does REGNO need to be saved? */
905 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
906 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
907 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
908 && (REGNUM != CARRY_REGNUM) \
909 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
911 /* Compute the stack layout. */
913 struct xstormy16_stack_layout
914 xstormy16_compute_stack_layout (void)
916 struct xstormy16_stack_layout layout;
918 const int ifun = xstormy16_interrupt_function_p ();
920 layout.locals_size = get_frame_size ();
922 layout.register_save_size = 0;
923 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
924 if (REG_NEEDS_SAVE (regno, ifun))
925 layout.register_save_size += UNITS_PER_WORD;
928 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
930 layout.stdarg_save_size = 0;
932 layout.frame_size = (layout.locals_size
933 + layout.register_save_size
934 + layout.stdarg_save_size);
936 if (crtl->args.size <= 2048 && crtl->args.size != -1)
938 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
939 + crtl->args.size <= 2048)
940 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
942 layout.fp_minus_ap = 2048 - crtl->args.size;
945 layout.fp_minus_ap = (layout.stdarg_save_size
946 + layout.register_save_size
947 - INCOMING_FRAME_SP_OFFSET);
948 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
949 - layout.fp_minus_ap);
950 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
954 /* Worker function for TARGET_CAN_ELIMINATE. */
957 xstormy16_can_eliminate (const int from, const int to)
959 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
960 ? ! frame_pointer_needed
964 /* Determine how all the special registers get eliminated. */
967 xstormy16_initial_elimination_offset (int from, int to)
969 struct xstormy16_stack_layout layout;
972 layout = xstormy16_compute_stack_layout ();
974 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
975 result = layout.sp_minus_fp - layout.locals_size;
976 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
977 result = - layout.locals_size;
978 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
979 result = - layout.fp_minus_ap;
980 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
981 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
989 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
991 rtx set, clobber, insn;
993 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
994 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
995 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
999 /* Called after register allocation to add any instructions needed for
1000 the prologue. Using a prologue insn is favored compared to putting
1001 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1002 since it allows the scheduler to intermix instructions with the
1003 saves of the caller saved registers. In some cases, it might be
1004 necessary to emit a barrier instruction as the last insn to prevent
1007 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1008 so that the debug info generation code can handle them properly. */
1011 xstormy16_expand_prologue (void)
1013 struct xstormy16_stack_layout layout;
1017 const int ifun = xstormy16_interrupt_function_p ();
1019 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1020 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1022 layout = xstormy16_compute_stack_layout ();
1024 if (layout.locals_size >= 32768)
1025 error ("local variable memory requirements exceed capacity");
1027 /* Save the argument registers if necessary. */
1028 if (layout.stdarg_save_size)
1029 for (regno = FIRST_ARGUMENT_REGISTER;
1030 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1034 rtx reg = gen_rtx_REG (HImode, regno);
1036 insn = emit_move_insn (mem_push_rtx, reg);
1037 RTX_FRAME_RELATED_P (insn) = 1;
1039 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1041 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1042 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1044 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1045 plus_constant (stack_pointer_rtx,
1046 GET_MODE_SIZE (Pmode)));
1047 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1048 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1049 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1052 /* Push each of the registers to save. */
1053 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1054 if (REG_NEEDS_SAVE (regno, ifun))
1057 rtx reg = gen_rtx_REG (HImode, regno);
1059 insn = emit_move_insn (mem_push_rtx, reg);
1060 RTX_FRAME_RELATED_P (insn) = 1;
1062 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1064 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1065 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1067 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1068 plus_constant (stack_pointer_rtx,
1069 GET_MODE_SIZE (Pmode)));
1070 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1071 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1072 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1075 /* It's just possible that the SP here might be what we need for
1077 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1079 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1080 RTX_FRAME_RELATED_P (insn) = 1;
1083 /* Allocate space for local variables. */
1084 if (layout.locals_size)
1086 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1087 GEN_INT (layout.locals_size));
1088 RTX_FRAME_RELATED_P (insn) = 1;
1091 /* Set up the frame pointer, if required. */
1092 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1094 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1095 RTX_FRAME_RELATED_P (insn) = 1;
1097 if (layout.sp_minus_fp)
1099 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1100 hard_frame_pointer_rtx,
1101 GEN_INT (- layout.sp_minus_fp));
1102 RTX_FRAME_RELATED_P (insn) = 1;
1107 /* Do we need an epilogue at all? */
1110 direct_return (void)
1112 return (reload_completed
1113 && xstormy16_compute_stack_layout ().frame_size == 0
1114 && ! xstormy16_interrupt_function_p ());
1117 /* Called after register allocation to add any instructions needed for
1118 the epilogue. Using an epilogue insn is favored compared to putting
1119 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1120 since it allows the scheduler to intermix instructions with the
1121 saves of the caller saved registers. In some cases, it might be
1122 necessary to emit a barrier instruction as the last insn to prevent
1126 xstormy16_expand_epilogue (void)
1128 struct xstormy16_stack_layout layout;
1131 const int ifun = xstormy16_interrupt_function_p ();
1133 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1134 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1136 layout = xstormy16_compute_stack_layout ();
1138 /* Pop the stack for the locals. */
1139 if (layout.locals_size)
1141 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1142 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1144 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1145 GEN_INT (- layout.locals_size));
1148 /* Restore any call-saved registers. */
1149 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1150 if (REG_NEEDS_SAVE (regno, ifun))
1151 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1153 /* Pop the stack for the stdarg save area. */
1154 if (layout.stdarg_save_size)
1155 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1156 GEN_INT (- layout.stdarg_save_size));
1160 emit_jump_insn (gen_return_internal_interrupt ());
1162 emit_jump_insn (gen_return_internal ());
1166 xstormy16_epilogue_uses (int regno)
1168 if (reload_completed && call_used_regs[regno])
1170 const int ifun = xstormy16_interrupt_function_p ();
1171 return REG_NEEDS_SAVE (regno, ifun);
1177 xstormy16_function_profiler (void)
1179 sorry ("function_profiler support");
1182 /* Update CUM to advance past an argument in the argument list. The
1183 values MODE, TYPE and NAMED describe that argument. Once this is
1184 done, the variable CUM is suitable for analyzing the *following*
1185 argument with `TARGET_FUNCTION_ARG', etc.
1187 This function need not do anything if the argument in question was
1188 passed on the stack. The compiler knows how to track the amount of
1189 stack space used for arguments without any special help. However,
1190 it makes life easier for xstormy16_build_va_list if it does update
1194 xstormy16_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1195 const_tree type, bool named ATTRIBUTE_UNUSED)
1197 /* If an argument would otherwise be passed partially in registers,
1198 and partially on the stack, the whole of it is passed on the
1200 if (*cum < NUM_ARGUMENT_REGISTERS
1201 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1202 *cum = NUM_ARGUMENT_REGISTERS;
1204 *cum += XSTORMY16_WORD_SIZE (type, mode);
1208 xstormy16_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1209 const_tree type, bool named ATTRIBUTE_UNUSED)
1211 if (mode == VOIDmode)
1213 if (targetm.calls.must_pass_in_stack (mode, type)
1214 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1216 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1219 /* Build the va_list type.
1221 For this chip, va_list is a record containing a counter and a pointer.
1222 The counter is of type 'int' and indicates how many bytes
1223 have been used to date. The pointer indicates the stack position
1224 for arguments that have not been passed in registers.
1225 To keep the layout nice, the pointer is first in the structure. */
1228 xstormy16_build_builtin_va_list (void)
1230 tree f_1, f_2, record, type_decl;
1232 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1233 type_decl = build_decl (BUILTINS_LOCATION,
1234 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1236 f_1 = build_decl (BUILTINS_LOCATION,
1237 FIELD_DECL, get_identifier ("base"),
1239 f_2 = build_decl (BUILTINS_LOCATION,
1240 FIELD_DECL, get_identifier ("count"),
1241 unsigned_type_node);
1243 DECL_FIELD_CONTEXT (f_1) = record;
1244 DECL_FIELD_CONTEXT (f_2) = record;
1246 TYPE_STUB_DECL (record) = type_decl;
1247 TYPE_NAME (record) = type_decl;
1248 TYPE_FIELDS (record) = f_1;
1249 DECL_CHAIN (f_1) = f_2;
1251 layout_type (record);
1256 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1257 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1258 variable to initialize. NEXTARG is the machine independent notion of the
1259 'next' argument after the variable arguments. */
1262 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1264 tree f_base, f_count;
1268 if (xstormy16_interrupt_function_p ())
1269 error ("cannot use va_start in interrupt function");
1271 f_base = TYPE_FIELDS (va_list_type_node);
1272 f_count = DECL_CHAIN (f_base);
1274 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1275 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1278 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1279 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1280 u = fold_convert (TREE_TYPE (count), u);
1281 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1282 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1283 TREE_SIDE_EFFECTS (t) = 1;
1284 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1286 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1287 build_int_cst (NULL_TREE,
1288 crtl->args.info * UNITS_PER_WORD));
1289 TREE_SIDE_EFFECTS (t) = 1;
1290 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1293 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1294 of type va_list as a tree, TYPE is the type passed to va_arg.
1295 Note: This algorithm is documented in stormy-abi. */
1298 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1299 gimple_seq *post_p ATTRIBUTE_UNUSED)
1301 tree f_base, f_count;
1303 tree count_tmp, addr, t;
1304 tree lab_gotaddr, lab_fromstack;
1305 int size, size_of_reg_args, must_stack;
1308 f_base = TYPE_FIELDS (va_list_type_node);
1309 f_count = DECL_CHAIN (f_base);
1311 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1312 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1315 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1316 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1317 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1319 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1321 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1322 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1323 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1324 addr = create_tmp_var (ptr_type_node, NULL);
1330 t = fold_convert (TREE_TYPE (count), size_tree);
1331 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1332 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1333 t = build2 (GT_EXPR, boolean_type_node, t, r);
1334 t = build3 (COND_EXPR, void_type_node, t,
1335 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1337 gimplify_and_add (t, pre_p);
1339 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1340 gimplify_assign (addr, t, pre_p);
1342 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1343 gimplify_and_add (t, pre_p);
1345 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1346 gimplify_and_add (t, pre_p);
1349 /* Arguments larger than a word might need to skip over some
1350 registers, since arguments are either passed entirely in
1351 registers or entirely on the stack. */
1352 size = PUSH_ROUNDING (int_size_in_bytes (type));
1353 if (size > 2 || size < 0 || must_stack)
1357 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1358 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1360 t = fold_convert (TREE_TYPE (count), r);
1361 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1362 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1363 gimplify_and_add (t, pre_p);
1366 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1367 + INCOMING_FRAME_SP_OFFSET);
1368 t = fold_convert (TREE_TYPE (count), t);
1369 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1370 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1371 fold_convert (TREE_TYPE (count), size_tree));
1372 t = fold_convert (TREE_TYPE (t), fold (t));
1373 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1374 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1375 gimplify_assign (addr, t, pre_p);
1377 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1378 gimplify_and_add (t, pre_p);
1380 t = fold_convert (TREE_TYPE (count), size_tree);
1381 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1382 gimplify_assign (count, t, pre_p);
1384 addr = fold_convert (build_pointer_type (type), addr);
1385 return build_va_arg_indirect_ref (addr);
1388 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1391 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1393 rtx temp = gen_reg_rtx (HImode);
1394 rtx reg_fnaddr = gen_reg_rtx (HImode);
1395 rtx reg_addr, reg_addr_mem;
1397 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1398 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1400 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1401 emit_move_insn (reg_addr_mem, temp);
1402 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1403 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1405 emit_move_insn (temp, static_chain);
1406 emit_move_insn (reg_addr_mem, temp);
1407 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1408 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1410 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1411 emit_move_insn (temp, reg_fnaddr);
1412 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1413 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1414 emit_move_insn (reg_addr_mem, temp);
1415 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1416 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1418 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1419 emit_move_insn (reg_addr_mem, reg_fnaddr);
1422 /* Worker function for TARGET_FUNCTION_VALUE. */
1425 xstormy16_function_value (const_tree valtype,
1426 const_tree func ATTRIBUTE_UNUSED,
1427 bool outgoing ATTRIBUTE_UNUSED)
1429 enum machine_mode mode;
1430 mode = TYPE_MODE (valtype);
1431 PROMOTE_MODE (mode, 0, valtype);
1432 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1435 /* Worker function for TARGET_LIBCALL_VALUE. */
1438 xstormy16_libcall_value (enum machine_mode mode,
1439 const_rtx fun ATTRIBUTE_UNUSED)
1441 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1444 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
1447 xstormy16_function_value_regno_p (const unsigned int regno)
1449 return (regno == RETURN_VALUE_REGNUM);
1452 /* A C compound statement that outputs the assembler code for a thunk function,
1453 used to implement C++ virtual function calls with multiple inheritance. The
1454 thunk acts as a wrapper around a virtual function, adjusting the implicit
1455 object parameter before handing control off to the real function.
1457 First, emit code to add the integer DELTA to the location that contains the
1458 incoming first argument. Assume that this argument contains a pointer, and
1459 is the one used to pass the `this' pointer in C++. This is the incoming
1460 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1461 addition must preserve the values of all other incoming arguments.
1463 After the addition, emit code to jump to FUNCTION, which is a
1464 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1465 the return address. Hence returning from FUNCTION will return to whoever
1466 called the current `thunk'.
1468 The effect must be as if @var{function} had been called directly
1469 with the adjusted first argument. This macro is responsible for
1470 emitting all of the code for a thunk function;
1471 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1474 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1475 extracted from it.) It might possibly be useful on some targets, but
1479 xstormy16_asm_output_mi_thunk (FILE *file,
1480 tree thunk_fndecl ATTRIBUTE_UNUSED,
1481 HOST_WIDE_INT delta,
1482 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1485 int regnum = FIRST_ARGUMENT_REGISTER;
1487 /* There might be a hidden first argument for a returned structure. */
1488 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1491 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1492 fputs ("\tjmpf ", file);
1493 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1497 /* The purpose of this function is to override the default behavior of
1498 BSS objects. Normally, they go into .bss or .sbss via ".common"
1499 directives, but we need to override that and put them in
1500 .bss_below100. We can't just use a section override (like we do
1501 for .data_below100), because that makes them initialized rather
1502 than uninitialized. */
1505 xstormy16_asm_output_aligned_common (FILE *stream,
1512 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1517 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1518 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1523 switch_to_section (bss100_section);
1531 name2 = default_strip_name_encoding (name);
1533 fprintf (stream, "\t.globl\t%s\n", name2);
1535 fprintf (stream, "\t.p2align %d\n", p2align);
1536 fprintf (stream, "\t.type\t%s, @object\n", name2);
1537 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1538 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1544 fprintf (stream, "\t.local\t");
1545 assemble_name (stream, name);
1546 fprintf (stream, "\n");
1548 fprintf (stream, "\t.comm\t");
1549 assemble_name (stream, name);
1550 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1553 /* Implement TARGET_ASM_INIT_SECTIONS. */
1556 xstormy16_asm_init_sections (void)
1559 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1560 output_section_asm_op,
1561 "\t.section \".bss_below100\",\"aw\",@nobits");
1564 /* Mark symbols with the "below100" attribute so that we can use the
1565 special addressing modes for them. */
1568 xstormy16_encode_section_info (tree decl, rtx r, int first)
1570 default_encode_section_info (decl, r, first);
1572 if (TREE_CODE (decl) == VAR_DECL
1573 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1574 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1576 rtx symbol = XEXP (r, 0);
1578 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1579 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1583 #undef TARGET_ASM_CONSTRUCTOR
1584 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1585 #undef TARGET_ASM_DESTRUCTOR
1586 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1588 /* Output constructors and destructors. Just like
1589 default_named_section_asm_out_* but don't set the sections writable. */
1592 xstormy16_asm_out_destructor (rtx symbol, int priority)
1594 const char *section = ".dtors";
1597 /* ??? This only works reliably with the GNU linker. */
1598 if (priority != DEFAULT_INIT_PRIORITY)
1600 sprintf (buf, ".dtors.%.5u",
1601 /* Invert the numbering so the linker puts us in the proper
1602 order; constructors are run from right to left, and the
1603 linker sorts in increasing order. */
1604 MAX_INIT_PRIORITY - priority);
1608 switch_to_section (get_section (section, 0, NULL));
1609 assemble_align (POINTER_SIZE);
1610 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1614 xstormy16_asm_out_constructor (rtx symbol, int priority)
1616 const char *section = ".ctors";
1619 /* ??? This only works reliably with the GNU linker. */
1620 if (priority != DEFAULT_INIT_PRIORITY)
1622 sprintf (buf, ".ctors.%.5u",
1623 /* Invert the numbering so the linker puts us in the proper
1624 order; constructors are run from right to left, and the
1625 linker sorts in increasing order. */
1626 MAX_INIT_PRIORITY - priority);
1630 switch_to_section (get_section (section, 0, NULL));
1631 assemble_align (POINTER_SIZE);
1632 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1635 /* Print a memory address as an operand to reference that memory location. */
1638 xstormy16_print_operand_address (FILE *file, rtx address)
1640 HOST_WIDE_INT offset;
1641 int pre_dec, post_inc;
1643 /* There are a few easy cases. */
1644 if (CONST_INT_P (address))
1646 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1650 if (CONSTANT_P (address) || LABEL_P (address))
1652 output_addr_const (file, address);
1656 /* Otherwise, it's hopefully something of the form
1657 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1658 if (GET_CODE (address) == PLUS)
1660 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1661 offset = INTVAL (XEXP (address, 1));
1662 address = XEXP (address, 0);
1667 pre_dec = (GET_CODE (address) == PRE_DEC);
1668 post_inc = (GET_CODE (address) == POST_INC);
1669 if (pre_dec || post_inc)
1670 address = XEXP (address, 0);
1672 gcc_assert (REG_P (address));
1677 fputs (reg_names [REGNO (address)], file);
1681 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1685 /* Print an operand to an assembler instruction. */
1688 xstormy16_print_operand (FILE *file, rtx x, int code)
1693 /* There is either one bit set, or one bit clear, in X.
1694 Print it preceded by '#'. */
1696 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1697 HOST_WIDE_INT xx = 1;
1700 if (CONST_INT_P (x))
1703 output_operand_lossage ("'B' operand is not constant");
1705 /* GCC sign-extends masks with the MSB set, so we have to
1706 detect all the cases that differ only in sign extension
1707 beyond the bits we care about. Normally, the predicates
1708 and constraints ensure that we have the right values. This
1709 works correctly for valid masks. */
1710 if (bits_set[xx & 7] <= 1)
1712 /* Remove sign extension bits. */
1713 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1715 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1717 l = exact_log2 (xx);
1721 /* Add sign extension bits. */
1722 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1723 xx |= ~(HOST_WIDE_INT)0xff;
1724 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1725 xx |= ~(HOST_WIDE_INT)0xffff;
1726 l = exact_log2 (~xx);
1730 output_operand_lossage ("'B' operand has multiple bits set");
1732 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1737 /* Print the symbol without a surrounding @fptr(). */
1738 if (GET_CODE (x) == SYMBOL_REF)
1739 assemble_name (file, XSTR (x, 0));
1740 else if (LABEL_P (x))
1741 output_asm_label (x);
1743 xstormy16_print_operand_address (file, x);
1748 /* Print the immediate operand less one, preceded by '#'.
1749 For 'O', negate it first. */
1751 HOST_WIDE_INT xx = 0;
1753 if (CONST_INT_P (x))
1756 output_operand_lossage ("'o' operand is not constant");
1761 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1766 /* Print the shift mask for bp/bn. */
1768 HOST_WIDE_INT xx = 1;
1771 if (CONST_INT_P (x))
1774 output_operand_lossage ("'B' operand is not constant");
1778 fputs (IMMEDIATE_PREFIX, file);
1779 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1784 /* Handled below. */
1788 output_operand_lossage ("xstormy16_print_operand: unknown code");
1792 switch (GET_CODE (x))
1795 fputs (reg_names [REGNO (x)], file);
1799 xstormy16_print_operand_address (file, XEXP (x, 0));
1803 /* Some kind of constant or label; an immediate operand,
1804 so prefix it with '#' for the assembler. */
1805 fputs (IMMEDIATE_PREFIX, file);
1806 output_addr_const (file, x);
1813 /* Expander for the `casesi' pattern.
1814 INDEX is the index of the switch statement.
1815 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1816 to the first table entry.
1817 RANGE is the number of table entries.
1818 TABLE is an ADDR_VEC that is the jump table.
1819 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1820 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1823 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1824 rtx table, rtx default_label)
1826 HOST_WIDE_INT range_i = INTVAL (range);
1829 /* This code uses 'br', so it can deal only with tables of size up to
1831 if (range_i >= 8192)
1832 sorry ("switch statement of size %lu entries too large",
1833 (unsigned long) range_i);
1835 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1837 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1839 int_index = gen_lowpart_common (HImode, index);
1840 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1841 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1844 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1845 instructions, without label or alignment or any other special
1846 constructs. We know that the previous instruction will be the
1847 `tablejump_pcrel' output above.
1849 TODO: it might be nice to output 'br' instructions if they could
1853 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1857 switch_to_section (current_function_section ());
1859 vlen = XVECLEN (table, 0);
1860 for (idx = 0; idx < vlen; idx++)
1862 fputs ("\tjmpf ", file);
1863 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1868 /* Expander for the `call' patterns.
1869 RETVAL is the RTL for the return register or NULL for void functions.
1870 DEST is the function to call, expressed as a MEM.
1871 COUNTER is ignored. */
1874 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1877 enum machine_mode mode;
1879 gcc_assert (MEM_P (dest));
1880 dest = XEXP (dest, 0);
1882 if (! CONSTANT_P (dest) && ! REG_P (dest))
1883 dest = force_reg (Pmode, dest);
1888 mode = GET_MODE (retval);
1890 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1893 call = gen_rtx_SET (VOIDmode, retval, call);
1895 if (! CONSTANT_P (dest))
1897 temp = gen_reg_rtx (HImode);
1898 emit_move_insn (temp, const0_rtx);
1903 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1904 gen_rtx_USE (VOIDmode, temp)));
1905 emit_call_insn (call);
1908 /* Expanders for multiword computational operations. */
1910 /* Expander for arithmetic operations; emit insns to compute
1912 (set DEST (CODE:MODE SRC0 SRC1))
1914 When CODE is COMPARE, a branch template is generated
1915 (this saves duplicating code in xstormy16_split_cbranch). */
1918 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1919 rtx dest, rtx src0, rtx src1)
1921 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1926 emit_move_insn (src0, const0_rtx);
1928 for (i = 0; i < num_words; i++)
1930 rtx w_src0, w_src1, w_dest;
1933 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1934 i * UNITS_PER_WORD);
1935 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1936 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1942 && CONST_INT_P (w_src1)
1943 && INTVAL (w_src1) == 0)
1947 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1949 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1955 if (code == COMPARE && i == num_words - 1)
1957 rtx branch, sub, clobber, sub_1;
1959 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1960 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1961 sub = gen_rtx_SET (VOIDmode, w_dest,
1962 gen_rtx_MINUS (HImode, sub_1, w_src1));
1963 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1964 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1965 gen_rtx_IF_THEN_ELSE (VOIDmode,
1971 insn = gen_rtx_PARALLEL (VOIDmode,
1972 gen_rtvec (3, branch, sub, clobber));
1976 && CONST_INT_P (w_src1)
1977 && INTVAL (w_src1) == 0)
1980 insn = gen_subchi4 (w_dest, w_src0, w_src1);
1982 insn = gen_subchi5 (w_dest, w_src0, w_src1);
1988 if (CONST_INT_P (w_src1)
1989 && INTVAL (w_src1) == -(code == AND))
1992 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
1997 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
2008 /* If we emit nothing, try_split() will think we failed. So emit
2009 something that does nothing and can be optimized away. */
2014 /* The shift operations are split at output time for constant values;
2015 variable-width shifts get handed off to a library routine.
2017 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2018 SIZE_R will be a CONST_INT, X will be a hard register. */
2021 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2022 rtx x, rtx size_r, rtx temp)
2025 const char *r0, *r1, *rt;
2028 gcc_assert (CONST_INT_P (size_r)
2032 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2037 r0 = reg_names [REGNO (x)];
2038 r1 = reg_names [REGNO (x) + 1];
2040 /* For shifts of size 1, we can use the rotate instructions. */
2046 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2049 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2052 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2060 /* For large shifts, there are easy special cases. */
2066 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2069 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2072 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2084 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2085 r1, r0, r0, r1, (int) size - 16);
2088 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2089 r0, r1, r1, r0, (int) size - 16);
2092 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2093 r0, r1, r1, r0, (int) size - 16);
2101 /* For the rest, we have to do more work. In particular, we
2102 need a temporary. */
2103 rt = reg_names [REGNO (temp)];
2108 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2109 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2114 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2115 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2120 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2121 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2130 /* Attribute handling. */
2132 /* Return nonzero if the function is an interrupt function. */
2135 xstormy16_interrupt_function_p (void)
2139 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2140 any functions are declared, which is demonstrably wrong, but
2141 it is worked around here. FIXME. */
2145 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2146 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2149 #undef TARGET_ATTRIBUTE_TABLE
2150 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2152 static tree xstormy16_handle_interrupt_attribute
2153 (tree *, tree, tree, int, bool *);
2154 static tree xstormy16_handle_below100_attribute
2155 (tree *, tree, tree, int, bool *);
2157 static const struct attribute_spec xstormy16_attribute_table[] =
2159 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2160 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2161 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2162 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2163 { NULL, 0, 0, false, false, false, NULL }
2166 /* Handle an "interrupt" attribute;
2167 arguments as in struct attribute_spec.handler. */
2170 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2171 tree args ATTRIBUTE_UNUSED,
2172 int flags ATTRIBUTE_UNUSED,
2175 if (TREE_CODE (*node) != FUNCTION_TYPE)
2177 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2179 *no_add_attrs = true;
2185 /* Handle an "below" attribute;
2186 arguments as in struct attribute_spec.handler. */
2189 xstormy16_handle_below100_attribute (tree *node,
2190 tree name ATTRIBUTE_UNUSED,
2191 tree args ATTRIBUTE_UNUSED,
2192 int flags ATTRIBUTE_UNUSED,
2195 if (TREE_CODE (*node) != VAR_DECL
2196 && TREE_CODE (*node) != POINTER_TYPE
2197 && TREE_CODE (*node) != TYPE_DECL)
2199 warning (OPT_Wattributes,
2200 "%<__BELOW100__%> attribute only applies to variables");
2201 *no_add_attrs = true;
2203 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2205 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2207 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2208 "with auto storage class");
2209 *no_add_attrs = true;
2216 #undef TARGET_INIT_BUILTINS
2217 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2218 #undef TARGET_EXPAND_BUILTIN
2219 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2225 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2226 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2230 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2231 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2232 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2233 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2234 { NULL, 0, NULL, NULL }
2238 xstormy16_init_builtins (void)
2240 tree args, ret_type, arg;
2243 ret_type = void_type_node;
2245 for (i = 0; s16builtins[i].name; i++)
2247 args = void_list_node;
2248 for (a = strlen (s16builtins[i].arg_types) - 1; a >= 0; a--)
2250 switch (s16builtins[i].arg_types[a])
2252 case 's': arg = short_integer_type_node; break;
2253 case 'S': arg = short_unsigned_type_node; break;
2254 case 'l': arg = long_integer_type_node; break;
2255 case 'L': arg = long_unsigned_type_node; break;
2256 default: gcc_unreachable ();
2261 args = tree_cons (NULL_TREE, arg, args);
2263 add_builtin_function (s16builtins[i].name,
2264 build_function_type (ret_type, args),
2265 i, BUILT_IN_MD, NULL, NULL);
2270 xstormy16_expand_builtin (tree exp, rtx target,
2271 rtx subtarget ATTRIBUTE_UNUSED,
2272 enum machine_mode mode ATTRIBUTE_UNUSED,
2273 int ignore ATTRIBUTE_UNUSED)
2275 rtx op[10], args[10], pat, copyto[10], retval = 0;
2276 tree fndecl, argtree;
2279 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2280 argtree = TREE_OPERAND (exp, 1);
2281 i = DECL_FUNCTION_CODE (fndecl);
2282 code = s16builtins[i].md_code;
2284 for (a = 0; a < 10 && argtree; a++)
2286 args[a] = expand_normal (TREE_VALUE (argtree));
2287 argtree = TREE_CHAIN (argtree);
2290 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2292 char ao = s16builtins[i].arg_ops[o];
2293 char c = insn_data[code].operand[o].constraint[0];
2294 enum machine_mode omode;
2298 omode = (enum machine_mode) insn_data[code].operand[o].mode;
2300 op[o] = target ? target : gen_reg_rtx (omode);
2302 op[o] = gen_reg_rtx (omode);
2304 op[o] = args[(int) hex_value (ao)];
2306 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2308 if (c == '+' || c == '=')
2311 op[o] = gen_reg_rtx (omode);
2314 op[o] = copy_to_mode_reg (omode, op[o]);
2321 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2322 op[5], op[6], op[7], op[8], op[9]);
2325 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2328 emit_move_insn (copyto[o], op[o]);
2329 if (op[o] == retval)
2336 /* Look for combinations of insns that can be converted to BN or BP
2337 opcodes. This is, unfortunately, too complex to do with MD
2341 combine_bnp (rtx insn)
2343 int insn_code, regno, need_extend;
2345 rtx cond, reg, and_insn, load, qireg, mem;
2346 enum machine_mode load_mode = QImode;
2347 enum machine_mode and_mode = QImode;
2348 rtx shift = NULL_RTX;
2350 insn_code = recog_memoized (insn);
2351 if (insn_code != CODE_FOR_cbranchhi
2352 && insn_code != CODE_FOR_cbranchhi_neg)
2355 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2356 cond = XEXP (cond, 1); /* if */
2357 cond = XEXP (cond, 0); /* cond */
2358 switch (GET_CODE (cond))
2372 reg = XEXP (cond, 0);
2375 regno = REGNO (reg);
2376 if (XEXP (cond, 1) != const0_rtx)
2378 if (! find_regno_note (insn, REG_DEAD, regno))
2380 qireg = gen_rtx_REG (QImode, regno);
2384 /* LT and GE conditionals should have a sign extend before
2386 for (and_insn = prev_real_insn (insn); and_insn;
2387 and_insn = prev_real_insn (and_insn))
2389 int and_code = recog_memoized (and_insn);
2391 if (and_code == CODE_FOR_extendqihi2
2392 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2393 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2396 if (and_code == CODE_FOR_movhi_internal
2397 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2399 /* This is for testing bit 15. */
2404 if (reg_mentioned_p (reg, and_insn))
2407 if (GET_CODE (and_insn) != NOTE
2408 && GET_CODE (and_insn) != INSN)
2414 /* EQ and NE conditionals have an AND before them. */
2415 for (and_insn = prev_real_insn (insn); and_insn;
2416 and_insn = prev_real_insn (and_insn))
2418 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2419 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2420 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2423 if (reg_mentioned_p (reg, and_insn))
2426 if (GET_CODE (and_insn) != NOTE
2427 && GET_CODE (and_insn) != INSN)
2433 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2434 followed by an AND like this:
2436 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2437 (clobber (reg:BI carry))]
2439 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2441 Attempt to detect this here. */
2442 for (shift = prev_real_insn (and_insn); shift;
2443 shift = prev_real_insn (shift))
2445 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2446 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2447 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2450 if (reg_mentioned_p (reg, shift)
2451 || (GET_CODE (shift) != NOTE
2452 && GET_CODE (shift) != INSN))
2463 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2465 load = prev_real_insn (load))
2467 int load_code = recog_memoized (load);
2469 if (load_code == CODE_FOR_movhi_internal
2470 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2471 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2472 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2478 if (load_code == CODE_FOR_movqi_internal
2479 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2480 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2486 if (load_code == CODE_FOR_zero_extendqihi2
2487 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2488 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2495 if (reg_mentioned_p (reg, load))
2498 if (GET_CODE (load) != NOTE
2499 && GET_CODE (load) != INSN)
2505 mem = SET_SRC (PATTERN (load));
2509 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2511 /* If the mem includes a zero-extend operation and we are
2512 going to generate a sign-extend operation then move the
2513 mem inside the zero-extend. */
2514 if (GET_CODE (mem) == ZERO_EXTEND)
2515 mem = XEXP (mem, 0);
2519 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2523 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2526 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2529 if (load_mode == HImode)
2531 rtx addr = XEXP (mem, 0);
2533 if (! (mask & 0xff))
2535 addr = plus_constant (addr, 1);
2538 mem = gen_rtx_MEM (QImode, addr);
2542 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2544 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2546 INSN_CODE (insn) = -1;
2549 if (and_insn != insn)
2550 delete_insn (and_insn);
2552 if (shift != NULL_RTX)
2553 delete_insn (shift);
2557 xstormy16_reorg (void)
2561 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2563 if (! JUMP_P (insn))
2569 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2572 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2574 const HOST_WIDE_INT size = int_size_in_bytes (type);
2575 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2578 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
2579 static const struct default_options xstorym16_option_optimization_table[] =
2581 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
2582 { OPT_LEVELS_NONE, 0, NULL, 0 }
2585 #undef TARGET_ASM_ALIGNED_HI_OP
2586 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2587 #undef TARGET_ASM_ALIGNED_SI_OP
2588 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2589 #undef TARGET_ENCODE_SECTION_INFO
2590 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2592 /* Select_section doesn't handle .bss_below100. */
2593 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2594 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2596 #undef TARGET_ASM_OUTPUT_MI_THUNK
2597 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2598 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2599 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2601 #undef TARGET_RTX_COSTS
2602 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2603 #undef TARGET_ADDRESS_COST
2604 #define TARGET_ADDRESS_COST xstormy16_address_cost
2606 #undef TARGET_BUILD_BUILTIN_VA_LIST
2607 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2608 #undef TARGET_EXPAND_BUILTIN_VA_START
2609 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2610 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2611 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2613 #undef TARGET_PROMOTE_FUNCTION_MODE
2614 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2615 #undef TARGET_PROMOTE_PROTOTYPES
2616 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2618 #undef TARGET_FUNCTION_ARG
2619 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2620 #undef TARGET_FUNCTION_ARG_ADVANCE
2621 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2623 #undef TARGET_RETURN_IN_MEMORY
2624 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2625 #undef TARGET_FUNCTION_VALUE
2626 #define TARGET_FUNCTION_VALUE xstormy16_function_value
2627 #undef TARGET_LIBCALL_VALUE
2628 #define TARGET_LIBCALL_VALUE xstormy16_libcall_value
2629 #undef TARGET_FUNCTION_VALUE_REGNO_P
2630 #define TARGET_FUNCTION_VALUE_REGNO_P xstormy16_function_value_regno_p
2632 #undef TARGET_MACHINE_DEPENDENT_REORG
2633 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2635 #undef TARGET_PREFERRED_RELOAD_CLASS
2636 #define TARGET_PREFERRED_RELOAD_CLASS xstormy16_preferred_reload_class
2637 #undef TARGET_PREFERRED_OUTPUT_RELOAD_CLASS
2638 #define TARGET_PREFERRED_OUTPUT_RELOAD_CLASS xstormy16_preferred_reload_class
2640 #undef TARGET_LEGITIMATE_ADDRESS_P
2641 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2642 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
2643 #define TARGET_MODE_DEPENDENT_ADDRESS_P xstormy16_mode_dependent_address_p
2645 #undef TARGET_CAN_ELIMINATE
2646 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2648 #undef TARGET_TRAMPOLINE_INIT
2649 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2651 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2652 #define TARGET_OPTION_OPTIMIZATION_TABLE xstorym16_option_optimization_table
2654 struct gcc_target targetm = TARGET_INITIALIZER;
2656 #include "gt-stormy16.h"