1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "c-family/c-common.h"
39 #include "diagnostic-core.h"
45 #include "langhooks.h"
48 #include "target-def.h"
52 /* Maximal allowed offset for an address in the LD command */
53 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
55 /* Return true if STR starts with PREFIX and false, otherwise. */
56 #define STR_PREFIX_P(STR,PREFIX) (0 == strncmp (STR, PREFIX, strlen (PREFIX)))
58 /* The 4 bits starting at SECTION_MACH_DEP are reserved to store the
59 address space where data is to be located.
60 As the only non-generic address spaces are all located in Flash,
61 this can be used to test if data shall go into some .progmem* section.
62 This must be the rightmost field of machine dependent section flags. */
63 #define AVR_SECTION_PROGMEM (0xf * SECTION_MACH_DEP)
65 /* Similar 4-bit region for SYMBOL_REF_FLAGS. */
66 #define AVR_SYMBOL_FLAG_PROGMEM (0xf * SYMBOL_FLAG_MACH_DEP)
68 /* Similar 4-bit region in SYMBOL_REF_FLAGS:
69 Set address-space AS in SYMBOL_REF_FLAGS of SYM */
70 #define AVR_SYMBOL_SET_ADDR_SPACE(SYM,AS) \
72 SYMBOL_REF_FLAGS (sym) &= ~AVR_SYMBOL_FLAG_PROGMEM; \
73 SYMBOL_REF_FLAGS (sym) |= (AS) * SYMBOL_FLAG_MACH_DEP; \
76 /* Read address-space from SYMBOL_REF_FLAGS of SYM */
77 #define AVR_SYMBOL_GET_ADDR_SPACE(SYM) \
78 ((SYMBOL_REF_FLAGS (sym) & AVR_SYMBOL_FLAG_PROGMEM) \
79 / SYMBOL_FLAG_MACH_DEP)
81 /* Known address spaces. The order must be the same as in the respective
82 enum from avr.h (or designated initialized must be used). */
83 const avr_addrspace_t avr_addrspace[] =
85 { ADDR_SPACE_RAM, 0, 2, "" , 0 },
86 { ADDR_SPACE_FLASH, 1, 2, "__flash", 0 },
87 { ADDR_SPACE_FLASH1, 1, 2, "__flash1", 1 },
88 { ADDR_SPACE_FLASH2, 1, 2, "__flash2", 2 },
89 { ADDR_SPACE_FLASH3, 1, 2, "__flash3", 3 },
90 { ADDR_SPACE_FLASH4, 1, 2, "__flash4", 4 },
91 { ADDR_SPACE_FLASH5, 1, 2, "__flash5", 5 },
92 { ADDR_SPACE_MEMX, 1, 3, "__memx", 0 },
96 /* Map 64-k Flash segment to section prefix. */
97 static const char* const progmem_section_prefix[6] =
107 /* Holding RAM addresses of some SFRs used by the compiler and that
108 are unique over all devices in an architecture like 'avr4'. */
112 /* SREG: The pocessor status */
115 /* RAMPX, RAMPY, RAMPD and CCP of XMEGA */
121 /* RAMPZ: The high byte of 24-bit address used with ELPM */
124 /* SP: The stack pointer and its low and high byte */
129 static avr_addr_t avr_addr;
132 /* Prototypes for local helper functions. */
134 static const char* out_movqi_r_mr (rtx, rtx[], int*);
135 static const char* out_movhi_r_mr (rtx, rtx[], int*);
136 static const char* out_movsi_r_mr (rtx, rtx[], int*);
137 static const char* out_movqi_mr_r (rtx, rtx[], int*);
138 static const char* out_movhi_mr_r (rtx, rtx[], int*);
139 static const char* out_movsi_mr_r (rtx, rtx[], int*);
141 static int avr_naked_function_p (tree);
142 static int interrupt_function_p (tree);
143 static int signal_function_p (tree);
144 static int avr_OS_task_function_p (tree);
145 static int avr_OS_main_function_p (tree);
146 static int avr_regs_to_save (HARD_REG_SET *);
147 static int get_sequence_length (rtx insns);
148 static int sequent_regs_live (void);
149 static const char *ptrreg_to_str (int);
150 static const char *cond_string (enum rtx_code);
151 static int avr_num_arg_regs (enum machine_mode, const_tree);
152 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code,
154 static void output_reload_in_const (rtx*, rtx, int*, bool);
155 static struct machine_function * avr_init_machine_status (void);
158 /* Prototypes for hook implementors if needed before their implementation. */
160 static bool avr_rtx_costs (rtx, int, int, int, int *, bool);
163 /* Allocate registers from r25 to r8 for parameters for function calls. */
164 #define FIRST_CUM_REG 26
166 /* Implicit target register of LPM instruction (R0) */
167 extern GTY(()) rtx lpm_reg_rtx;
170 /* (Implicit) address register of LPM instruction (R31:R30 = Z) */
171 extern GTY(()) rtx lpm_addr_reg_rtx;
172 rtx lpm_addr_reg_rtx;
174 /* Temporary register RTX (reg:QI TMP_REGNO) */
175 extern GTY(()) rtx tmp_reg_rtx;
178 /* Zeroed register RTX (reg:QI ZERO_REGNO) */
179 extern GTY(()) rtx zero_reg_rtx;
182 /* RTXs for all general purpose registers as QImode */
183 extern GTY(()) rtx all_regs_rtx[32];
184 rtx all_regs_rtx[32];
186 /* SREG, the processor status */
187 extern GTY(()) rtx sreg_rtx;
190 /* RAMP* special function registers */
191 extern GTY(()) rtx rampd_rtx;
192 extern GTY(()) rtx rampx_rtx;
193 extern GTY(()) rtx rampy_rtx;
194 extern GTY(()) rtx rampz_rtx;
200 /* RTX containing the strings "" and "e", respectively */
201 static GTY(()) rtx xstring_empty;
202 static GTY(()) rtx xstring_e;
204 /* Preprocessor macros to define depending on MCU type. */
205 const char *avr_extra_arch_macro;
207 /* Current architecture. */
208 const struct base_arch_s *avr_current_arch;
210 /* Current device. */
211 const struct mcu_type_s *avr_current_device;
213 /* Section to put switch tables in. */
214 static GTY(()) section *progmem_swtable_section;
216 /* Unnamed sections associated to __attribute__((progmem)) aka. PROGMEM
217 or to address space __flash*. */
218 static GTY(()) section *progmem_section[6];
220 /* Condition for insns/expanders from avr-dimode.md. */
221 bool avr_have_dimode = true;
223 /* To track if code will use .bss and/or .data. */
224 bool avr_need_clear_bss_p = false;
225 bool avr_need_copy_data_p = false;
229 /* Custom function to count number of set bits. */
232 avr_popcount (unsigned int val)
246 /* Constraint helper function. XVAL is a CONST_INT or a CONST_DOUBLE.
247 Return true if the least significant N_BYTES bytes of XVAL all have a
248 popcount in POP_MASK and false, otherwise. POP_MASK represents a subset
249 of integers which contains an integer N iff bit N of POP_MASK is set. */
252 avr_popcount_each_byte (rtx xval, int n_bytes, int pop_mask)
256 enum machine_mode mode = GET_MODE (xval);
258 if (VOIDmode == mode)
261 for (i = 0; i < n_bytes; i++)
263 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
264 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
266 if (0 == (pop_mask & (1 << avr_popcount (val8))))
274 avr_option_override (void)
276 flag_delete_null_pointer_checks = 0;
278 /* caller-save.c looks for call-clobbered hard registers that are assigned
279 to pseudos that cross calls and tries so save-restore them around calls
280 in order to reduce the number of stack slots needed.
282 This might leads to situations where reload is no more able to cope
283 with the challenge of AVR's very few address registers and fails to
284 perform the requested spills. */
287 flag_caller_saves = 0;
289 /* Unwind tables currently require a frame pointer for correctness,
290 see toplev.c:process_options(). */
292 if ((flag_unwind_tables
293 || flag_non_call_exceptions
294 || flag_asynchronous_unwind_tables)
295 && !ACCUMULATE_OUTGOING_ARGS)
297 flag_omit_frame_pointer = 0;
300 avr_current_device = &avr_mcu_types[avr_mcu_index];
301 avr_current_arch = &avr_arch_types[avr_current_device->arch];
302 avr_extra_arch_macro = avr_current_device->macro;
304 /* RAM addresses of some SFRs common to all Devices in respective Arch. */
306 /* SREG: Status Register containing flags like I (global IRQ) */
307 avr_addr.sreg = 0x3F + avr_current_arch->sfr_offset;
309 /* RAMPZ: Address' high part when loading via ELPM */
310 avr_addr.rampz = 0x3B + avr_current_arch->sfr_offset;
312 avr_addr.rampy = 0x3A + avr_current_arch->sfr_offset;
313 avr_addr.rampx = 0x39 + avr_current_arch->sfr_offset;
314 avr_addr.rampd = 0x38 + avr_current_arch->sfr_offset;
315 avr_addr.ccp = 0x34 + avr_current_arch->sfr_offset;
317 /* SP: Stack Pointer (SP_H:SP_L) */
318 avr_addr.sp_l = 0x3D + avr_current_arch->sfr_offset;
319 avr_addr.sp_h = avr_addr.sp_l + 1;
321 init_machine_status = avr_init_machine_status;
323 avr_log_set_avr_log();
326 /* Function to set up the backend function structure. */
328 static struct machine_function *
329 avr_init_machine_status (void)
331 return ggc_alloc_cleared_machine_function ();
335 /* Implement `INIT_EXPANDERS'. */
336 /* The function works like a singleton. */
339 avr_init_expanders (void)
343 for (regno = 0; regno < 32; regno ++)
344 all_regs_rtx[regno] = gen_rtx_REG (QImode, regno);
346 lpm_reg_rtx = all_regs_rtx[LPM_REGNO];
347 tmp_reg_rtx = all_regs_rtx[TMP_REGNO];
348 zero_reg_rtx = all_regs_rtx[ZERO_REGNO];
350 lpm_addr_reg_rtx = gen_rtx_REG (HImode, REG_Z);
352 sreg_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.sreg));
353 rampd_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampd));
354 rampx_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampx));
355 rampy_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampy));
356 rampz_rtx = gen_rtx_MEM (QImode, GEN_INT (avr_addr.rampz));
358 xstring_empty = gen_rtx_CONST_STRING (VOIDmode, "");
359 xstring_e = gen_rtx_CONST_STRING (VOIDmode, "e");
363 /* Return register class for register R. */
366 avr_regno_reg_class (int r)
368 static const enum reg_class reg_class_tab[] =
372 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
373 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
374 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
375 NO_LD_REGS, NO_LD_REGS, NO_LD_REGS, NO_LD_REGS,
377 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
378 SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS, SIMPLE_LD_REGS,
380 ADDW_REGS, ADDW_REGS,
382 POINTER_X_REGS, POINTER_X_REGS,
384 POINTER_Y_REGS, POINTER_Y_REGS,
386 POINTER_Z_REGS, POINTER_Z_REGS,
392 return reg_class_tab[r];
399 avr_scalar_mode_supported_p (enum machine_mode mode)
404 return default_scalar_mode_supported_p (mode);
408 /* Return TRUE if DECL is a VAR_DECL located in Flash and FALSE, otherwise. */
411 avr_decl_flash_p (tree decl)
413 if (TREE_CODE (decl) != VAR_DECL
414 || TREE_TYPE (decl) == error_mark_node)
419 return !ADDR_SPACE_GENERIC_P (TYPE_ADDR_SPACE (TREE_TYPE (decl)));
423 /* Return TRUE if DECL is a VAR_DECL located in the 24-bit Flash
424 address space and FALSE, otherwise. */
427 avr_decl_memx_p (tree decl)
429 if (TREE_CODE (decl) != VAR_DECL
430 || TREE_TYPE (decl) == error_mark_node)
435 return (ADDR_SPACE_MEMX == TYPE_ADDR_SPACE (TREE_TYPE (decl)));
439 /* Return TRUE if X is a MEM rtx located in Flash and FALSE, otherwise. */
442 avr_mem_flash_p (rtx x)
445 && !ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x)));
449 /* Return TRUE if X is a MEM rtx located in the 24-bit Flash
450 address space and FALSE, otherwise. */
453 avr_mem_memx_p (rtx x)
456 && ADDR_SPACE_MEMX == MEM_ADDR_SPACE (x));
460 /* A helper for the subsequent function attribute used to dig for
461 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
464 avr_lookup_function_attribute1 (const_tree func, const char *name)
466 if (FUNCTION_DECL == TREE_CODE (func))
468 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
473 func = TREE_TYPE (func);
476 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
477 || TREE_CODE (func) == METHOD_TYPE);
479 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
482 /* Return nonzero if FUNC is a naked function. */
485 avr_naked_function_p (tree func)
487 return avr_lookup_function_attribute1 (func, "naked");
490 /* Return nonzero if FUNC is an interrupt function as specified
491 by the "interrupt" attribute. */
494 interrupt_function_p (tree func)
496 return avr_lookup_function_attribute1 (func, "interrupt");
499 /* Return nonzero if FUNC is a signal function as specified
500 by the "signal" attribute. */
503 signal_function_p (tree func)
505 return avr_lookup_function_attribute1 (func, "signal");
508 /* Return nonzero if FUNC is an OS_task function. */
511 avr_OS_task_function_p (tree func)
513 return avr_lookup_function_attribute1 (func, "OS_task");
516 /* Return nonzero if FUNC is an OS_main function. */
519 avr_OS_main_function_p (tree func)
521 return avr_lookup_function_attribute1 (func, "OS_main");
525 /* Implement `ACCUMULATE_OUTGOING_ARGS'. */
528 avr_accumulate_outgoing_args (void)
531 return TARGET_ACCUMULATE_OUTGOING_ARGS;
533 /* FIXME: For setjmp and in avr_builtin_setjmp_frame_value we don't know
534 what offset is correct. In some cases it is relative to
535 virtual_outgoing_args_rtx and in others it is relative to
536 virtual_stack_vars_rtx. For example code see
537 gcc.c-torture/execute/built-in-setjmp.c
538 gcc.c-torture/execute/builtins/sprintf-chk.c */
540 return (TARGET_ACCUMULATE_OUTGOING_ARGS
541 && !(cfun->calls_setjmp
542 || cfun->has_nonlocal_label));
546 /* Report contribution of accumulated outgoing arguments to stack size. */
549 avr_outgoing_args_size (void)
551 return ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : 0;
555 /* Implement `STARTING_FRAME_OFFSET'. */
556 /* This is the offset from the frame pointer register to the first stack slot
557 that contains a variable living in the frame. */
560 avr_starting_frame_offset (void)
562 return 1 + avr_outgoing_args_size ();
566 /* Return the number of hard registers to push/pop in the prologue/epilogue
567 of the current function, and optionally store these registers in SET. */
570 avr_regs_to_save (HARD_REG_SET *set)
573 int int_or_sig_p = (interrupt_function_p (current_function_decl)
574 || signal_function_p (current_function_decl));
577 CLEAR_HARD_REG_SET (*set);
580 /* No need to save any registers if the function never returns or
581 has the "OS_task" or "OS_main" attribute. */
582 if (TREE_THIS_VOLATILE (current_function_decl)
583 || cfun->machine->is_OS_task
584 || cfun->machine->is_OS_main)
587 for (reg = 0; reg < 32; reg++)
589 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
590 any global register variables. */
594 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
595 || (df_regs_ever_live_p (reg)
596 && (int_or_sig_p || !call_used_regs[reg])
597 /* Don't record frame pointer registers here. They are treated
598 indivitually in prologue. */
599 && !(frame_pointer_needed
600 && (reg == REG_Y || reg == (REG_Y+1)))))
603 SET_HARD_REG_BIT (*set, reg);
610 /* Return true if register FROM can be eliminated via register TO. */
613 avr_can_eliminate (const int from, const int to)
615 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
616 || (frame_pointer_needed && to == FRAME_POINTER_REGNUM)
617 || ((from == FRAME_POINTER_REGNUM
618 || from == FRAME_POINTER_REGNUM + 1)
619 && !frame_pointer_needed));
622 /* Compute offset between arg_pointer and frame_pointer. */
625 avr_initial_elimination_offset (int from, int to)
627 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
631 int offset = frame_pointer_needed ? 2 : 0;
632 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
634 offset += avr_regs_to_save (NULL);
635 return (get_frame_size () + avr_outgoing_args_size()
636 + avr_pc_size + 1 + offset);
640 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
641 frame pointer by +STARTING_FRAME_OFFSET.
642 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
643 avoids creating add/sub of offset in nonlocal goto and setjmp. */
646 avr_builtin_setjmp_frame_value (void)
648 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
649 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
652 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
653 This is return address of function. */
655 avr_return_addr_rtx (int count, rtx tem)
659 /* Can only return this function's return address. Others not supported. */
665 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
666 warning (0, "'builtin_return_address' contains only 2 bytes of address");
669 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
671 r = gen_rtx_PLUS (Pmode, tem, r);
672 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
673 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
677 /* Return 1 if the function epilogue is just a single "ret". */
680 avr_simple_epilogue (void)
682 return (! frame_pointer_needed
683 && get_frame_size () == 0
684 && avr_outgoing_args_size() == 0
685 && avr_regs_to_save (NULL) == 0
686 && ! interrupt_function_p (current_function_decl)
687 && ! signal_function_p (current_function_decl)
688 && ! avr_naked_function_p (current_function_decl)
689 && ! TREE_THIS_VOLATILE (current_function_decl));
692 /* This function checks sequence of live registers. */
695 sequent_regs_live (void)
701 for (reg = 0; reg < 18; ++reg)
705 /* Don't recognize sequences that contain global register
714 if (!call_used_regs[reg])
716 if (df_regs_ever_live_p (reg))
726 if (!frame_pointer_needed)
728 if (df_regs_ever_live_p (REG_Y))
736 if (df_regs_ever_live_p (REG_Y+1))
749 return (cur_seq == live_seq) ? live_seq : 0;
752 /* Obtain the length sequence of insns. */
755 get_sequence_length (rtx insns)
760 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
761 length += get_attr_length (insn);
766 /* Implement INCOMING_RETURN_ADDR_RTX. */
769 avr_incoming_return_addr_rtx (void)
771 /* The return address is at the top of the stack. Note that the push
772 was via post-decrement, which means the actual address is off by one. */
773 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
776 /* Helper for expand_prologue. Emit a push of a byte register. */
779 emit_push_byte (unsigned regno, bool frame_related_p)
783 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
784 mem = gen_frame_mem (QImode, mem);
785 reg = gen_rtx_REG (QImode, regno);
787 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
789 RTX_FRAME_RELATED_P (insn) = 1;
791 cfun->machine->stack_usage++;
795 /* Helper for expand_prologue. Emit a push of a SFR via tmp_reg.
796 SFR is a MEM representing the memory location of the SFR.
797 If CLR_P then clear the SFR after the push using zero_reg. */
800 emit_push_sfr (rtx sfr, bool frame_related_p, bool clr_p)
804 gcc_assert (MEM_P (sfr));
806 /* IN __tmp_reg__, IO(SFR) */
807 insn = emit_move_insn (tmp_reg_rtx, sfr);
809 RTX_FRAME_RELATED_P (insn) = 1;
811 /* PUSH __tmp_reg__ */
812 emit_push_byte (TMP_REGNO, frame_related_p);
816 /* OUT IO(SFR), __zero_reg__ */
817 insn = emit_move_insn (sfr, const0_rtx);
819 RTX_FRAME_RELATED_P (insn) = 1;
824 avr_prologue_setup_frame (HOST_WIDE_INT size, HARD_REG_SET set)
827 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
828 int live_seq = sequent_regs_live ();
830 bool minimize = (TARGET_CALL_PROLOGUES
833 && !cfun->machine->is_OS_task
834 && !cfun->machine->is_OS_main);
837 && (frame_pointer_needed
838 || avr_outgoing_args_size() > 8
839 || (AVR_2_BYTE_PC && live_seq > 6)
843 int first_reg, reg, offset;
845 emit_move_insn (gen_rtx_REG (HImode, REG_X),
846 gen_int_mode (size, HImode));
848 pattern = gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
849 gen_int_mode (live_seq+size, HImode));
850 insn = emit_insn (pattern);
851 RTX_FRAME_RELATED_P (insn) = 1;
853 /* Describe the effect of the unspec_volatile call to prologue_saves.
854 Note that this formulation assumes that add_reg_note pushes the
855 notes to the front. Thus we build them in the reverse order of
856 how we want dwarf2out to process them. */
858 /* The function does always set frame_pointer_rtx, but whether that
859 is going to be permanent in the function is frame_pointer_needed. */
861 add_reg_note (insn, REG_CFA_ADJUST_CFA,
862 gen_rtx_SET (VOIDmode, (frame_pointer_needed
864 : stack_pointer_rtx),
865 plus_constant (stack_pointer_rtx,
866 -(size + live_seq))));
868 /* Note that live_seq always contains r28+r29, but the other
869 registers to be saved are all below 18. */
871 first_reg = 18 - (live_seq - 2);
873 for (reg = 29, offset = -live_seq + 1;
875 reg = (reg == 28 ? 17 : reg - 1), ++offset)
879 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
880 r = gen_rtx_REG (QImode, reg);
881 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
884 cfun->machine->stack_usage += size + live_seq;
890 for (reg = 0; reg < 32; ++reg)
891 if (TEST_HARD_REG_BIT (set, reg))
892 emit_push_byte (reg, true);
894 if (frame_pointer_needed
895 && (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main)))
897 /* Push frame pointer. Always be consistent about the
898 ordering of pushes -- epilogue_restores expects the
899 register pair to be pushed low byte first. */
901 emit_push_byte (REG_Y, true);
902 emit_push_byte (REG_Y + 1, true);
905 if (frame_pointer_needed
908 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
909 RTX_FRAME_RELATED_P (insn) = 1;
914 /* Creating a frame can be done by direct manipulation of the
915 stack or via the frame pointer. These two methods are:
922 the optimum method depends on function type, stack and
923 frame size. To avoid a complex logic, both methods are
924 tested and shortest is selected.
926 There is also the case where SIZE != 0 and no frame pointer is
927 needed; this can occur if ACCUMULATE_OUTGOING_ARGS is on.
928 In that case, insn (*) is not needed in that case.
929 We use the X register as scratch. This is save because in X
931 In an interrupt routine, the case of SIZE != 0 together with
932 !frame_pointer_needed can only occur if the function is not a
933 leaf function and thus X has already been saved. */
936 rtx fp_plus_insns, fp, my_fp;
938 gcc_assert (frame_pointer_needed
940 || !current_function_is_leaf);
942 fp = my_fp = (frame_pointer_needed
944 : gen_rtx_REG (Pmode, REG_X));
946 if (AVR_HAVE_8BIT_SP)
948 /* The high byte (r29) does not change:
949 Prefer SUBI (1 cycle) over SBIW (2 cycles, same size). */
951 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
954 /************ Method 1: Adjust frame pointer ************/
958 /* Normally, the dwarf2out frame-related-expr interpreter does
959 not expect to have the CFA change once the frame pointer is
960 set up. Thus, we avoid marking the move insn below and
961 instead indicate that the entire operation is complete after
962 the frame pointer subtraction is done. */
964 insn = emit_move_insn (fp, stack_pointer_rtx);
965 if (frame_pointer_needed)
967 RTX_FRAME_RELATED_P (insn) = 1;
968 add_reg_note (insn, REG_CFA_ADJUST_CFA,
969 gen_rtx_SET (VOIDmode, fp, stack_pointer_rtx));
972 insn = emit_move_insn (my_fp, plus_constant (my_fp, -size));
973 if (frame_pointer_needed)
975 RTX_FRAME_RELATED_P (insn) = 1;
976 add_reg_note (insn, REG_CFA_ADJUST_CFA,
977 gen_rtx_SET (VOIDmode, fp,
978 plus_constant (fp, -size)));
981 /* Copy to stack pointer. Note that since we've already
982 changed the CFA to the frame pointer this operation
983 need not be annotated if frame pointer is needed.
984 Always move through unspec, see PR50063.
985 For meaning of irq_state see movhi_sp_r insn. */
987 if (cfun->machine->is_interrupt)
990 if (TARGET_NO_INTERRUPTS
991 || cfun->machine->is_signal
992 || cfun->machine->is_OS_main)
995 if (AVR_HAVE_8BIT_SP)
998 insn = emit_insn (gen_movhi_sp_r (stack_pointer_rtx,
999 fp, GEN_INT (irq_state)));
1000 if (!frame_pointer_needed)
1002 RTX_FRAME_RELATED_P (insn) = 1;
1003 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1004 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1005 plus_constant (stack_pointer_rtx,
1009 fp_plus_insns = get_insns ();
1012 /************ Method 2: Adjust Stack pointer ************/
1014 /* Stack adjustment by means of RCALL . and/or PUSH __TMP_REG__
1015 can only handle specific offsets. */
1017 if (avr_sp_immediate_operand (gen_int_mode (-size, HImode), HImode))
1023 insn = emit_move_insn (stack_pointer_rtx,
1024 plus_constant (stack_pointer_rtx, -size));
1025 RTX_FRAME_RELATED_P (insn) = 1;
1026 add_reg_note (insn, REG_CFA_ADJUST_CFA,
1027 gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1028 plus_constant (stack_pointer_rtx,
1030 if (frame_pointer_needed)
1032 insn = emit_move_insn (fp, stack_pointer_rtx);
1033 RTX_FRAME_RELATED_P (insn) = 1;
1036 sp_plus_insns = get_insns ();
1039 /************ Use shortest method ************/
1041 emit_insn (get_sequence_length (sp_plus_insns)
1042 < get_sequence_length (fp_plus_insns)
1048 emit_insn (fp_plus_insns);
1051 cfun->machine->stack_usage += size;
1052 } /* !minimize && size != 0 */
1057 /* Output function prologue. */
1060 expand_prologue (void)
1065 size = get_frame_size() + avr_outgoing_args_size();
1067 /* Init cfun->machine. */
1068 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
1069 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
1070 cfun->machine->is_signal = signal_function_p (current_function_decl);
1071 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
1072 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
1073 cfun->machine->stack_usage = 0;
1075 /* Prologue: naked. */
1076 if (cfun->machine->is_naked)
1081 avr_regs_to_save (&set);
1083 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1085 /* Enable interrupts. */
1086 if (cfun->machine->is_interrupt)
1087 emit_insn (gen_enable_interrupt ());
1089 /* Push zero reg. */
1090 emit_push_byte (ZERO_REGNO, true);
1093 emit_push_byte (TMP_REGNO, true);
1096 /* ??? There's no dwarf2 column reserved for SREG. */
1097 emit_push_sfr (sreg_rtx, false, false /* clr */);
1099 /* Clear zero reg. */
1100 emit_move_insn (zero_reg_rtx, const0_rtx);
1102 /* Prevent any attempt to delete the setting of ZERO_REG! */
1103 emit_use (zero_reg_rtx);
1105 /* Push and clear RAMPD/X/Y/Z if present and low-part register is used.
1106 ??? There are no dwarf2 columns reserved for RAMPD/X/Y/Z. */
1109 emit_push_sfr (rampd_rtx, false /* frame-related */, true /* clr */);
1112 && TEST_HARD_REG_BIT (set, REG_X)
1113 && TEST_HARD_REG_BIT (set, REG_X + 1))
1115 emit_push_sfr (rampx_rtx, false /* frame-related */, true /* clr */);
1119 && (frame_pointer_needed
1120 || (TEST_HARD_REG_BIT (set, REG_Y)
1121 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1123 emit_push_sfr (rampy_rtx, false /* frame-related */, true /* clr */);
1127 && TEST_HARD_REG_BIT (set, REG_Z)
1128 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1130 emit_push_sfr (rampz_rtx, false /* frame-related */, AVR_HAVE_RAMPD);
1132 } /* is_interrupt is_signal */
1134 avr_prologue_setup_frame (size, set);
1136 if (flag_stack_usage_info)
1137 current_function_static_stack_size = cfun->machine->stack_usage;
1140 /* Output summary at end of function prologue. */
1143 avr_asm_function_end_prologue (FILE *file)
1145 if (cfun->machine->is_naked)
1147 fputs ("/* prologue: naked */\n", file);
1151 if (cfun->machine->is_interrupt)
1153 fputs ("/* prologue: Interrupt */\n", file);
1155 else if (cfun->machine->is_signal)
1157 fputs ("/* prologue: Signal */\n", file);
1160 fputs ("/* prologue: function */\n", file);
1163 if (ACCUMULATE_OUTGOING_ARGS)
1164 fprintf (file, "/* outgoing args size = %d */\n",
1165 avr_outgoing_args_size());
1167 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
1169 fprintf (file, "/* stack size = %d */\n",
1170 cfun->machine->stack_usage);
1171 /* Create symbol stack offset here so all functions have it. Add 1 to stack
1172 usage for offset so that SP + .L__stack_offset = return address. */
1173 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
1177 /* Implement EPILOGUE_USES. */
1180 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
1182 if (reload_completed
1184 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
1189 /* Helper for expand_epilogue. Emit a pop of a byte register. */
1192 emit_pop_byte (unsigned regno)
1196 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
1197 mem = gen_frame_mem (QImode, mem);
1198 reg = gen_rtx_REG (QImode, regno);
1200 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
1203 /* Output RTL epilogue. */
1206 expand_epilogue (bool sibcall_p)
1213 bool isr_p = cfun->machine->is_interrupt || cfun->machine->is_signal;
1215 size = get_frame_size() + avr_outgoing_args_size();
1217 /* epilogue: naked */
1218 if (cfun->machine->is_naked)
1220 gcc_assert (!sibcall_p);
1222 emit_jump_insn (gen_return ());
1226 avr_regs_to_save (&set);
1227 live_seq = sequent_regs_live ();
1229 minimize = (TARGET_CALL_PROLOGUES
1232 && !cfun->machine->is_OS_task
1233 && !cfun->machine->is_OS_main);
1237 || frame_pointer_needed
1240 /* Get rid of frame. */
1242 if (!frame_pointer_needed)
1244 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
1249 emit_move_insn (frame_pointer_rtx,
1250 plus_constant (frame_pointer_rtx, size));
1253 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
1259 /* Try two methods to adjust stack and select shortest. */
1265 gcc_assert (frame_pointer_needed
1267 || !current_function_is_leaf);
1269 fp = my_fp = (frame_pointer_needed
1271 : gen_rtx_REG (Pmode, REG_X));
1273 if (AVR_HAVE_8BIT_SP)
1275 /* The high byte (r29) does not change:
1276 Prefer SUBI (1 cycle) over SBIW (2 cycles). */
1278 my_fp = all_regs_rtx[FRAME_POINTER_REGNUM];
1281 /********** Method 1: Adjust fp register **********/
1285 if (!frame_pointer_needed)
1286 emit_move_insn (fp, stack_pointer_rtx);
1288 emit_move_insn (my_fp, plus_constant (my_fp, size));
1290 /* Copy to stack pointer. */
1292 if (TARGET_NO_INTERRUPTS)
1295 if (AVR_HAVE_8BIT_SP)
1298 emit_insn (gen_movhi_sp_r (stack_pointer_rtx, fp,
1299 GEN_INT (irq_state)));
1301 fp_plus_insns = get_insns ();
1304 /********** Method 2: Adjust Stack pointer **********/
1306 if (avr_sp_immediate_operand (gen_int_mode (size, HImode), HImode))
1312 emit_move_insn (stack_pointer_rtx,
1313 plus_constant (stack_pointer_rtx, size));
1315 sp_plus_insns = get_insns ();
1318 /************ Use shortest method ************/
1320 emit_insn (get_sequence_length (sp_plus_insns)
1321 < get_sequence_length (fp_plus_insns)
1326 emit_insn (fp_plus_insns);
1329 if (frame_pointer_needed
1330 && !(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1332 /* Restore previous frame_pointer. See expand_prologue for
1333 rationale for not using pophi. */
1335 emit_pop_byte (REG_Y + 1);
1336 emit_pop_byte (REG_Y);
1339 /* Restore used registers. */
1341 for (reg = 31; reg >= 0; --reg)
1342 if (TEST_HARD_REG_BIT (set, reg))
1343 emit_pop_byte (reg);
1347 /* Restore RAMPZ/Y/X/D using tmp_reg as scratch.
1348 The conditions to restore them must be tha same as in prologue. */
1351 && TEST_HARD_REG_BIT (set, REG_Z)
1352 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1354 emit_pop_byte (TMP_REGNO);
1355 emit_move_insn (rampz_rtx, tmp_reg_rtx);
1359 && (frame_pointer_needed
1360 || (TEST_HARD_REG_BIT (set, REG_Y)
1361 && TEST_HARD_REG_BIT (set, REG_Y + 1))))
1363 emit_pop_byte (TMP_REGNO);
1364 emit_move_insn (rampy_rtx, tmp_reg_rtx);
1368 && TEST_HARD_REG_BIT (set, REG_X)
1369 && TEST_HARD_REG_BIT (set, REG_X + 1))
1371 emit_pop_byte (TMP_REGNO);
1372 emit_move_insn (rampx_rtx, tmp_reg_rtx);
1377 emit_pop_byte (TMP_REGNO);
1378 emit_move_insn (rampd_rtx, tmp_reg_rtx);
1381 /* Restore SREG using tmp_reg as scratch. */
1383 emit_pop_byte (TMP_REGNO);
1384 emit_move_insn (sreg_rtx, tmp_reg_rtx);
1386 /* Restore tmp REG. */
1387 emit_pop_byte (TMP_REGNO);
1389 /* Restore zero REG. */
1390 emit_pop_byte (ZERO_REGNO);
1394 emit_jump_insn (gen_return ());
1397 /* Output summary messages at beginning of function epilogue. */
1400 avr_asm_function_begin_epilogue (FILE *file)
1402 fprintf (file, "/* epilogue start */\n");
1406 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1409 avr_cannot_modify_jumps_p (void)
1412 /* Naked Functions must not have any instructions after
1413 their epilogue, see PR42240 */
1415 if (reload_completed
1417 && cfun->machine->is_naked)
1426 /* Helper function for `avr_legitimate_address_p'. */
1429 avr_reg_ok_for_addr_p (rtx reg, addr_space_t as,
1430 RTX_CODE outer_code, bool strict)
1433 && (avr_regno_mode_code_ok_for_base_p (REGNO (reg), QImode,
1434 as, outer_code, UNKNOWN)
1436 && REGNO (reg) >= FIRST_PSEUDO_REGISTER)));
1440 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1441 machine for a memory operand of mode MODE. */
1444 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1446 bool ok = CONSTANT_ADDRESS_P (x);
1448 switch (GET_CODE (x))
1451 ok = avr_reg_ok_for_addr_p (x, ADDR_SPACE_GENERIC,
1456 && REG_X == REGNO (x))
1464 ok = avr_reg_ok_for_addr_p (XEXP (x, 0), ADDR_SPACE_GENERIC,
1465 GET_CODE (x), strict);
1470 rtx reg = XEXP (x, 0);
1471 rtx op1 = XEXP (x, 1);
1474 && CONST_INT_P (op1)
1475 && INTVAL (op1) >= 0)
1477 bool fit = IN_RANGE (INTVAL (op1), 0, MAX_LD_OFFSET (mode));
1482 || avr_reg_ok_for_addr_p (reg, ADDR_SPACE_GENERIC,
1485 if (reg == frame_pointer_rtx
1486 || reg == arg_pointer_rtx)
1491 else if (frame_pointer_needed
1492 && reg == frame_pointer_rtx)
1504 if (avr_log.legitimate_address_p)
1506 avr_edump ("\n%?: ret=%d, mode=%m strict=%d "
1507 "reload_completed=%d reload_in_progress=%d %s:",
1508 ok, mode, strict, reload_completed, reload_in_progress,
1509 reg_renumber ? "(reg_renumber)" : "");
1511 if (GET_CODE (x) == PLUS
1512 && REG_P (XEXP (x, 0))
1513 && CONST_INT_P (XEXP (x, 1))
1514 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
1517 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1518 true_regnum (XEXP (x, 0)));
1521 avr_edump ("\n%r\n", x);
1528 /* Former implementation of TARGET_LEGITIMIZE_ADDRESS,
1529 now only a helper for avr_addr_space_legitimize_address. */
1530 /* Attempts to replace X with a valid
1531 memory address for an operand of mode MODE */
1534 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1536 bool big_offset_p = false;
1540 if (GET_CODE (oldx) == PLUS
1541 && REG_P (XEXP (oldx, 0)))
1543 if (REG_P (XEXP (oldx, 1)))
1544 x = force_reg (GET_MODE (oldx), oldx);
1545 else if (CONST_INT_P (XEXP (oldx, 1)))
1547 int offs = INTVAL (XEXP (oldx, 1));
1548 if (frame_pointer_rtx != XEXP (oldx, 0)
1549 && offs > MAX_LD_OFFSET (mode))
1551 big_offset_p = true;
1552 x = force_reg (GET_MODE (oldx), oldx);
1557 if (avr_log.legitimize_address)
1559 avr_edump ("\n%?: mode=%m\n %r\n", mode, oldx);
1562 avr_edump (" %s --> %r\n", big_offset_p ? "(big offset)" : "", x);
1569 /* Implement `LEGITIMIZE_RELOAD_ADDRESS'. */
1570 /* This will allow register R26/27 to be used where it is no worse than normal
1571 base pointers R28/29 or R30/31. For example, if base offset is greater
1572 than 63 bytes or for R++ or --R addressing. */
1575 avr_legitimize_reload_address (rtx *px, enum machine_mode mode,
1576 int opnum, int type, int addr_type,
1577 int ind_levels ATTRIBUTE_UNUSED,
1578 rtx (*mk_memloc)(rtx,int))
1582 if (avr_log.legitimize_reload_address)
1583 avr_edump ("\n%?:%m %r\n", mode, x);
1585 if (1 && (GET_CODE (x) == POST_INC
1586 || GET_CODE (x) == PRE_DEC))
1588 push_reload (XEXP (x, 0), XEXP (x, 0), &XEXP (x, 0), &XEXP (x, 0),
1589 POINTER_REGS, GET_MODE (x), GET_MODE (x), 0, 0,
1590 opnum, RELOAD_OTHER);
1592 if (avr_log.legitimize_reload_address)
1593 avr_edump (" RCLASS.1 = %R\n IN = %r\n OUT = %r\n",
1594 POINTER_REGS, XEXP (x, 0), XEXP (x, 0));
1599 if (GET_CODE (x) == PLUS
1600 && REG_P (XEXP (x, 0))
1601 && 0 == reg_equiv_constant (REGNO (XEXP (x, 0)))
1602 && CONST_INT_P (XEXP (x, 1))
1603 && INTVAL (XEXP (x, 1)) >= 1)
1605 bool fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1609 if (reg_equiv_address (REGNO (XEXP (x, 0))) != 0)
1611 int regno = REGNO (XEXP (x, 0));
1612 rtx mem = mk_memloc (x, regno);
1614 push_reload (XEXP (mem, 0), NULL_RTX, &XEXP (mem, 0), NULL,
1615 POINTER_REGS, Pmode, VOIDmode, 0, 0,
1618 if (avr_log.legitimize_reload_address)
1619 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1620 POINTER_REGS, XEXP (mem, 0), NULL_RTX);
1622 push_reload (mem, NULL_RTX, &XEXP (x, 0), NULL,
1623 BASE_POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1626 if (avr_log.legitimize_reload_address)
1627 avr_edump (" RCLASS.2 = %R\n IN = %r\n OUT = %r\n",
1628 BASE_POINTER_REGS, mem, NULL_RTX);
1633 else if (! (frame_pointer_needed
1634 && XEXP (x, 0) == frame_pointer_rtx))
1636 push_reload (x, NULL_RTX, px, NULL,
1637 POINTER_REGS, GET_MODE (x), VOIDmode, 0, 0,
1640 if (avr_log.legitimize_reload_address)
1641 avr_edump (" RCLASS.3 = %R\n IN = %r\n OUT = %r\n",
1642 POINTER_REGS, x, NULL_RTX);
1652 /* Helper function to print assembler resp. track instruction
1653 sequence lengths. Always return "".
1656 Output assembler code from template TPL with operands supplied
1657 by OPERANDS. This is just forwarding to output_asm_insn.
1660 If N_WORDS >= 0 Add N_WORDS to *PLEN.
1661 If N_WORDS < 0 Set *PLEN to -N_WORDS.
1662 Don't output anything.
1666 avr_asm_len (const char* tpl, rtx* operands, int* plen, int n_words)
1670 output_asm_insn (tpl, operands);
1684 /* Return a pointer register name as a string. */
1687 ptrreg_to_str (int regno)
1691 case REG_X: return "X";
1692 case REG_Y: return "Y";
1693 case REG_Z: return "Z";
1695 output_operand_lossage ("address operand requires constraint for"
1696 " X, Y, or Z register");
1701 /* Return the condition name as a string.
1702 Used in conditional jump constructing */
1705 cond_string (enum rtx_code code)
1714 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1719 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1735 /* Implement `TARGET_PRINT_OPERAND_ADDRESS'. */
1736 /* Output ADDR to FILE as address. */
1739 avr_print_operand_address (FILE *file, rtx addr)
1741 switch (GET_CODE (addr))
1744 fprintf (file, ptrreg_to_str (REGNO (addr)));
1748 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1752 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1756 if (CONSTANT_ADDRESS_P (addr)
1757 && text_segment_operand (addr, VOIDmode))
1760 if (GET_CODE (x) == CONST)
1762 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1764 /* Assembler gs() will implant word address. Make offset
1765 a byte offset inside gs() for assembler. This is
1766 needed because the more logical (constant+gs(sym)) is not
1767 accepted by gas. For 128K and lower devices this is ok.
1768 For large devices it will create a Trampoline to offset
1769 from symbol which may not be what the user really wanted. */
1770 fprintf (file, "gs(");
1771 output_addr_const (file, XEXP (x,0));
1772 fprintf (file, "+" HOST_WIDE_INT_PRINT_DEC ")",
1773 2 * INTVAL (XEXP (x, 1)));
1775 if (warning (0, "pointer offset from symbol maybe incorrect"))
1777 output_addr_const (stderr, addr);
1778 fprintf(stderr,"\n");
1783 fprintf (file, "gs(");
1784 output_addr_const (file, addr);
1785 fprintf (file, ")");
1789 output_addr_const (file, addr);
1794 /* Implement `TARGET_PRINT_OPERAND_PUNCT_VALID_P'. */
1797 avr_print_operand_punct_valid_p (unsigned char code)
1799 return code == '~' || code == '!';
1803 /* Implement `TARGET_PRINT_OPERAND'. */
1804 /* Output X as assembler operand to file FILE.
1805 For a description of supported %-codes, see top of avr.md. */
1808 avr_print_operand (FILE *file, rtx x, int code)
1812 if (code >= 'A' && code <= 'D')
1817 if (!AVR_HAVE_JMP_CALL)
1820 else if (code == '!')
1822 if (AVR_HAVE_EIJMP_EICALL)
1825 else if (code == 't'
1828 static int t_regno = -1;
1829 static int t_nbits = -1;
1831 if (REG_P (x) && t_regno < 0 && code == 'T')
1833 t_regno = REGNO (x);
1834 t_nbits = GET_MODE_BITSIZE (GET_MODE (x));
1836 else if (CONST_INT_P (x) && t_regno >= 0
1837 && IN_RANGE (INTVAL (x), 0, t_nbits - 1))
1839 int bpos = INTVAL (x);
1841 fprintf (file, "%s", reg_names[t_regno + bpos / 8]);
1843 fprintf (file, ",%d", bpos % 8);
1848 fatal_insn ("operands to %T/%t must be reg + const_int:", x);
1852 if (x == zero_reg_rtx)
1853 fprintf (file, "__zero_reg__");
1855 fprintf (file, reg_names[true_regnum (x) + abcd]);
1857 else if (CONST_INT_P (x))
1859 HOST_WIDE_INT ival = INTVAL (x);
1862 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ival + abcd);
1863 else if (low_io_address_operand (x, VOIDmode)
1864 || high_io_address_operand (x, VOIDmode))
1866 if (AVR_HAVE_RAMPZ && ival == avr_addr.rampz)
1867 fprintf (file, "__RAMPZ__");
1868 else if (AVR_HAVE_RAMPY && ival == avr_addr.rampy)
1869 fprintf (file, "__RAMPY__");
1870 else if (AVR_HAVE_RAMPX && ival == avr_addr.rampx)
1871 fprintf (file, "__RAMPX__");
1872 else if (AVR_HAVE_RAMPD && ival == avr_addr.rampd)
1873 fprintf (file, "__RAMPD__");
1874 else if (AVR_XMEGA && ival == avr_addr.ccp)
1875 fprintf (file, "__CCP__");
1876 else if (ival == avr_addr.sreg) fprintf (file, "__SREG__");
1877 else if (ival == avr_addr.sp_l) fprintf (file, "__SP_L__");
1878 else if (ival == avr_addr.sp_h) fprintf (file, "__SP_H__");
1881 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
1882 ival - avr_current_arch->sfr_offset);
1886 fatal_insn ("bad address, not an I/O address:", x);
1890 rtx addr = XEXP (x, 0);
1894 if (!CONSTANT_P (addr))
1895 fatal_insn ("bad address, not a constant:", addr);
1896 /* Assembler template with m-code is data - not progmem section */
1897 if (text_segment_operand (addr, VOIDmode))
1898 if (warning (0, "accessing data memory with"
1899 " program memory address"))
1901 output_addr_const (stderr, addr);
1902 fprintf(stderr,"\n");
1904 output_addr_const (file, addr);
1906 else if (code == 'i')
1908 avr_print_operand (file, addr, 'i');
1910 else if (code == 'o')
1912 if (GET_CODE (addr) != PLUS)
1913 fatal_insn ("bad address, not (reg+disp):", addr);
1915 avr_print_operand (file, XEXP (addr, 1), 0);
1917 else if (code == 'p' || code == 'r')
1919 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1920 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1923 avr_print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1925 avr_print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1927 else if (GET_CODE (addr) == PLUS)
1929 avr_print_operand_address (file, XEXP (addr,0));
1930 if (REGNO (XEXP (addr, 0)) == REG_X)
1931 fatal_insn ("internal compiler error. Bad address:"
1934 avr_print_operand (file, XEXP (addr,1), code);
1937 avr_print_operand_address (file, addr);
1939 else if (code == 'i')
1941 fatal_insn ("bad address, not an I/O address:", x);
1943 else if (code == 'x')
1945 /* Constant progmem address - like used in jmp or call */
1946 if (0 == text_segment_operand (x, VOIDmode))
1947 if (warning (0, "accessing program memory"
1948 " with data memory address"))
1950 output_addr_const (stderr, x);
1951 fprintf(stderr,"\n");
1953 /* Use normal symbol for direct address no linker trampoline needed */
1954 output_addr_const (file, x);
1956 else if (GET_CODE (x) == CONST_DOUBLE)
1960 if (GET_MODE (x) != SFmode)
1961 fatal_insn ("internal compiler error. Unknown mode:", x);
1962 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1963 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1964 fprintf (file, "0x%lx", val);
1966 else if (GET_CODE (x) == CONST_STRING)
1967 fputs (XSTR (x, 0), file);
1968 else if (code == 'j')
1969 fputs (cond_string (GET_CODE (x)), file);
1970 else if (code == 'k')
1971 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1973 avr_print_operand_address (file, x);
1976 /* Update the condition code in the INSN. */
1979 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1982 enum attr_cc cc = get_attr_cc (insn);
1990 case CC_OUT_PLUS_NOCLOBBER:
1993 rtx *op = recog_data.operand;
1996 /* Extract insn's operands. */
1997 extract_constrain_insn_cached (insn);
2005 avr_out_plus (op, &len_dummy, &icc);
2006 cc = (enum attr_cc) icc;
2009 case CC_OUT_PLUS_NOCLOBBER:
2010 avr_out_plus_noclobber (op, &len_dummy, &icc);
2011 cc = (enum attr_cc) icc;
2016 cc = (op[1] == CONST0_RTX (GET_MODE (op[0]))
2017 && reg_overlap_mentioned_p (op[0], zero_reg_rtx))
2018 /* Loading zero-reg with 0 uses CLI and thus clobbers cc0. */
2020 /* Any other "r,rL" combination does not alter cc0. */
2024 } /* inner switch */
2028 } /* outer swicth */
2033 /* Special values like CC_OUT_PLUS from above have been
2034 mapped to "standard" CC_* values so we never come here. */
2040 /* Insn does not affect CC at all. */
2048 set = single_set (insn);
2052 cc_status.flags |= CC_NO_OVERFLOW;
2053 cc_status.value1 = SET_DEST (set);
2058 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
2059 The V flag may or may not be known but that's ok because
2060 alter_cond will change tests to use EQ/NE. */
2061 set = single_set (insn);
2065 cc_status.value1 = SET_DEST (set);
2066 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
2071 set = single_set (insn);
2074 cc_status.value1 = SET_SRC (set);
2078 /* Insn doesn't leave CC in a usable state. */
2084 /* Choose mode for jump insn:
2085 1 - relative jump in range -63 <= x <= 62 ;
2086 2 - relative jump in range -2046 <= x <= 2045 ;
2087 3 - absolute jump (only for ATmega[16]03). */
2090 avr_jump_mode (rtx x, rtx insn)
2092 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
2093 ? XEXP (x, 0) : x));
2094 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
2095 int jump_distance = cur_addr - dest_addr;
2097 if (-63 <= jump_distance && jump_distance <= 62)
2099 else if (-2046 <= jump_distance && jump_distance <= 2045)
2101 else if (AVR_HAVE_JMP_CALL)
2107 /* return an AVR condition jump commands.
2108 X is a comparison RTX.
2109 LEN is a number returned by avr_jump_mode function.
2110 if REVERSE nonzero then condition code in X must be reversed. */
2113 ret_cond_branch (rtx x, int len, int reverse)
2115 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
2120 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2121 return (len == 1 ? ("breq .+2" CR_TAB
2123 len == 2 ? ("breq .+4" CR_TAB
2131 return (len == 1 ? ("breq .+2" CR_TAB
2133 len == 2 ? ("breq .+4" CR_TAB
2140 return (len == 1 ? ("breq .+2" CR_TAB
2142 len == 2 ? ("breq .+4" CR_TAB
2149 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
2150 return (len == 1 ? ("breq %0" CR_TAB
2152 len == 2 ? ("breq .+2" CR_TAB
2159 return (len == 1 ? ("breq %0" CR_TAB
2161 len == 2 ? ("breq .+2" CR_TAB
2168 return (len == 1 ? ("breq %0" CR_TAB
2170 len == 2 ? ("breq .+2" CR_TAB
2184 return ("br%j1 .+2" CR_TAB
2187 return ("br%j1 .+4" CR_TAB
2198 return ("br%k1 .+2" CR_TAB
2201 return ("br%k1 .+4" CR_TAB
2209 /* Output insn cost for next insn. */
2212 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
2213 int num_operands ATTRIBUTE_UNUSED)
2215 if (avr_log.rtx_costs)
2217 rtx set = single_set (insn);
2220 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
2221 set_src_cost (SET_SRC (set), optimize_insn_for_speed_p ()));
2223 fprintf (asm_out_file, "/* DEBUG: pattern-cost = %d. */\n",
2224 rtx_cost (PATTERN (insn), INSN, 0,
2225 optimize_insn_for_speed_p()));
2229 /* Return 0 if undefined, 1 if always true or always false. */
2232 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
2234 unsigned int max = (mode == QImode ? 0xff :
2235 mode == HImode ? 0xffff :
2236 mode == PSImode ? 0xffffff :
2237 mode == SImode ? 0xffffffff : 0);
2238 if (max && op && GET_CODE (x) == CONST_INT)
2240 if (unsigned_condition (op) != op)
2243 if (max != (INTVAL (x) & max)
2244 && INTVAL (x) != 0xff)
2251 /* Returns nonzero if REGNO is the number of a hard
2252 register in which function arguments are sometimes passed. */
2255 function_arg_regno_p(int r)
2257 return (r >= 8 && r <= 25);
2260 /* Initializing the variable cum for the state at the beginning
2261 of the argument list. */
2264 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
2265 tree fndecl ATTRIBUTE_UNUSED)
2268 cum->regno = FIRST_CUM_REG;
2269 if (!libname && stdarg_p (fntype))
2272 /* Assume the calle may be tail called */
2274 cfun->machine->sibcall_fails = 0;
2277 /* Returns the number of registers to allocate for a function argument. */
2280 avr_num_arg_regs (enum machine_mode mode, const_tree type)
2284 if (mode == BLKmode)
2285 size = int_size_in_bytes (type);
2287 size = GET_MODE_SIZE (mode);
2289 /* Align all function arguments to start in even-numbered registers.
2290 Odd-sized arguments leave holes above them. */
2292 return (size + 1) & ~1;
2295 /* Controls whether a function argument is passed
2296 in a register, and which register. */
2299 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
2300 const_tree type, bool named ATTRIBUTE_UNUSED)
2302 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2303 int bytes = avr_num_arg_regs (mode, type);
2305 if (cum->nregs && bytes <= cum->nregs)
2306 return gen_rtx_REG (mode, cum->regno - bytes);
2311 /* Update the summarizer variable CUM to advance past an argument
2312 in the argument list. */
2315 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
2316 const_tree type, bool named ATTRIBUTE_UNUSED)
2318 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2319 int bytes = avr_num_arg_regs (mode, type);
2321 cum->nregs -= bytes;
2322 cum->regno -= bytes;
2324 /* A parameter is being passed in a call-saved register. As the original
2325 contents of these regs has to be restored before leaving the function,
2326 a function must not pass arguments in call-saved regs in order to get
2331 && !call_used_regs[cum->regno])
2333 /* FIXME: We ship info on failing tail-call in struct machine_function.
2334 This uses internals of calls.c:expand_call() and the way args_so_far
2335 is used. targetm.function_ok_for_sibcall() needs to be extended to
2336 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
2337 dependent so that such an extension is not wanted. */
2339 cfun->machine->sibcall_fails = 1;
2342 /* Test if all registers needed by the ABI are actually available. If the
2343 user has fixed a GPR needed to pass an argument, an (implicit) function
2344 call will clobber that fixed register. See PR45099 for an example. */
2351 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
2352 if (fixed_regs[regno])
2353 warning (0, "fixed register %s used to pass parameter to function",
2357 if (cum->nregs <= 0)
2360 cum->regno = FIRST_CUM_REG;
2364 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
2365 /* Decide whether we can make a sibling call to a function. DECL is the
2366 declaration of the function being targeted by the call and EXP is the
2367 CALL_EXPR representing the call. */
2370 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
2374 /* Tail-calling must fail if callee-saved regs are used to pass
2375 function args. We must not tail-call when `epilogue_restores'
2376 is used. Unfortunately, we cannot tell at this point if that
2377 actually will happen or not, and we cannot step back from
2378 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
2380 if (cfun->machine->sibcall_fails
2381 || TARGET_CALL_PROLOGUES)
2386 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
2390 decl_callee = TREE_TYPE (decl_callee);
2394 decl_callee = fntype_callee;
2396 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
2397 && METHOD_TYPE != TREE_CODE (decl_callee))
2399 decl_callee = TREE_TYPE (decl_callee);
2403 /* Ensure that caller and callee have compatible epilogues */
2405 if (interrupt_function_p (current_function_decl)
2406 || signal_function_p (current_function_decl)
2407 || avr_naked_function_p (decl_callee)
2408 || avr_naked_function_p (current_function_decl)
2409 /* FIXME: For OS_task and OS_main, we are over-conservative.
2410 This is due to missing documentation of these attributes
2411 and what they actually should do and should not do. */
2412 || (avr_OS_task_function_p (decl_callee)
2413 != avr_OS_task_function_p (current_function_decl))
2414 || (avr_OS_main_function_p (decl_callee)
2415 != avr_OS_main_function_p (current_function_decl)))
2423 /***********************************************************************
2424 Functions for outputting various mov's for a various modes
2425 ************************************************************************/
2427 /* Return true if a value of mode MODE is read from flash by
2428 __load_* function from libgcc. */
2431 avr_load_libgcc_p (rtx op)
2433 enum machine_mode mode = GET_MODE (op);
2434 int n_bytes = GET_MODE_SIZE (mode);
2438 && avr_mem_flash_p (op));
2441 /* Return true if a value of mode MODE is read by __xload_* function. */
2444 avr_xload_libgcc_p (enum machine_mode mode)
2446 int n_bytes = GET_MODE_SIZE (mode);
2449 || avr_current_device->n_flash > 1);
2453 /* Find an unused d-register to be used as scratch in INSN.
2454 EXCLUDE is either NULL_RTX or some register. In the case where EXCLUDE
2455 is a register, skip all possible return values that overlap EXCLUDE.
2456 The policy for the returned register is similar to that of
2457 `reg_unused_after', i.e. the returned register may overlap the SET_DEST
2460 Return a QImode d-register or NULL_RTX if nothing found. */
2463 avr_find_unused_d_reg (rtx insn, rtx exclude)
2466 bool isr_p = (interrupt_function_p (current_function_decl)
2467 || signal_function_p (current_function_decl));
2469 for (regno = 16; regno < 32; regno++)
2471 rtx reg = all_regs_rtx[regno];
2474 && reg_overlap_mentioned_p (exclude, reg))
2475 || fixed_regs[regno])
2480 /* Try non-live register */
2482 if (!df_regs_ever_live_p (regno)
2483 && (TREE_THIS_VOLATILE (current_function_decl)
2484 || cfun->machine->is_OS_task
2485 || cfun->machine->is_OS_main
2486 || (!isr_p && call_used_regs[regno])))
2491 /* Any live register can be used if it is unused after.
2492 Prologue/epilogue will care for it as needed. */
2494 if (df_regs_ever_live_p (regno)
2495 && reg_unused_after (insn, reg))
2505 /* Helper function for the next function in the case where only restricted
2506 version of LPM instruction is available. */
2509 avr_out_lpm_no_lpmx (rtx insn, rtx *xop, int *plen)
2513 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2516 regno_dest = REGNO (dest);
2518 /* The implicit target register of LPM. */
2519 xop[3] = lpm_reg_rtx;
2521 switch (GET_CODE (addr))
2528 gcc_assert (REG_Z == REGNO (addr));
2536 avr_asm_len ("%4lpm", xop, plen, 1);
2538 if (regno_dest != LPM_REGNO)
2539 avr_asm_len ("mov %0,%3", xop, plen, 1);
2544 if (REGNO (dest) == REG_Z)
2545 return avr_asm_len ("%4lpm" CR_TAB
2550 "pop %A0", xop, plen, 6);
2552 avr_asm_len ("%4lpm" CR_TAB
2556 "mov %B0,%3", xop, plen, 5);
2558 if (!reg_unused_after (insn, addr))
2559 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2568 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2571 if (regno_dest == LPM_REGNO)
2572 avr_asm_len ("%4lpm" CR_TAB
2573 "adiw %2,1", xop, plen, 2);
2575 avr_asm_len ("%4lpm" CR_TAB
2577 "adiw %2,1", xop, plen, 3);
2580 avr_asm_len ("%4lpm" CR_TAB
2582 "adiw %2,1", xop, plen, 3);
2585 avr_asm_len ("%4lpm" CR_TAB
2587 "adiw %2,1", xop, plen, 3);
2590 avr_asm_len ("%4lpm" CR_TAB
2592 "adiw %2,1", xop, plen, 3);
2594 break; /* POST_INC */
2596 } /* switch CODE (addr) */
2602 /* If PLEN == NULL: Ouput instructions to load a value from a memory location
2603 OP[1] in AS1 to register OP[0].
2604 If PLEN != 0 set *PLEN to the length in words of the instruction sequence.
2608 avr_out_lpm (rtx insn, rtx *op, int *plen)
2612 rtx src = SET_SRC (single_set (insn));
2614 int n_bytes = GET_MODE_SIZE (GET_MODE (dest));
2618 addr_space_t as = MEM_ADDR_SPACE (src);
2625 warning (0, "writing to address space %qs not supported",
2626 avr_addrspace[MEM_ADDR_SPACE (dest)].name);
2631 addr = XEXP (src, 0);
2632 code = GET_CODE (addr);
2634 gcc_assert (REG_P (dest));
2635 gcc_assert (REG == code || POST_INC == code);
2639 xop[2] = lpm_addr_reg_rtx;
2640 xop[4] = xstring_empty;
2641 xop[5] = tmp_reg_rtx;
2643 regno_dest = REGNO (dest);
2645 segment = avr_addrspace[as].segment;
2647 /* Set RAMPZ as needed. */
2651 xop[4] = GEN_INT (segment);
2653 if (xop[3] = avr_find_unused_d_reg (insn, lpm_addr_reg_rtx),
2656 avr_asm_len ("ldi %3,%4" CR_TAB
2657 "out __RAMPZ__,%3", xop, plen, 2);
2659 else if (segment == 1)
2661 avr_asm_len ("clr %5" CR_TAB
2663 "out __RAMPZ__,%5", xop, plen, 3);
2667 avr_asm_len ("mov %5,%2" CR_TAB
2669 "out __RAMPZ__,%2" CR_TAB
2670 "mov %2,%5", xop, plen, 4);
2675 if (!AVR_HAVE_ELPMX)
2676 return avr_out_lpm_no_lpmx (insn, xop, plen);
2678 else if (!AVR_HAVE_LPMX)
2680 return avr_out_lpm_no_lpmx (insn, xop, plen);
2683 /* We have [E]LPMX: Output reading from Flash the comfortable way. */
2685 switch (GET_CODE (addr))
2692 gcc_assert (REG_Z == REGNO (addr));
2700 return avr_asm_len ("%4lpm %0,%a2", xop, plen, 1);
2703 if (REGNO (dest) == REG_Z)
2704 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2705 "%4lpm %B0,%a2" CR_TAB
2706 "mov %A0,%5", xop, plen, 3);
2709 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2710 "%4lpm %B0,%a2", xop, plen, 2);
2712 if (!reg_unused_after (insn, addr))
2713 avr_asm_len ("sbiw %2,1", xop, plen, 1);
2720 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2721 "%4lpm %B0,%a2+" CR_TAB
2722 "%4lpm %C0,%a2", xop, plen, 3);
2724 if (!reg_unused_after (insn, addr))
2725 avr_asm_len ("sbiw %2,2", xop, plen, 1);
2731 avr_asm_len ("%4lpm %A0,%a2+" CR_TAB
2732 "%4lpm %B0,%a2+", xop, plen, 2);
2734 if (REGNO (dest) == REG_Z - 2)
2735 return avr_asm_len ("%4lpm %5,%a2+" CR_TAB
2736 "%4lpm %C0,%a2" CR_TAB
2737 "mov %D0,%5", xop, plen, 3);
2740 avr_asm_len ("%4lpm %C0,%a2+" CR_TAB
2741 "%4lpm %D0,%a2", xop, plen, 2);
2743 if (!reg_unused_after (insn, addr))
2744 avr_asm_len ("sbiw %2,3", xop, plen, 1);
2754 gcc_assert (REG_Z == REGNO (XEXP (addr, 0))
2757 avr_asm_len ("%4lpm %A0,%a2+", xop, plen, 1);
2758 if (n_bytes >= 2) avr_asm_len ("%4lpm %B0,%a2+", xop, plen, 1);
2759 if (n_bytes >= 3) avr_asm_len ("%4lpm %C0,%a2+", xop, plen, 1);
2760 if (n_bytes >= 4) avr_asm_len ("%4lpm %D0,%a2+", xop, plen, 1);
2762 break; /* POST_INC */
2764 } /* switch CODE (addr) */
2766 if (xop[4] == xstring_e && AVR_HAVE_RAMPD)
2768 /* Reset RAMPZ to 0 so that EBI devices don't read garbage from RAM */
2770 avr_asm_len ("out __RAMPZ__,__zero_reg__", xop, plen, 1);
2777 /* Worker function for xload_8 insn. */
2780 avr_out_xload (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
2786 xop[2] = lpm_addr_reg_rtx;
2787 xop[3] = AVR_HAVE_LPMX ? op[0] : lpm_reg_rtx;
2792 avr_asm_len ("sbrc %1,7" CR_TAB
2794 "sbrs %1,7", xop, plen, 3);
2796 avr_asm_len (AVR_HAVE_LPMX ? "lpm %3,%a2" : "lpm", xop, plen, 1);
2798 if (REGNO (xop[0]) != REGNO (xop[3]))
2799 avr_asm_len ("mov %0,%3", xop, plen, 1);
2806 output_movqi (rtx insn, rtx operands[], int *l)
2809 rtx dest = operands[0];
2810 rtx src = operands[1];
2813 if (avr_mem_flash_p (src)
2814 || avr_mem_flash_p (dest))
2816 return avr_out_lpm (insn, operands, real_l);
2824 if (register_operand (dest, QImode))
2826 if (register_operand (src, QImode)) /* mov r,r */
2828 if (test_hard_reg_class (STACK_REG, dest))
2830 else if (test_hard_reg_class (STACK_REG, src))
2835 else if (CONSTANT_P (src))
2837 output_reload_in_const (operands, NULL_RTX, real_l, false);
2840 else if (GET_CODE (src) == MEM)
2841 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
2843 else if (GET_CODE (dest) == MEM)
2848 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2850 return out_movqi_mr_r (insn, xop, real_l);
2857 output_movhi (rtx insn, rtx xop[], int *plen)
2862 gcc_assert (GET_MODE_SIZE (GET_MODE (dest)) == 2);
2864 if (avr_mem_flash_p (src)
2865 || avr_mem_flash_p (dest))
2867 return avr_out_lpm (insn, xop, plen);
2872 if (REG_P (src)) /* mov r,r */
2874 if (test_hard_reg_class (STACK_REG, dest))
2876 if (AVR_HAVE_8BIT_SP)
2877 return avr_asm_len ("out __SP_L__,%A1", xop, plen, -1);
2880 return avr_asm_len ("out __SP_L__,%A1" CR_TAB
2881 "out __SP_H__,%B1", xop, plen, -2);
2883 /* Use simple load of SP if no interrupts are used. */
2885 return TARGET_NO_INTERRUPTS
2886 ? avr_asm_len ("out __SP_H__,%B1" CR_TAB
2887 "out __SP_L__,%A1", xop, plen, -2)
2889 : avr_asm_len ("in __tmp_reg__,__SREG__" CR_TAB
2891 "out __SP_H__,%B1" CR_TAB
2892 "out __SREG__,__tmp_reg__" CR_TAB
2893 "out __SP_L__,%A1", xop, plen, -5);
2895 else if (test_hard_reg_class (STACK_REG, src))
2897 return AVR_HAVE_8BIT_SP
2898 ? avr_asm_len ("in %A0,__SP_L__" CR_TAB
2899 "clr %B0", xop, plen, -2)
2901 : avr_asm_len ("in %A0,__SP_L__" CR_TAB
2902 "in %B0,__SP_H__", xop, plen, -2);
2905 return AVR_HAVE_MOVW
2906 ? avr_asm_len ("movw %0,%1", xop, plen, -1)
2908 : avr_asm_len ("mov %A0,%A1" CR_TAB
2909 "mov %B0,%B1", xop, plen, -2);
2911 else if (CONSTANT_P (src))
2913 return output_reload_inhi (xop, NULL, plen);
2915 else if (MEM_P (src))
2917 return out_movhi_r_mr (insn, xop, plen); /* mov r,m */
2920 else if (MEM_P (dest))
2925 xop[1] = src == const0_rtx ? zero_reg_rtx : src;
2927 return out_movhi_mr_r (insn, xop, plen);
2930 fatal_insn ("invalid insn:", insn);
2936 out_movqi_r_mr (rtx insn, rtx op[], int *plen)
2940 rtx x = XEXP (src, 0);
2942 if (CONSTANT_ADDRESS_P (x))
2944 return optimize > 0 && io_address_operand (x, QImode)
2945 ? avr_asm_len ("in %0,%i1", op, plen, -1)
2946 : avr_asm_len ("lds %0,%m1", op, plen, -2);
2948 else if (GET_CODE (x) == PLUS
2949 && REG_P (XEXP (x, 0))
2950 && CONST_INT_P (XEXP (x, 1)))
2952 /* memory access by reg+disp */
2954 int disp = INTVAL (XEXP (x, 1));
2956 if (disp - GET_MODE_SIZE (GET_MODE (src)) >= 63)
2958 if (REGNO (XEXP (x, 0)) != REG_Y)
2959 fatal_insn ("incorrect insn:",insn);
2961 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2962 return avr_asm_len ("adiw r28,%o1-63" CR_TAB
2963 "ldd %0,Y+63" CR_TAB
2964 "sbiw r28,%o1-63", op, plen, -3);
2966 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
2967 "sbci r29,hi8(-%o1)" CR_TAB
2969 "subi r28,lo8(%o1)" CR_TAB
2970 "sbci r29,hi8(%o1)", op, plen, -5);
2972 else if (REGNO (XEXP (x, 0)) == REG_X)
2974 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2975 it but I have this situation with extremal optimizing options. */
2977 avr_asm_len ("adiw r26,%o1" CR_TAB
2978 "ld %0,X", op, plen, -2);
2980 if (!reg_overlap_mentioned_p (dest, XEXP (x,0))
2981 && !reg_unused_after (insn, XEXP (x,0)))
2983 avr_asm_len ("sbiw r26,%o1", op, plen, 1);
2989 return avr_asm_len ("ldd %0,%1", op, plen, -1);
2992 return avr_asm_len ("ld %0,%1", op, plen, -1);
2996 out_movhi_r_mr (rtx insn, rtx op[], int *plen)
3000 rtx base = XEXP (src, 0);
3001 int reg_dest = true_regnum (dest);
3002 int reg_base = true_regnum (base);
3003 /* "volatile" forces reading low byte first, even if less efficient,
3004 for correct operation with 16-bit I/O registers. */
3005 int mem_volatile_p = MEM_VOLATILE_P (src);
3009 if (reg_dest == reg_base) /* R = (R) */
3010 return avr_asm_len ("ld __tmp_reg__,%1+" CR_TAB
3012 "mov %A0,__tmp_reg__", op, plen, -3);
3014 if (reg_base != REG_X)
3015 return avr_asm_len ("ld %A0,%1" CR_TAB
3016 "ldd %B0,%1+1", op, plen, -2);
3018 avr_asm_len ("ld %A0,X+" CR_TAB
3019 "ld %B0,X", op, plen, -2);
3021 if (!reg_unused_after (insn, base))
3022 avr_asm_len ("sbiw r26,1", op, plen, 1);
3026 else if (GET_CODE (base) == PLUS) /* (R + i) */
3028 int disp = INTVAL (XEXP (base, 1));
3029 int reg_base = true_regnum (XEXP (base, 0));
3031 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3033 if (REGNO (XEXP (base, 0)) != REG_Y)
3034 fatal_insn ("incorrect insn:",insn);
3036 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (src))
3037 ? avr_asm_len ("adiw r28,%o1-62" CR_TAB
3038 "ldd %A0,Y+62" CR_TAB
3039 "ldd %B0,Y+63" CR_TAB
3040 "sbiw r28,%o1-62", op, plen, -4)
3042 : avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3043 "sbci r29,hi8(-%o1)" CR_TAB
3045 "ldd %B0,Y+1" CR_TAB
3046 "subi r28,lo8(%o1)" CR_TAB
3047 "sbci r29,hi8(%o1)", op, plen, -6);
3050 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
3051 it but I have this situation with extremal
3052 optimization options. */
3054 if (reg_base == REG_X)
3055 return reg_base == reg_dest
3056 ? avr_asm_len ("adiw r26,%o1" CR_TAB
3057 "ld __tmp_reg__,X+" CR_TAB
3059 "mov %A0,__tmp_reg__", op, plen, -4)
3061 : avr_asm_len ("adiw r26,%o1" CR_TAB
3064 "sbiw r26,%o1+1", op, plen, -4);
3066 return reg_base == reg_dest
3067 ? avr_asm_len ("ldd __tmp_reg__,%A1" CR_TAB
3068 "ldd %B0,%B1" CR_TAB
3069 "mov %A0,__tmp_reg__", op, plen, -3)
3071 : avr_asm_len ("ldd %A0,%A1" CR_TAB
3072 "ldd %B0,%B1", op, plen, -2);
3074 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3076 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3077 fatal_insn ("incorrect insn:", insn);
3079 if (!mem_volatile_p)
3080 return avr_asm_len ("ld %B0,%1" CR_TAB
3081 "ld %A0,%1", op, plen, -2);
3083 return REGNO (XEXP (base, 0)) == REG_X
3084 ? avr_asm_len ("sbiw r26,2" CR_TAB
3087 "sbiw r26,1", op, plen, -4)
3089 : avr_asm_len ("sbiw %r1,2" CR_TAB
3091 "ldd %B0,%p1+1", op, plen, -3);
3093 else if (GET_CODE (base) == POST_INC) /* (R++) */
3095 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
3096 fatal_insn ("incorrect insn:", insn);
3098 return avr_asm_len ("ld %A0,%1" CR_TAB
3099 "ld %B0,%1", op, plen, -2);
3101 else if (CONSTANT_ADDRESS_P (base))
3103 return optimize > 0 && io_address_operand (base, HImode)
3104 ? avr_asm_len ("in %A0,%i1" CR_TAB
3105 "in %B0,%i1+1", op, plen, -2)
3107 : avr_asm_len ("lds %A0,%m1" CR_TAB
3108 "lds %B0,%m1+1", op, plen, -4);
3111 fatal_insn ("unknown move insn:",insn);
3116 out_movsi_r_mr (rtx insn, rtx op[], int *l)
3120 rtx base = XEXP (src, 0);
3121 int reg_dest = true_regnum (dest);
3122 int reg_base = true_regnum (base);
3130 if (reg_base == REG_X) /* (R26) */
3132 if (reg_dest == REG_X)
3133 /* "ld r26,-X" is undefined */
3134 return *l=7, ("adiw r26,3" CR_TAB
3137 "ld __tmp_reg__,-X" CR_TAB
3140 "mov r27,__tmp_reg__");
3141 else if (reg_dest == REG_X - 2)
3142 return *l=5, ("ld %A0,X+" CR_TAB
3144 "ld __tmp_reg__,X+" CR_TAB
3146 "mov %C0,__tmp_reg__");
3147 else if (reg_unused_after (insn, base))
3148 return *l=4, ("ld %A0,X+" CR_TAB
3153 return *l=5, ("ld %A0,X+" CR_TAB
3161 if (reg_dest == reg_base)
3162 return *l=5, ("ldd %D0,%1+3" CR_TAB
3163 "ldd %C0,%1+2" CR_TAB
3164 "ldd __tmp_reg__,%1+1" CR_TAB
3166 "mov %B0,__tmp_reg__");
3167 else if (reg_base == reg_dest + 2)
3168 return *l=5, ("ld %A0,%1" CR_TAB
3169 "ldd %B0,%1+1" CR_TAB
3170 "ldd __tmp_reg__,%1+2" CR_TAB
3171 "ldd %D0,%1+3" CR_TAB
3172 "mov %C0,__tmp_reg__");
3174 return *l=4, ("ld %A0,%1" CR_TAB
3175 "ldd %B0,%1+1" CR_TAB
3176 "ldd %C0,%1+2" CR_TAB
3180 else if (GET_CODE (base) == PLUS) /* (R + i) */
3182 int disp = INTVAL (XEXP (base, 1));
3184 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3186 if (REGNO (XEXP (base, 0)) != REG_Y)
3187 fatal_insn ("incorrect insn:",insn);
3189 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3190 return *l = 6, ("adiw r28,%o1-60" CR_TAB
3191 "ldd %A0,Y+60" CR_TAB
3192 "ldd %B0,Y+61" CR_TAB
3193 "ldd %C0,Y+62" CR_TAB
3194 "ldd %D0,Y+63" CR_TAB
3197 return *l = 8, ("subi r28,lo8(-%o1)" CR_TAB
3198 "sbci r29,hi8(-%o1)" CR_TAB
3200 "ldd %B0,Y+1" CR_TAB
3201 "ldd %C0,Y+2" CR_TAB
3202 "ldd %D0,Y+3" CR_TAB
3203 "subi r28,lo8(%o1)" CR_TAB
3204 "sbci r29,hi8(%o1)");
3207 reg_base = true_regnum (XEXP (base, 0));
3208 if (reg_base == REG_X)
3211 if (reg_dest == REG_X)
3214 /* "ld r26,-X" is undefined */
3215 return ("adiw r26,%o1+3" CR_TAB
3218 "ld __tmp_reg__,-X" CR_TAB
3221 "mov r27,__tmp_reg__");
3224 if (reg_dest == REG_X - 2)
3225 return ("adiw r26,%o1" CR_TAB
3228 "ld __tmp_reg__,X+" CR_TAB
3230 "mov r26,__tmp_reg__");
3232 return ("adiw r26,%o1" CR_TAB
3239 if (reg_dest == reg_base)
3240 return *l=5, ("ldd %D0,%D1" CR_TAB
3241 "ldd %C0,%C1" CR_TAB
3242 "ldd __tmp_reg__,%B1" CR_TAB
3243 "ldd %A0,%A1" CR_TAB
3244 "mov %B0,__tmp_reg__");
3245 else if (reg_dest == reg_base - 2)
3246 return *l=5, ("ldd %A0,%A1" CR_TAB
3247 "ldd %B0,%B1" CR_TAB
3248 "ldd __tmp_reg__,%C1" CR_TAB
3249 "ldd %D0,%D1" CR_TAB
3250 "mov %C0,__tmp_reg__");
3251 return *l=4, ("ldd %A0,%A1" CR_TAB
3252 "ldd %B0,%B1" CR_TAB
3253 "ldd %C0,%C1" CR_TAB
3256 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3257 return *l=4, ("ld %D0,%1" CR_TAB
3261 else if (GET_CODE (base) == POST_INC) /* (R++) */
3262 return *l=4, ("ld %A0,%1" CR_TAB
3266 else if (CONSTANT_ADDRESS_P (base))
3267 return *l=8, ("lds %A0,%m1" CR_TAB
3268 "lds %B0,%m1+1" CR_TAB
3269 "lds %C0,%m1+2" CR_TAB
3272 fatal_insn ("unknown move insn:",insn);
3277 out_movsi_mr_r (rtx insn, rtx op[], int *l)
3281 rtx base = XEXP (dest, 0);
3282 int reg_base = true_regnum (base);
3283 int reg_src = true_regnum (src);
3289 if (CONSTANT_ADDRESS_P (base))
3290 return *l=8,("sts %m0,%A1" CR_TAB
3291 "sts %m0+1,%B1" CR_TAB
3292 "sts %m0+2,%C1" CR_TAB
3294 if (reg_base > 0) /* (r) */
3296 if (reg_base == REG_X) /* (R26) */
3298 if (reg_src == REG_X)
3300 /* "st X+,r26" is undefined */
3301 if (reg_unused_after (insn, base))
3302 return *l=6, ("mov __tmp_reg__,r27" CR_TAB
3305 "st X+,__tmp_reg__" CR_TAB
3309 return *l=7, ("mov __tmp_reg__,r27" CR_TAB
3312 "st X+,__tmp_reg__" CR_TAB
3317 else if (reg_base == reg_src + 2)
3319 if (reg_unused_after (insn, base))
3320 return *l=7, ("mov __zero_reg__,%C1" CR_TAB
3321 "mov __tmp_reg__,%D1" CR_TAB
3324 "st %0+,__zero_reg__" CR_TAB
3325 "st %0,__tmp_reg__" CR_TAB
3326 "clr __zero_reg__");
3328 return *l=8, ("mov __zero_reg__,%C1" CR_TAB
3329 "mov __tmp_reg__,%D1" CR_TAB
3332 "st %0+,__zero_reg__" CR_TAB
3333 "st %0,__tmp_reg__" CR_TAB
3334 "clr __zero_reg__" CR_TAB
3337 return *l=5, ("st %0+,%A1" CR_TAB
3344 return *l=4, ("st %0,%A1" CR_TAB
3345 "std %0+1,%B1" CR_TAB
3346 "std %0+2,%C1" CR_TAB
3349 else if (GET_CODE (base) == PLUS) /* (R + i) */
3351 int disp = INTVAL (XEXP (base, 1));
3352 reg_base = REGNO (XEXP (base, 0));
3353 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3355 if (reg_base != REG_Y)
3356 fatal_insn ("incorrect insn:",insn);
3358 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3359 return *l = 6, ("adiw r28,%o0-60" CR_TAB
3360 "std Y+60,%A1" CR_TAB
3361 "std Y+61,%B1" CR_TAB
3362 "std Y+62,%C1" CR_TAB
3363 "std Y+63,%D1" CR_TAB
3366 return *l = 8, ("subi r28,lo8(-%o0)" CR_TAB
3367 "sbci r29,hi8(-%o0)" CR_TAB
3369 "std Y+1,%B1" CR_TAB
3370 "std Y+2,%C1" CR_TAB
3371 "std Y+3,%D1" CR_TAB
3372 "subi r28,lo8(%o0)" CR_TAB
3373 "sbci r29,hi8(%o0)");
3375 if (reg_base == REG_X)
3378 if (reg_src == REG_X)
3381 return ("mov __tmp_reg__,r26" CR_TAB
3382 "mov __zero_reg__,r27" CR_TAB
3383 "adiw r26,%o0" CR_TAB
3384 "st X+,__tmp_reg__" CR_TAB
3385 "st X+,__zero_reg__" CR_TAB
3388 "clr __zero_reg__" CR_TAB
3391 else if (reg_src == REG_X - 2)
3394 return ("mov __tmp_reg__,r26" CR_TAB
3395 "mov __zero_reg__,r27" CR_TAB
3396 "adiw r26,%o0" CR_TAB
3399 "st X+,__tmp_reg__" CR_TAB
3400 "st X,__zero_reg__" CR_TAB
3401 "clr __zero_reg__" CR_TAB
3405 return ("adiw r26,%o0" CR_TAB
3412 return *l=4, ("std %A0,%A1" CR_TAB
3413 "std %B0,%B1" CR_TAB
3414 "std %C0,%C1" CR_TAB
3417 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3418 return *l=4, ("st %0,%D1" CR_TAB
3422 else if (GET_CODE (base) == POST_INC) /* (R++) */
3423 return *l=4, ("st %0,%A1" CR_TAB
3427 fatal_insn ("unknown move insn:",insn);
3432 output_movsisf (rtx insn, rtx operands[], int *l)
3435 rtx dest = operands[0];
3436 rtx src = operands[1];
3439 if (avr_mem_flash_p (src)
3440 || avr_mem_flash_p (dest))
3442 return avr_out_lpm (insn, operands, real_l);
3448 if (register_operand (dest, VOIDmode))
3450 if (register_operand (src, VOIDmode)) /* mov r,r */
3452 if (true_regnum (dest) > true_regnum (src))
3457 return ("movw %C0,%C1" CR_TAB
3461 return ("mov %D0,%D1" CR_TAB
3462 "mov %C0,%C1" CR_TAB
3463 "mov %B0,%B1" CR_TAB
3471 return ("movw %A0,%A1" CR_TAB
3475 return ("mov %A0,%A1" CR_TAB
3476 "mov %B0,%B1" CR_TAB
3477 "mov %C0,%C1" CR_TAB
3481 else if (CONSTANT_P (src))
3483 return output_reload_insisf (operands, NULL_RTX, real_l);
3485 else if (GET_CODE (src) == MEM)
3486 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
3488 else if (GET_CODE (dest) == MEM)
3492 if (src == CONST0_RTX (GET_MODE (dest)))
3493 operands[1] = zero_reg_rtx;
3495 templ = out_movsi_mr_r (insn, operands, real_l);
3498 output_asm_insn (templ, operands);
3503 fatal_insn ("invalid insn:", insn);
3508 /* Handle loads of 24-bit types from memory to register. */
3511 avr_out_load_psi (rtx insn, rtx *op, int *plen)
3515 rtx base = XEXP (src, 0);
3516 int reg_dest = true_regnum (dest);
3517 int reg_base = true_regnum (base);
3521 if (reg_base == REG_X) /* (R26) */
3523 if (reg_dest == REG_X)
3524 /* "ld r26,-X" is undefined */
3525 return avr_asm_len ("adiw r26,2" CR_TAB
3527 "ld __tmp_reg__,-X" CR_TAB
3530 "mov r27,__tmp_reg__", op, plen, -6);
3533 avr_asm_len ("ld %A0,X+" CR_TAB
3535 "ld %C0,X", op, plen, -3);
3537 if (reg_dest != REG_X - 2
3538 && !reg_unused_after (insn, base))
3540 avr_asm_len ("sbiw r26,2", op, plen, 1);
3546 else /* reg_base != REG_X */
3548 if (reg_dest == reg_base)
3549 return avr_asm_len ("ldd %C0,%1+2" CR_TAB
3550 "ldd __tmp_reg__,%1+1" CR_TAB
3552 "mov %B0,__tmp_reg__", op, plen, -4);
3554 return avr_asm_len ("ld %A0,%1" CR_TAB
3555 "ldd %B0,%1+1" CR_TAB
3556 "ldd %C0,%1+2", op, plen, -3);
3559 else if (GET_CODE (base) == PLUS) /* (R + i) */
3561 int disp = INTVAL (XEXP (base, 1));
3563 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
3565 if (REGNO (XEXP (base, 0)) != REG_Y)
3566 fatal_insn ("incorrect insn:",insn);
3568 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
3569 return avr_asm_len ("adiw r28,%o1-61" CR_TAB
3570 "ldd %A0,Y+61" CR_TAB
3571 "ldd %B0,Y+62" CR_TAB
3572 "ldd %C0,Y+63" CR_TAB
3573 "sbiw r28,%o1-61", op, plen, -5);
3575 return avr_asm_len ("subi r28,lo8(-%o1)" CR_TAB
3576 "sbci r29,hi8(-%o1)" CR_TAB
3578 "ldd %B0,Y+1" CR_TAB
3579 "ldd %C0,Y+2" CR_TAB
3580 "subi r28,lo8(%o1)" CR_TAB
3581 "sbci r29,hi8(%o1)", op, plen, -7);
3584 reg_base = true_regnum (XEXP (base, 0));
3585 if (reg_base == REG_X)
3588 if (reg_dest == REG_X)
3590 /* "ld r26,-X" is undefined */
3591 return avr_asm_len ("adiw r26,%o1+2" CR_TAB
3593 "ld __tmp_reg__,-X" CR_TAB
3596 "mov r27,__tmp_reg__", op, plen, -6);
3599 avr_asm_len ("adiw r26,%o1" CR_TAB
3602 "ld r26,X", op, plen, -4);
3604 if (reg_dest != REG_X - 2)
3605 avr_asm_len ("sbiw r26,%o1+2", op, plen, 1);
3610 if (reg_dest == reg_base)
3611 return avr_asm_len ("ldd %C0,%C1" CR_TAB
3612 "ldd __tmp_reg__,%B1" CR_TAB
3613 "ldd %A0,%A1" CR_TAB
3614 "mov %B0,__tmp_reg__", op, plen, -4);
3616 return avr_asm_len ("ldd %A0,%A1" CR_TAB
3617 "ldd %B0,%B1" CR_TAB
3618 "ldd %C0,%C1", op, plen, -3);
3620 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3621 return avr_asm_len ("ld %C0,%1" CR_TAB
3623 "ld %A0,%1", op, plen, -3);
3624 else if (GET_CODE (base) == POST_INC) /* (R++) */
3625 return avr_asm_len ("ld %A0,%1" CR_TAB
3627 "ld %C0,%1", op, plen, -3);
3629 else if (CONSTANT_ADDRESS_P (base))
3630 return avr_asm_len ("lds %A0,%m1" CR_TAB
3631 "lds %B0,%m1+1" CR_TAB
3632 "lds %C0,%m1+2", op, plen , -6);
3634 fatal_insn ("unknown move insn:",insn);
3638 /* Handle store of 24-bit type from register or zero to memory. */
3641 avr_out_store_psi (rtx insn, rtx *op, int *plen)
3645 rtx base = XEXP (dest, 0);
3646 int reg_base = true_regnum (base);
3648 if (CONSTANT_ADDRESS_P (base))
3649 return avr_asm_len ("sts %m0,%A1" CR_TAB
3650 "sts %m0+1,%B1" CR_TAB
3651 "sts %m0+2,%C1", op, plen, -6);
3653 if (reg_base > 0) /* (r) */
3655 if (reg_base == REG_X) /* (R26) */
3657 gcc_assert (!reg_overlap_mentioned_p (base, src));
3659 avr_asm_len ("st %0+,%A1" CR_TAB
3661 "st %0,%C1", op, plen, -3);
3663 if (!reg_unused_after (insn, base))
3664 avr_asm_len ("sbiw r26,2", op, plen, 1);
3669 return avr_asm_len ("st %0,%A1" CR_TAB
3670 "std %0+1,%B1" CR_TAB
3671 "std %0+2,%C1", op, plen, -3);
3673 else if (GET_CODE (base) == PLUS) /* (R + i) */
3675 int disp = INTVAL (XEXP (base, 1));
3676 reg_base = REGNO (XEXP (base, 0));
3678 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3680 if (reg_base != REG_Y)
3681 fatal_insn ("incorrect insn:",insn);
3683 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3684 return avr_asm_len ("adiw r28,%o0-61" CR_TAB
3685 "std Y+61,%A1" CR_TAB
3686 "std Y+62,%B1" CR_TAB
3687 "std Y+63,%C1" CR_TAB
3688 "sbiw r28,%o0-60", op, plen, -5);
3690 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3691 "sbci r29,hi8(-%o0)" CR_TAB
3693 "std Y+1,%B1" CR_TAB
3694 "std Y+2,%C1" CR_TAB
3695 "subi r28,lo8(%o0)" CR_TAB
3696 "sbci r29,hi8(%o0)", op, plen, -7);
3698 if (reg_base == REG_X)
3701 gcc_assert (!reg_overlap_mentioned_p (XEXP (base, 0), src));
3703 avr_asm_len ("adiw r26,%o0" CR_TAB
3706 "st X,%C1", op, plen, -4);
3708 if (!reg_unused_after (insn, XEXP (base, 0)))
3709 avr_asm_len ("sbiw r26,%o0+2", op, plen, 1);
3714 return avr_asm_len ("std %A0,%A1" CR_TAB
3715 "std %B0,%B1" CR_TAB
3716 "std %C0,%C1", op, plen, -3);
3718 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3719 return avr_asm_len ("st %0,%C1" CR_TAB
3721 "st %0,%A1", op, plen, -3);
3722 else if (GET_CODE (base) == POST_INC) /* (R++) */
3723 return avr_asm_len ("st %0,%A1" CR_TAB
3725 "st %0,%C1", op, plen, -3);
3727 fatal_insn ("unknown move insn:",insn);
3732 /* Move around 24-bit stuff. */
3735 avr_out_movpsi (rtx insn, rtx *op, int *plen)
3740 if (avr_mem_flash_p (src)
3741 || avr_mem_flash_p (dest))
3743 return avr_out_lpm (insn, op, plen);
3746 if (register_operand (dest, VOIDmode))
3748 if (register_operand (src, VOIDmode)) /* mov r,r */
3750 if (true_regnum (dest) > true_regnum (src))
3752 avr_asm_len ("mov %C0,%C1", op, plen, -1);
3755 return avr_asm_len ("movw %A0,%A1", op, plen, 1);
3757 return avr_asm_len ("mov %B0,%B1" CR_TAB
3758 "mov %A0,%A1", op, plen, 2);
3763 avr_asm_len ("movw %A0,%A1", op, plen, -1);
3765 avr_asm_len ("mov %A0,%A1" CR_TAB
3766 "mov %B0,%B1", op, plen, -2);
3768 return avr_asm_len ("mov %C0,%C1", op, plen, 1);
3771 else if (CONSTANT_P (src))
3773 return avr_out_reload_inpsi (op, NULL_RTX, plen);
3775 else if (MEM_P (src))
3776 return avr_out_load_psi (insn, op, plen); /* mov r,m */
3778 else if (MEM_P (dest))
3783 xop[1] = src == CONST0_RTX (GET_MODE (dest)) ? zero_reg_rtx : src;
3785 return avr_out_store_psi (insn, xop, plen);
3788 fatal_insn ("invalid insn:", insn);
3794 out_movqi_mr_r (rtx insn, rtx op[], int *plen)
3798 rtx x = XEXP (dest, 0);
3800 if (CONSTANT_ADDRESS_P (x))
3802 return optimize > 0 && io_address_operand (x, QImode)
3803 ? avr_asm_len ("out %i0,%1", op, plen, -1)
3804 : avr_asm_len ("sts %m0,%1", op, plen, -2);
3806 else if (GET_CODE (x) == PLUS
3807 && REG_P (XEXP (x, 0))
3808 && CONST_INT_P (XEXP (x, 1)))
3810 /* memory access by reg+disp */
3812 int disp = INTVAL (XEXP (x, 1));
3814 if (disp - GET_MODE_SIZE (GET_MODE (dest)) >= 63)
3816 if (REGNO (XEXP (x, 0)) != REG_Y)
3817 fatal_insn ("incorrect insn:",insn);
3819 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
3820 return avr_asm_len ("adiw r28,%o0-63" CR_TAB
3821 "std Y+63,%1" CR_TAB
3822 "sbiw r28,%o0-63", op, plen, -3);
3824 return avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3825 "sbci r29,hi8(-%o0)" CR_TAB
3827 "subi r28,lo8(%o0)" CR_TAB
3828 "sbci r29,hi8(%o0)", op, plen, -5);
3830 else if (REGNO (XEXP (x,0)) == REG_X)
3832 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
3834 avr_asm_len ("mov __tmp_reg__,%1" CR_TAB
3835 "adiw r26,%o0" CR_TAB
3836 "st X,__tmp_reg__", op, plen, -3);
3840 avr_asm_len ("adiw r26,%o0" CR_TAB
3841 "st X,%1", op, plen, -2);
3844 if (!reg_unused_after (insn, XEXP (x,0)))
3845 avr_asm_len ("sbiw r26,%o0", op, plen, 1);
3850 return avr_asm_len ("std %0,%1", op, plen, -1);
3853 return avr_asm_len ("st %0,%1", op, plen, -1);
3857 /* Helper for the next function for XMEGA. It does the same
3858 but with low byte first. */
3861 avr_out_movhi_mr_r_xmega (rtx insn, rtx op[], int *plen)
3865 rtx base = XEXP (dest, 0);
3866 int reg_base = true_regnum (base);
3867 int reg_src = true_regnum (src);
3869 /* "volatile" forces writing low byte first, even if less efficient,
3870 for correct operation with 16-bit I/O registers like SP. */
3871 int mem_volatile_p = MEM_VOLATILE_P (dest);
3873 if (CONSTANT_ADDRESS_P (base))
3874 return optimize > 0 && io_address_operand (base, HImode)
3875 ? avr_asm_len ("out %i0,%A1" CR_TAB
3876 "out %i0+1,%B1", op, plen, -2)
3878 : avr_asm_len ("sts %m0,%A1" CR_TAB
3879 "sts %m0+1,%B1", op, plen, -4);
3883 if (reg_base != REG_X)
3884 return avr_asm_len ("st %0,%A1" CR_TAB
3885 "std %0+1,%B1", op, plen, -2);
3887 if (reg_src == REG_X)
3888 /* "st X+,r26" and "st -X,r26" are undefined. */
3889 avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
3892 "st X,__tmp_reg__", op, plen, -4);
3894 avr_asm_len ("st X+,%A1" CR_TAB
3895 "st X,%B1", op, plen, -2);
3897 return reg_unused_after (insn, base)
3899 : avr_asm_len ("sbiw r26,1", op, plen, 1);
3901 else if (GET_CODE (base) == PLUS)
3903 int disp = INTVAL (XEXP (base, 1));
3904 reg_base = REGNO (XEXP (base, 0));
3905 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
3907 if (reg_base != REG_Y)
3908 fatal_insn ("incorrect insn:",insn);
3910 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
3911 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
3912 "std Y+62,%A1" CR_TAB
3913 "std Y+63,%B1" CR_TAB
3914 "sbiw r28,%o0-62", op, plen, -4)
3916 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
3917 "sbci r29,hi8(-%o0)" CR_TAB
3919 "std Y+1,%B1" CR_TAB
3920 "subi r28,lo8(%o0)" CR_TAB
3921 "sbci r29,hi8(%o0)", op, plen, -6);
3924 if (reg_base != REG_X)
3925 return avr_asm_len ("std %A0,%A1" CR_TAB
3926 "std %B0,%B1", op, plen, -2);
3928 return reg_src == REG_X
3929 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
3930 "mov __zero_reg__,r27" CR_TAB
3931 "adiw r26,%o0" CR_TAB
3932 "st X+,__tmp_reg__" CR_TAB
3933 "st X,__zero_reg__" CR_TAB
3934 "clr __zero_reg__" CR_TAB
3935 "sbiw r26,%o0+1", op, plen, -7)
3937 : avr_asm_len ("adiw r26,%o0" CR_TAB
3940 "sbiw r26,%o0+1", op, plen, -4);
3942 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
3944 if (!mem_volatile_p)
3945 return avr_asm_len ("st %0,%B1" CR_TAB
3946 "st %0,%A1", op, plen, -2);
3948 return REGNO (XEXP (base, 0)) == REG_X
3949 ? avr_asm_len ("sbiw r26,2" CR_TAB
3952 "sbiw r26,1", op, plen, -4)
3954 : avr_asm_len ("sbiw %r0,2" CR_TAB
3956 "std %p0+1,%B1", op, plen, -3);
3958 else if (GET_CODE (base) == POST_INC) /* (R++) */
3960 return avr_asm_len ("st %0,%A1" CR_TAB
3961 "st %0,%B1", op, plen, -2);
3964 fatal_insn ("unknown move insn:",insn);
3970 out_movhi_mr_r (rtx insn, rtx op[], int *plen)
3974 rtx base = XEXP (dest, 0);
3975 int reg_base = true_regnum (base);
3976 int reg_src = true_regnum (src);
3979 /* "volatile" forces writing high-byte first (no-xmega) resp.
3980 low-byte first (xmega) even if less efficient, for correct
3981 operation with 16-bit I/O registers like. */
3984 return avr_out_movhi_mr_r_xmega (insn, op, plen);
3986 mem_volatile_p = MEM_VOLATILE_P (dest);
3988 if (CONSTANT_ADDRESS_P (base))
3989 return optimize > 0 && io_address_operand (base, HImode)
3990 ? avr_asm_len ("out %i0+1,%B1" CR_TAB
3991 "out %i0,%A1", op, plen, -2)
3993 : avr_asm_len ("sts %m0+1,%B1" CR_TAB
3994 "sts %m0,%A1", op, plen, -4);
3998 if (reg_base != REG_X)
3999 return avr_asm_len ("std %0+1,%B1" CR_TAB
4000 "st %0,%A1", op, plen, -2);
4002 if (reg_src == REG_X)
4003 /* "st X+,r26" and "st -X,r26" are undefined. */
4004 return !mem_volatile_p && reg_unused_after (insn, src)
4005 ? avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4008 "st X,__tmp_reg__", op, plen, -4)
4010 : avr_asm_len ("mov __tmp_reg__,r27" CR_TAB
4012 "st X,__tmp_reg__" CR_TAB
4014 "st X,r26", op, plen, -5);
4016 return !mem_volatile_p && reg_unused_after (insn, base)
4017 ? avr_asm_len ("st X+,%A1" CR_TAB
4018 "st X,%B1", op, plen, -2)
4019 : avr_asm_len ("adiw r26,1" CR_TAB
4021 "st -X,%A1", op, plen, -3);
4023 else if (GET_CODE (base) == PLUS)
4025 int disp = INTVAL (XEXP (base, 1));
4026 reg_base = REGNO (XEXP (base, 0));
4027 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
4029 if (reg_base != REG_Y)
4030 fatal_insn ("incorrect insn:",insn);
4032 return disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest))
4033 ? avr_asm_len ("adiw r28,%o0-62" CR_TAB
4034 "std Y+63,%B1" CR_TAB
4035 "std Y+62,%A1" CR_TAB
4036 "sbiw r28,%o0-62", op, plen, -4)
4038 : avr_asm_len ("subi r28,lo8(-%o0)" CR_TAB
4039 "sbci r29,hi8(-%o0)" CR_TAB
4040 "std Y+1,%B1" CR_TAB
4042 "subi r28,lo8(%o0)" CR_TAB
4043 "sbci r29,hi8(%o0)", op, plen, -6);
4046 if (reg_base != REG_X)
4047 return avr_asm_len ("std %B0,%B1" CR_TAB
4048 "std %A0,%A1", op, plen, -2);
4050 return reg_src == REG_X
4051 ? avr_asm_len ("mov __tmp_reg__,r26" CR_TAB
4052 "mov __zero_reg__,r27" CR_TAB
4053 "adiw r26,%o0+1" CR_TAB
4054 "st X,__zero_reg__" CR_TAB
4055 "st -X,__tmp_reg__" CR_TAB
4056 "clr __zero_reg__" CR_TAB
4057 "sbiw r26,%o0", op, plen, -7)
4059 : avr_asm_len ("adiw r26,%o0+1" CR_TAB
4062 "sbiw r26,%o0", op, plen, -4);
4064 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
4066 return avr_asm_len ("st %0,%B1" CR_TAB
4067 "st %0,%A1", op, plen, -2);
4069 else if (GET_CODE (base) == POST_INC) /* (R++) */
4071 if (!mem_volatile_p)
4072 return avr_asm_len ("st %0,%A1" CR_TAB
4073 "st %0,%B1", op, plen, -2);
4075 return REGNO (XEXP (base, 0)) == REG_X
4076 ? avr_asm_len ("adiw r26,1" CR_TAB
4079 "adiw r26,2", op, plen, -4)
4081 : avr_asm_len ("std %p0+1,%B1" CR_TAB
4083 "adiw %r0,2", op, plen, -3);
4085 fatal_insn ("unknown move insn:",insn);
4089 /* Return 1 if frame pointer for current function required. */
4092 avr_frame_pointer_required_p (void)
4094 return (cfun->calls_alloca
4095 || cfun->calls_setjmp
4096 || cfun->has_nonlocal_label
4097 || crtl->args.info.nregs == 0
4098 || get_frame_size () > 0);
4101 /* Returns the condition of compare insn INSN, or UNKNOWN. */
4104 compare_condition (rtx insn)
4106 rtx next = next_real_insn (insn);
4108 if (next && JUMP_P (next))
4110 rtx pat = PATTERN (next);
4111 rtx src = SET_SRC (pat);
4113 if (IF_THEN_ELSE == GET_CODE (src))
4114 return GET_CODE (XEXP (src, 0));
4121 /* Returns true iff INSN is a tst insn that only tests the sign. */
4124 compare_sign_p (rtx insn)
4126 RTX_CODE cond = compare_condition (insn);
4127 return (cond == GE || cond == LT);
4131 /* Returns true iff the next insn is a JUMP_INSN with a condition
4132 that needs to be swapped (GT, GTU, LE, LEU). */
4135 compare_diff_p (rtx insn)
4137 RTX_CODE cond = compare_condition (insn);
4138 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
4141 /* Returns true iff INSN is a compare insn with the EQ or NE condition. */
4144 compare_eq_p (rtx insn)
4146 RTX_CODE cond = compare_condition (insn);
4147 return (cond == EQ || cond == NE);
4151 /* Output compare instruction
4153 compare (XOP[0], XOP[1])
4155 for an HI/SI register XOP[0] and an integer XOP[1]. Return "".
4156 XOP[2] is an 8-bit scratch register as needed.
4158 PLEN == NULL: Output instructions.
4159 PLEN != NULL: Set *PLEN to the length (in words) of the sequence.
4160 Don't output anything. */
4163 avr_out_compare (rtx insn, rtx *xop, int *plen)
4165 /* Register to compare and value to compare against. */
4169 /* MODE of the comparison. */
4170 enum machine_mode mode = GET_MODE (xreg);
4172 /* Number of bytes to operate on. */
4173 int i, n_bytes = GET_MODE_SIZE (mode);
4175 /* Value (0..0xff) held in clobber register xop[2] or -1 if unknown. */
4176 int clobber_val = -1;
4178 gcc_assert (REG_P (xreg));
4179 gcc_assert ((CONST_INT_P (xval) && n_bytes <= 4)
4180 || (const_double_operand (xval, VOIDmode) && n_bytes == 8));
4185 /* Comparisons == +/-1 and != +/-1 can be done similar to camparing
4186 against 0 by ORing the bytes. This is one instruction shorter.
4187 Notice that DImode comparisons are always against reg:DI 18
4188 and therefore don't use this. */
4190 if (!test_hard_reg_class (LD_REGS, xreg)
4191 && compare_eq_p (insn)
4192 && reg_unused_after (insn, xreg))
4194 if (xval == const1_rtx)
4196 avr_asm_len ("dec %A0" CR_TAB
4197 "or %A0,%B0", xop, plen, 2);
4200 avr_asm_len ("or %A0,%C0", xop, plen, 1);
4203 avr_asm_len ("or %A0,%D0", xop, plen, 1);
4207 else if (xval == constm1_rtx)
4210 avr_asm_len ("and %A0,%D0", xop, plen, 1);
4213 avr_asm_len ("and %A0,%C0", xop, plen, 1);
4215 return avr_asm_len ("and %A0,%B0" CR_TAB
4216 "com %A0", xop, plen, 2);
4220 for (i = 0; i < n_bytes; i++)
4222 /* We compare byte-wise. */
4223 rtx reg8 = simplify_gen_subreg (QImode, xreg, mode, i);
4224 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
4226 /* 8-bit value to compare with this byte. */
4227 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
4229 /* Registers R16..R31 can operate with immediate. */
4230 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
4233 xop[1] = gen_int_mode (val8, QImode);
4235 /* Word registers >= R24 can use SBIW/ADIW with 0..63. */
4238 && test_hard_reg_class (ADDW_REGS, reg8))
4240 int val16 = trunc_int_for_mode (INTVAL (xval), HImode);
4242 if (IN_RANGE (val16, 0, 63)
4244 || reg_unused_after (insn, xreg)))
4246 avr_asm_len ("sbiw %0,%1", xop, plen, 1);
4252 && IN_RANGE (val16, -63, -1)
4253 && compare_eq_p (insn)
4254 && reg_unused_after (insn, xreg))
4256 return avr_asm_len ("adiw %0,%n1", xop, plen, 1);
4260 /* Comparing against 0 is easy. */
4265 ? "cp %0,__zero_reg__"
4266 : "cpc %0,__zero_reg__", xop, plen, 1);
4270 /* Upper registers can compare and subtract-with-carry immediates.
4271 Notice that compare instructions do the same as respective subtract
4272 instruction; the only difference is that comparisons don't write
4273 the result back to the target register. */
4279 avr_asm_len ("cpi %0,%1", xop, plen, 1);
4282 else if (reg_unused_after (insn, xreg))
4284 avr_asm_len ("sbci %0,%1", xop, plen, 1);
4289 /* Must load the value into the scratch register. */
4291 gcc_assert (REG_P (xop[2]));
4293 if (clobber_val != (int) val8)
4294 avr_asm_len ("ldi %2,%1", xop, plen, 1);
4295 clobber_val = (int) val8;
4299 : "cpc %0,%2", xop, plen, 1);
4306 /* Prepare operands of compare_const_di2 to be used with avr_out_compare. */
4309 avr_out_compare64 (rtx insn, rtx *op, int *plen)
4313 xop[0] = gen_rtx_REG (DImode, 18);
4317 return avr_out_compare (insn, xop, plen);
4320 /* Output test instruction for HImode. */
4323 avr_out_tsthi (rtx insn, rtx *op, int *plen)
4325 if (compare_sign_p (insn))
4327 avr_asm_len ("tst %B0", op, plen, -1);
4329 else if (reg_unused_after (insn, op[0])
4330 && compare_eq_p (insn))
4332 /* Faster than sbiw if we can clobber the operand. */
4333 avr_asm_len ("or %A0,%B0", op, plen, -1);
4337 avr_out_compare (insn, op, plen);
4344 /* Output test instruction for PSImode. */
4347 avr_out_tstpsi (rtx insn, rtx *op, int *plen)
4349 if (compare_sign_p (insn))
4351 avr_asm_len ("tst %C0", op, plen, -1);
4353 else if (reg_unused_after (insn, op[0])
4354 && compare_eq_p (insn))
4356 /* Faster than sbiw if we can clobber the operand. */
4357 avr_asm_len ("or %A0,%B0" CR_TAB
4358 "or %A0,%C0", op, plen, -2);
4362 avr_out_compare (insn, op, plen);
4369 /* Output test instruction for SImode. */
4372 avr_out_tstsi (rtx insn, rtx *op, int *plen)
4374 if (compare_sign_p (insn))
4376 avr_asm_len ("tst %D0", op, plen, -1);
4378 else if (reg_unused_after (insn, op[0])
4379 && compare_eq_p (insn))
4381 /* Faster than sbiw if we can clobber the operand. */
4382 avr_asm_len ("or %A0,%B0" CR_TAB
4384 "or %A0,%D0", op, plen, -3);
4388 avr_out_compare (insn, op, plen);
4395 /* Generate asm equivalent for various shifts. This only handles cases
4396 that are not already carefully hand-optimized in ?sh??i3_out.
4398 OPERANDS[0] resp. %0 in TEMPL is the operand to be shifted.
4399 OPERANDS[2] is the shift count as CONST_INT, MEM or REG.
4400 OPERANDS[3] is a QImode scratch register from LD regs if
4401 available and SCRATCH, otherwise (no scratch available)
4403 TEMPL is an assembler template that shifts by one position.
4404 T_LEN is the length of this template. */
4407 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
4408 int *plen, int t_len)
4410 bool second_label = true;
4411 bool saved_in_tmp = false;
4412 bool use_zero_reg = false;
4415 op[0] = operands[0];
4416 op[1] = operands[1];
4417 op[2] = operands[2];
4418 op[3] = operands[3];
4423 if (CONST_INT_P (operands[2]))
4425 bool scratch = (GET_CODE (PATTERN (insn)) == PARALLEL
4426 && REG_P (operands[3]));
4427 int count = INTVAL (operands[2]);
4428 int max_len = 10; /* If larger than this, always use a loop. */
4433 if (count < 8 && !scratch)
4434 use_zero_reg = true;
4437 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
4439 if (t_len * count <= max_len)
4441 /* Output shifts inline with no loop - faster. */
4444 avr_asm_len (templ, op, plen, t_len);
4451 avr_asm_len ("ldi %3,%2", op, plen, 1);
4453 else if (use_zero_reg)
4455 /* Hack to save one word: use __zero_reg__ as loop counter.
4456 Set one bit, then shift in a loop until it is 0 again. */
4458 op[3] = zero_reg_rtx;
4460 avr_asm_len ("set" CR_TAB
4461 "bld %3,%2-1", op, plen, 2);
4465 /* No scratch register available, use one from LD_REGS (saved in
4466 __tmp_reg__) that doesn't overlap with registers to shift. */
4468 op[3] = all_regs_rtx[((REGNO (op[0]) - 1) & 15) + 16];
4469 op[4] = tmp_reg_rtx;
4470 saved_in_tmp = true;
4472 avr_asm_len ("mov %4,%3" CR_TAB
4473 "ldi %3,%2", op, plen, 2);
4476 second_label = false;
4478 else if (MEM_P (op[2]))
4482 op_mov[0] = op[3] = tmp_reg_rtx;
4485 out_movqi_r_mr (insn, op_mov, plen);
4487 else if (register_operand (op[2], QImode))
4491 if (!reg_unused_after (insn, op[2])
4492 || reg_overlap_mentioned_p (op[0], op[2]))
4494 op[3] = tmp_reg_rtx;
4495 avr_asm_len ("mov %3,%2", op, plen, 1);
4499 fatal_insn ("bad shift insn:", insn);
4502 avr_asm_len ("rjmp 2f", op, plen, 1);
4504 avr_asm_len ("1:", op, plen, 0);
4505 avr_asm_len (templ, op, plen, t_len);
4508 avr_asm_len ("2:", op, plen, 0);
4510 avr_asm_len (use_zero_reg ? "lsr %3" : "dec %3", op, plen, 1);
4511 avr_asm_len (second_label ? "brpl 1b" : "brne 1b", op, plen, 1);
4514 avr_asm_len ("mov %3,%4", op, plen, 1);
4518 /* 8bit shift left ((char)x << i) */
4521 ashlqi3_out (rtx insn, rtx operands[], int *len)
4523 if (GET_CODE (operands[2]) == CONST_INT)
4530 switch (INTVAL (operands[2]))
4533 if (INTVAL (operands[2]) < 8)
4545 return ("lsl %0" CR_TAB
4550 return ("lsl %0" CR_TAB
4555 if (test_hard_reg_class (LD_REGS, operands[0]))
4558 return ("swap %0" CR_TAB
4562 return ("lsl %0" CR_TAB
4568 if (test_hard_reg_class (LD_REGS, operands[0]))
4571 return ("swap %0" CR_TAB
4576 return ("lsl %0" CR_TAB
4583 if (test_hard_reg_class (LD_REGS, operands[0]))
4586 return ("swap %0" CR_TAB
4592 return ("lsl %0" CR_TAB
4601 return ("ror %0" CR_TAB
4606 else if (CONSTANT_P (operands[2]))
4607 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4609 out_shift_with_cnt ("lsl %0",
4610 insn, operands, len, 1);
4615 /* 16bit shift left ((short)x << i) */
4618 ashlhi3_out (rtx insn, rtx operands[], int *len)
4620 if (GET_CODE (operands[2]) == CONST_INT)
4622 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4623 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4630 switch (INTVAL (operands[2]))
4633 if (INTVAL (operands[2]) < 16)
4637 return ("clr %B0" CR_TAB
4641 if (optimize_size && scratch)
4646 return ("swap %A0" CR_TAB
4648 "andi %B0,0xf0" CR_TAB
4649 "eor %B0,%A0" CR_TAB
4650 "andi %A0,0xf0" CR_TAB
4656 return ("swap %A0" CR_TAB
4658 "ldi %3,0xf0" CR_TAB
4660 "eor %B0,%A0" CR_TAB
4664 break; /* optimize_size ? 6 : 8 */
4668 break; /* scratch ? 5 : 6 */
4672 return ("lsl %A0" CR_TAB
4676 "andi %B0,0xf0" CR_TAB
4677 "eor %B0,%A0" CR_TAB
4678 "andi %A0,0xf0" CR_TAB
4684 return ("lsl %A0" CR_TAB
4688 "ldi %3,0xf0" CR_TAB
4690 "eor %B0,%A0" CR_TAB
4698 break; /* scratch ? 5 : 6 */
4700 return ("clr __tmp_reg__" CR_TAB
4703 "ror __tmp_reg__" CR_TAB
4706 "ror __tmp_reg__" CR_TAB
4707 "mov %B0,%A0" CR_TAB
4708 "mov %A0,__tmp_reg__");
4712 return ("lsr %B0" CR_TAB
4713 "mov %B0,%A0" CR_TAB
4719 return *len = 2, ("mov %B0,%A1" CR_TAB
4724 return ("mov %B0,%A0" CR_TAB
4730 return ("mov %B0,%A0" CR_TAB
4737 return ("mov %B0,%A0" CR_TAB
4747 return ("mov %B0,%A0" CR_TAB
4755 return ("mov %B0,%A0" CR_TAB
4758 "ldi %3,0xf0" CR_TAB
4762 return ("mov %B0,%A0" CR_TAB
4773 return ("mov %B0,%A0" CR_TAB
4779 if (AVR_HAVE_MUL && scratch)
4782 return ("ldi %3,0x20" CR_TAB
4786 "clr __zero_reg__");
4788 if (optimize_size && scratch)
4793 return ("mov %B0,%A0" CR_TAB
4797 "ldi %3,0xe0" CR_TAB
4803 return ("set" CR_TAB
4808 "clr __zero_reg__");
4811 return ("mov %B0,%A0" CR_TAB
4820 if (AVR_HAVE_MUL && ldi_ok)
4823 return ("ldi %B0,0x40" CR_TAB
4824 "mul %A0,%B0" CR_TAB
4827 "clr __zero_reg__");
4829 if (AVR_HAVE_MUL && scratch)
4832 return ("ldi %3,0x40" CR_TAB
4836 "clr __zero_reg__");
4838 if (optimize_size && ldi_ok)
4841 return ("mov %B0,%A0" CR_TAB
4842 "ldi %A0,6" "\n1:\t"
4847 if (optimize_size && scratch)
4850 return ("clr %B0" CR_TAB
4859 return ("clr %B0" CR_TAB
4866 out_shift_with_cnt ("lsl %A0" CR_TAB
4867 "rol %B0", insn, operands, len, 2);
4872 /* 24-bit shift left */
4875 avr_out_ashlpsi3 (rtx insn, rtx *op, int *plen)
4880 if (CONST_INT_P (op[2]))
4882 switch (INTVAL (op[2]))
4885 if (INTVAL (op[2]) < 24)
4888 return avr_asm_len ("clr %A0" CR_TAB
4890 "clr %C0", op, plen, 3);
4894 int reg0 = REGNO (op[0]);
4895 int reg1 = REGNO (op[1]);
4898 return avr_asm_len ("mov %C0,%B1" CR_TAB
4899 "mov %B0,%A1" CR_TAB
4900 "clr %A0", op, plen, 3);
4902 return avr_asm_len ("clr %A0" CR_TAB
4903 "mov %B0,%A1" CR_TAB
4904 "mov %C0,%B1", op, plen, 3);
4909 int reg0 = REGNO (op[0]);
4910 int reg1 = REGNO (op[1]);
4912 if (reg0 + 2 != reg1)
4913 avr_asm_len ("mov %C0,%A0", op, plen, 1);
4915 return avr_asm_len ("clr %B0" CR_TAB
4916 "clr %A0", op, plen, 2);
4920 return avr_asm_len ("clr %C0" CR_TAB
4924 "clr %A0", op, plen, 5);
4928 out_shift_with_cnt ("lsl %A0" CR_TAB
4930 "rol %C0", insn, op, plen, 3);
4935 /* 32bit shift left ((long)x << i) */
4938 ashlsi3_out (rtx insn, rtx operands[], int *len)
4940 if (GET_CODE (operands[2]) == CONST_INT)
4948 switch (INTVAL (operands[2]))
4951 if (INTVAL (operands[2]) < 32)
4955 return *len = 3, ("clr %D0" CR_TAB
4959 return ("clr %D0" CR_TAB
4966 int reg0 = true_regnum (operands[0]);
4967 int reg1 = true_regnum (operands[1]);
4970 return ("mov %D0,%C1" CR_TAB
4971 "mov %C0,%B1" CR_TAB
4972 "mov %B0,%A1" CR_TAB
4975 return ("clr %A0" CR_TAB
4976 "mov %B0,%A1" CR_TAB
4977 "mov %C0,%B1" CR_TAB
4983 int reg0 = true_regnum (operands[0]);
4984 int reg1 = true_regnum (operands[1]);
4985 if (reg0 + 2 == reg1)
4986 return *len = 2, ("clr %B0" CR_TAB
4989 return *len = 3, ("movw %C0,%A1" CR_TAB
4993 return *len = 4, ("mov %C0,%A1" CR_TAB
4994 "mov %D0,%B1" CR_TAB
5001 return ("mov %D0,%A1" CR_TAB
5008 return ("clr %D0" CR_TAB
5017 out_shift_with_cnt ("lsl %A0" CR_TAB
5020 "rol %D0", insn, operands, len, 4);
5024 /* 8bit arithmetic shift right ((signed char)x >> i) */
5027 ashrqi3_out (rtx insn, rtx operands[], int *len)
5029 if (GET_CODE (operands[2]) == CONST_INT)
5036 switch (INTVAL (operands[2]))
5044 return ("asr %0" CR_TAB
5049 return ("asr %0" CR_TAB
5055 return ("asr %0" CR_TAB
5062 return ("asr %0" CR_TAB
5070 return ("bst %0,6" CR_TAB
5076 if (INTVAL (operands[2]) < 8)
5083 return ("lsl %0" CR_TAB
5087 else if (CONSTANT_P (operands[2]))
5088 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5090 out_shift_with_cnt ("asr %0",
5091 insn, operands, len, 1);
5096 /* 16bit arithmetic shift right ((signed short)x >> i) */
5099 ashrhi3_out (rtx insn, rtx operands[], int *len)
5101 if (GET_CODE (operands[2]) == CONST_INT)
5103 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5104 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5111 switch (INTVAL (operands[2]))
5115 /* XXX try to optimize this too? */
5120 break; /* scratch ? 5 : 6 */
5122 return ("mov __tmp_reg__,%A0" CR_TAB
5123 "mov %A0,%B0" CR_TAB
5124 "lsl __tmp_reg__" CR_TAB
5126 "sbc %B0,%B0" CR_TAB
5127 "lsl __tmp_reg__" CR_TAB
5133 return ("lsl %A0" CR_TAB
5134 "mov %A0,%B0" CR_TAB
5140 int reg0 = true_regnum (operands[0]);
5141 int reg1 = true_regnum (operands[1]);
5144 return *len = 3, ("mov %A0,%B0" CR_TAB
5148 return *len = 4, ("mov %A0,%B1" CR_TAB
5156 return ("mov %A0,%B0" CR_TAB
5158 "sbc %B0,%B0" CR_TAB
5163 return ("mov %A0,%B0" CR_TAB
5165 "sbc %B0,%B0" CR_TAB
5170 if (AVR_HAVE_MUL && ldi_ok)
5173 return ("ldi %A0,0x20" CR_TAB
5174 "muls %B0,%A0" CR_TAB
5176 "sbc %B0,%B0" CR_TAB
5177 "clr __zero_reg__");
5179 if (optimize_size && scratch)
5182 return ("mov %A0,%B0" CR_TAB
5184 "sbc %B0,%B0" CR_TAB
5190 if (AVR_HAVE_MUL && ldi_ok)
5193 return ("ldi %A0,0x10" CR_TAB
5194 "muls %B0,%A0" CR_TAB
5196 "sbc %B0,%B0" CR_TAB
5197 "clr __zero_reg__");
5199 if (optimize_size && scratch)
5202 return ("mov %A0,%B0" CR_TAB
5204 "sbc %B0,%B0" CR_TAB
5211 if (AVR_HAVE_MUL && ldi_ok)
5214 return ("ldi %A0,0x08" CR_TAB
5215 "muls %B0,%A0" CR_TAB
5217 "sbc %B0,%B0" CR_TAB
5218 "clr __zero_reg__");
5221 break; /* scratch ? 5 : 7 */
5223 return ("mov %A0,%B0" CR_TAB
5225 "sbc %B0,%B0" CR_TAB
5234 return ("lsl %B0" CR_TAB
5235 "sbc %A0,%A0" CR_TAB
5237 "mov %B0,%A0" CR_TAB
5241 if (INTVAL (operands[2]) < 16)
5247 return *len = 3, ("lsl %B0" CR_TAB
5248 "sbc %A0,%A0" CR_TAB
5253 out_shift_with_cnt ("asr %B0" CR_TAB
5254 "ror %A0", insn, operands, len, 2);
5259 /* 24-bit arithmetic shift right */
5262 avr_out_ashrpsi3 (rtx insn, rtx *op, int *plen)
5264 int dest = REGNO (op[0]);
5265 int src = REGNO (op[1]);
5267 if (CONST_INT_P (op[2]))
5272 switch (INTVAL (op[2]))
5276 return avr_asm_len ("mov %A0,%B1" CR_TAB
5277 "mov %B0,%C1" CR_TAB
5280 "dec %C0", op, plen, 5);
5282 return avr_asm_len ("clr %C0" CR_TAB
5285 "mov %B0,%C1" CR_TAB
5286 "mov %A0,%B1", op, plen, 5);
5289 if (dest != src + 2)
5290 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5292 return avr_asm_len ("clr %B0" CR_TAB
5295 "mov %C0,%B0", op, plen, 4);
5298 if (INTVAL (op[2]) < 24)
5304 return avr_asm_len ("lsl %C0" CR_TAB
5305 "sbc %A0,%A0" CR_TAB
5306 "mov %B0,%A0" CR_TAB
5307 "mov %C0,%A0", op, plen, 4);
5311 out_shift_with_cnt ("asr %C0" CR_TAB
5313 "ror %A0", insn, op, plen, 3);
5318 /* 32bit arithmetic shift right ((signed long)x >> i) */
5321 ashrsi3_out (rtx insn, rtx operands[], int *len)
5323 if (GET_CODE (operands[2]) == CONST_INT)
5331 switch (INTVAL (operands[2]))
5335 int reg0 = true_regnum (operands[0]);
5336 int reg1 = true_regnum (operands[1]);
5339 return ("mov %A0,%B1" CR_TAB
5340 "mov %B0,%C1" CR_TAB
5341 "mov %C0,%D1" CR_TAB
5346 return ("clr %D0" CR_TAB
5349 "mov %C0,%D1" CR_TAB
5350 "mov %B0,%C1" CR_TAB
5356 int reg0 = true_regnum (operands[0]);
5357 int reg1 = true_regnum (operands[1]);
5359 if (reg0 == reg1 + 2)
5360 return *len = 4, ("clr %D0" CR_TAB
5365 return *len = 5, ("movw %A0,%C1" CR_TAB
5371 return *len = 6, ("mov %B0,%D1" CR_TAB
5372 "mov %A0,%C1" CR_TAB
5380 return *len = 6, ("mov %A0,%D1" CR_TAB
5384 "mov %B0,%D0" CR_TAB
5388 if (INTVAL (operands[2]) < 32)
5395 return *len = 4, ("lsl %D0" CR_TAB
5396 "sbc %A0,%A0" CR_TAB
5397 "mov %B0,%A0" CR_TAB
5400 return *len = 5, ("lsl %D0" CR_TAB
5401 "sbc %A0,%A0" CR_TAB
5402 "mov %B0,%A0" CR_TAB
5403 "mov %C0,%A0" CR_TAB
5408 out_shift_with_cnt ("asr %D0" CR_TAB
5411 "ror %A0", insn, operands, len, 4);
5415 /* 8bit logic shift right ((unsigned char)x >> i) */
5418 lshrqi3_out (rtx insn, rtx operands[], int *len)
5420 if (GET_CODE (operands[2]) == CONST_INT)
5427 switch (INTVAL (operands[2]))
5430 if (INTVAL (operands[2]) < 8)
5442 return ("lsr %0" CR_TAB
5446 return ("lsr %0" CR_TAB
5451 if (test_hard_reg_class (LD_REGS, operands[0]))
5454 return ("swap %0" CR_TAB
5458 return ("lsr %0" CR_TAB
5464 if (test_hard_reg_class (LD_REGS, operands[0]))
5467 return ("swap %0" CR_TAB
5472 return ("lsr %0" CR_TAB
5479 if (test_hard_reg_class (LD_REGS, operands[0]))
5482 return ("swap %0" CR_TAB
5488 return ("lsr %0" CR_TAB
5497 return ("rol %0" CR_TAB
5502 else if (CONSTANT_P (operands[2]))
5503 fatal_insn ("internal compiler error. Incorrect shift:", insn);
5505 out_shift_with_cnt ("lsr %0",
5506 insn, operands, len, 1);
5510 /* 16bit logic shift right ((unsigned short)x >> i) */
5513 lshrhi3_out (rtx insn, rtx operands[], int *len)
5515 if (GET_CODE (operands[2]) == CONST_INT)
5517 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
5518 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
5525 switch (INTVAL (operands[2]))
5528 if (INTVAL (operands[2]) < 16)
5532 return ("clr %B0" CR_TAB
5536 if (optimize_size && scratch)
5541 return ("swap %B0" CR_TAB
5543 "andi %A0,0x0f" CR_TAB
5544 "eor %A0,%B0" CR_TAB
5545 "andi %B0,0x0f" CR_TAB
5551 return ("swap %B0" CR_TAB
5553 "ldi %3,0x0f" CR_TAB
5555 "eor %A0,%B0" CR_TAB
5559 break; /* optimize_size ? 6 : 8 */
5563 break; /* scratch ? 5 : 6 */
5567 return ("lsr %B0" CR_TAB
5571 "andi %A0,0x0f" CR_TAB
5572 "eor %A0,%B0" CR_TAB
5573 "andi %B0,0x0f" CR_TAB
5579 return ("lsr %B0" CR_TAB
5583 "ldi %3,0x0f" CR_TAB
5585 "eor %A0,%B0" CR_TAB
5593 break; /* scratch ? 5 : 6 */
5595 return ("clr __tmp_reg__" CR_TAB
5598 "rol __tmp_reg__" CR_TAB
5601 "rol __tmp_reg__" CR_TAB
5602 "mov %A0,%B0" CR_TAB
5603 "mov %B0,__tmp_reg__");
5607 return ("lsl %A0" CR_TAB
5608 "mov %A0,%B0" CR_TAB
5610 "sbc %B0,%B0" CR_TAB
5614 return *len = 2, ("mov %A0,%B1" CR_TAB
5619 return ("mov %A0,%B0" CR_TAB
5625 return ("mov %A0,%B0" CR_TAB
5632 return ("mov %A0,%B0" CR_TAB
5642 return ("mov %A0,%B0" CR_TAB
5650 return ("mov %A0,%B0" CR_TAB
5653 "ldi %3,0x0f" CR_TAB
5657 return ("mov %A0,%B0" CR_TAB
5668 return ("mov %A0,%B0" CR_TAB
5674 if (AVR_HAVE_MUL && scratch)
5677 return ("ldi %3,0x08" CR_TAB
5681 "clr __zero_reg__");
5683 if (optimize_size && scratch)
5688 return ("mov %A0,%B0" CR_TAB
5692 "ldi %3,0x07" CR_TAB
5698 return ("set" CR_TAB
5703 "clr __zero_reg__");
5706 return ("mov %A0,%B0" CR_TAB
5715 if (AVR_HAVE_MUL && ldi_ok)
5718 return ("ldi %A0,0x04" CR_TAB
5719 "mul %B0,%A0" CR_TAB
5722 "clr __zero_reg__");
5724 if (AVR_HAVE_MUL && scratch)
5727 return ("ldi %3,0x04" CR_TAB
5731 "clr __zero_reg__");
5733 if (optimize_size && ldi_ok)
5736 return ("mov %A0,%B0" CR_TAB
5737 "ldi %B0,6" "\n1:\t"
5742 if (optimize_size && scratch)
5745 return ("clr %A0" CR_TAB
5754 return ("clr %A0" CR_TAB
5761 out_shift_with_cnt ("lsr %B0" CR_TAB
5762 "ror %A0", insn, operands, len, 2);
5767 /* 24-bit logic shift right */
5770 avr_out_lshrpsi3 (rtx insn, rtx *op, int *plen)
5772 int dest = REGNO (op[0]);
5773 int src = REGNO (op[1]);
5775 if (CONST_INT_P (op[2]))
5780 switch (INTVAL (op[2]))
5784 return avr_asm_len ("mov %A0,%B1" CR_TAB
5785 "mov %B0,%C1" CR_TAB
5786 "clr %C0", op, plen, 3);
5788 return avr_asm_len ("clr %C0" CR_TAB
5789 "mov %B0,%C1" CR_TAB
5790 "mov %A0,%B1", op, plen, 3);
5793 if (dest != src + 2)
5794 avr_asm_len ("mov %A0,%C1", op, plen, 1);
5796 return avr_asm_len ("clr %B0" CR_TAB
5797 "clr %C0", op, plen, 2);
5800 if (INTVAL (op[2]) < 24)
5806 return avr_asm_len ("clr %A0" CR_TAB
5810 "clr %C0", op, plen, 5);
5814 out_shift_with_cnt ("lsr %C0" CR_TAB
5816 "ror %A0", insn, op, plen, 3);
5821 /* 32bit logic shift right ((unsigned int)x >> i) */
5824 lshrsi3_out (rtx insn, rtx operands[], int *len)
5826 if (GET_CODE (operands[2]) == CONST_INT)
5834 switch (INTVAL (operands[2]))
5837 if (INTVAL (operands[2]) < 32)
5841 return *len = 3, ("clr %D0" CR_TAB
5845 return ("clr %D0" CR_TAB
5852 int reg0 = true_regnum (operands[0]);
5853 int reg1 = true_regnum (operands[1]);
5856 return ("mov %A0,%B1" CR_TAB
5857 "mov %B0,%C1" CR_TAB
5858 "mov %C0,%D1" CR_TAB
5861 return ("clr %D0" CR_TAB
5862 "mov %C0,%D1" CR_TAB
5863 "mov %B0,%C1" CR_TAB
5869 int reg0 = true_regnum (operands[0]);
5870 int reg1 = true_regnum (operands[1]);
5872 if (reg0 == reg1 + 2)
5873 return *len = 2, ("clr %C0" CR_TAB
5876 return *len = 3, ("movw %A0,%C1" CR_TAB
5880 return *len = 4, ("mov %B0,%D1" CR_TAB
5881 "mov %A0,%C1" CR_TAB
5887 return *len = 4, ("mov %A0,%D1" CR_TAB
5894 return ("clr %A0" CR_TAB
5903 out_shift_with_cnt ("lsr %D0" CR_TAB
5906 "ror %A0", insn, operands, len, 4);
5911 /* Output addition of register XOP[0] and compile time constant XOP[2]:
5913 XOP[0] = XOP[0] + XOP[2]
5915 and return "". If PLEN == NULL, print assembler instructions to perform the
5916 addition; otherwise, set *PLEN to the length of the instruction sequence (in
5917 words) printed with PLEN == NULL. XOP[3] is an 8-bit scratch register.
5918 CODE == PLUS: perform addition by using ADD instructions.
5919 CODE == MINUS: perform addition by using SUB instructions.
5920 Set *PCC to effect on cc0 according to respective CC_* insn attribute. */
5923 avr_out_plus_1 (rtx *xop, int *plen, enum rtx_code code, int *pcc)
5925 /* MODE of the operation. */
5926 enum machine_mode mode = GET_MODE (xop[0]);
5928 /* Number of bytes to operate on. */
5929 int i, n_bytes = GET_MODE_SIZE (mode);
5931 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
5932 int clobber_val = -1;
5934 /* op[0]: 8-bit destination register
5935 op[1]: 8-bit const int
5936 op[2]: 8-bit scratch register */
5939 /* Started the operation? Before starting the operation we may skip
5940 adding 0. This is no more true after the operation started because
5941 carry must be taken into account. */
5942 bool started = false;
5944 /* Value to add. There are two ways to add VAL: R += VAL and R -= -VAL. */
5947 /* Except in the case of ADIW with 16-bit register (see below)
5948 addition does not set cc0 in a usable way. */
5950 *pcc = (MINUS == code) ? CC_SET_CZN : CC_CLOBBER;
5953 xval = simplify_unary_operation (NEG, mode, xval, mode);
5960 for (i = 0; i < n_bytes; i++)
5962 /* We operate byte-wise on the destination. */
5963 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
5964 rtx xval8 = simplify_gen_subreg (QImode, xval, mode, i);
5966 /* 8-bit value to operate with this byte. */
5967 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
5969 /* Registers R16..R31 can operate with immediate. */
5970 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
5973 op[1] = gen_int_mode (val8, QImode);
5975 /* To get usable cc0 no low-bytes must have been skipped. */
5983 && test_hard_reg_class (ADDW_REGS, reg8))
5985 rtx xval16 = simplify_gen_subreg (HImode, xval, mode, i);
5986 unsigned int val16 = UINTVAL (xval16) & GET_MODE_MASK (HImode);
5988 /* Registers R24, X, Y, Z can use ADIW/SBIW with constants < 64
5989 i.e. operate word-wise. */
5996 avr_asm_len (code == PLUS ? "adiw %0,%1" : "sbiw %0,%1",
5999 if (n_bytes == 2 && PLUS == code)
6011 avr_asm_len (code == PLUS
6012 ? "adc %0,__zero_reg__" : "sbc %0,__zero_reg__",
6016 else if ((val8 == 1 || val8 == 0xff)
6018 && i == n_bytes - 1)
6020 avr_asm_len ((code == PLUS) ^ (val8 == 1) ? "dec %0" : "inc %0",
6029 gcc_assert (plen != NULL || REG_P (op[2]));
6031 if (clobber_val != (int) val8)
6032 avr_asm_len ("ldi %2,%1", op, plen, 1);
6033 clobber_val = (int) val8;
6035 avr_asm_len (started ? "adc %0,%2" : "add %0,%2", op, plen, 1);
6042 avr_asm_len (started ? "sbci %0,%1" : "subi %0,%1", op, plen, 1);
6045 gcc_assert (plen != NULL || REG_P (op[2]));
6047 if (clobber_val != (int) val8)
6048 avr_asm_len ("ldi %2,%1", op, plen, 1);
6049 clobber_val = (int) val8;
6051 avr_asm_len (started ? "sbc %0,%2" : "sub %0,%2", op, plen, 1);
6063 } /* for all sub-bytes */
6065 /* No output doesn't change cc0. */
6067 if (plen && *plen == 0)
6072 /* Output addition of register XOP[0] and compile time constant XOP[2]:
6074 XOP[0] = XOP[0] + XOP[2]
6076 and return "". If PLEN == NULL, print assembler instructions to perform the
6077 addition; otherwise, set *PLEN to the length of the instruction sequence (in
6078 words) printed with PLEN == NULL.
6079 If PCC != 0 then set *PCC to the the instruction sequence's effect on the
6080 condition code (with respect to XOP[0]). */
6083 avr_out_plus (rtx *xop, int *plen, int *pcc)
6085 int len_plus, len_minus;
6086 int cc_plus, cc_minus, cc_dummy;
6091 /* Work out if XOP[0] += XOP[2] is better or XOP[0] -= -XOP[2]. */
6093 avr_out_plus_1 (xop, &len_plus, PLUS, &cc_plus);
6094 avr_out_plus_1 (xop, &len_minus, MINUS, &cc_minus);
6096 /* Prefer MINUS over PLUS if size is equal because it sets cc0. */
6100 *plen = (len_minus <= len_plus) ? len_minus : len_plus;
6101 *pcc = (len_minus <= len_plus) ? cc_minus : cc_plus;
6103 else if (len_minus <= len_plus)
6104 avr_out_plus_1 (xop, NULL, MINUS, pcc);
6106 avr_out_plus_1 (xop, NULL, PLUS, pcc);
6112 /* Same as above but XOP has just 3 entries.
6113 Supply a dummy 4th operand. */
6116 avr_out_plus_noclobber (rtx *xop, int *plen, int *pcc)
6125 return avr_out_plus (op, plen, pcc);
6129 /* Prepare operands of adddi3_const_insn to be used with avr_out_plus_1. */
6132 avr_out_plus64 (rtx addend, int *plen)
6137 op[0] = gen_rtx_REG (DImode, 18);
6142 avr_out_plus_1 (op, plen, MINUS, &cc_dummy);
6147 /* Output bit operation (IOR, AND, XOR) with register XOP[0] and compile
6148 time constant XOP[2]:
6150 XOP[0] = XOP[0] <op> XOP[2]
6152 and return "". If PLEN == NULL, print assembler instructions to perform the
6153 operation; otherwise, set *PLEN to the length of the instruction sequence
6154 (in words) printed with PLEN == NULL. XOP[3] is either an 8-bit clobber
6155 register or SCRATCH if no clobber register is needed for the operation. */
6158 avr_out_bitop (rtx insn, rtx *xop, int *plen)
6160 /* CODE and MODE of the operation. */
6161 enum rtx_code code = GET_CODE (SET_SRC (single_set (insn)));
6162 enum machine_mode mode = GET_MODE (xop[0]);
6164 /* Number of bytes to operate on. */
6165 int i, n_bytes = GET_MODE_SIZE (mode);
6167 /* Value of T-flag (0 or 1) or -1 if unknow. */
6170 /* Value (0..0xff) held in clobber register op[3] or -1 if unknown. */
6171 int clobber_val = -1;
6173 /* op[0]: 8-bit destination register
6174 op[1]: 8-bit const int
6175 op[2]: 8-bit clobber register or SCRATCH
6176 op[3]: 8-bit register containing 0xff or NULL_RTX */
6185 for (i = 0; i < n_bytes; i++)
6187 /* We operate byte-wise on the destination. */
6188 rtx reg8 = simplify_gen_subreg (QImode, xop[0], mode, i);
6189 rtx xval8 = simplify_gen_subreg (QImode, xop[2], mode, i);
6191 /* 8-bit value to operate with this byte. */
6192 unsigned int val8 = UINTVAL (xval8) & GET_MODE_MASK (QImode);
6194 /* Number of bits set in the current byte of the constant. */
6195 int pop8 = avr_popcount (val8);
6197 /* Registers R16..R31 can operate with immediate. */
6198 bool ld_reg_p = test_hard_reg_class (LD_REGS, reg8);
6201 op[1] = GEN_INT (val8);
6210 avr_asm_len ("ori %0,%1", op, plen, 1);
6214 avr_asm_len ("set", op, plen, 1);
6217 op[1] = GEN_INT (exact_log2 (val8));
6218 avr_asm_len ("bld %0,%1", op, plen, 1);
6222 if (op[3] != NULL_RTX)
6223 avr_asm_len ("mov %0,%3", op, plen, 1);
6225 avr_asm_len ("clr %0" CR_TAB
6226 "dec %0", op, plen, 2);
6232 if (clobber_val != (int) val8)
6233 avr_asm_len ("ldi %2,%1", op, plen, 1);
6234 clobber_val = (int) val8;
6236 avr_asm_len ("or %0,%2", op, plen, 1);
6246 avr_asm_len ("clr %0", op, plen, 1);
6248 avr_asm_len ("andi %0,%1", op, plen, 1);
6252 avr_asm_len ("clt", op, plen, 1);
6255 op[1] = GEN_INT (exact_log2 (GET_MODE_MASK (QImode) & ~val8));
6256 avr_asm_len ("bld %0,%1", op, plen, 1);
6260 if (clobber_val != (int) val8)
6261 avr_asm_len ("ldi %2,%1", op, plen, 1);
6262 clobber_val = (int) val8;
6264 avr_asm_len ("and %0,%2", op, plen, 1);
6274 avr_asm_len ("com %0", op, plen, 1);
6275 else if (ld_reg_p && val8 == (1 << 7))
6276 avr_asm_len ("subi %0,%1", op, plen, 1);
6279 if (clobber_val != (int) val8)
6280 avr_asm_len ("ldi %2,%1", op, plen, 1);
6281 clobber_val = (int) val8;
6283 avr_asm_len ("eor %0,%2", op, plen, 1);
6289 /* Unknown rtx_code */
6292 } /* for all sub-bytes */
6298 /* PLEN == NULL: Output code to add CONST_INT OP[0] to SP.
6299 PLEN != NULL: Set *PLEN to the length of that sequence.
6303 avr_out_addto_sp (rtx *op, int *plen)
6305 int pc_len = AVR_2_BYTE_PC ? 2 : 3;
6306 int addend = INTVAL (op[0]);
6313 if (flag_verbose_asm || flag_print_asm_name)
6314 avr_asm_len (ASM_COMMENT_START "SP -= %n0", op, plen, 0);
6316 while (addend <= -pc_len)
6319 avr_asm_len ("rcall .", op, plen, 1);
6322 while (addend++ < 0)
6323 avr_asm_len ("push __zero_reg__", op, plen, 1);
6325 else if (addend > 0)
6327 if (flag_verbose_asm || flag_print_asm_name)
6328 avr_asm_len (ASM_COMMENT_START "SP += %0", op, plen, 0);
6330 while (addend-- > 0)
6331 avr_asm_len ("pop __tmp_reg__", op, plen, 1);
6338 /* Create RTL split patterns for byte sized rotate expressions. This
6339 produces a series of move instructions and considers overlap situations.
6340 Overlapping non-HImode operands need a scratch register. */
6343 avr_rotate_bytes (rtx operands[])
6346 enum machine_mode mode = GET_MODE (operands[0]);
6347 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
6348 bool same_reg = rtx_equal_p (operands[0], operands[1]);
6349 int num = INTVAL (operands[2]);
6350 rtx scratch = operands[3];
6351 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
6352 Word move if no scratch is needed, otherwise use size of scratch. */
6353 enum machine_mode move_mode = QImode;
6354 int move_size, offset, size;
6358 else if ((mode == SImode && !same_reg) || !overlapped)
6361 move_mode = GET_MODE (scratch);
6363 /* Force DI rotate to use QI moves since other DI moves are currently split
6364 into QI moves so forward propagation works better. */
6367 /* Make scratch smaller if needed. */
6368 if (SCRATCH != GET_CODE (scratch)
6369 && HImode == GET_MODE (scratch)
6370 && QImode == move_mode)
6371 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
6373 move_size = GET_MODE_SIZE (move_mode);
6374 /* Number of bytes/words to rotate. */
6375 offset = (num >> 3) / move_size;
6376 /* Number of moves needed. */
6377 size = GET_MODE_SIZE (mode) / move_size;
6378 /* Himode byte swap is special case to avoid a scratch register. */
6379 if (mode == HImode && same_reg)
6381 /* HImode byte swap, using xor. This is as quick as using scratch. */
6383 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
6384 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
6385 if (!rtx_equal_p (dst, src))
6387 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6388 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
6389 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
6394 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
6395 /* Create linked list of moves to determine move order. */
6399 } move[MAX_SIZE + 8];
6402 gcc_assert (size <= MAX_SIZE);
6403 /* Generate list of subreg moves. */
6404 for (i = 0; i < size; i++)
6407 int to = (from + offset) % size;
6408 move[i].src = simplify_gen_subreg (move_mode, operands[1],
6409 mode, from * move_size);
6410 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
6411 mode, to * move_size);
6414 /* Mark dependence where a dst of one move is the src of another move.
6415 The first move is a conflict as it must wait until second is
6416 performed. We ignore moves to self - we catch this later. */
6418 for (i = 0; i < size; i++)
6419 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
6420 for (j = 0; j < size; j++)
6421 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
6423 /* The dst of move i is the src of move j. */
6430 /* Go through move list and perform non-conflicting moves. As each
6431 non-overlapping move is made, it may remove other conflicts
6432 so the process is repeated until no conflicts remain. */
6437 /* Emit move where dst is not also a src or we have used that
6439 for (i = 0; i < size; i++)
6440 if (move[i].src != NULL_RTX)
6442 if (move[i].links == -1
6443 || move[move[i].links].src == NULL_RTX)
6446 /* Ignore NOP moves to self. */
6447 if (!rtx_equal_p (move[i].dst, move[i].src))
6448 emit_move_insn (move[i].dst, move[i].src);
6450 /* Remove conflict from list. */
6451 move[i].src = NULL_RTX;
6457 /* Check for deadlock. This is when no moves occurred and we have
6458 at least one blocked move. */
6459 if (moves == 0 && blocked != -1)
6461 /* Need to use scratch register to break deadlock.
6462 Add move to put dst of blocked move into scratch.
6463 When this move occurs, it will break chain deadlock.
6464 The scratch register is substituted for real move. */
6466 gcc_assert (SCRATCH != GET_CODE (scratch));
6468 move[size].src = move[blocked].dst;
6469 move[size].dst = scratch;
6470 /* Scratch move is never blocked. */
6471 move[size].links = -1;
6472 /* Make sure we have valid link. */
6473 gcc_assert (move[blocked].links != -1);
6474 /* Replace src of blocking move with scratch reg. */
6475 move[move[blocked].links].src = scratch;
6476 /* Make dependent on scratch move occuring. */
6477 move[blocked].links = size;
6481 while (blocked != -1);
6486 /* Modifies the length assigned to instruction INSN
6487 LEN is the initially computed length of the insn. */
6490 adjust_insn_length (rtx insn, int len)
6492 rtx *op = recog_data.operand;
6493 enum attr_adjust_len adjust_len;
6495 /* Some complex insns don't need length adjustment and therefore
6496 the length need not/must not be adjusted for these insns.
6497 It is easier to state this in an insn attribute "adjust_len" than
6498 to clutter up code here... */
6500 if (-1 == recog_memoized (insn))
6505 /* Read from insn attribute "adjust_len" if/how length is to be adjusted. */
6507 adjust_len = get_attr_adjust_len (insn);
6509 if (adjust_len == ADJUST_LEN_NO)
6511 /* Nothing to adjust: The length from attribute "length" is fine.
6512 This is the default. */
6517 /* Extract insn's operands. */
6519 extract_constrain_insn_cached (insn);
6521 /* Dispatch to right function. */
6525 case ADJUST_LEN_RELOAD_IN16: output_reload_inhi (op, op[2], &len); break;
6526 case ADJUST_LEN_RELOAD_IN24: avr_out_reload_inpsi (op, op[2], &len); break;
6527 case ADJUST_LEN_RELOAD_IN32: output_reload_insisf (op, op[2], &len); break;
6529 case ADJUST_LEN_OUT_BITOP: avr_out_bitop (insn, op, &len); break;
6531 case ADJUST_LEN_OUT_PLUS: avr_out_plus (op, &len, NULL); break;
6532 case ADJUST_LEN_PLUS64: avr_out_plus64 (op[0], &len); break;
6533 case ADJUST_LEN_OUT_PLUS_NOCLOBBER:
6534 avr_out_plus_noclobber (op, &len, NULL); break;
6536 case ADJUST_LEN_ADDTO_SP: avr_out_addto_sp (op, &len); break;
6538 case ADJUST_LEN_MOV8: output_movqi (insn, op, &len); break;
6539 case ADJUST_LEN_MOV16: output_movhi (insn, op, &len); break;
6540 case ADJUST_LEN_MOV24: avr_out_movpsi (insn, op, &len); break;
6541 case ADJUST_LEN_MOV32: output_movsisf (insn, op, &len); break;
6542 case ADJUST_LEN_MOVMEM: avr_out_movmem (insn, op, &len); break;
6543 case ADJUST_LEN_XLOAD: avr_out_xload (insn, op, &len); break;
6545 case ADJUST_LEN_TSTHI: avr_out_tsthi (insn, op, &len); break;
6546 case ADJUST_LEN_TSTPSI: avr_out_tstpsi (insn, op, &len); break;
6547 case ADJUST_LEN_TSTSI: avr_out_tstsi (insn, op, &len); break;
6548 case ADJUST_LEN_COMPARE: avr_out_compare (insn, op, &len); break;
6549 case ADJUST_LEN_COMPARE64: avr_out_compare64 (insn, op, &len); break;
6551 case ADJUST_LEN_LSHRQI: lshrqi3_out (insn, op, &len); break;
6552 case ADJUST_LEN_LSHRHI: lshrhi3_out (insn, op, &len); break;
6553 case ADJUST_LEN_LSHRSI: lshrsi3_out (insn, op, &len); break;
6555 case ADJUST_LEN_ASHRQI: ashrqi3_out (insn, op, &len); break;
6556 case ADJUST_LEN_ASHRHI: ashrhi3_out (insn, op, &len); break;
6557 case ADJUST_LEN_ASHRSI: ashrsi3_out (insn, op, &len); break;
6559 case ADJUST_LEN_ASHLQI: ashlqi3_out (insn, op, &len); break;
6560 case ADJUST_LEN_ASHLHI: ashlhi3_out (insn, op, &len); break;
6561 case ADJUST_LEN_ASHLSI: ashlsi3_out (insn, op, &len); break;
6563 case ADJUST_LEN_ASHLPSI: avr_out_ashlpsi3 (insn, op, &len); break;
6564 case ADJUST_LEN_ASHRPSI: avr_out_ashrpsi3 (insn, op, &len); break;
6565 case ADJUST_LEN_LSHRPSI: avr_out_lshrpsi3 (insn, op, &len); break;
6567 case ADJUST_LEN_CALL: len = AVR_HAVE_JMP_CALL ? 2 : 1; break;
6569 case ADJUST_LEN_INSERT_BITS: avr_out_insert_bits (op, &len); break;
6578 /* Return nonzero if register REG dead after INSN. */
6581 reg_unused_after (rtx insn, rtx reg)
6583 return (dead_or_set_p (insn, reg)
6584 || (REG_P(reg) && _reg_unused_after (insn, reg)));
6587 /* Return nonzero if REG is not used after INSN.
6588 We assume REG is a reload reg, and therefore does
6589 not live past labels. It may live past calls or jumps though. */
6592 _reg_unused_after (rtx insn, rtx reg)
6597 /* If the reg is set by this instruction, then it is safe for our
6598 case. Disregard the case where this is a store to memory, since
6599 we are checking a register used in the store address. */
6600 set = single_set (insn);
6601 if (set && GET_CODE (SET_DEST (set)) != MEM
6602 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6605 while ((insn = NEXT_INSN (insn)))
6608 code = GET_CODE (insn);
6611 /* If this is a label that existed before reload, then the register
6612 if dead here. However, if this is a label added by reorg, then
6613 the register may still be live here. We can't tell the difference,
6614 so we just ignore labels completely. */
6615 if (code == CODE_LABEL)
6623 if (code == JUMP_INSN)
6626 /* If this is a sequence, we must handle them all at once.
6627 We could have for instance a call that sets the target register,
6628 and an insn in a delay slot that uses the register. In this case,
6629 we must return 0. */
6630 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
6635 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
6637 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
6638 rtx set = single_set (this_insn);
6640 if (GET_CODE (this_insn) == CALL_INSN)
6642 else if (GET_CODE (this_insn) == JUMP_INSN)
6644 if (INSN_ANNULLED_BRANCH_P (this_insn))
6649 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6651 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6653 if (GET_CODE (SET_DEST (set)) != MEM)
6659 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
6664 else if (code == JUMP_INSN)
6668 if (code == CALL_INSN)
6671 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
6672 if (GET_CODE (XEXP (tem, 0)) == USE
6673 && REG_P (XEXP (XEXP (tem, 0), 0))
6674 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
6676 if (call_used_regs[REGNO (reg)])
6680 set = single_set (insn);
6682 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
6684 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
6685 return GET_CODE (SET_DEST (set)) != MEM;
6686 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
6693 /* Return RTX that represents the lower 16 bits of a constant address.
6694 Unfortunately, simplify_gen_subreg does not handle this case. */
6697 avr_const_address_lo16 (rtx x)
6701 switch (GET_CODE (x))
6707 if (PLUS == GET_CODE (XEXP (x, 0))
6708 && SYMBOL_REF == GET_CODE (XEXP (XEXP (x, 0), 0))
6709 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
6711 HOST_WIDE_INT offset = INTVAL (XEXP (XEXP (x, 0), 1));
6712 const char *name = XSTR (XEXP (XEXP (x, 0), 0), 0);
6714 lo16 = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6715 lo16 = gen_rtx_CONST (Pmode, plus_constant (lo16, offset));
6724 const char *name = XSTR (x, 0);
6726 return gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (name));
6730 avr_edump ("\n%?: %r\n", x);
6735 /* Target hook for assembling integer objects. The AVR version needs
6736 special handling for references to certain labels. */
6739 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
6741 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
6742 && text_segment_operand (x, VOIDmode) )
6744 fputs ("\t.word\tgs(", asm_out_file);
6745 output_addr_const (asm_out_file, x);
6746 fputs (")\n", asm_out_file);
6750 else if (GET_MODE (x) == PSImode)
6752 default_assemble_integer (avr_const_address_lo16 (x),
6753 GET_MODE_SIZE (HImode), aligned_p);
6755 fputs ("\t.warning\t\"assembling 24-bit address needs binutils"
6756 " extension for hh8(", asm_out_file);
6757 output_addr_const (asm_out_file, x);
6758 fputs (")\"\n", asm_out_file);
6760 fputs ("\t.byte\t0\t" ASM_COMMENT_START " hh8(", asm_out_file);
6761 output_addr_const (asm_out_file, x);
6762 fputs (")\n", asm_out_file);
6767 return default_assemble_integer (x, size, aligned_p);
6771 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
6774 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
6777 /* If the function has the 'signal' or 'interrupt' attribute, test to
6778 make sure that the name of the function is "__vector_NN" so as to
6779 catch when the user misspells the interrupt vector name. */
6781 if (cfun->machine->is_interrupt)
6783 if (!STR_PREFIX_P (name, "__vector"))
6785 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6786 "%qs appears to be a misspelled interrupt handler",
6790 else if (cfun->machine->is_signal)
6792 if (!STR_PREFIX_P (name, "__vector"))
6794 warning_at (DECL_SOURCE_LOCATION (decl), 0,
6795 "%qs appears to be a misspelled signal handler",
6800 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
6801 ASM_OUTPUT_LABEL (file, name);
6805 /* Return value is nonzero if pseudos that have been
6806 assigned to registers of class CLASS would likely be spilled
6807 because registers of CLASS are needed for spill registers. */
6810 avr_class_likely_spilled_p (reg_class_t c)
6812 return (c != ALL_REGS && c != ADDW_REGS);
6815 /* Valid attributes:
6816 progmem - put data to program memory;
6817 signal - make a function to be hardware interrupt. After function
6818 prologue interrupts are disabled;
6819 interrupt - make a function to be hardware interrupt. After function
6820 prologue interrupts are enabled;
6821 naked - don't generate function prologue/epilogue and `ret' command.
6823 Only `progmem' attribute valid for type. */
6825 /* Handle a "progmem" attribute; arguments as in
6826 struct attribute_spec.handler. */
6828 avr_handle_progmem_attribute (tree *node, tree name,
6829 tree args ATTRIBUTE_UNUSED,
6830 int flags ATTRIBUTE_UNUSED,
6835 if (TREE_CODE (*node) == TYPE_DECL)
6837 /* This is really a decl attribute, not a type attribute,
6838 but try to handle it for GCC 3.0 backwards compatibility. */
6840 tree type = TREE_TYPE (*node);
6841 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
6842 tree newtype = build_type_attribute_variant (type, attr);
6844 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
6845 TREE_TYPE (*node) = newtype;
6846 *no_add_attrs = true;
6848 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
6850 *no_add_attrs = false;
6854 warning (OPT_Wattributes, "%qE attribute ignored",
6856 *no_add_attrs = true;
6863 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
6864 struct attribute_spec.handler. */
6867 avr_handle_fndecl_attribute (tree *node, tree name,
6868 tree args ATTRIBUTE_UNUSED,
6869 int flags ATTRIBUTE_UNUSED,
6872 if (TREE_CODE (*node) != FUNCTION_DECL)
6874 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6876 *no_add_attrs = true;
6883 avr_handle_fntype_attribute (tree *node, tree name,
6884 tree args ATTRIBUTE_UNUSED,
6885 int flags ATTRIBUTE_UNUSED,
6888 if (TREE_CODE (*node) != FUNCTION_TYPE)
6890 warning (OPT_Wattributes, "%qE attribute only applies to functions",
6892 *no_add_attrs = true;
6899 /* AVR attributes. */
6900 static const struct attribute_spec
6901 avr_attribute_table[] =
6903 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
6904 affects_type_identity } */
6905 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
6907 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6909 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
6911 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
6913 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
6915 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
6917 { NULL, 0, 0, false, false, false, NULL, false }
6921 /* Look if DECL shall be placed in program memory space by
6922 means of attribute `progmem' or some address-space qualifier.
6923 Return non-zero if DECL is data that must end up in Flash and
6924 zero if the data lives in RAM (.bss, .data, .rodata, ...).
6926 Return 2 if DECL is located in 24-bit flash address-space
6927 Return 1 if DECL is located in 16-bit flash address-space
6928 Return -1 if attribute `progmem' occurs in DECL or ATTRIBUTES
6929 Return 0 otherwise */
6932 avr_progmem_p (tree decl, tree attributes)
6936 if (TREE_CODE (decl) != VAR_DECL)
6939 if (avr_decl_memx_p (decl))
6942 if (avr_decl_flash_p (decl))
6946 != lookup_attribute ("progmem", attributes))
6953 while (TREE_CODE (a) == ARRAY_TYPE);
6955 if (a == error_mark_node)
6958 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
6965 /* Scan type TYP for pointer references to address space ASn.
6966 Return ADDR_SPACE_GENERIC (i.e. 0) if all pointers targeting
6967 the AS are also declared to be CONST.
6968 Otherwise, return the respective addres space, i.e. a value != 0. */
6971 avr_nonconst_pointer_addrspace (tree typ)
6973 while (ARRAY_TYPE == TREE_CODE (typ))
6974 typ = TREE_TYPE (typ);
6976 if (POINTER_TYPE_P (typ))
6979 tree target = TREE_TYPE (typ);
6981 /* Pointer to function: Test the function's return type. */
6983 if (FUNCTION_TYPE == TREE_CODE (target))
6984 return avr_nonconst_pointer_addrspace (TREE_TYPE (target));
6986 /* "Ordinary" pointers... */
6988 while (TREE_CODE (target) == ARRAY_TYPE)
6989 target = TREE_TYPE (target);
6991 /* Pointers to non-generic address space must be const.
6992 Refuse address spaces outside the device's flash. */
6994 as = TYPE_ADDR_SPACE (target);
6996 if (!ADDR_SPACE_GENERIC_P (as)
6997 && (!TYPE_READONLY (target)
6998 || avr_addrspace[as].segment >= avr_current_device->n_flash))
7003 /* Scan pointer's target type. */
7005 return avr_nonconst_pointer_addrspace (target);
7008 return ADDR_SPACE_GENERIC;
7012 /* Sanity check NODE so that all pointers targeting non-generic addres spaces
7013 go along with CONST qualifier. Writing to these address spaces should
7014 be detected and complained about as early as possible. */
7017 avr_pgm_check_var_decl (tree node)
7019 const char *reason = NULL;
7021 addr_space_t as = ADDR_SPACE_GENERIC;
7023 gcc_assert (as == 0);
7025 if (avr_log.progmem)
7026 avr_edump ("%?: %t\n", node);
7028 switch (TREE_CODE (node))
7034 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7035 reason = "variable";
7039 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7040 reason = "function parameter";
7044 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (node)), as)
7045 reason = "structure field";
7049 if (as = avr_nonconst_pointer_addrspace (TREE_TYPE (TREE_TYPE (node))),
7051 reason = "return type of function";
7055 if (as = avr_nonconst_pointer_addrspace (node), as)
7062 avr_edump ("%?: %s, %d, %d\n",
7063 avr_addrspace[as].name,
7064 avr_addrspace[as].segment, avr_current_device->n_flash);
7065 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7068 error ("%qT uses address space %qs beyond flash of %qs",
7069 node, avr_addrspace[as].name, avr_current_device->name);
7071 error ("%s %q+D uses address space %qs beyond flash of %qs",
7072 reason, node, avr_addrspace[as].name,
7073 avr_current_device->name);
7078 error ("pointer targeting address space %qs must be const in %qT",
7079 avr_addrspace[as].name, node);
7081 error ("pointer targeting address space %qs must be const"
7083 avr_addrspace[as].name, reason, node);
7087 return reason == NULL;
7091 /* Add the section attribute if the variable is in progmem. */
7094 avr_insert_attributes (tree node, tree *attributes)
7096 avr_pgm_check_var_decl (node);
7098 if (TREE_CODE (node) == VAR_DECL
7099 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
7100 && avr_progmem_p (node, *attributes))
7105 /* For C++, we have to peel arrays in order to get correct
7106 determination of readonlyness. */
7109 node0 = TREE_TYPE (node0);
7110 while (TREE_CODE (node0) == ARRAY_TYPE);
7112 if (error_mark_node == node0)
7115 as = TYPE_ADDR_SPACE (TREE_TYPE (node));
7117 if (avr_addrspace[as].segment >= avr_current_device->n_flash)
7119 error ("variable %q+D located in address space %qs"
7120 " beyond flash of %qs",
7121 node, avr_addrspace[as].name, avr_current_device->name);
7124 if (!TYPE_READONLY (node0)
7125 && !TREE_READONLY (node))
7127 const char *reason = "__attribute__((progmem))";
7129 if (!ADDR_SPACE_GENERIC_P (as))
7130 reason = avr_addrspace[as].name;
7132 if (avr_log.progmem)
7133 avr_edump ("\n%?: %t\n%t\n", node, node0);
7135 error ("variable %q+D must be const in order to be put into"
7136 " read-only section by means of %qs", node, reason);
7142 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
7143 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
7144 /* Track need of __do_clear_bss. */
7147 avr_asm_output_aligned_decl_common (FILE * stream,
7148 const_tree decl ATTRIBUTE_UNUSED,
7150 unsigned HOST_WIDE_INT size,
7151 unsigned int align, bool local_p)
7153 /* __gnu_lto_v1 etc. are just markers for the linker injected by toplev.c.
7154 There is no need to trigger __do_clear_bss code for them. */
7156 if (!STR_PREFIX_P (name, "__gnu_lto"))
7157 avr_need_clear_bss_p = true;
7160 ASM_OUTPUT_ALIGNED_LOCAL (stream, name, size, align);
7162 ASM_OUTPUT_ALIGNED_COMMON (stream, name, size, align);
7166 /* Unnamed section callback for data_section
7167 to track need of __do_copy_data. */
7170 avr_output_data_section_asm_op (const void *data)
7172 avr_need_copy_data_p = true;
7174 /* Dispatch to default. */
7175 output_section_asm_op (data);
7179 /* Unnamed section callback for bss_section
7180 to track need of __do_clear_bss. */
7183 avr_output_bss_section_asm_op (const void *data)
7185 avr_need_clear_bss_p = true;
7187 /* Dispatch to default. */
7188 output_section_asm_op (data);
7192 /* Unnamed section callback for progmem*.data sections. */
7195 avr_output_progmem_section_asm_op (const void *data)
7197 fprintf (asm_out_file, "\t.section\t%s,\"a\",@progbits\n",
7198 (const char*) data);
7202 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
7205 avr_asm_init_sections (void)
7209 /* Set up a section for jump tables. Alignment is handled by
7210 ASM_OUTPUT_BEFORE_CASE_LABEL. */
7212 if (AVR_HAVE_JMP_CALL)
7214 progmem_swtable_section
7215 = get_unnamed_section (0, output_section_asm_op,
7216 "\t.section\t.progmem.gcc_sw_table"
7217 ",\"a\",@progbits");
7221 progmem_swtable_section
7222 = get_unnamed_section (SECTION_CODE, output_section_asm_op,
7223 "\t.section\t.progmem.gcc_sw_table"
7224 ",\"ax\",@progbits");
7227 for (n = 0; n < sizeof (progmem_section) / sizeof (*progmem_section); n++)
7230 = get_unnamed_section (0, avr_output_progmem_section_asm_op,
7231 progmem_section_prefix[n]);
7234 /* Override section callbacks to keep track of `avr_need_clear_bss_p'
7235 resp. `avr_need_copy_data_p'. */
7237 readonly_data_section->unnamed.callback = avr_output_data_section_asm_op;
7238 data_section->unnamed.callback = avr_output_data_section_asm_op;
7239 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
7243 /* Implement `TARGET_ASM_FUNCTION_RODATA_SECTION'. */
7246 avr_asm_function_rodata_section (tree decl)
7248 /* If a function is unused and optimized out by -ffunction-sections
7249 and --gc-sections, ensure that the same will happen for its jump
7250 tables by putting them into individual sections. */
7255 /* Get the frodata section from the default function in varasm.c
7256 but treat function-associated data-like jump tables as code
7257 rather than as user defined data. AVR has no constant pools. */
7259 int fdata = flag_data_sections;
7261 flag_data_sections = flag_function_sections;
7262 frodata = default_function_rodata_section (decl);
7263 flag_data_sections = fdata;
7264 flags = frodata->common.flags;
7267 if (frodata != readonly_data_section
7268 && flags & SECTION_NAMED)
7270 /* Adjust section flags and replace section name prefix. */
7274 static const char* const prefix[] =
7276 ".rodata", ".progmem.gcc_sw_table",
7277 ".gnu.linkonce.r.", ".gnu.linkonce.t."
7280 for (i = 0; i < sizeof (prefix) / sizeof (*prefix); i += 2)
7282 const char * old_prefix = prefix[i];
7283 const char * new_prefix = prefix[i+1];
7284 const char * name = frodata->named.name;
7286 if (STR_PREFIX_P (name, old_prefix))
7288 const char *rname = ACONCAT ((new_prefix,
7289 name + strlen (old_prefix), NULL));
7290 flags &= ~SECTION_CODE;
7291 flags |= AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE;
7293 return get_section (rname, flags, frodata->named.decl);
7298 return progmem_swtable_section;
7302 /* Implement `TARGET_ASM_NAMED_SECTION'. */
7303 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
7306 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
7308 if (flags & AVR_SECTION_PROGMEM)
7310 addr_space_t as = (flags & AVR_SECTION_PROGMEM) / SECTION_MACH_DEP;
7311 int segment = avr_addrspace[as].segment;
7312 const char *old_prefix = ".rodata";
7313 const char *new_prefix = progmem_section_prefix[segment];
7315 if (STR_PREFIX_P (name, old_prefix))
7317 const char *sname = ACONCAT ((new_prefix,
7318 name + strlen (old_prefix), NULL));
7319 default_elf_asm_named_section (sname, flags, decl);
7323 default_elf_asm_named_section (new_prefix, flags, decl);
7327 if (!avr_need_copy_data_p)
7328 avr_need_copy_data_p = (STR_PREFIX_P (name, ".data")
7329 || STR_PREFIX_P (name, ".rodata")
7330 || STR_PREFIX_P (name, ".gnu.linkonce.d"));
7332 if (!avr_need_clear_bss_p)
7333 avr_need_clear_bss_p = STR_PREFIX_P (name, ".bss");
7335 default_elf_asm_named_section (name, flags, decl);
7339 avr_section_type_flags (tree decl, const char *name, int reloc)
7341 unsigned int flags = default_section_type_flags (decl, name, reloc);
7343 if (STR_PREFIX_P (name, ".noinit"))
7345 if (decl && TREE_CODE (decl) == VAR_DECL
7346 && DECL_INITIAL (decl) == NULL_TREE)
7347 flags |= SECTION_BSS; /* @nobits */
7349 warning (0, "only uninitialized variables can be placed in the "
7353 if (decl && DECL_P (decl)
7354 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7356 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7358 /* Attribute progmem puts data in generic address space.
7359 Set section flags as if it was in __flash to get the right
7360 section prefix in the remainder. */
7362 if (ADDR_SPACE_GENERIC_P (as))
7363 as = ADDR_SPACE_FLASH;
7365 flags |= as * SECTION_MACH_DEP;
7366 flags &= ~SECTION_WRITE;
7367 flags &= ~SECTION_BSS;
7374 /* Implement `TARGET_ENCODE_SECTION_INFO'. */
7377 avr_encode_section_info (tree decl, rtx rtl, int new_decl_p)
7379 /* In avr_handle_progmem_attribute, DECL_INITIAL is not yet
7380 readily available, see PR34734. So we postpone the warning
7381 about uninitialized data in program memory section until here. */
7384 && decl && DECL_P (decl)
7385 && NULL_TREE == DECL_INITIAL (decl)
7386 && !DECL_EXTERNAL (decl)
7387 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7389 warning (OPT_Wuninitialized,
7390 "uninitialized variable %q+D put into "
7391 "program memory area", decl);
7394 default_encode_section_info (decl, rtl, new_decl_p);
7396 if (decl && DECL_P (decl)
7397 && TREE_CODE (decl) != FUNCTION_DECL
7399 && SYMBOL_REF == GET_CODE (XEXP (rtl, 0)))
7401 rtx sym = XEXP (rtl, 0);
7402 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7404 /* PSTR strings are in generic space but located in flash:
7405 patch address space. */
7407 if (-1 == avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7408 as = ADDR_SPACE_FLASH;
7410 AVR_SYMBOL_SET_ADDR_SPACE (sym, as);
7415 /* Implement `TARGET_ASM_SELECT_SECTION' */
7418 avr_asm_select_section (tree decl, int reloc, unsigned HOST_WIDE_INT align)
7420 section * sect = default_elf_select_section (decl, reloc, align);
7422 if (decl && DECL_P (decl)
7423 && avr_progmem_p (decl, DECL_ATTRIBUTES (decl)))
7425 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
7426 int segment = avr_addrspace[as].segment;
7428 if (sect->common.flags & SECTION_NAMED)
7430 const char * name = sect->named.name;
7431 const char * old_prefix = ".rodata";
7432 const char * new_prefix = progmem_section_prefix[segment];
7434 if (STR_PREFIX_P (name, old_prefix))
7436 const char *sname = ACONCAT ((new_prefix,
7437 name + strlen (old_prefix), NULL));
7438 return get_section (sname, sect->common.flags, sect->named.decl);
7442 return progmem_section[segment];
7448 /* Implement `TARGET_ASM_FILE_START'. */
7449 /* Outputs some text at the start of each assembler file. */
7452 avr_file_start (void)
7454 int sfr_offset = avr_current_arch->sfr_offset;
7456 if (avr_current_arch->asm_only)
7457 error ("MCU %qs supported for assembler only", avr_current_device->name);
7459 default_file_start ();
7461 /* Print I/O addresses of some SFRs used with IN and OUT. */
7463 if (!AVR_HAVE_8BIT_SP)
7464 fprintf (asm_out_file, "__SP_H__ = 0x%02x\n", avr_addr.sp_h - sfr_offset);
7466 fprintf (asm_out_file, "__SP_L__ = 0x%02x\n", avr_addr.sp_l - sfr_offset);
7467 fprintf (asm_out_file, "__SREG__ = 0x%02x\n", avr_addr.sreg - sfr_offset);
7469 fprintf (asm_out_file, "__RAMPZ__ = 0x%02x\n", avr_addr.rampz - sfr_offset);
7471 fprintf (asm_out_file, "__RAMPY__ = 0x%02x\n", avr_addr.rampy - sfr_offset);
7473 fprintf (asm_out_file, "__RAMPX__ = 0x%02x\n", avr_addr.rampx - sfr_offset);
7475 fprintf (asm_out_file, "__RAMPD__ = 0x%02x\n", avr_addr.rampd - sfr_offset);
7477 fprintf (asm_out_file, "__CCP__ = 0x%02x\n", avr_addr.ccp - sfr_offset);
7478 fprintf (asm_out_file, "__tmp_reg__ = %d\n", TMP_REGNO);
7479 fprintf (asm_out_file, "__zero_reg__ = %d\n", ZERO_REGNO);
7483 /* Implement `TARGET_ASM_FILE_END'. */
7484 /* Outputs to the stdio stream FILE some
7485 appropriate text to go at the end of an assembler file. */
7490 /* Output these only if there is anything in the
7491 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
7492 input section(s) - some code size can be saved by not
7493 linking in the initialization code from libgcc if resp.
7494 sections are empty. */
7496 if (avr_need_copy_data_p)
7497 fputs (".global __do_copy_data\n", asm_out_file);
7499 if (avr_need_clear_bss_p)
7500 fputs (".global __do_clear_bss\n", asm_out_file);
7503 /* Choose the order in which to allocate hard registers for
7504 pseudo-registers local to a basic block.
7506 Store the desired register order in the array `reg_alloc_order'.
7507 Element 0 should be the register to allocate first; element 1, the
7508 next register; and so on. */
7511 order_regs_for_local_alloc (void)
7514 static const int order_0[] = {
7522 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7526 static const int order_1[] = {
7534 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7538 static const int order_2[] = {
7547 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
7552 const int *order = (TARGET_ORDER_1 ? order_1 :
7553 TARGET_ORDER_2 ? order_2 :
7555 for (i=0; i < ARRAY_SIZE (order_0); ++i)
7556 reg_alloc_order[i] = order[i];
7560 /* Implement `TARGET_REGISTER_MOVE_COST' */
7563 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
7564 reg_class_t from, reg_class_t to)
7566 return (from == STACK_REG ? 6
7567 : to == STACK_REG ? 12
7572 /* Implement `TARGET_MEMORY_MOVE_COST' */
7575 avr_memory_move_cost (enum machine_mode mode,
7576 reg_class_t rclass ATTRIBUTE_UNUSED,
7577 bool in ATTRIBUTE_UNUSED)
7579 return (mode == QImode ? 2
7580 : mode == HImode ? 4
7581 : mode == SImode ? 8
7582 : mode == SFmode ? 8
7587 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
7588 cost of an RTX operand given its context. X is the rtx of the
7589 operand, MODE is its mode, and OUTER is the rtx_code of this
7590 operand's parent operator. */
7593 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
7594 int opno, bool speed)
7596 enum rtx_code code = GET_CODE (x);
7607 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
7614 avr_rtx_costs (x, code, outer, opno, &total, speed);
7618 /* Worker function for AVR backend's rtx_cost function.
7619 X is rtx expression whose cost is to be calculated.
7620 Return true if the complete cost has been computed.
7621 Return false if subexpressions should be scanned.
7622 In either case, *TOTAL contains the cost result. */
7625 avr_rtx_costs_1 (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED,
7626 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
7628 enum rtx_code code = (enum rtx_code) codearg;
7629 enum machine_mode mode = GET_MODE (x);
7639 /* Immediate constants are as cheap as registers. */
7644 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7652 *total = COSTS_N_INSNS (1);
7658 *total = COSTS_N_INSNS (2 * GET_MODE_SIZE (mode) - 1);
7664 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7672 *total = COSTS_N_INSNS (1);
7678 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7682 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7683 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7687 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
7688 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7689 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7693 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
7694 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
7695 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7703 && MULT == GET_CODE (XEXP (x, 0))
7704 && register_operand (XEXP (x, 1), QImode))
7707 *total = COSTS_N_INSNS (speed ? 4 : 3);
7708 /* multiply-add with constant: will be split and load constant. */
7709 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7710 *total = COSTS_N_INSNS (1) + *total;
7713 *total = COSTS_N_INSNS (1);
7714 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7715 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7720 && (MULT == GET_CODE (XEXP (x, 0))
7721 || ASHIFT == GET_CODE (XEXP (x, 0)))
7722 && register_operand (XEXP (x, 1), HImode)
7723 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))
7724 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 0), 0))))
7727 *total = COSTS_N_INSNS (speed ? 5 : 4);
7728 /* multiply-add with constant: will be split and load constant. */
7729 if (CONST_INT_P (XEXP (XEXP (x, 0), 1)))
7730 *total = COSTS_N_INSNS (1) + *total;
7733 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7735 *total = COSTS_N_INSNS (2);
7736 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7739 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7740 *total = COSTS_N_INSNS (1);
7742 *total = COSTS_N_INSNS (2);
7746 if (!CONST_INT_P (XEXP (x, 1)))
7748 *total = COSTS_N_INSNS (3);
7749 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7752 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7753 *total = COSTS_N_INSNS (2);
7755 *total = COSTS_N_INSNS (3);
7759 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7761 *total = COSTS_N_INSNS (4);
7762 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7765 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
7766 *total = COSTS_N_INSNS (1);
7768 *total = COSTS_N_INSNS (4);
7774 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7780 && register_operand (XEXP (x, 0), QImode)
7781 && MULT == GET_CODE (XEXP (x, 1)))
7784 *total = COSTS_N_INSNS (speed ? 4 : 3);
7785 /* multiply-sub with constant: will be split and load constant. */
7786 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7787 *total = COSTS_N_INSNS (1) + *total;
7792 && register_operand (XEXP (x, 0), HImode)
7793 && (MULT == GET_CODE (XEXP (x, 1))
7794 || ASHIFT == GET_CODE (XEXP (x, 1)))
7795 && (ZERO_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))
7796 || SIGN_EXTEND == GET_CODE (XEXP (XEXP (x, 1), 0))))
7799 *total = COSTS_N_INSNS (speed ? 5 : 4);
7800 /* multiply-sub with constant: will be split and load constant. */
7801 if (CONST_INT_P (XEXP (XEXP (x, 1), 1)))
7802 *total = COSTS_N_INSNS (1) + *total;
7808 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7809 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7810 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7811 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7815 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
7816 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7817 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7825 *total = COSTS_N_INSNS (!speed ? 3 : 4);
7827 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7835 rtx op0 = XEXP (x, 0);
7836 rtx op1 = XEXP (x, 1);
7837 enum rtx_code code0 = GET_CODE (op0);
7838 enum rtx_code code1 = GET_CODE (op1);
7839 bool ex0 = SIGN_EXTEND == code0 || ZERO_EXTEND == code0;
7840 bool ex1 = SIGN_EXTEND == code1 || ZERO_EXTEND == code1;
7843 && (u8_operand (op1, HImode)
7844 || s8_operand (op1, HImode)))
7846 *total = COSTS_N_INSNS (!speed ? 4 : 6);
7850 && register_operand (op1, HImode))
7852 *total = COSTS_N_INSNS (!speed ? 5 : 8);
7855 else if (ex0 || ex1)
7857 *total = COSTS_N_INSNS (!speed ? 3 : 5);
7860 else if (register_operand (op0, HImode)
7861 && (u8_operand (op1, HImode)
7862 || s8_operand (op1, HImode)))
7864 *total = COSTS_N_INSNS (!speed ? 6 : 9);
7868 *total = COSTS_N_INSNS (!speed ? 7 : 10);
7871 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7878 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7888 /* Add some additional costs besides CALL like moves etc. */
7890 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7894 /* Just a rough estimate. Even with -O2 we don't want bulky
7895 code expanded inline. */
7897 *total = COSTS_N_INSNS (25);
7903 *total = COSTS_N_INSNS (300);
7905 /* Add some additional costs besides CALL like moves etc. */
7906 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 5 : 4);
7914 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7915 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
7923 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
7925 *total = COSTS_N_INSNS (15 * GET_MODE_SIZE (mode));
7926 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7927 /* For div/mod with const-int divisor we have at least the cost of
7928 loading the divisor. */
7929 if (CONST_INT_P (XEXP (x, 1)))
7930 *total += COSTS_N_INSNS (GET_MODE_SIZE (mode));
7931 /* Add some overall penaly for clobbering and moving around registers */
7932 *total += COSTS_N_INSNS (2);
7939 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
7940 *total = COSTS_N_INSNS (1);
7945 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
7946 *total = COSTS_N_INSNS (3);
7951 if (CONST_INT_P (XEXP (x, 1)))
7952 switch (INTVAL (XEXP (x, 1)))
7956 *total = COSTS_N_INSNS (5);
7959 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
7967 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
7974 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
7976 *total = COSTS_N_INSNS (!speed ? 4 : 17);
7977 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
7982 val = INTVAL (XEXP (x, 1));
7984 *total = COSTS_N_INSNS (3);
7985 else if (val >= 0 && val <= 7)
7986 *total = COSTS_N_INSNS (val);
7988 *total = COSTS_N_INSNS (1);
7995 if (const_2_to_7_operand (XEXP (x, 1), HImode)
7996 && (SIGN_EXTEND == GET_CODE (XEXP (x, 0))
7997 || ZERO_EXTEND == GET_CODE (XEXP (x, 0))))
7999 *total = COSTS_N_INSNS (!speed ? 4 : 6);
8004 if (const1_rtx == (XEXP (x, 1))
8005 && SIGN_EXTEND == GET_CODE (XEXP (x, 0)))
8007 *total = COSTS_N_INSNS (2);
8011 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8013 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8014 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8018 switch (INTVAL (XEXP (x, 1)))
8025 *total = COSTS_N_INSNS (2);
8028 *total = COSTS_N_INSNS (3);
8034 *total = COSTS_N_INSNS (4);
8039 *total = COSTS_N_INSNS (5);
8042 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8045 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8048 *total = COSTS_N_INSNS (!speed ? 5 : 10);
8051 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8052 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8058 if (!CONST_INT_P (XEXP (x, 1)))
8060 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8063 switch (INTVAL (XEXP (x, 1)))
8071 *total = COSTS_N_INSNS (3);
8074 *total = COSTS_N_INSNS (5);
8077 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8083 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8085 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8086 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8090 switch (INTVAL (XEXP (x, 1)))
8096 *total = COSTS_N_INSNS (3);
8101 *total = COSTS_N_INSNS (4);
8104 *total = COSTS_N_INSNS (6);
8107 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8110 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8111 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8119 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8126 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8128 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8129 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8134 val = INTVAL (XEXP (x, 1));
8136 *total = COSTS_N_INSNS (4);
8138 *total = COSTS_N_INSNS (2);
8139 else if (val >= 0 && val <= 7)
8140 *total = COSTS_N_INSNS (val);
8142 *total = COSTS_N_INSNS (1);
8147 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8149 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8150 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8154 switch (INTVAL (XEXP (x, 1)))
8160 *total = COSTS_N_INSNS (2);
8163 *total = COSTS_N_INSNS (3);
8169 *total = COSTS_N_INSNS (4);
8173 *total = COSTS_N_INSNS (5);
8176 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8179 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8183 *total = COSTS_N_INSNS (!speed ? 5 : 8);
8186 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8187 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8193 if (!CONST_INT_P (XEXP (x, 1)))
8195 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8198 switch (INTVAL (XEXP (x, 1)))
8204 *total = COSTS_N_INSNS (3);
8208 *total = COSTS_N_INSNS (5);
8211 *total = COSTS_N_INSNS (4);
8214 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8220 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8222 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8223 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8227 switch (INTVAL (XEXP (x, 1)))
8233 *total = COSTS_N_INSNS (4);
8238 *total = COSTS_N_INSNS (6);
8241 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8244 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
8247 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8248 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8256 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8263 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8265 *total = COSTS_N_INSNS (!speed ? 4 : 17);
8266 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8271 val = INTVAL (XEXP (x, 1));
8273 *total = COSTS_N_INSNS (3);
8274 else if (val >= 0 && val <= 7)
8275 *total = COSTS_N_INSNS (val);
8277 *total = COSTS_N_INSNS (1);
8282 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8284 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8285 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8289 switch (INTVAL (XEXP (x, 1)))
8296 *total = COSTS_N_INSNS (2);
8299 *total = COSTS_N_INSNS (3);
8304 *total = COSTS_N_INSNS (4);
8308 *total = COSTS_N_INSNS (5);
8314 *total = COSTS_N_INSNS (!speed ? 5 : 6);
8317 *total = COSTS_N_INSNS (!speed ? 5 : 7);
8321 *total = COSTS_N_INSNS (!speed ? 5 : 9);
8324 *total = COSTS_N_INSNS (!speed ? 5 : 41);
8325 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8331 if (!CONST_INT_P (XEXP (x, 1)))
8333 *total = COSTS_N_INSNS (!speed ? 6 : 73);
8336 switch (INTVAL (XEXP (x, 1)))
8344 *total = COSTS_N_INSNS (3);
8347 *total = COSTS_N_INSNS (5);
8350 *total = COSTS_N_INSNS (!speed ? 5 : 3 * INTVAL (XEXP (x, 1)));
8356 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8358 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8359 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8363 switch (INTVAL (XEXP (x, 1)))
8369 *total = COSTS_N_INSNS (4);
8372 *total = COSTS_N_INSNS (!speed ? 7 : 8);
8377 *total = COSTS_N_INSNS (4);
8380 *total = COSTS_N_INSNS (6);
8383 *total = COSTS_N_INSNS (!speed ? 7 : 113);
8384 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1,
8392 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8396 switch (GET_MODE (XEXP (x, 0)))
8399 *total = COSTS_N_INSNS (1);
8400 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8401 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8405 *total = COSTS_N_INSNS (2);
8406 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8407 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8408 else if (INTVAL (XEXP (x, 1)) != 0)
8409 *total += COSTS_N_INSNS (1);
8413 *total = COSTS_N_INSNS (3);
8414 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
8415 *total += COSTS_N_INSNS (2);
8419 *total = COSTS_N_INSNS (4);
8420 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
8421 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, 1, speed);
8422 else if (INTVAL (XEXP (x, 1)) != 0)
8423 *total += COSTS_N_INSNS (3);
8429 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, 0, speed);
8434 && LSHIFTRT == GET_CODE (XEXP (x, 0))
8435 && MULT == GET_CODE (XEXP (XEXP (x, 0), 0))
8436 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
8438 if (QImode == mode || HImode == mode)
8440 *total = COSTS_N_INSNS (2);
8453 /* Implement `TARGET_RTX_COSTS'. */
8456 avr_rtx_costs (rtx x, int codearg, int outer_code,
8457 int opno, int *total, bool speed)
8459 bool done = avr_rtx_costs_1 (x, codearg, outer_code,
8460 opno, total, speed);
8462 if (avr_log.rtx_costs)
8464 avr_edump ("\n%?=%b (%s) total=%d, outer=%C:\n%r\n",
8465 done, speed ? "speed" : "size", *total, outer_code, x);
8472 /* Implement `TARGET_ADDRESS_COST'. */
8475 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
8479 if (GET_CODE (x) == PLUS
8480 && CONST_INT_P (XEXP (x, 1))
8481 && (REG_P (XEXP (x, 0))
8482 || GET_CODE (XEXP (x, 0)) == SUBREG))
8484 if (INTVAL (XEXP (x, 1)) >= 61)
8487 else if (CONSTANT_ADDRESS_P (x))
8490 && io_address_operand (x, QImode))
8494 if (avr_log.address_cost)
8495 avr_edump ("\n%?: %d = %r\n", cost, x);
8500 /* Test for extra memory constraint 'Q'.
8501 It's a memory address based on Y or Z pointer with valid displacement. */
8504 extra_constraint_Q (rtx x)
8508 if (GET_CODE (XEXP (x,0)) == PLUS
8509 && REG_P (XEXP (XEXP (x,0), 0))
8510 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
8511 && (INTVAL (XEXP (XEXP (x,0), 1))
8512 <= MAX_LD_OFFSET (GET_MODE (x))))
8514 rtx xx = XEXP (XEXP (x,0), 0);
8515 int regno = REGNO (xx);
8517 ok = (/* allocate pseudos */
8518 regno >= FIRST_PSEUDO_REGISTER
8519 /* strictly check */
8520 || regno == REG_Z || regno == REG_Y
8521 /* XXX frame & arg pointer checks */
8522 || xx == frame_pointer_rtx
8523 || xx == arg_pointer_rtx);
8525 if (avr_log.constraints)
8526 avr_edump ("\n%?=%d reload_completed=%d reload_in_progress=%d\n %r\n",
8527 ok, reload_completed, reload_in_progress, x);
8533 /* Convert condition code CONDITION to the valid AVR condition code. */
8536 avr_normalize_condition (RTX_CODE condition)
8553 /* Helper function for `avr_reorg'. */
8556 avr_compare_pattern (rtx insn)
8558 rtx pattern = single_set (insn);
8561 && NONJUMP_INSN_P (insn)
8562 && SET_DEST (pattern) == cc0_rtx
8563 && GET_CODE (SET_SRC (pattern)) == COMPARE
8564 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 0))
8565 && DImode != GET_MODE (XEXP (SET_SRC (pattern), 1)))
8573 /* Helper function for `avr_reorg'. */
8575 /* Expansion of switch/case decision trees leads to code like
8577 cc0 = compare (Reg, Num)
8581 cc0 = compare (Reg, Num)
8585 The second comparison is superfluous and can be deleted.
8586 The second jump condition can be transformed from a
8587 "difficult" one to a "simple" one because "cc0 > 0" and
8588 "cc0 >= 0" will have the same effect here.
8590 This function relies on the way switch/case is being expaned
8591 as binary decision tree. For example code see PR 49903.
8593 Return TRUE if optimization performed.
8594 Return FALSE if nothing changed.
8596 INSN1 is a comparison, i.e. avr_compare_pattern != 0.
8598 We don't want to do this in text peephole because it is
8599 tedious to work out jump offsets there and the second comparison
8600 might have been transormed by `avr_reorg'.
8602 RTL peephole won't do because peephole2 does not scan across
8606 avr_reorg_remove_redundant_compare (rtx insn1)
8608 rtx comp1, ifelse1, xcond1, branch1;
8609 rtx comp2, ifelse2, xcond2, branch2, insn2;
8611 rtx jump, target, cond;
8613 /* Look out for: compare1 - branch1 - compare2 - branch2 */
8615 branch1 = next_nonnote_nondebug_insn (insn1);
8616 if (!branch1 || !JUMP_P (branch1))
8619 insn2 = next_nonnote_nondebug_insn (branch1);
8620 if (!insn2 || !avr_compare_pattern (insn2))
8623 branch2 = next_nonnote_nondebug_insn (insn2);
8624 if (!branch2 || !JUMP_P (branch2))
8627 comp1 = avr_compare_pattern (insn1);
8628 comp2 = avr_compare_pattern (insn2);
8629 xcond1 = single_set (branch1);
8630 xcond2 = single_set (branch2);
8632 if (!comp1 || !comp2
8633 || !rtx_equal_p (comp1, comp2)
8634 || !xcond1 || SET_DEST (xcond1) != pc_rtx
8635 || !xcond2 || SET_DEST (xcond2) != pc_rtx
8636 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond1))
8637 || IF_THEN_ELSE != GET_CODE (SET_SRC (xcond2)))
8642 comp1 = SET_SRC (comp1);
8643 ifelse1 = SET_SRC (xcond1);
8644 ifelse2 = SET_SRC (xcond2);
8646 /* comp<n> is COMPARE now and ifelse<n> is IF_THEN_ELSE. */
8648 if (EQ != GET_CODE (XEXP (ifelse1, 0))
8649 || !REG_P (XEXP (comp1, 0))
8650 || !CONST_INT_P (XEXP (comp1, 1))
8651 || XEXP (ifelse1, 2) != pc_rtx
8652 || XEXP (ifelse2, 2) != pc_rtx
8653 || LABEL_REF != GET_CODE (XEXP (ifelse1, 1))
8654 || LABEL_REF != GET_CODE (XEXP (ifelse2, 1))
8655 || !COMPARISON_P (XEXP (ifelse2, 0))
8656 || cc0_rtx != XEXP (XEXP (ifelse1, 0), 0)
8657 || cc0_rtx != XEXP (XEXP (ifelse2, 0), 0)
8658 || const0_rtx != XEXP (XEXP (ifelse1, 0), 1)
8659 || const0_rtx != XEXP (XEXP (ifelse2, 0), 1))
8664 /* We filtered the insn sequence to look like
8670 (if_then_else (eq (cc0)
8679 (if_then_else (CODE (cc0)
8685 code = GET_CODE (XEXP (ifelse2, 0));
8687 /* Map GT/GTU to GE/GEU which is easier for AVR.
8688 The first two instructions compare/branch on EQ
8689 so we may replace the difficult
8691 if (x == VAL) goto L1;
8692 if (x > VAL) goto L2;
8696 if (x == VAL) goto L1;
8697 if (x >= VAL) goto L2;
8699 Similarly, replace LE/LEU by LT/LTU. */
8710 code = avr_normalize_condition (code);
8717 /* Wrap the branches into UNSPECs so they won't be changed or
8718 optimized in the remainder. */
8720 target = XEXP (XEXP (ifelse1, 1), 0);
8721 cond = XEXP (ifelse1, 0);
8722 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn1);
8724 JUMP_LABEL (jump) = JUMP_LABEL (branch1);
8726 target = XEXP (XEXP (ifelse2, 1), 0);
8727 cond = gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
8728 jump = emit_jump_insn_after (gen_branch_unspec (target, cond), insn2);
8730 JUMP_LABEL (jump) = JUMP_LABEL (branch2);
8732 /* The comparisons in insn1 and insn2 are exactly the same;
8733 insn2 is superfluous so delete it. */
8735 delete_insn (insn2);
8736 delete_insn (branch1);
8737 delete_insn (branch2);
8743 /* Implement `TARGET_MACHINE_DEPENDENT_REORG'. */
8744 /* Optimize conditional jumps. */
8749 rtx insn = get_insns();
8751 for (insn = next_real_insn (insn); insn; insn = next_real_insn (insn))
8753 rtx pattern = avr_compare_pattern (insn);
8759 && avr_reorg_remove_redundant_compare (insn))
8764 if (compare_diff_p (insn))
8766 /* Now we work under compare insn with difficult branch. */
8768 rtx next = next_real_insn (insn);
8769 rtx pat = PATTERN (next);
8771 pattern = SET_SRC (pattern);
8773 if (true_regnum (XEXP (pattern, 0)) >= 0
8774 && true_regnum (XEXP (pattern, 1)) >= 0)
8776 rtx x = XEXP (pattern, 0);
8777 rtx src = SET_SRC (pat);
8778 rtx t = XEXP (src,0);
8779 PUT_CODE (t, swap_condition (GET_CODE (t)));
8780 XEXP (pattern, 0) = XEXP (pattern, 1);
8781 XEXP (pattern, 1) = x;
8782 INSN_CODE (next) = -1;
8784 else if (true_regnum (XEXP (pattern, 0)) >= 0
8785 && XEXP (pattern, 1) == const0_rtx)
8787 /* This is a tst insn, we can reverse it. */
8788 rtx src = SET_SRC (pat);
8789 rtx t = XEXP (src,0);
8791 PUT_CODE (t, swap_condition (GET_CODE (t)));
8792 XEXP (pattern, 1) = XEXP (pattern, 0);
8793 XEXP (pattern, 0) = const0_rtx;
8794 INSN_CODE (next) = -1;
8795 INSN_CODE (insn) = -1;
8797 else if (true_regnum (XEXP (pattern, 0)) >= 0
8798 && CONST_INT_P (XEXP (pattern, 1)))
8800 rtx x = XEXP (pattern, 1);
8801 rtx src = SET_SRC (pat);
8802 rtx t = XEXP (src,0);
8803 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
8805 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
8807 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
8808 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
8809 INSN_CODE (next) = -1;
8810 INSN_CODE (insn) = -1;
8817 /* Returns register number for function return value.*/
8819 static inline unsigned int
8820 avr_ret_register (void)
8825 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
8828 avr_function_value_regno_p (const unsigned int regno)
8830 return (regno == avr_ret_register ());
8833 /* Create an RTX representing the place where a
8834 library function returns a value of mode MODE. */
8837 avr_libcall_value (enum machine_mode mode,
8838 const_rtx func ATTRIBUTE_UNUSED)
8840 int offs = GET_MODE_SIZE (mode);
8843 offs = (offs + 1) & ~1;
8845 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
8848 /* Create an RTX representing the place where a
8849 function returns a value of data type VALTYPE. */
8852 avr_function_value (const_tree type,
8853 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
8854 bool outgoing ATTRIBUTE_UNUSED)
8858 if (TYPE_MODE (type) != BLKmode)
8859 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
8861 offs = int_size_in_bytes (type);
8864 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
8865 offs = GET_MODE_SIZE (SImode);
8866 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
8867 offs = GET_MODE_SIZE (DImode);
8869 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
8873 test_hard_reg_class (enum reg_class rclass, rtx x)
8875 int regno = true_regnum (x);
8879 if (TEST_HARD_REG_CLASS (rclass, regno))
8886 /* Helper for jump_over_one_insn_p: Test if INSN is a 2-word instruction
8887 and thus is suitable to be skipped by CPSE, SBRC, etc. */
8890 avr_2word_insn_p (rtx insn)
8892 if (avr_current_device->errata_skip
8894 || 2 != get_attr_length (insn))
8899 switch (INSN_CODE (insn))
8904 case CODE_FOR_movqi_insn:
8906 rtx set = single_set (insn);
8907 rtx src = SET_SRC (set);
8908 rtx dest = SET_DEST (set);
8910 /* Factor out LDS and STS from movqi_insn. */
8913 && (REG_P (src) || src == const0_rtx))
8915 return CONSTANT_ADDRESS_P (XEXP (dest, 0));
8917 else if (REG_P (dest)
8920 return CONSTANT_ADDRESS_P (XEXP (src, 0));
8926 case CODE_FOR_call_insn:
8927 case CODE_FOR_call_value_insn:
8934 jump_over_one_insn_p (rtx insn, rtx dest)
8936 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
8939 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
8940 int dest_addr = INSN_ADDRESSES (uid);
8941 int jump_offset = dest_addr - jump_addr - get_attr_length (insn);
8943 return (jump_offset == 1
8944 || (jump_offset == 2
8945 && avr_2word_insn_p (next_active_insn (insn))));
8948 /* Returns 1 if a value of mode MODE can be stored starting with hard
8949 register number REGNO. On the enhanced core, anything larger than
8950 1 byte must start in even numbered register for "movw" to work
8951 (this way we don't have to check for odd registers everywhere). */
8954 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
8956 /* NOTE: 8-bit values must not be disallowed for R28 or R29.
8957 Disallowing QI et al. in these regs might lead to code like
8958 (set (subreg:QI (reg:HI 28) n) ...)
8959 which will result in wrong code because reload does not
8960 handle SUBREGs of hard regsisters like this.
8961 This could be fixed in reload. However, it appears
8962 that fixing reload is not wanted by reload people. */
8964 /* Any GENERAL_REGS register can hold 8-bit values. */
8966 if (GET_MODE_SIZE (mode) == 1)
8969 /* FIXME: Ideally, the following test is not needed.
8970 However, it turned out that it can reduce the number
8971 of spill fails. AVR and it's poor endowment with
8972 address registers is extreme stress test for reload. */
8974 if (GET_MODE_SIZE (mode) >= 4
8978 /* All modes larger than 8 bits should start in an even register. */
8980 return !(regno & 1);
8984 /* Implement `MODE_CODE_BASE_REG_CLASS'. */
8987 avr_mode_code_base_reg_class (enum machine_mode mode ATTRIBUTE_UNUSED,
8988 addr_space_t as, RTX_CODE outer_code,
8989 RTX_CODE index_code ATTRIBUTE_UNUSED)
8991 if (!ADDR_SPACE_GENERIC_P (as))
8993 return POINTER_Z_REGS;
8997 return reload_completed ? BASE_POINTER_REGS : POINTER_REGS;
8999 return PLUS == outer_code ? BASE_POINTER_REGS : POINTER_REGS;
9003 /* Implement `REGNO_MODE_CODE_OK_FOR_BASE_P'. */
9006 avr_regno_mode_code_ok_for_base_p (int regno,
9007 enum machine_mode mode ATTRIBUTE_UNUSED,
9008 addr_space_t as ATTRIBUTE_UNUSED,
9009 RTX_CODE outer_code,
9010 RTX_CODE index_code ATTRIBUTE_UNUSED)
9014 if (!ADDR_SPACE_GENERIC_P (as))
9016 if (regno < FIRST_PSEUDO_REGISTER
9024 regno = reg_renumber[regno];
9035 if (regno < FIRST_PSEUDO_REGISTER
9039 || regno == ARG_POINTER_REGNUM))
9043 else if (reg_renumber)
9045 regno = reg_renumber[regno];
9050 || regno == ARG_POINTER_REGNUM)
9057 && PLUS == outer_code
9067 /* A helper for `output_reload_insisf' and `output_reload_inhi'. */
9068 /* Set 32-bit register OP[0] to compile-time constant OP[1].
9069 CLOBBER_REG is a QI clobber register or NULL_RTX.
9070 LEN == NULL: output instructions.
9071 LEN != NULL: set *LEN to the length of the instruction sequence
9072 (in words) printed with LEN = NULL.
9073 If CLEAR_P is true, OP[0] had been cleard to Zero already.
9074 If CLEAR_P is false, nothing is known about OP[0].
9076 The effect on cc0 is as follows:
9078 Load 0 to any register except ZERO_REG : NONE
9079 Load ld register with any value : NONE
9080 Anything else: : CLOBBER */
9083 output_reload_in_const (rtx *op, rtx clobber_reg, int *len, bool clear_p)
9089 int clobber_val = 1234;
9090 bool cooked_clobber_p = false;
9092 enum machine_mode mode = GET_MODE (dest);
9093 int n, n_bytes = GET_MODE_SIZE (mode);
9095 gcc_assert (REG_P (dest)
9096 && CONSTANT_P (src));
9101 /* (REG:SI 14) is special: It's neither in LD_REGS nor in NO_LD_REGS
9102 but has some subregs that are in LD_REGS. Use the MSB (REG:QI 17). */
9104 if (REGNO (dest) < 16
9105 && REGNO (dest) + GET_MODE_SIZE (mode) > 16)
9107 clobber_reg = all_regs_rtx[REGNO (dest) + n_bytes - 1];
9110 /* We might need a clobber reg but don't have one. Look at the value to
9111 be loaded more closely. A clobber is only needed if it is a symbol
9112 or contains a byte that is neither 0, -1 or a power of 2. */
9114 if (NULL_RTX == clobber_reg
9115 && !test_hard_reg_class (LD_REGS, dest)
9116 && (! (CONST_INT_P (src) || CONST_DOUBLE_P (src))
9117 || !avr_popcount_each_byte (src, n_bytes,
9118 (1 << 0) | (1 << 1) | (1 << 8))))
9120 /* We have no clobber register but need one. Cook one up.
9121 That's cheaper than loading from constant pool. */
9123 cooked_clobber_p = true;
9124 clobber_reg = all_regs_rtx[REG_Z + 1];
9125 avr_asm_len ("mov __tmp_reg__,%0", &clobber_reg, len, 1);
9128 /* Now start filling DEST from LSB to MSB. */
9130 for (n = 0; n < n_bytes; n++)
9133 bool done_byte = false;
9137 /* Crop the n-th destination byte. */
9139 xdest[n] = simplify_gen_subreg (QImode, dest, mode, n);
9140 ldreg_p = test_hard_reg_class (LD_REGS, xdest[n]);
9142 if (!CONST_INT_P (src)
9143 && !CONST_DOUBLE_P (src))
9145 static const char* const asm_code[][2] =
9147 { "ldi %2,lo8(%1)" CR_TAB "mov %0,%2", "ldi %0,lo8(%1)" },
9148 { "ldi %2,hi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hi8(%1)" },
9149 { "ldi %2,hlo8(%1)" CR_TAB "mov %0,%2", "ldi %0,hlo8(%1)" },
9150 { "ldi %2,hhi8(%1)" CR_TAB "mov %0,%2", "ldi %0,hhi8(%1)" }
9155 xop[2] = clobber_reg;
9157 avr_asm_len (asm_code[n][ldreg_p], xop, len, ldreg_p ? 1 : 2);
9162 /* Crop the n-th source byte. */
9164 xval = simplify_gen_subreg (QImode, src, mode, n);
9165 ival[n] = INTVAL (xval);
9167 /* Look if we can reuse the low word by means of MOVW. */
9173 rtx lo16 = simplify_gen_subreg (HImode, src, mode, 0);
9174 rtx hi16 = simplify_gen_subreg (HImode, src, mode, 2);
9176 if (INTVAL (lo16) == INTVAL (hi16))
9178 if (0 != INTVAL (lo16)
9181 avr_asm_len ("movw %C0,%A0", &op[0], len, 1);
9188 /* Don't use CLR so that cc0 is set as expected. */
9193 avr_asm_len (ldreg_p ? "ldi %0,0"
9194 : ZERO_REGNO == REGNO (xdest[n]) ? "clr %0"
9195 : "mov %0,__zero_reg__",
9200 if (clobber_val == ival[n]
9201 && REGNO (clobber_reg) == REGNO (xdest[n]))
9206 /* LD_REGS can use LDI to move a constant value */
9212 avr_asm_len ("ldi %0,lo8(%1)", xop, len, 1);
9216 /* Try to reuse value already loaded in some lower byte. */
9218 for (j = 0; j < n; j++)
9219 if (ival[j] == ival[n])
9224 avr_asm_len ("mov %0,%1", xop, len, 1);
9232 /* Need no clobber reg for -1: Use CLR/DEC */
9237 avr_asm_len ("clr %0", &xdest[n], len, 1);
9239 avr_asm_len ("dec %0", &xdest[n], len, 1);
9242 else if (1 == ival[n])
9245 avr_asm_len ("clr %0", &xdest[n], len, 1);
9247 avr_asm_len ("inc %0", &xdest[n], len, 1);
9251 /* Use T flag or INC to manage powers of 2 if we have
9254 if (NULL_RTX == clobber_reg
9255 && single_one_operand (xval, QImode))
9258 xop[1] = GEN_INT (exact_log2 (ival[n] & GET_MODE_MASK (QImode)));
9260 gcc_assert (constm1_rtx != xop[1]);
9265 avr_asm_len ("set", xop, len, 1);
9269 avr_asm_len ("clr %0", xop, len, 1);
9271 avr_asm_len ("bld %0,%1", xop, len, 1);
9275 /* We actually need the LD_REGS clobber reg. */
9277 gcc_assert (NULL_RTX != clobber_reg);
9281 xop[2] = clobber_reg;
9282 clobber_val = ival[n];
9284 avr_asm_len ("ldi %2,lo8(%1)" CR_TAB
9285 "mov %0,%2", xop, len, 2);
9288 /* If we cooked up a clobber reg above, restore it. */
9290 if (cooked_clobber_p)
9292 avr_asm_len ("mov %0,__tmp_reg__", &clobber_reg, len, 1);
9297 /* Reload the constant OP[1] into the HI register OP[0].
9298 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9299 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9300 need a clobber reg or have to cook one up.
9302 PLEN == NULL: Output instructions.
9303 PLEN != NULL: Output nothing. Set *PLEN to number of words occupied
9304 by the insns printed.
9309 output_reload_inhi (rtx *op, rtx clobber_reg, int *plen)
9311 output_reload_in_const (op, clobber_reg, plen, false);
9316 /* Reload a SI or SF compile time constant OP[1] into the register OP[0].
9317 CLOBBER_REG is a QI clobber reg needed to move vast majority of consts
9318 into a NO_LD_REGS register. If CLOBBER_REG is NULL_RTX we either don't
9319 need a clobber reg or have to cook one up.
9321 LEN == NULL: Output instructions.
9323 LEN != NULL: Output nothing. Set *LEN to number of words occupied
9324 by the insns printed.
9329 output_reload_insisf (rtx *op, rtx clobber_reg, int *len)
9332 && !test_hard_reg_class (LD_REGS, op[0])
9333 && (CONST_INT_P (op[1])
9334 || CONST_DOUBLE_P (op[1])))
9336 int len_clr, len_noclr;
9338 /* In some cases it is better to clear the destination beforehand, e.g.
9340 CLR R2 CLR R3 MOVW R4,R2 INC R2
9344 CLR R2 INC R2 CLR R3 CLR R4 CLR R5
9346 We find it too tedious to work that out in the print function.
9347 Instead, we call the print function twice to get the lengths of
9348 both methods and use the shortest one. */
9350 output_reload_in_const (op, clobber_reg, &len_clr, true);
9351 output_reload_in_const (op, clobber_reg, &len_noclr, false);
9353 if (len_noclr - len_clr == 4)
9355 /* Default needs 4 CLR instructions: clear register beforehand. */
9357 avr_asm_len ("mov %A0,__zero_reg__" CR_TAB
9358 "mov %B0,__zero_reg__" CR_TAB
9359 "movw %C0,%A0", &op[0], len, 3);
9361 output_reload_in_const (op, clobber_reg, len, true);
9370 /* Default: destination not pre-cleared. */
9372 output_reload_in_const (op, clobber_reg, len, false);
9377 avr_out_reload_inpsi (rtx *op, rtx clobber_reg, int *len)
9379 output_reload_in_const (op, clobber_reg, len, false);
9385 avr_output_addr_vec_elt (FILE *stream, int value)
9387 if (AVR_HAVE_JMP_CALL)
9388 fprintf (stream, "\t.word gs(.L%d)\n", value);
9390 fprintf (stream, "\trjmp .L%d\n", value);
9393 /* Returns true if SCRATCH are safe to be allocated as a scratch
9394 registers (for a define_peephole2) in the current function. */
9397 avr_hard_regno_scratch_ok (unsigned int regno)
9399 /* Interrupt functions can only use registers that have already been saved
9400 by the prologue, even if they would normally be call-clobbered. */
9402 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9403 && !df_regs_ever_live_p (regno))
9406 /* Don't allow hard registers that might be part of the frame pointer.
9407 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9408 and don't care for a frame pointer that spans more than one register. */
9410 if ((!reload_completed || frame_pointer_needed)
9411 && (regno == REG_Y || regno == REG_Y + 1))
9419 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
9422 avr_hard_regno_rename_ok (unsigned int old_reg,
9423 unsigned int new_reg)
9425 /* Interrupt functions can only use registers that have already been
9426 saved by the prologue, even if they would normally be
9429 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
9430 && !df_regs_ever_live_p (new_reg))
9433 /* Don't allow hard registers that might be part of the frame pointer.
9434 Some places in the compiler just test for [HARD_]FRAME_POINTER_REGNUM
9435 and don't care for a frame pointer that spans more than one register. */
9437 if ((!reload_completed || frame_pointer_needed)
9438 && (old_reg == REG_Y || old_reg == REG_Y + 1
9439 || new_reg == REG_Y || new_reg == REG_Y + 1))
9447 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
9448 or memory location in the I/O space (QImode only).
9450 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
9451 Operand 1: register operand to test, or CONST_INT memory address.
9452 Operand 2: bit number.
9453 Operand 3: label to jump to if the test is true. */
9456 avr_out_sbxx_branch (rtx insn, rtx operands[])
9458 enum rtx_code comp = GET_CODE (operands[0]);
9459 bool long_jump = get_attr_length (insn) >= 4;
9460 bool reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
9464 else if (comp == LT)
9468 comp = reverse_condition (comp);
9470 switch (GET_CODE (operands[1]))
9477 if (low_io_address_operand (operands[1], QImode))
9480 output_asm_insn ("sbis %i1,%2", operands);
9482 output_asm_insn ("sbic %i1,%2", operands);
9486 output_asm_insn ("in __tmp_reg__,%i1", operands);
9488 output_asm_insn ("sbrs __tmp_reg__,%2", operands);
9490 output_asm_insn ("sbrc __tmp_reg__,%2", operands);
9493 break; /* CONST_INT */
9498 output_asm_insn ("sbrs %T1%T2", operands);
9500 output_asm_insn ("sbrc %T1%T2", operands);
9506 return ("rjmp .+4" CR_TAB
9515 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
9518 avr_asm_out_ctor (rtx symbol, int priority)
9520 fputs ("\t.global __do_global_ctors\n", asm_out_file);
9521 default_ctor_section_asm_out_constructor (symbol, priority);
9524 /* Worker function for TARGET_ASM_DESTRUCTOR. */
9527 avr_asm_out_dtor (rtx symbol, int priority)
9529 fputs ("\t.global __do_global_dtors\n", asm_out_file);
9530 default_dtor_section_asm_out_destructor (symbol, priority);
9533 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9536 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9538 if (TYPE_MODE (type) == BLKmode)
9540 HOST_WIDE_INT size = int_size_in_bytes (type);
9541 return (size == -1 || size > 8);
9547 /* Worker function for CASE_VALUES_THRESHOLD. */
9550 avr_case_values_threshold (void)
9552 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
9556 /* Implement `TARGET_ADDR_SPACE_ADDRESS_MODE'. */
9558 static enum machine_mode
9559 avr_addr_space_address_mode (addr_space_t as)
9561 return avr_addrspace[as].pointer_size == 3 ? PSImode : HImode;
9565 /* Implement `TARGET_ADDR_SPACE_POINTER_MODE'. */
9567 static enum machine_mode
9568 avr_addr_space_pointer_mode (addr_space_t as)
9570 return avr_addr_space_address_mode (as);
9574 /* Helper for following function. */
9577 avr_reg_ok_for_pgm_addr (rtx reg, bool strict)
9579 gcc_assert (REG_P (reg));
9583 return REGNO (reg) == REG_Z;
9586 /* Avoid combine to propagate hard regs. */
9588 if (can_create_pseudo_p()
9589 && REGNO (reg) < REG_Z)
9598 /* Implement `TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P'. */
9601 avr_addr_space_legitimate_address_p (enum machine_mode mode, rtx x,
9602 bool strict, addr_space_t as)
9611 case ADDR_SPACE_GENERIC:
9612 return avr_legitimate_address_p (mode, x, strict);
9614 case ADDR_SPACE_FLASH:
9615 case ADDR_SPACE_FLASH1:
9616 case ADDR_SPACE_FLASH2:
9617 case ADDR_SPACE_FLASH3:
9618 case ADDR_SPACE_FLASH4:
9619 case ADDR_SPACE_FLASH5:
9621 switch (GET_CODE (x))
9624 ok = avr_reg_ok_for_pgm_addr (x, strict);
9628 ok = avr_reg_ok_for_pgm_addr (XEXP (x, 0), strict);
9637 case ADDR_SPACE_MEMX:
9640 && can_create_pseudo_p());
9642 if (LO_SUM == GET_CODE (x))
9644 rtx hi = XEXP (x, 0);
9645 rtx lo = XEXP (x, 1);
9648 && (!strict || REGNO (hi) < FIRST_PSEUDO_REGISTER)
9650 && REGNO (lo) == REG_Z);
9656 if (avr_log.legitimate_address_p)
9658 avr_edump ("\n%?: ret=%b, mode=%m strict=%d "
9659 "reload_completed=%d reload_in_progress=%d %s:",
9660 ok, mode, strict, reload_completed, reload_in_progress,
9661 reg_renumber ? "(reg_renumber)" : "");
9663 if (GET_CODE (x) == PLUS
9664 && REG_P (XEXP (x, 0))
9665 && CONST_INT_P (XEXP (x, 1))
9666 && IN_RANGE (INTVAL (XEXP (x, 1)), 0, MAX_LD_OFFSET (mode))
9669 avr_edump ("(r%d ---> r%d)", REGNO (XEXP (x, 0)),
9670 true_regnum (XEXP (x, 0)));
9673 avr_edump ("\n%r\n", x);
9680 /* Implement `TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS'. */
9683 avr_addr_space_legitimize_address (rtx x, rtx old_x,
9684 enum machine_mode mode, addr_space_t as)
9686 if (ADDR_SPACE_GENERIC_P (as))
9687 return avr_legitimize_address (x, old_x, mode);
9689 if (avr_log.legitimize_address)
9691 avr_edump ("\n%?: mode=%m\n %r\n", mode, old_x);
9698 /* Implement `TARGET_ADDR_SPACE_CONVERT'. */
9701 avr_addr_space_convert (rtx src, tree type_from, tree type_to)
9703 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (type_from));
9704 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type_to));
9706 if (avr_log.progmem)
9707 avr_edump ("\n%!: op = %r\nfrom = %t\nto = %t\n",
9708 src, type_from, type_to);
9710 /* Up-casting from 16-bit to 24-bit pointer. */
9712 if (as_from != ADDR_SPACE_MEMX
9713 && as_to == ADDR_SPACE_MEMX)
9717 rtx reg = gen_reg_rtx (PSImode);
9719 while (CONST == GET_CODE (sym) || PLUS == GET_CODE (sym))
9720 sym = XEXP (sym, 0);
9722 /* Look at symbol flags: avr_encode_section_info set the flags
9723 also if attribute progmem was seen so that we get the right
9724 promotion for, e.g. PSTR-like strings that reside in generic space
9725 but are located in flash. In that case we patch the incoming
9728 if (SYMBOL_REF == GET_CODE (sym)
9729 && ADDR_SPACE_FLASH == AVR_SYMBOL_GET_ADDR_SPACE (sym))
9731 as_from = ADDR_SPACE_FLASH;
9734 /* Linearize memory: RAM has bit 23 set. */
9736 msb = ADDR_SPACE_GENERIC_P (as_from)
9738 : avr_addrspace[as_from].segment;
9740 src = force_reg (Pmode, src);
9743 ? gen_zero_extendhipsi2 (reg, src)
9744 : gen_n_extendhipsi2 (reg, gen_int_mode (msb, QImode), src));
9749 /* Down-casting from 24-bit to 16-bit throws away the high byte. */
9751 if (as_from == ADDR_SPACE_MEMX
9752 && as_to != ADDR_SPACE_MEMX)
9754 rtx new_src = gen_reg_rtx (Pmode);
9756 src = force_reg (PSImode, src);
9758 emit_move_insn (new_src,
9759 simplify_gen_subreg (Pmode, src, PSImode, 0));
9767 /* Implement `TARGET_ADDR_SPACE_SUBSET_P'. */
9770 avr_addr_space_subset_p (addr_space_t subset ATTRIBUTE_UNUSED,
9771 addr_space_t superset ATTRIBUTE_UNUSED)
9773 /* Allow any kind of pointer mess. */
9779 /* Worker function for movmemhi expander.
9780 XOP[0] Destination as MEM:BLK
9782 XOP[2] # Bytes to copy
9784 Return TRUE if the expansion is accomplished.
9785 Return FALSE if the operand compination is not supported. */
9788 avr_emit_movmemhi (rtx *xop)
9790 HOST_WIDE_INT count;
9791 enum machine_mode loop_mode;
9792 addr_space_t as = MEM_ADDR_SPACE (xop[1]);
9793 rtx loop_reg, addr1, a_src, a_dest, insn, xas;
9794 rtx a_hi8 = NULL_RTX;
9796 if (avr_mem_flash_p (xop[0]))
9799 if (!CONST_INT_P (xop[2]))
9802 count = INTVAL (xop[2]);
9806 a_src = XEXP (xop[1], 0);
9807 a_dest = XEXP (xop[0], 0);
9809 if (PSImode == GET_MODE (a_src))
9811 gcc_assert (as == ADDR_SPACE_MEMX);
9813 loop_mode = (count < 0x100) ? QImode : HImode;
9814 loop_reg = gen_rtx_REG (loop_mode, 24);
9815 emit_move_insn (loop_reg, gen_int_mode (count, loop_mode));
9817 addr1 = simplify_gen_subreg (HImode, a_src, PSImode, 0);
9818 a_hi8 = simplify_gen_subreg (QImode, a_src, PSImode, 2);
9822 int segment = avr_addrspace[as].segment;
9825 && avr_current_device->n_flash > 1)
9827 a_hi8 = GEN_INT (segment);
9828 emit_move_insn (rampz_rtx, a_hi8 = copy_to_mode_reg (QImode, a_hi8));
9830 else if (!ADDR_SPACE_GENERIC_P (as))
9832 as = ADDR_SPACE_FLASH;
9837 loop_mode = (count <= 0x100) ? QImode : HImode;
9838 loop_reg = copy_to_mode_reg (loop_mode, gen_int_mode (count, loop_mode));
9843 /* FIXME: Register allocator might come up with spill fails if it is left
9844 on its own. Thus, we allocate the pointer registers by hand:
9846 X = destination address */
9848 emit_move_insn (lpm_addr_reg_rtx, addr1);
9849 emit_move_insn (gen_rtx_REG (HImode, REG_X), a_dest);
9851 /* FIXME: Register allocator does a bad job and might spill address
9852 register(s) inside the loop leading to additional move instruction
9853 to/from stack which could clobber tmp_reg. Thus, do *not* emit
9854 load and store as seperate insns. Instead, we perform the copy
9855 by means of one monolithic insn. */
9857 gcc_assert (TMP_REGNO == LPM_REGNO);
9859 if (as != ADDR_SPACE_MEMX)
9861 /* Load instruction ([E]LPM or LD) is known at compile time:
9862 Do the copy-loop inline. */
9864 rtx (*fun) (rtx, rtx, rtx)
9865 = QImode == loop_mode ? gen_movmem_qi : gen_movmem_hi;
9867 insn = fun (xas, loop_reg, loop_reg);
9871 rtx (*fun) (rtx, rtx)
9872 = QImode == loop_mode ? gen_movmemx_qi : gen_movmemx_hi;
9874 emit_move_insn (gen_rtx_REG (QImode, 23), a_hi8);
9876 insn = fun (xas, GEN_INT (avr_addr.rampz));
9879 set_mem_addr_space (SET_SRC (XVECEXP (insn, 0, 0)), as);
9886 /* Print assembler for movmem_qi, movmem_hi insns...
9888 $1, $2 : Loop register
9890 X : Destination address
9894 avr_out_movmem (rtx insn ATTRIBUTE_UNUSED, rtx *op, int *plen)
9896 addr_space_t as = (addr_space_t) INTVAL (op[0]);
9897 enum machine_mode loop_mode = GET_MODE (op[1]);
9898 bool sbiw_p = test_hard_reg_class (ADDW_REGS, op[1]);
9906 xop[2] = tmp_reg_rtx;
9910 avr_asm_len ("0:", xop, plen, 0);
9912 /* Load with post-increment */
9919 case ADDR_SPACE_GENERIC:
9921 avr_asm_len ("ld %2,Z+", xop, plen, 1);
9924 case ADDR_SPACE_FLASH:
9927 avr_asm_len ("lpm %2,%Z+", xop, plen, 1);
9929 avr_asm_len ("lpm" CR_TAB
9930 "adiw r30,1", xop, plen, 2);
9933 case ADDR_SPACE_FLASH1:
9934 case ADDR_SPACE_FLASH2:
9935 case ADDR_SPACE_FLASH3:
9936 case ADDR_SPACE_FLASH4:
9937 case ADDR_SPACE_FLASH5:
9940 avr_asm_len ("elpm %2,Z+", xop, plen, 1);
9942 avr_asm_len ("elpm" CR_TAB
9943 "adiw r30,1", xop, plen, 2);
9947 /* Store with post-increment */
9949 avr_asm_len ("st X+,%2", xop, plen, 1);
9951 /* Decrement loop-counter and set Z-flag */
9953 if (QImode == loop_mode)
9955 avr_asm_len ("dec %1", xop, plen, 1);
9959 avr_asm_len ("sbiw %1,1", xop, plen, 1);
9963 avr_asm_len ("subi %A1,1" CR_TAB
9964 "sbci %B1,0", xop, plen, 2);
9967 /* Loop until zero */
9969 return avr_asm_len ("brne 0b", xop, plen, 1);
9974 /* Helper for __builtin_avr_delay_cycles */
9977 avr_expand_delay_cycles (rtx operands0)
9979 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
9980 unsigned HOST_WIDE_INT cycles_used;
9981 unsigned HOST_WIDE_INT loop_count;
9983 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
9985 loop_count = ((cycles - 9) / 6) + 1;
9986 cycles_used = ((loop_count - 1) * 6) + 9;
9987 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
9988 cycles -= cycles_used;
9991 if (IN_RANGE (cycles, 262145, 83886081))
9993 loop_count = ((cycles - 7) / 5) + 1;
9994 if (loop_count > 0xFFFFFF)
9995 loop_count = 0xFFFFFF;
9996 cycles_used = ((loop_count - 1) * 5) + 7;
9997 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
9998 cycles -= cycles_used;
10001 if (IN_RANGE (cycles, 768, 262144))
10003 loop_count = ((cycles - 5) / 4) + 1;
10004 if (loop_count > 0xFFFF)
10005 loop_count = 0xFFFF;
10006 cycles_used = ((loop_count - 1) * 4) + 5;
10007 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
10008 cycles -= cycles_used;
10011 if (IN_RANGE (cycles, 6, 767))
10013 loop_count = cycles / 3;
10014 if (loop_count > 255)
10016 cycles_used = loop_count * 3;
10017 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
10018 cycles -= cycles_used;
10021 while (cycles >= 2)
10023 emit_insn (gen_nopv (GEN_INT(2)));
10029 emit_insn (gen_nopv (GEN_INT(1)));
10035 /* Return VAL * BASE + DIGIT. BASE = 0 is shortcut for BASE = 2^{32} */
10038 avr_double_int_push_digit (double_int val, int base,
10039 unsigned HOST_WIDE_INT digit)
10042 ? double_int_lshift (val, 32, 64, false)
10043 : double_int_mul (val, uhwi_to_double_int (base));
10045 return double_int_add (val, uhwi_to_double_int (digit));
10049 /* Compute the image of x under f, i.e. perform x --> f(x) */
10052 avr_map (double_int f, int x)
10054 return 0xf & double_int_to_uhwi (double_int_rshift (f, 4*x, 64, false));
10058 /* Return some metrics of map A. */
10062 /* Number of fixed points in { 0 ... 7 } */
10065 /* Size of preimage of non-fixed points in { 0 ... 7 } */
10068 /* Mask representing the fixed points in { 0 ... 7 } */
10069 MAP_MASK_FIXED_0_7,
10071 /* Size of the preimage of { 0 ... 7 } */
10074 /* Mask that represents the preimage of { f } */
10075 MAP_MASK_PREIMAGE_F
10079 avr_map_metric (double_int a, int mode)
10081 unsigned i, metric = 0;
10083 for (i = 0; i < 8; i++)
10085 unsigned ai = avr_map (a, i);
10087 if (mode == MAP_FIXED_0_7)
10089 else if (mode == MAP_NONFIXED_0_7)
10090 metric += ai < 8 && ai != i;
10091 else if (mode == MAP_MASK_FIXED_0_7)
10092 metric |= ((unsigned) (ai == i)) << i;
10093 else if (mode == MAP_PREIMAGE_0_7)
10095 else if (mode == MAP_MASK_PREIMAGE_F)
10096 metric |= ((unsigned) (ai == 0xf)) << i;
10105 /* Return true if IVAL has a 0xf in its hexadecimal representation
10106 and false, otherwise. Only nibbles 0..7 are taken into account.
10107 Used as constraint helper for C0f and Cxf. */
10110 avr_has_nibble_0xf (rtx ival)
10112 return 0 != avr_map_metric (rtx_to_double_int (ival), MAP_MASK_PREIMAGE_F);
10116 /* We have a set of bits that are mapped by a function F.
10117 Try to decompose F by means of a second function G so that
10123 cost (F o G^-1) + cost (G) < cost (F)
10125 Example: Suppose builtin insert_bits supplies us with the map
10126 F = 0x3210ffff. Instead of doing 4 bit insertions to get the high
10127 nibble of the result, we can just as well rotate the bits before inserting
10128 them and use the map 0x7654ffff which is cheaper than the original map.
10129 For this example G = G^-1 = 0x32107654 and F o G^-1 = 0x7654ffff. */
10133 /* tree code of binary function G */
10134 enum tree_code code;
10136 /* The constant second argument of G */
10139 /* G^-1, the inverse of G (*, arg) */
10142 /* The cost of appplying G (*, arg) */
10145 /* The composition F o G^-1 (*, arg) for some function F */
10148 /* For debug purpose only */
10152 static const avr_map_op_t avr_map_op[] =
10154 { LROTATE_EXPR, 0, 0x76543210, 0, { 0, 0 }, "id" },
10155 { LROTATE_EXPR, 1, 0x07654321, 2, { 0, 0 }, "<<<" },
10156 { LROTATE_EXPR, 2, 0x10765432, 4, { 0, 0 }, "<<<" },
10157 { LROTATE_EXPR, 3, 0x21076543, 4, { 0, 0 }, "<<<" },
10158 { LROTATE_EXPR, 4, 0x32107654, 1, { 0, 0 }, "<<<" },
10159 { LROTATE_EXPR, 5, 0x43210765, 3, { 0, 0 }, "<<<" },
10160 { LROTATE_EXPR, 6, 0x54321076, 5, { 0, 0 }, "<<<" },
10161 { LROTATE_EXPR, 7, 0x65432107, 3, { 0, 0 }, "<<<" },
10162 { RSHIFT_EXPR, 1, 0x6543210c, 1, { 0, 0 }, ">>" },
10163 { RSHIFT_EXPR, 1, 0x7543210c, 1, { 0, 0 }, ">>" },
10164 { RSHIFT_EXPR, 2, 0x543210cc, 2, { 0, 0 }, ">>" },
10165 { RSHIFT_EXPR, 2, 0x643210cc, 2, { 0, 0 }, ">>" },
10166 { RSHIFT_EXPR, 2, 0x743210cc, 2, { 0, 0 }, ">>" },
10167 { LSHIFT_EXPR, 1, 0xc7654321, 1, { 0, 0 }, "<<" },
10168 { LSHIFT_EXPR, 2, 0xcc765432, 2, { 0, 0 }, "<<" }
10172 /* Try to decompose F as F = (F o G^-1) o G as described above.
10173 The result is a struct representing F o G^-1 and G.
10174 If result.cost < 0 then such a decomposition does not exist. */
10176 static avr_map_op_t
10177 avr_map_decompose (double_int f, const avr_map_op_t *g, bool val_const_p)
10180 bool val_used_p = 0 != avr_map_metric (f, MAP_MASK_PREIMAGE_F);
10181 avr_map_op_t f_ginv = *g;
10182 double_int ginv = uhwi_to_double_int (g->ginv);
10186 /* Step 1: Computing F o G^-1 */
10188 for (i = 7; i >= 0; i--)
10190 int x = avr_map (f, i);
10194 x = avr_map (ginv, x);
10196 /* The bit is no element of the image of G: no avail (cost = -1) */
10202 f_ginv.map = avr_double_int_push_digit (f_ginv.map, 16, x);
10205 /* Step 2: Compute the cost of the operations.
10206 The overall cost of doing an operation prior to the insertion is
10207 the cost of the insertion plus the cost of the operation. */
10209 /* Step 2a: Compute cost of F o G^-1 */
10211 if (0 == avr_map_metric (f_ginv.map, MAP_NONFIXED_0_7))
10213 /* The mapping consists only of fixed points and can be folded
10214 to AND/OR logic in the remainder. Reasonable cost is 3. */
10216 f_ginv.cost = 2 + (val_used_p && !val_const_p);
10222 /* Get the cost of the insn by calling the output worker with some
10223 fake values. Mimic effect of reloading xop[3]: Unused operands
10224 are mapped to 0 and used operands are reloaded to xop[0]. */
10226 xop[0] = all_regs_rtx[24];
10227 xop[1] = gen_int_mode (double_int_to_uhwi (f_ginv.map), SImode);
10228 xop[2] = all_regs_rtx[25];
10229 xop[3] = val_used_p ? xop[0] : const0_rtx;
10231 avr_out_insert_bits (xop, &f_ginv.cost);
10233 f_ginv.cost += val_const_p && val_used_p ? 1 : 0;
10236 /* Step 2b: Add cost of G */
10238 f_ginv.cost += g->cost;
10240 if (avr_log.builtin)
10241 avr_edump (" %s%d=%d", g->str, g->arg, f_ginv.cost);
10247 /* Insert bits from XOP[1] into XOP[0] according to MAP.
10248 XOP[0] and XOP[1] don't overlap.
10249 If FIXP_P = true: Move all bits according to MAP using BLD/BST sequences.
10250 If FIXP_P = false: Just move the bit if its position in the destination
10251 is different to its source position. */
10254 avr_move_bits (rtx *xop, double_int map, bool fixp_p, int *plen)
10258 /* T-flag contains this bit of the source, i.e. of XOP[1] */
10259 int t_bit_src = -1;
10261 /* We order the operations according to the requested source bit b. */
10263 for (b = 0; b < 8; b++)
10264 for (bit_dest = 0; bit_dest < 8; bit_dest++)
10266 int bit_src = avr_map (map, bit_dest);
10270 /* Same position: No need to copy as requested by FIXP_P. */
10271 || (bit_dest == bit_src && !fixp_p))
10274 if (t_bit_src != bit_src)
10276 /* Source bit is not yet in T: Store it to T. */
10278 t_bit_src = bit_src;
10280 xop[3] = GEN_INT (bit_src);
10281 avr_asm_len ("bst %T1%T3", xop, plen, 1);
10284 /* Load destination bit with T. */
10286 xop[3] = GEN_INT (bit_dest);
10287 avr_asm_len ("bld %T0%T3", xop, plen, 1);
10292 /* PLEN == 0: Print assembler code for `insert_bits'.
10293 PLEN != 0: Compute code length in bytes.
10296 OP[1]: The mapping composed of nibbles. If nibble no. N is
10297 0: Bit N of result is copied from bit OP[2].0
10299 7: Bit N of result is copied from bit OP[2].7
10300 0xf: Bit N of result is copied from bit OP[3].N
10301 OP[2]: Bits to be inserted
10302 OP[3]: Target value */
10305 avr_out_insert_bits (rtx *op, int *plen)
10307 double_int map = rtx_to_double_int (op[1]);
10308 unsigned mask_fixed;
10309 bool fixp_p = true;
10316 gcc_assert (REG_P (xop[2]) || CONST_INT_P (xop[2]));
10320 else if (flag_print_asm_name)
10321 fprintf (asm_out_file,
10322 ASM_COMMENT_START "map = 0x%08" HOST_LONG_FORMAT "x\n",
10323 double_int_to_uhwi (map) & GET_MODE_MASK (SImode));
10325 /* If MAP has fixed points it might be better to initialize the result
10326 with the bits to be inserted instead of moving all bits by hand. */
10328 mask_fixed = avr_map_metric (map, MAP_MASK_FIXED_0_7);
10330 if (REGNO (xop[0]) == REGNO (xop[1]))
10332 /* Avoid early-clobber conflicts */
10334 avr_asm_len ("mov __tmp_reg__,%1", xop, plen, 1);
10335 xop[1] = tmp_reg_rtx;
10339 if (avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10341 /* XOP[2] is used and reloaded to XOP[0] already */
10343 int n_fix = 0, n_nofix = 0;
10345 gcc_assert (REG_P (xop[2]));
10347 /* Get the code size of the bit insertions; once with all bits
10348 moved and once with fixed points omitted. */
10350 avr_move_bits (xop, map, true, &n_fix);
10351 avr_move_bits (xop, map, false, &n_nofix);
10353 if (fixp_p && n_fix - n_nofix > 3)
10355 xop[3] = gen_int_mode (~mask_fixed, QImode);
10357 avr_asm_len ("eor %0,%1" CR_TAB
10358 "andi %0,%3" CR_TAB
10359 "eor %0,%1", xop, plen, 3);
10365 /* XOP[2] is unused */
10367 if (fixp_p && mask_fixed)
10369 avr_asm_len ("mov %0,%1", xop, plen, 1);
10374 /* Move/insert remaining bits. */
10376 avr_move_bits (xop, map, fixp_p, plen);
10382 /* IDs for all the AVR builtins. */
10384 enum avr_builtin_id
10387 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) ID,
10388 #include "builtins.def"
10395 avr_init_builtin_int24 (void)
10397 tree int24_type = make_signed_type (GET_MODE_BITSIZE (PSImode));
10398 tree uint24_type = make_unsigned_type (GET_MODE_BITSIZE (PSImode));
10400 (*lang_hooks.types.register_builtin_type) (int24_type, "__int24");
10401 (*lang_hooks.types.register_builtin_type) (uint24_type, "__uint24");
10404 /* Implement `TARGET_INIT_BUILTINS' */
10405 /* Set up all builtin functions for this target. */
10408 avr_init_builtins (void)
10410 tree void_ftype_void
10411 = build_function_type_list (void_type_node, NULL_TREE);
10412 tree uchar_ftype_uchar
10413 = build_function_type_list (unsigned_char_type_node,
10414 unsigned_char_type_node,
10416 tree uint_ftype_uchar_uchar
10417 = build_function_type_list (unsigned_type_node,
10418 unsigned_char_type_node,
10419 unsigned_char_type_node,
10421 tree int_ftype_char_char
10422 = build_function_type_list (integer_type_node,
10426 tree int_ftype_char_uchar
10427 = build_function_type_list (integer_type_node,
10429 unsigned_char_type_node,
10431 tree void_ftype_ulong
10432 = build_function_type_list (void_type_node,
10433 long_unsigned_type_node,
10436 tree uchar_ftype_ulong_uchar_uchar
10437 = build_function_type_list (unsigned_char_type_node,
10438 long_unsigned_type_node,
10439 unsigned_char_type_node,
10440 unsigned_char_type_node,
10443 tree const_memx_void_node
10444 = build_qualified_type (void_type_node,
10446 | ENCODE_QUAL_ADDR_SPACE (ADDR_SPACE_MEMX));
10448 tree const_memx_ptr_type_node
10449 = build_pointer_type_for_mode (const_memx_void_node, PSImode, false);
10451 tree char_ftype_const_memx_ptr
10452 = build_function_type_list (char_type_node,
10453 const_memx_ptr_type_node,
10456 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, CODE) \
10457 add_builtin_function (NAME, TYPE, ID, BUILT_IN_MD, NULL, NULL_TREE);
10458 #include "builtins.def"
10461 avr_init_builtin_int24 ();
10465 struct avr_builtin_description
10467 enum insn_code icode;
10469 enum avr_builtin_id id;
10473 static const struct avr_builtin_description
10477 #define DEF_BUILTIN(NAME, N_ARGS, ID, TYPE, ICODE) \
10478 { ICODE, NAME, ID, N_ARGS },
10479 #include "builtins.def"
10482 { CODE_FOR_nothing, NULL, 0, -1 }
10486 /* Subroutine of avr_expand_builtin to take care of unop insns. */
10489 avr_expand_unop_builtin (enum insn_code icode, tree exp,
10493 tree arg0 = CALL_EXPR_ARG (exp, 0);
10494 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10495 enum machine_mode op0mode = GET_MODE (op0);
10496 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10497 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10500 || GET_MODE (target) != tmode
10501 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10503 target = gen_reg_rtx (tmode);
10506 if (op0mode == SImode && mode0 == HImode)
10509 op0 = gen_lowpart (HImode, op0);
10512 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
10514 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10515 op0 = copy_to_mode_reg (mode0, op0);
10517 pat = GEN_FCN (icode) (target, op0);
10527 /* Subroutine of avr_expand_builtin to take care of binop insns. */
10530 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
10533 tree arg0 = CALL_EXPR_ARG (exp, 0);
10534 tree arg1 = CALL_EXPR_ARG (exp, 1);
10535 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10536 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10537 enum machine_mode op0mode = GET_MODE (op0);
10538 enum machine_mode op1mode = GET_MODE (op1);
10539 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10540 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10541 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10544 || GET_MODE (target) != tmode
10545 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10547 target = gen_reg_rtx (tmode);
10550 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10553 op0 = gen_lowpart (HImode, op0);
10556 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10559 op1 = gen_lowpart (HImode, op1);
10562 /* In case the insn wants input operands in modes different from
10563 the result, abort. */
10565 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10566 && (op1mode == mode1 || op1mode == VOIDmode));
10568 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10569 op0 = copy_to_mode_reg (mode0, op0);
10571 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10572 op1 = copy_to_mode_reg (mode1, op1);
10574 pat = GEN_FCN (icode) (target, op0, op1);
10583 /* Subroutine of avr_expand_builtin to take care of 3-operand insns. */
10586 avr_expand_triop_builtin (enum insn_code icode, tree exp, rtx target)
10589 tree arg0 = CALL_EXPR_ARG (exp, 0);
10590 tree arg1 = CALL_EXPR_ARG (exp, 1);
10591 tree arg2 = CALL_EXPR_ARG (exp, 2);
10592 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10593 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10594 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10595 enum machine_mode op0mode = GET_MODE (op0);
10596 enum machine_mode op1mode = GET_MODE (op1);
10597 enum machine_mode op2mode = GET_MODE (op2);
10598 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10599 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
10600 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
10601 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
10604 || GET_MODE (target) != tmode
10605 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10607 target = gen_reg_rtx (tmode);
10610 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
10613 op0 = gen_lowpart (HImode, op0);
10616 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
10619 op1 = gen_lowpart (HImode, op1);
10622 if ((op2mode == SImode || op2mode == VOIDmode) && mode2 == HImode)
10625 op2 = gen_lowpart (HImode, op2);
10628 /* In case the insn wants input operands in modes different from
10629 the result, abort. */
10631 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
10632 && (op1mode == mode1 || op1mode == VOIDmode)
10633 && (op2mode == mode2 || op2mode == VOIDmode));
10635 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
10636 op0 = copy_to_mode_reg (mode0, op0);
10638 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
10639 op1 = copy_to_mode_reg (mode1, op1);
10641 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
10642 op2 = copy_to_mode_reg (mode2, op2);
10644 pat = GEN_FCN (icode) (target, op0, op1, op2);
10654 /* Expand an expression EXP that calls a built-in function,
10655 with result going to TARGET if that's convenient
10656 (and in mode MODE if that's convenient).
10657 SUBTARGET may be used as the target for computing one of EXP's operands.
10658 IGNORE is nonzero if the value is to be ignored. */
10661 avr_expand_builtin (tree exp, rtx target,
10662 rtx subtarget ATTRIBUTE_UNUSED,
10663 enum machine_mode mode ATTRIBUTE_UNUSED,
10664 int ignore ATTRIBUTE_UNUSED)
10667 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
10668 const char* bname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
10669 unsigned int id = DECL_FUNCTION_CODE (fndecl);
10675 case AVR_BUILTIN_NOP:
10676 emit_insn (gen_nopv (GEN_INT(1)));
10679 case AVR_BUILTIN_DELAY_CYCLES:
10681 arg0 = CALL_EXPR_ARG (exp, 0);
10682 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10684 if (!CONST_INT_P (op0))
10685 error ("%s expects a compile time integer constant", bname);
10687 avr_expand_delay_cycles (op0);
10692 case AVR_BUILTIN_INSERT_BITS:
10694 arg0 = CALL_EXPR_ARG (exp, 0);
10695 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
10697 if (!CONST_INT_P (op0))
10699 error ("%s expects a compile time long integer constant"
10700 " as first argument", bname);
10706 for (i = 0; avr_bdesc[i].name; i++)
10708 const struct avr_builtin_description *d = &avr_bdesc[i];
10714 emit_insn ((GEN_FCN (d->icode)) (target));
10718 return avr_expand_unop_builtin (d->icode, exp, target);
10721 return avr_expand_binop_builtin (d->icode, exp, target);
10724 return avr_expand_triop_builtin (d->icode, exp, target);
10731 gcc_unreachable ();
10735 /* Implement `TARGET_FOLD_BUILTIN'. */
10738 avr_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *arg,
10739 bool ignore ATTRIBUTE_UNUSED)
10741 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
10742 tree val_type = TREE_TYPE (TREE_TYPE (fndecl));
10752 case AVR_BUILTIN_SWAP:
10754 return fold_build2 (LROTATE_EXPR, val_type, arg[0],
10755 build_int_cst (val_type, 4));
10758 case AVR_BUILTIN_INSERT_BITS:
10760 tree tbits = arg[1];
10761 tree tval = arg[2];
10763 tree map_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
10765 bool changed = false;
10767 avr_map_op_t best_g;
10769 if (TREE_CODE (arg[0]) != INTEGER_CST)
10771 /* No constant as first argument: Don't fold this and run into
10772 error in avr_expand_builtin. */
10777 map = tree_to_double_int (arg[0]);
10778 tmap = double_int_to_tree (map_type, map);
10780 if (TREE_CODE (tval) != INTEGER_CST
10781 && 0 == avr_map_metric (map, MAP_MASK_PREIMAGE_F))
10783 /* There are no F in the map, i.e. 3rd operand is unused.
10784 Replace that argument with some constant to render
10785 respective input unused. */
10787 tval = build_int_cst (val_type, 0);
10791 if (TREE_CODE (tbits) != INTEGER_CST
10792 && 0 == avr_map_metric (map, MAP_PREIMAGE_0_7))
10794 /* Similar for the bits to be inserted. If they are unused,
10795 we can just as well pass 0. */
10797 tbits = build_int_cst (val_type, 0);
10800 if (TREE_CODE (tbits) == INTEGER_CST)
10802 /* Inserting bits known at compile time is easy and can be
10803 performed by AND and OR with appropriate masks. */
10805 int bits = TREE_INT_CST_LOW (tbits);
10806 int mask_ior = 0, mask_and = 0xff;
10808 for (i = 0; i < 8; i++)
10810 int mi = avr_map (map, i);
10814 if (bits & (1 << mi)) mask_ior |= (1 << i);
10815 else mask_and &= ~(1 << i);
10819 tval = fold_build2 (BIT_IOR_EXPR, val_type, tval,
10820 build_int_cst (val_type, mask_ior));
10821 return fold_build2 (BIT_AND_EXPR, val_type, tval,
10822 build_int_cst (val_type, mask_and));
10826 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10828 /* If bits don't change their position we can use vanilla logic
10829 to merge the two arguments. */
10831 if (0 == avr_map_metric (map, MAP_NONFIXED_0_7))
10833 int mask_f = avr_map_metric (map, MAP_MASK_PREIMAGE_F);
10834 tree tres, tmask = build_int_cst (val_type, mask_f ^ 0xff);
10836 tres = fold_build2 (BIT_XOR_EXPR, val_type, tbits, tval);
10837 tres = fold_build2 (BIT_AND_EXPR, val_type, tres, tmask);
10838 return fold_build2 (BIT_XOR_EXPR, val_type, tres, tval);
10841 /* Try to decomposing map to reduce overall cost. */
10843 if (avr_log.builtin)
10844 avr_edump ("\n%?: %X\n%?: ROL cost: ", map);
10846 best_g = avr_map_op[0];
10847 best_g.cost = 1000;
10849 for (i = 0; i < sizeof (avr_map_op) / sizeof (*avr_map_op); i++)
10852 = avr_map_decompose (map, avr_map_op + i,
10853 TREE_CODE (tval) == INTEGER_CST);
10855 if (g.cost >= 0 && g.cost < best_g.cost)
10859 if (avr_log.builtin)
10862 if (best_g.arg == 0)
10863 /* No optimization found */
10866 /* Apply operation G to the 2nd argument. */
10868 if (avr_log.builtin)
10869 avr_edump ("%?: using OP(%s%d, %X) cost %d\n",
10870 best_g.str, best_g.arg, best_g.map, best_g.cost);
10872 /* Do right-shifts arithmetically: They copy the MSB instead of
10873 shifting in a non-usable value (0) as with logic right-shift. */
10875 tbits = fold_convert (signed_char_type_node, tbits);
10876 tbits = fold_build2 (best_g.code, signed_char_type_node, tbits,
10877 build_int_cst (val_type, best_g.arg));
10878 tbits = fold_convert (val_type, tbits);
10880 /* Use map o G^-1 instead of original map to undo the effect of G. */
10882 tmap = double_int_to_tree (map_type, best_g.map);
10884 return build_call_expr (fndecl, 3, tmap, tbits, tval);
10885 } /* AVR_BUILTIN_INSERT_BITS */
10893 /* Initialize the GCC target structure. */
10895 #undef TARGET_ASM_ALIGNED_HI_OP
10896 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
10897 #undef TARGET_ASM_ALIGNED_SI_OP
10898 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
10899 #undef TARGET_ASM_UNALIGNED_HI_OP
10900 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
10901 #undef TARGET_ASM_UNALIGNED_SI_OP
10902 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
10903 #undef TARGET_ASM_INTEGER
10904 #define TARGET_ASM_INTEGER avr_assemble_integer
10905 #undef TARGET_ASM_FILE_START
10906 #define TARGET_ASM_FILE_START avr_file_start
10907 #undef TARGET_ASM_FILE_END
10908 #define TARGET_ASM_FILE_END avr_file_end
10910 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
10911 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
10912 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
10913 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
10915 #undef TARGET_FUNCTION_VALUE
10916 #define TARGET_FUNCTION_VALUE avr_function_value
10917 #undef TARGET_LIBCALL_VALUE
10918 #define TARGET_LIBCALL_VALUE avr_libcall_value
10919 #undef TARGET_FUNCTION_VALUE_REGNO_P
10920 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
10922 #undef TARGET_ATTRIBUTE_TABLE
10923 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
10924 #undef TARGET_INSERT_ATTRIBUTES
10925 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
10926 #undef TARGET_SECTION_TYPE_FLAGS
10927 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
10929 #undef TARGET_ASM_NAMED_SECTION
10930 #define TARGET_ASM_NAMED_SECTION avr_asm_named_section
10931 #undef TARGET_ASM_INIT_SECTIONS
10932 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
10933 #undef TARGET_ENCODE_SECTION_INFO
10934 #define TARGET_ENCODE_SECTION_INFO avr_encode_section_info
10935 #undef TARGET_ASM_SELECT_SECTION
10936 #define TARGET_ASM_SELECT_SECTION avr_asm_select_section
10938 #undef TARGET_REGISTER_MOVE_COST
10939 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
10940 #undef TARGET_MEMORY_MOVE_COST
10941 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
10942 #undef TARGET_RTX_COSTS
10943 #define TARGET_RTX_COSTS avr_rtx_costs
10944 #undef TARGET_ADDRESS_COST
10945 #define TARGET_ADDRESS_COST avr_address_cost
10946 #undef TARGET_MACHINE_DEPENDENT_REORG
10947 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
10948 #undef TARGET_FUNCTION_ARG
10949 #define TARGET_FUNCTION_ARG avr_function_arg
10950 #undef TARGET_FUNCTION_ARG_ADVANCE
10951 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
10953 #undef TARGET_RETURN_IN_MEMORY
10954 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
10956 #undef TARGET_STRICT_ARGUMENT_NAMING
10957 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
10959 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
10960 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
10962 #undef TARGET_HARD_REGNO_SCRATCH_OK
10963 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
10964 #undef TARGET_CASE_VALUES_THRESHOLD
10965 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
10967 #undef TARGET_FRAME_POINTER_REQUIRED
10968 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
10969 #undef TARGET_CAN_ELIMINATE
10970 #define TARGET_CAN_ELIMINATE avr_can_eliminate
10972 #undef TARGET_CLASS_LIKELY_SPILLED_P
10973 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
10975 #undef TARGET_OPTION_OVERRIDE
10976 #define TARGET_OPTION_OVERRIDE avr_option_override
10978 #undef TARGET_CANNOT_MODIFY_JUMPS_P
10979 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
10981 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
10982 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
10984 #undef TARGET_INIT_BUILTINS
10985 #define TARGET_INIT_BUILTINS avr_init_builtins
10987 #undef TARGET_EXPAND_BUILTIN
10988 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
10990 #undef TARGET_FOLD_BUILTIN
10991 #define TARGET_FOLD_BUILTIN avr_fold_builtin
10993 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
10994 #define TARGET_ASM_FUNCTION_RODATA_SECTION avr_asm_function_rodata_section
10996 #undef TARGET_SCALAR_MODE_SUPPORTED_P
10997 #define TARGET_SCALAR_MODE_SUPPORTED_P avr_scalar_mode_supported_p
10999 #undef TARGET_ADDR_SPACE_SUBSET_P
11000 #define TARGET_ADDR_SPACE_SUBSET_P avr_addr_space_subset_p
11002 #undef TARGET_ADDR_SPACE_CONVERT
11003 #define TARGET_ADDR_SPACE_CONVERT avr_addr_space_convert
11005 #undef TARGET_ADDR_SPACE_ADDRESS_MODE
11006 #define TARGET_ADDR_SPACE_ADDRESS_MODE avr_addr_space_address_mode
11008 #undef TARGET_ADDR_SPACE_POINTER_MODE
11009 #define TARGET_ADDR_SPACE_POINTER_MODE avr_addr_space_pointer_mode
11011 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
11012 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P \
11013 avr_addr_space_legitimate_address_p
11015 #undef TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS
11016 #define TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS avr_addr_space_legitimize_address
11018 #undef TARGET_PRINT_OPERAND
11019 #define TARGET_PRINT_OPERAND avr_print_operand
11020 #undef TARGET_PRINT_OPERAND_ADDRESS
11021 #define TARGET_PRINT_OPERAND_ADDRESS avr_print_operand_address
11022 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
11023 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P avr_print_operand_punct_valid_p
11025 struct gcc_target targetm = TARGET_INITIALIZER;
11028 #include "gt-avr.h"