1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_regs_to_save (HARD_REG_SET *);
55 static int sequent_regs_live (void);
56 static const char *ptrreg_to_str (int);
57 static const char *cond_string (enum rtx_code);
58 static int avr_num_arg_regs (enum machine_mode, tree);
60 static RTX_CODE compare_condition (rtx insn);
61 static int compare_sign_p (rtx insn);
62 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
63 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
65 const struct attribute_spec avr_attribute_table[];
66 static bool avr_assemble_integer (rtx, unsigned int, int);
67 static void avr_file_start (void);
68 static void avr_file_end (void);
69 static void avr_asm_function_end_prologue (FILE *);
70 static void avr_asm_function_begin_epilogue (FILE *);
71 static void avr_insert_attributes (tree, tree *);
72 static void avr_asm_init_sections (void);
73 static unsigned int avr_section_type_flags (tree, const char *, int);
75 static void avr_reorg (void);
76 static void avr_asm_out_ctor (rtx, int);
77 static void avr_asm_out_dtor (rtx, int);
78 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
79 static bool avr_rtx_costs (rtx, int, int, int *);
80 static int avr_address_cost (rtx);
81 static bool avr_return_in_memory (const_tree, const_tree);
82 static struct machine_function * avr_init_machine_status (void);
83 /* Allocate registers from r25 to r8 for parameters for function calls. */
84 #define FIRST_CUM_REG 26
86 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
87 static GTY(()) rtx tmp_reg_rtx;
89 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
90 static GTY(()) rtx zero_reg_rtx;
92 /* AVR register names {"r0", "r1", ..., "r31"} */
93 static const char *const avr_regnames[] = REGISTER_NAMES;
95 /* This holds the last insn address. */
96 static int last_insn_address = 0;
98 /* Preprocessor macros to define depending on MCU type. */
99 const char *avr_base_arch_macro;
100 const char *avr_extra_arch_macro;
102 section *progmem_section;
104 /* More than 8K of program memory: use "call" and "jmp". */
107 /* Core have 'MUL*' instructions. */
108 int avr_have_mul_p = 0;
110 /* Assembler only. */
111 int avr_asm_only_p = 0;
113 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
114 int avr_have_movw_lpmx_p = 0;
121 const char *const macro;
124 static const struct base_arch_s avr_arch_types[] = {
125 { 1, 0, 0, 0, NULL }, /* unknown device specified */
126 { 1, 0, 0, 0, "__AVR_ARCH__=1" },
127 { 0, 0, 0, 0, "__AVR_ARCH__=2" },
128 { 0, 0, 0, 1, "__AVR_ARCH__=25"},
129 { 0, 0, 1, 0, "__AVR_ARCH__=3" },
130 { 0, 1, 0, 1, "__AVR_ARCH__=4" },
131 { 0, 1, 1, 1, "__AVR_ARCH__=5" }
134 /* These names are used as the index into the avr_arch_types[] table
149 const char *const name;
150 int arch; /* index in avr_arch_types[] */
151 /* Must lie outside user's namespace. NULL == no macro. */
152 const char *const macro;
155 /* List of all known AVR MCU types - if updated, it has to be kept
156 in sync in several places (FIXME: is there a better way?):
158 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
159 - t-avr (MULTILIB_MATCHES)
160 - gas/config/tc-avr.c
163 static const struct mcu_type_s avr_mcu_types[] = {
164 /* Classic, <= 8K. */
165 { "avr2", ARCH_AVR2, NULL },
166 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
167 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
168 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
169 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
170 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
171 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
172 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
173 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
174 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
175 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
176 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
177 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
178 /* Classic + MOVW, <= 8K. */
179 { "avr25", ARCH_AVR25, NULL },
180 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
181 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
182 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
183 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
184 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
185 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
186 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
187 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
188 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
189 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
190 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
191 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
192 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
193 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
195 { "avr3", ARCH_AVR3, NULL },
196 { "atmega103", ARCH_AVR3, "__AVR_ATmega103__" },
197 { "atmega603", ARCH_AVR3, "__AVR_ATmega603__" },
198 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
199 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
200 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
201 /* Enhanced, <= 8K. */
202 { "avr4", ARCH_AVR4, NULL },
203 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
204 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
205 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
206 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
207 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
208 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
209 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
210 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
211 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
212 /* Enhanced, > 8K. */
213 { "avr5", ARCH_AVR5, NULL },
214 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
215 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
216 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
217 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
218 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
219 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
220 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
221 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
222 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
223 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
224 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
225 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
226 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
227 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
228 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
229 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
230 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
231 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
232 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
233 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
234 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
235 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
236 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
237 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
238 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
239 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
240 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
241 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
242 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
243 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
244 { "atmega128", ARCH_AVR5, "__AVR_ATmega128__" },
245 { "atmega1280", ARCH_AVR5, "__AVR_ATmega1280__" },
246 { "atmega1281", ARCH_AVR5, "__AVR_ATmega1281__" },
247 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
248 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
249 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
250 { "at90can128", ARCH_AVR5, "__AVR_AT90CAN128__" },
251 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
252 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
253 { "at90usb82", ARCH_AVR5, "__AVR_AT90USB82__" },
254 { "at90usb162", ARCH_AVR5, "__AVR_AT90USB162__" },
255 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
256 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
257 { "at90usb1286", ARCH_AVR5, "__AVR_AT90USB1286__" },
258 { "at90usb1287", ARCH_AVR5, "__AVR_AT90USB1287__" },
259 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
260 /* Assembler only. */
261 { "avr1", ARCH_AVR1, NULL },
262 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
263 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
264 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
265 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
266 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
267 { NULL, ARCH_UNKNOWN, NULL }
270 int avr_case_values_threshold = 30000;
272 /* Initialize the GCC target structure. */
273 #undef TARGET_ASM_ALIGNED_HI_OP
274 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
275 #undef TARGET_ASM_ALIGNED_SI_OP
276 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
277 #undef TARGET_ASM_UNALIGNED_HI_OP
278 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
279 #undef TARGET_ASM_UNALIGNED_SI_OP
280 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
281 #undef TARGET_ASM_INTEGER
282 #define TARGET_ASM_INTEGER avr_assemble_integer
283 #undef TARGET_ASM_FILE_START
284 #define TARGET_ASM_FILE_START avr_file_start
285 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
286 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
287 #undef TARGET_ASM_FILE_END
288 #define TARGET_ASM_FILE_END avr_file_end
290 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
291 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
292 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
293 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
294 #undef TARGET_ATTRIBUTE_TABLE
295 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
296 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
297 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
298 #undef TARGET_INSERT_ATTRIBUTES
299 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
300 #undef TARGET_SECTION_TYPE_FLAGS
301 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
302 #undef TARGET_RTX_COSTS
303 #define TARGET_RTX_COSTS avr_rtx_costs
304 #undef TARGET_ADDRESS_COST
305 #define TARGET_ADDRESS_COST avr_address_cost
306 #undef TARGET_MACHINE_DEPENDENT_REORG
307 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
309 #undef TARGET_RETURN_IN_MEMORY
310 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
312 #undef TARGET_STRICT_ARGUMENT_NAMING
313 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
315 struct gcc_target targetm = TARGET_INITIALIZER;
318 avr_override_options (void)
320 const struct mcu_type_s *t;
321 const struct base_arch_s *base;
323 flag_delete_null_pointer_checks = 0;
325 for (t = avr_mcu_types; t->name; t++)
326 if (strcmp (t->name, avr_mcu_name) == 0)
331 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
333 for (t = avr_mcu_types; t->name; t++)
334 fprintf (stderr," %s\n", t->name);
337 base = &avr_arch_types[t->arch];
338 avr_asm_only_p = base->asm_only;
339 avr_have_mul_p = base->have_mul;
340 avr_mega_p = base->mega;
341 avr_have_movw_lpmx_p = base->have_movw_lpmx;
342 avr_base_arch_macro = base->macro;
343 avr_extra_arch_macro = t->macro;
345 if (optimize && !TARGET_NO_TABLEJUMP)
346 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
348 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
349 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
351 init_machine_status = avr_init_machine_status;
354 /* return register class from register number. */
356 static const int reg_class_tab[]={
357 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
358 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
359 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
360 GENERAL_REGS, /* r0 - r15 */
361 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
362 LD_REGS, /* r16 - 23 */
363 ADDW_REGS,ADDW_REGS, /* r24,r25 */
364 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
365 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
366 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
367 STACK_REG,STACK_REG /* SPL,SPH */
370 /* Function to set up the backend function structure. */
372 static struct machine_function *
373 avr_init_machine_status (void)
375 return ((struct machine_function *)
376 ggc_alloc_cleared (sizeof (struct machine_function)));
379 /* Return register class for register R. */
382 avr_regno_reg_class (int r)
385 return reg_class_tab[r];
389 /* Return nonzero if FUNC is a naked function. */
392 avr_naked_function_p (tree func)
396 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
398 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
399 return a != NULL_TREE;
402 /* Return nonzero if FUNC is an interrupt function as specified
403 by the "interrupt" attribute. */
406 interrupt_function_p (tree func)
410 if (TREE_CODE (func) != FUNCTION_DECL)
413 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
414 return a != NULL_TREE;
417 /* Return nonzero if FUNC is a signal function as specified
418 by the "signal" attribute. */
421 signal_function_p (tree func)
425 if (TREE_CODE (func) != FUNCTION_DECL)
428 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
429 return a != NULL_TREE;
432 /* Return the number of hard registers to push/pop in the prologue/epilogue
433 of the current function, and optionally store these registers in SET. */
436 avr_regs_to_save (HARD_REG_SET *set)
439 int int_or_sig_p = (interrupt_function_p (current_function_decl)
440 || signal_function_p (current_function_decl));
441 int leaf_func_p = leaf_function_p ();
444 CLEAR_HARD_REG_SET (*set);
447 /* No need to save any registers if the function never returns. */
448 if (TREE_THIS_VOLATILE (current_function_decl))
451 for (reg = 0; reg < 32; reg++)
453 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
454 any global register variables. */
458 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
459 || (df_regs_ever_live_p (reg)
460 && (int_or_sig_p || !call_used_regs[reg])
461 && !(frame_pointer_needed
462 && (reg == REG_Y || reg == (REG_Y+1)))))
465 SET_HARD_REG_BIT (*set, reg);
472 /* Compute offset between arg_pointer and frame_pointer. */
475 initial_elimination_offset (int from, int to)
477 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
481 int offset = frame_pointer_needed ? 2 : 0;
483 offset += avr_regs_to_save (NULL);
484 return get_frame_size () + 2 + 1 + offset;
488 /* Return 1 if the function epilogue is just a single "ret". */
491 avr_simple_epilogue (void)
493 return (! frame_pointer_needed
494 && get_frame_size () == 0
495 && avr_regs_to_save (NULL) == 0
496 && ! interrupt_function_p (current_function_decl)
497 && ! signal_function_p (current_function_decl)
498 && ! avr_naked_function_p (current_function_decl)
499 && ! MAIN_NAME_P (DECL_NAME (current_function_decl))
500 && ! TREE_THIS_VOLATILE (current_function_decl));
503 /* This function checks sequence of live registers. */
506 sequent_regs_live (void)
512 for (reg = 0; reg < 18; ++reg)
514 if (!call_used_regs[reg])
516 if (df_regs_ever_live_p (reg))
526 if (!frame_pointer_needed)
528 if (df_regs_ever_live_p (REG_Y))
536 if (df_regs_ever_live_p (REG_Y+1))
549 return (cur_seq == live_seq) ? live_seq : 0;
552 /* Output function prologue. */
555 expand_prologue (void)
559 HOST_WIDE_INT size = get_frame_size();
560 /* Define templates for push instructions. */
561 rtx pushbyte = gen_rtx_MEM (QImode,
562 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
563 rtx pushword = gen_rtx_MEM (HImode,
564 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
567 last_insn_address = 0;
569 /* Init cfun->machine. */
570 cfun->machine->is_main = MAIN_NAME_P (DECL_NAME (current_function_decl));
571 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
572 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
573 cfun->machine->is_signal = signal_function_p (current_function_decl);
575 /* Prologue: naked. */
576 if (cfun->machine->is_naked)
581 live_seq = sequent_regs_live ();
582 minimize = (TARGET_CALL_PROLOGUES
583 && !(cfun->machine->is_interrupt || cfun->machine->is_signal)
586 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
588 if (cfun->machine->is_interrupt)
590 /* Enable interrupts. */
591 insn = emit_insn (gen_enable_interrupt ());
592 RTX_FRAME_RELATED_P (insn) = 1;
596 insn = emit_move_insn (pushbyte, zero_reg_rtx);
597 RTX_FRAME_RELATED_P (insn) = 1;
600 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
601 RTX_FRAME_RELATED_P (insn) = 1;
604 insn = emit_move_insn (tmp_reg_rtx,
605 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
606 RTX_FRAME_RELATED_P (insn) = 1;
607 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
608 RTX_FRAME_RELATED_P (insn) = 1;
610 /* Clear zero reg. */
611 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
612 RTX_FRAME_RELATED_P (insn) = 1;
614 /* Prevent any attempt to delete the setting of ZERO_REG! */
615 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
617 if (cfun->machine->is_main)
620 sprintf (buffer, "%s - %d", avr_init_stack, (int) size);
621 rtx sym = gen_rtx_SYMBOL_REF (HImode, ggc_strdup (buffer));
622 /* Initialize stack pointer using frame pointer. */
623 insn = emit_move_insn (frame_pointer_rtx, sym);
624 RTX_FRAME_RELATED_P (insn) = 1;
625 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
626 RTX_FRAME_RELATED_P (insn) = 1;
628 else if (minimize && (frame_pointer_needed || live_seq > 6))
630 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
631 gen_int_mode (size, HImode));
632 RTX_FRAME_RELATED_P (insn) = 1;
635 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
636 gen_int_mode (size + live_seq, HImode)));
637 RTX_FRAME_RELATED_P (insn) = 1;
642 avr_regs_to_save (&set);
644 for (reg = 0; reg < 32; ++reg)
646 if (TEST_HARD_REG_BIT (set, reg))
648 /* Emit push of register to save. */
649 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
650 RTX_FRAME_RELATED_P (insn) = 1;
653 if (frame_pointer_needed)
655 /* Push frame pointer. */
656 insn = emit_move_insn (pushword, frame_pointer_rtx);
657 RTX_FRAME_RELATED_P (insn) = 1;
660 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
661 RTX_FRAME_RELATED_P (insn) = 1;
665 /* Creating a frame can be done by direct manipulation of the
666 stack or via the frame pointer. These two methods are:
673 the optimum method depends on function type, stack and frame size.
674 To avoid a complex logic, both methods are tested and shortest
678 if (TARGET_TINY_STACK)
680 if (size < -63 || size > 63)
681 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
683 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
684 over 'sbiw' (2 cycles, same size). */
685 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
689 /* Normal sized addition. */
690 myfp = frame_pointer_rtx;
692 /* Calculate length. */
695 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
697 get_attr_length (gen_move_insn (myfp,
698 gen_rtx_PLUS (GET_MODE(myfp), myfp,
702 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
704 /* Method 2-Adjust Stack pointer. */
705 int sp_plus_length = 0;
709 get_attr_length (gen_move_insn (stack_pointer_rtx,
710 gen_rtx_PLUS (HImode, stack_pointer_rtx,
714 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
716 /* Use shortest method. */
717 if (size <= 6 && (sp_plus_length < method1_length))
719 insn = emit_move_insn (stack_pointer_rtx,
720 gen_rtx_PLUS (HImode, stack_pointer_rtx,
721 gen_int_mode (-size, HImode)));
722 RTX_FRAME_RELATED_P (insn) = 1;
723 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
724 RTX_FRAME_RELATED_P (insn) = 1;
728 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
729 RTX_FRAME_RELATED_P (insn) = 1;
730 insn = emit_move_insn (myfp,
731 gen_rtx_PLUS (GET_MODE(myfp), frame_pointer_rtx,
732 gen_int_mode (-size, GET_MODE(myfp))));
733 RTX_FRAME_RELATED_P (insn) = 1;
734 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
735 RTX_FRAME_RELATED_P (insn) = 1;
742 /* Output summary at end of function prologue. */
745 avr_asm_function_end_prologue (FILE *file)
747 if (cfun->machine->is_naked)
749 fputs ("/* prologue: naked */\n", file);
753 if (cfun->machine->is_interrupt)
755 fputs ("/* prologue: Interrupt */\n", file);
757 else if (cfun->machine->is_signal)
759 fputs ("/* prologue: Signal */\n", file);
761 else if (cfun->machine->is_main)
763 fputs ("/* prologue: main */\n", file);
766 fputs ("/* prologue: function */\n", file);
768 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
773 /* Implement EPILOGUE_USES. */
776 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
780 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
785 /* Output RTL epilogue. */
788 expand_epilogue (void)
793 HOST_WIDE_INT size = get_frame_size();
795 /* epilogue: naked */
796 if (cfun->machine->is_naked)
798 emit_jump_insn (gen_return ());
802 live_seq = sequent_regs_live ();
803 minimize = (TARGET_CALL_PROLOGUES
804 && !(cfun->machine->is_interrupt || cfun->machine->is_signal)
807 if (cfun->machine->is_main)
809 /* Return value from main() is already in the correct registers
810 (r25:r24) as the exit() argument. */
811 emit_jump_insn (gen_return ());
813 else if (minimize && (frame_pointer_needed || live_seq > 4))
815 if (frame_pointer_needed)
817 /* Get rid of frame. */
818 emit_move_insn(frame_pointer_rtx,
819 gen_rtx_PLUS (HImode, frame_pointer_rtx,
820 gen_int_mode (size, HImode)));
824 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
827 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
831 if (frame_pointer_needed)
835 /* Try two methods to adjust stack and select shortest. */
837 /* Method 1-Adjust frame pointer. */
839 get_attr_length (gen_move_insn (frame_pointer_rtx,
840 gen_rtx_PLUS (HImode, frame_pointer_rtx,
843 /* Copy to stack pointer. */
845 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
847 /* Method 2-Adjust Stack pointer. */
848 int sp_plus_length = 0;
852 get_attr_length (gen_move_insn (stack_pointer_rtx,
853 gen_rtx_PLUS (HImode, stack_pointer_rtx,
857 /* Use shortest method. */
858 if (size <= 5 && (sp_plus_length < fp_plus_length))
860 emit_move_insn (stack_pointer_rtx,
861 gen_rtx_PLUS (HImode, stack_pointer_rtx,
862 gen_int_mode (size, HImode)));
866 emit_move_insn (frame_pointer_rtx,
867 gen_rtx_PLUS (HImode, frame_pointer_rtx,
868 gen_int_mode (size, HImode)));
869 /* Copy to stack pointer. */
870 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
874 /* Restore previous frame_pointer. */
875 emit_insn (gen_pophi (frame_pointer_rtx));
877 /* Restore used registers. */
879 avr_regs_to_save (&set);
880 for (reg = 31; reg >= 0; --reg)
882 if (TEST_HARD_REG_BIT (set, reg))
883 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
885 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
888 /* Restore SREG using tmp reg as scratch. */
889 emit_insn (gen_popqi (tmp_reg_rtx));
891 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
894 /* Restore tmp REG. */
895 emit_insn (gen_popqi (tmp_reg_rtx));
897 /* Restore zero REG. */
898 emit_insn (gen_popqi (zero_reg_rtx));
901 emit_jump_insn (gen_return ());
905 /* Output summary messages at beginning of function epilogue. */
908 avr_asm_function_begin_epilogue (FILE *file)
910 fprintf (file, "/* epilogue start */\n");
913 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
914 machine for a memory operand of mode MODE. */
917 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
919 enum reg_class r = NO_REGS;
921 if (TARGET_ALL_DEBUG)
923 fprintf (stderr, "mode: (%s) %s %s %s %s:",
925 strict ? "(strict)": "",
926 reload_completed ? "(reload_completed)": "",
927 reload_in_progress ? "(reload_in_progress)": "",
928 reg_renumber ? "(reg_renumber)" : "");
929 if (GET_CODE (x) == PLUS
930 && REG_P (XEXP (x, 0))
931 && GET_CODE (XEXP (x, 1)) == CONST_INT
932 && INTVAL (XEXP (x, 1)) >= 0
933 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
936 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
937 true_regnum (XEXP (x, 0)));
940 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
941 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
943 else if (CONSTANT_ADDRESS_P (x))
945 else if (GET_CODE (x) == PLUS
946 && REG_P (XEXP (x, 0))
947 && GET_CODE (XEXP (x, 1)) == CONST_INT
948 && INTVAL (XEXP (x, 1)) >= 0)
950 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
954 || REGNO (XEXP (x,0)) == REG_Y
955 || REGNO (XEXP (x,0)) == REG_Z)
956 r = BASE_POINTER_REGS;
957 if (XEXP (x,0) == frame_pointer_rtx
958 || XEXP (x,0) == arg_pointer_rtx)
959 r = BASE_POINTER_REGS;
961 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
964 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
965 && REG_P (XEXP (x, 0))
966 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
967 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
971 if (TARGET_ALL_DEBUG)
973 fprintf (stderr, " ret = %c\n", r + '0');
975 return r == NO_REGS ? 0 : (int)r;
978 /* Attempts to replace X with a valid
979 memory address for an operand of mode MODE */
982 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
985 if (TARGET_ALL_DEBUG)
987 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
991 if (GET_CODE (oldx) == PLUS
992 && REG_P (XEXP (oldx,0)))
994 if (REG_P (XEXP (oldx,1)))
995 x = force_reg (GET_MODE (oldx), oldx);
996 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
998 int offs = INTVAL (XEXP (oldx,1));
999 if (frame_pointer_rtx != XEXP (oldx,0))
1000 if (offs > MAX_LD_OFFSET (mode))
1002 if (TARGET_ALL_DEBUG)
1003 fprintf (stderr, "force_reg (big offset)\n");
1004 x = force_reg (GET_MODE (oldx), oldx);
1012 /* Return a pointer register name as a string. */
1015 ptrreg_to_str (int regno)
1019 case REG_X: return "X";
1020 case REG_Y: return "Y";
1021 case REG_Z: return "Z";
1023 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1028 /* Return the condition name as a string.
1029 Used in conditional jump constructing */
1032 cond_string (enum rtx_code code)
1041 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1046 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1059 /* Output ADDR to FILE as address. */
1062 print_operand_address (FILE *file, rtx addr)
1064 switch (GET_CODE (addr))
1067 fprintf (file, ptrreg_to_str (REGNO (addr)));
1071 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1075 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1079 if (CONSTANT_ADDRESS_P (addr)
1080 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1081 || GET_CODE (addr) == LABEL_REF))
1083 fprintf (file, "pm(");
1084 output_addr_const (file,addr);
1085 fprintf (file ,")");
1088 output_addr_const (file, addr);
1093 /* Output X as assembler operand to file FILE. */
1096 print_operand (FILE *file, rtx x, int code)
1100 if (code >= 'A' && code <= 'D')
1110 if (x == zero_reg_rtx)
1111 fprintf (file, "__zero_reg__");
1113 fprintf (file, reg_names[true_regnum (x) + abcd]);
1115 else if (GET_CODE (x) == CONST_INT)
1116 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1117 else if (GET_CODE (x) == MEM)
1119 rtx addr = XEXP (x,0);
1121 if (CONSTANT_P (addr) && abcd)
1124 output_address (addr);
1125 fprintf (file, ")+%d", abcd);
1127 else if (code == 'o')
1129 if (GET_CODE (addr) != PLUS)
1130 fatal_insn ("bad address, not (reg+disp):", addr);
1132 print_operand (file, XEXP (addr, 1), 0);
1134 else if (code == 'p' || code == 'r')
1136 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1137 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1140 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1142 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1144 else if (GET_CODE (addr) == PLUS)
1146 print_operand_address (file, XEXP (addr,0));
1147 if (REGNO (XEXP (addr, 0)) == REG_X)
1148 fatal_insn ("internal compiler error. Bad address:"
1151 print_operand (file, XEXP (addr,1), code);
1154 print_operand_address (file, addr);
1156 else if (GET_CODE (x) == CONST_DOUBLE)
1160 if (GET_MODE (x) != SFmode)
1161 fatal_insn ("internal compiler error. Unknown mode:", x);
1162 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1163 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1164 fprintf (file, "0x%lx", val);
1166 else if (code == 'j')
1167 fputs (cond_string (GET_CODE (x)), file);
1168 else if (code == 'k')
1169 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1171 print_operand_address (file, x);
1174 /* Update the condition code in the INSN. */
1177 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1181 switch (get_attr_cc (insn))
1184 /* Insn does not affect CC at all. */
1192 set = single_set (insn);
1196 cc_status.flags |= CC_NO_OVERFLOW;
1197 cc_status.value1 = SET_DEST (set);
1202 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1203 The V flag may or may not be known but that's ok because
1204 alter_cond will change tests to use EQ/NE. */
1205 set = single_set (insn);
1209 cc_status.value1 = SET_DEST (set);
1210 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1215 set = single_set (insn);
1218 cc_status.value1 = SET_SRC (set);
1222 /* Insn doesn't leave CC in a usable state. */
1225 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1226 set = single_set (insn);
1229 rtx src = SET_SRC (set);
1231 if (GET_CODE (src) == ASHIFTRT
1232 && GET_MODE (src) == QImode)
1234 rtx x = XEXP (src, 1);
1236 if (GET_CODE (x) == CONST_INT
1240 cc_status.value1 = SET_DEST (set);
1241 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1249 /* Return maximum number of consecutive registers of
1250 class CLASS needed to hold a value of mode MODE. */
1253 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1255 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1258 /* Choose mode for jump insn:
1259 1 - relative jump in range -63 <= x <= 62 ;
1260 2 - relative jump in range -2046 <= x <= 2045 ;
1261 3 - absolute jump (only for ATmega[16]03). */
1264 avr_jump_mode (rtx x, rtx insn)
1266 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1267 ? XEXP (x, 0) : x));
1268 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1269 int jump_distance = cur_addr - dest_addr;
1271 if (-63 <= jump_distance && jump_distance <= 62)
1273 else if (-2046 <= jump_distance && jump_distance <= 2045)
1281 /* return an AVR condition jump commands.
1282 X is a comparison RTX.
1283 LEN is a number returned by avr_jump_mode function.
1284 if REVERSE nonzero then condition code in X must be reversed. */
1287 ret_cond_branch (rtx x, int len, int reverse)
1289 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1294 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1295 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1297 len == 2 ? (AS1 (breq,.+4) CR_TAB
1298 AS1 (brmi,.+2) CR_TAB
1300 (AS1 (breq,.+6) CR_TAB
1301 AS1 (brmi,.+4) CR_TAB
1305 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1307 len == 2 ? (AS1 (breq,.+4) CR_TAB
1308 AS1 (brlt,.+2) CR_TAB
1310 (AS1 (breq,.+6) CR_TAB
1311 AS1 (brlt,.+4) CR_TAB
1314 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1316 len == 2 ? (AS1 (breq,.+4) CR_TAB
1317 AS1 (brlo,.+2) CR_TAB
1319 (AS1 (breq,.+6) CR_TAB
1320 AS1 (brlo,.+4) CR_TAB
1323 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1324 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1326 len == 2 ? (AS1 (breq,.+2) CR_TAB
1327 AS1 (brpl,.+2) CR_TAB
1329 (AS1 (breq,.+2) CR_TAB
1330 AS1 (brpl,.+4) CR_TAB
1333 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1335 len == 2 ? (AS1 (breq,.+2) CR_TAB
1336 AS1 (brge,.+2) CR_TAB
1338 (AS1 (breq,.+2) CR_TAB
1339 AS1 (brge,.+4) CR_TAB
1342 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1344 len == 2 ? (AS1 (breq,.+2) CR_TAB
1345 AS1 (brsh,.+2) CR_TAB
1347 (AS1 (breq,.+2) CR_TAB
1348 AS1 (brsh,.+4) CR_TAB
1356 return AS1 (br%k1,%0);
1358 return (AS1 (br%j1,.+2) CR_TAB
1361 return (AS1 (br%j1,.+4) CR_TAB
1370 return AS1 (br%j1,%0);
1372 return (AS1 (br%k1,.+2) CR_TAB
1375 return (AS1 (br%k1,.+4) CR_TAB
1383 /* Predicate function for immediate operand which fits to byte (8bit) */
1386 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1388 return (GET_CODE (op) == CONST_INT
1389 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1392 /* Output all insn addresses and their sizes into the assembly language
1393 output file. This is helpful for debugging whether the length attributes
1394 in the md file are correct.
1395 Output insn cost for next insn. */
1398 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1399 int num_operands ATTRIBUTE_UNUSED)
1401 int uid = INSN_UID (insn);
1403 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1405 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1406 INSN_ADDRESSES (uid),
1407 INSN_ADDRESSES (uid) - last_insn_address,
1408 rtx_cost (PATTERN (insn), INSN));
1410 last_insn_address = INSN_ADDRESSES (uid);
1413 /* Return 0 if undefined, 1 if always true or always false. */
1416 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1418 unsigned int max = (mode == QImode ? 0xff :
1419 mode == HImode ? 0xffff :
1420 mode == SImode ? 0xffffffff : 0);
1421 if (max && operator && GET_CODE (x) == CONST_INT)
1423 if (unsigned_condition (operator) != operator)
1426 if (max != (INTVAL (x) & max)
1427 && INTVAL (x) != 0xff)
1434 /* Returns nonzero if REGNO is the number of a hard
1435 register in which function arguments are sometimes passed. */
1438 function_arg_regno_p(int r)
1440 return (r >= 8 && r <= 25);
1443 /* Initializing the variable cum for the state at the beginning
1444 of the argument list. */
1447 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1448 tree fndecl ATTRIBUTE_UNUSED)
1451 cum->regno = FIRST_CUM_REG;
1452 if (!libname && fntype)
1454 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1455 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1456 != void_type_node));
1462 /* Returns the number of registers to allocate for a function argument. */
1465 avr_num_arg_regs (enum machine_mode mode, tree type)
1469 if (mode == BLKmode)
1470 size = int_size_in_bytes (type);
1472 size = GET_MODE_SIZE (mode);
1474 /* Align all function arguments to start in even-numbered registers.
1475 Odd-sized arguments leave holes above them. */
1477 return (size + 1) & ~1;
1480 /* Controls whether a function argument is passed
1481 in a register, and which register. */
1484 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1485 int named ATTRIBUTE_UNUSED)
1487 int bytes = avr_num_arg_regs (mode, type);
1489 if (cum->nregs && bytes <= cum->nregs)
1490 return gen_rtx_REG (mode, cum->regno - bytes);
1495 /* Update the summarizer variable CUM to advance past an argument
1496 in the argument list. */
1499 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1500 int named ATTRIBUTE_UNUSED)
1502 int bytes = avr_num_arg_regs (mode, type);
1504 cum->nregs -= bytes;
1505 cum->regno -= bytes;
1507 if (cum->nregs <= 0)
1510 cum->regno = FIRST_CUM_REG;
1514 /***********************************************************************
1515 Functions for outputting various mov's for a various modes
1516 ************************************************************************/
1518 output_movqi (rtx insn, rtx operands[], int *l)
1521 rtx dest = operands[0];
1522 rtx src = operands[1];
1530 if (register_operand (dest, QImode))
1532 if (register_operand (src, QImode)) /* mov r,r */
1534 if (test_hard_reg_class (STACK_REG, dest))
1535 return AS2 (out,%0,%1);
1536 else if (test_hard_reg_class (STACK_REG, src))
1537 return AS2 (in,%0,%1);
1539 return AS2 (mov,%0,%1);
1541 else if (CONSTANT_P (src))
1543 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1544 return AS2 (ldi,%0,lo8(%1));
1546 if (GET_CODE (src) == CONST_INT)
1548 if (src == const0_rtx) /* mov r,L */
1549 return AS1 (clr,%0);
1550 else if (src == const1_rtx)
1553 return (AS1 (clr,%0) CR_TAB
1556 else if (src == constm1_rtx)
1558 /* Immediate constants -1 to any register */
1560 return (AS1 (clr,%0) CR_TAB
1565 int bit_nr = exact_log2 (INTVAL (src));
1571 output_asm_insn ((AS1 (clr,%0) CR_TAB
1574 avr_output_bld (operands, bit_nr);
1581 /* Last resort, larger than loading from memory. */
1583 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1584 AS2 (ldi,r31,lo8(%1)) CR_TAB
1585 AS2 (mov,%0,r31) CR_TAB
1586 AS2 (mov,r31,__tmp_reg__));
1588 else if (GET_CODE (src) == MEM)
1589 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1591 else if (GET_CODE (dest) == MEM)
1593 const char *template;
1595 if (src == const0_rtx)
1596 operands[1] = zero_reg_rtx;
1598 template = out_movqi_mr_r (insn, operands, real_l);
1601 output_asm_insn (template, operands);
1610 output_movhi (rtx insn, rtx operands[], int *l)
1613 rtx dest = operands[0];
1614 rtx src = operands[1];
1620 if (register_operand (dest, HImode))
1622 if (register_operand (src, HImode)) /* mov r,r */
1624 if (test_hard_reg_class (STACK_REG, dest))
1626 if (TARGET_TINY_STACK)
1629 return AS2 (out,__SP_L__,%A1);
1631 /* Use simple load of stack pointer if no interrupts are used
1632 or inside main or signal function prologue where they disabled. */
1633 else if (TARGET_NO_INTERRUPTS
1634 || (reload_completed
1635 && cfun->machine->is_main
1636 && prologue_epilogue_contains (insn))
1637 || (reload_completed
1638 && cfun->machine->is_signal
1639 && prologue_epilogue_contains (insn)))
1642 return (AS2 (out,__SP_H__,%B1) CR_TAB
1643 AS2 (out,__SP_L__,%A1));
1645 /* In interrupt prolog we know interrupts are enabled. */
1646 else if (reload_completed
1647 && cfun->machine->is_interrupt
1648 && prologue_epilogue_contains (insn))
1651 return ("cli" CR_TAB
1652 AS2 (out,__SP_H__,%B1) CR_TAB
1654 AS2 (out,__SP_L__,%A1));
1657 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1659 AS2 (out,__SP_H__,%B1) CR_TAB
1660 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1661 AS2 (out,__SP_L__,%A1));
1663 else if (test_hard_reg_class (STACK_REG, src))
1666 return (AS2 (in,%A0,__SP_L__) CR_TAB
1667 AS2 (in,%B0,__SP_H__));
1673 return (AS2 (movw,%0,%1));
1678 return (AS2 (mov,%A0,%A1) CR_TAB
1682 else if (CONSTANT_P (src))
1684 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1687 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1688 AS2 (ldi,%B0,hi8(%1)));
1691 if (GET_CODE (src) == CONST_INT)
1693 if (src == const0_rtx) /* mov r,L */
1696 return (AS1 (clr,%A0) CR_TAB
1699 else if (src == const1_rtx)
1702 return (AS1 (clr,%A0) CR_TAB
1703 AS1 (clr,%B0) CR_TAB
1706 else if (src == constm1_rtx)
1708 /* Immediate constants -1 to any register */
1710 return (AS1 (clr,%0) CR_TAB
1711 AS1 (dec,%A0) CR_TAB
1716 int bit_nr = exact_log2 (INTVAL (src));
1722 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1723 AS1 (clr,%B0) CR_TAB
1726 avr_output_bld (operands, bit_nr);
1732 if ((INTVAL (src) & 0xff) == 0)
1735 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1736 AS1 (clr,%A0) CR_TAB
1737 AS2 (ldi,r31,hi8(%1)) CR_TAB
1738 AS2 (mov,%B0,r31) CR_TAB
1739 AS2 (mov,r31,__tmp_reg__));
1741 else if ((INTVAL (src) & 0xff00) == 0)
1744 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1745 AS2 (ldi,r31,lo8(%1)) CR_TAB
1746 AS2 (mov,%A0,r31) CR_TAB
1747 AS1 (clr,%B0) CR_TAB
1748 AS2 (mov,r31,__tmp_reg__));
1752 /* Last resort, equal to loading from memory. */
1754 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1755 AS2 (ldi,r31,lo8(%1)) CR_TAB
1756 AS2 (mov,%A0,r31) CR_TAB
1757 AS2 (ldi,r31,hi8(%1)) CR_TAB
1758 AS2 (mov,%B0,r31) CR_TAB
1759 AS2 (mov,r31,__tmp_reg__));
1761 else if (GET_CODE (src) == MEM)
1762 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1764 else if (GET_CODE (dest) == MEM)
1766 const char *template;
1768 if (src == const0_rtx)
1769 operands[1] = zero_reg_rtx;
1771 template = out_movhi_mr_r (insn, operands, real_l);
1774 output_asm_insn (template, operands);
1779 fatal_insn ("invalid insn:", insn);
1784 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1788 rtx x = XEXP (src, 0);
1794 if (CONSTANT_ADDRESS_P (x))
1796 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1799 return AS2 (in,%0,__SREG__);
1801 if (avr_io_address_p (x, 1))
1804 return AS2 (in,%0,%1-0x20);
1807 return AS2 (lds,%0,%1);
1809 /* memory access by reg+disp */
1810 else if (GET_CODE (x) == PLUS
1811 && REG_P (XEXP (x,0))
1812 && GET_CODE (XEXP (x,1)) == CONST_INT)
1814 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1816 int disp = INTVAL (XEXP (x,1));
1817 if (REGNO (XEXP (x,0)) != REG_Y)
1818 fatal_insn ("incorrect insn:",insn);
1820 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1821 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1822 AS2 (ldd,%0,Y+63) CR_TAB
1823 AS2 (sbiw,r28,%o1-63));
1825 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1826 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1827 AS2 (ld,%0,Y) CR_TAB
1828 AS2 (subi,r28,lo8(%o1)) CR_TAB
1829 AS2 (sbci,r29,hi8(%o1)));
1831 else if (REGNO (XEXP (x,0)) == REG_X)
1833 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1834 it but I have this situation with extremal optimizing options. */
1835 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1836 || reg_unused_after (insn, XEXP (x,0)))
1837 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1840 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1841 AS2 (ld,%0,X) CR_TAB
1842 AS2 (sbiw,r26,%o1));
1845 return AS2 (ldd,%0,%1);
1848 return AS2 (ld,%0,%1);
1852 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1856 rtx base = XEXP (src, 0);
1857 int reg_dest = true_regnum (dest);
1858 int reg_base = true_regnum (base);
1859 /* "volatile" forces reading low byte first, even if less efficient,
1860 for correct operation with 16-bit I/O registers. */
1861 int mem_volatile_p = MEM_VOLATILE_P (src);
1869 if (reg_dest == reg_base) /* R = (R) */
1872 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1873 AS2 (ld,%B0,%1) CR_TAB
1874 AS2 (mov,%A0,__tmp_reg__));
1876 else if (reg_base == REG_X) /* (R26) */
1878 if (reg_unused_after (insn, base))
1881 return (AS2 (ld,%A0,X+) CR_TAB
1885 return (AS2 (ld,%A0,X+) CR_TAB
1886 AS2 (ld,%B0,X) CR_TAB
1892 return (AS2 (ld,%A0,%1) CR_TAB
1893 AS2 (ldd,%B0,%1+1));
1896 else if (GET_CODE (base) == PLUS) /* (R + i) */
1898 int disp = INTVAL (XEXP (base, 1));
1899 int reg_base = true_regnum (XEXP (base, 0));
1901 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1903 if (REGNO (XEXP (base, 0)) != REG_Y)
1904 fatal_insn ("incorrect insn:",insn);
1906 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1907 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1908 AS2 (ldd,%A0,Y+62) CR_TAB
1909 AS2 (ldd,%B0,Y+63) CR_TAB
1910 AS2 (sbiw,r28,%o1-62));
1912 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1913 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1914 AS2 (ld,%A0,Y) CR_TAB
1915 AS2 (ldd,%B0,Y+1) CR_TAB
1916 AS2 (subi,r28,lo8(%o1)) CR_TAB
1917 AS2 (sbci,r29,hi8(%o1)));
1919 if (reg_base == REG_X)
1921 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1922 it but I have this situation with extremal
1923 optimization options. */
1926 if (reg_base == reg_dest)
1927 return (AS2 (adiw,r26,%o1) CR_TAB
1928 AS2 (ld,__tmp_reg__,X+) CR_TAB
1929 AS2 (ld,%B0,X) CR_TAB
1930 AS2 (mov,%A0,__tmp_reg__));
1932 return (AS2 (adiw,r26,%o1) CR_TAB
1933 AS2 (ld,%A0,X+) CR_TAB
1934 AS2 (ld,%B0,X) CR_TAB
1935 AS2 (sbiw,r26,%o1+1));
1938 if (reg_base == reg_dest)
1941 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1942 AS2 (ldd,%B0,%B1) CR_TAB
1943 AS2 (mov,%A0,__tmp_reg__));
1947 return (AS2 (ldd,%A0,%A1) CR_TAB
1950 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1952 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1953 fatal_insn ("incorrect insn:", insn);
1957 if (REGNO (XEXP (base, 0)) == REG_X)
1960 return (AS2 (sbiw,r26,2) CR_TAB
1961 AS2 (ld,%A0,X+) CR_TAB
1962 AS2 (ld,%B0,X) CR_TAB
1968 return (AS2 (sbiw,%r1,2) CR_TAB
1969 AS2 (ld,%A0,%p1) CR_TAB
1970 AS2 (ldd,%B0,%p1+1));
1975 return (AS2 (ld,%B0,%1) CR_TAB
1978 else if (GET_CODE (base) == POST_INC) /* (R++) */
1980 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1981 fatal_insn ("incorrect insn:", insn);
1984 return (AS2 (ld,%A0,%1) CR_TAB
1987 else if (CONSTANT_ADDRESS_P (base))
1989 if (avr_io_address_p (base, 2))
1992 return (AS2 (in,%A0,%A1-0x20) CR_TAB
1993 AS2 (in,%B0,%B1-0x20));
1996 return (AS2 (lds,%A0,%A1) CR_TAB
2000 fatal_insn ("unknown move insn:",insn);
2005 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2009 rtx base = XEXP (src, 0);
2010 int reg_dest = true_regnum (dest);
2011 int reg_base = true_regnum (base);
2019 if (reg_base == REG_X) /* (R26) */
2021 if (reg_dest == REG_X)
2022 /* "ld r26,-X" is undefined */
2023 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2024 AS2 (ld,r29,X) CR_TAB
2025 AS2 (ld,r28,-X) CR_TAB
2026 AS2 (ld,__tmp_reg__,-X) CR_TAB
2027 AS2 (sbiw,r26,1) CR_TAB
2028 AS2 (ld,r26,X) CR_TAB
2029 AS2 (mov,r27,__tmp_reg__));
2030 else if (reg_dest == REG_X - 2)
2031 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2032 AS2 (ld,%B0,X+) CR_TAB
2033 AS2 (ld,__tmp_reg__,X+) CR_TAB
2034 AS2 (ld,%D0,X) CR_TAB
2035 AS2 (mov,%C0,__tmp_reg__));
2036 else if (reg_unused_after (insn, base))
2037 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2038 AS2 (ld,%B0,X+) CR_TAB
2039 AS2 (ld,%C0,X+) CR_TAB
2042 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2043 AS2 (ld,%B0,X+) CR_TAB
2044 AS2 (ld,%C0,X+) CR_TAB
2045 AS2 (ld,%D0,X) CR_TAB
2050 if (reg_dest == reg_base)
2051 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2052 AS2 (ldd,%C0,%1+2) CR_TAB
2053 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2054 AS2 (ld,%A0,%1) CR_TAB
2055 AS2 (mov,%B0,__tmp_reg__));
2056 else if (reg_base == reg_dest + 2)
2057 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2058 AS2 (ldd,%B0,%1+1) CR_TAB
2059 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2060 AS2 (ldd,%D0,%1+3) CR_TAB
2061 AS2 (mov,%C0,__tmp_reg__));
2063 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2064 AS2 (ldd,%B0,%1+1) CR_TAB
2065 AS2 (ldd,%C0,%1+2) CR_TAB
2066 AS2 (ldd,%D0,%1+3));
2069 else if (GET_CODE (base) == PLUS) /* (R + i) */
2071 int disp = INTVAL (XEXP (base, 1));
2073 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2075 if (REGNO (XEXP (base, 0)) != REG_Y)
2076 fatal_insn ("incorrect insn:",insn);
2078 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2079 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2080 AS2 (ldd,%A0,Y+60) CR_TAB
2081 AS2 (ldd,%B0,Y+61) CR_TAB
2082 AS2 (ldd,%C0,Y+62) CR_TAB
2083 AS2 (ldd,%D0,Y+63) CR_TAB
2084 AS2 (sbiw,r28,%o1-60));
2086 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2087 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2088 AS2 (ld,%A0,Y) CR_TAB
2089 AS2 (ldd,%B0,Y+1) CR_TAB
2090 AS2 (ldd,%C0,Y+2) CR_TAB
2091 AS2 (ldd,%D0,Y+3) CR_TAB
2092 AS2 (subi,r28,lo8(%o1)) CR_TAB
2093 AS2 (sbci,r29,hi8(%o1)));
2096 reg_base = true_regnum (XEXP (base, 0));
2097 if (reg_base == REG_X)
2100 if (reg_dest == REG_X)
2103 /* "ld r26,-X" is undefined */
2104 return (AS2 (adiw,r26,%o1+3) CR_TAB
2105 AS2 (ld,r29,X) CR_TAB
2106 AS2 (ld,r28,-X) CR_TAB
2107 AS2 (ld,__tmp_reg__,-X) CR_TAB
2108 AS2 (sbiw,r26,1) CR_TAB
2109 AS2 (ld,r26,X) CR_TAB
2110 AS2 (mov,r27,__tmp_reg__));
2113 if (reg_dest == REG_X - 2)
2114 return (AS2 (adiw,r26,%o1) CR_TAB
2115 AS2 (ld,r24,X+) CR_TAB
2116 AS2 (ld,r25,X+) CR_TAB
2117 AS2 (ld,__tmp_reg__,X+) CR_TAB
2118 AS2 (ld,r27,X) CR_TAB
2119 AS2 (mov,r26,__tmp_reg__));
2121 return (AS2 (adiw,r26,%o1) CR_TAB
2122 AS2 (ld,%A0,X+) CR_TAB
2123 AS2 (ld,%B0,X+) CR_TAB
2124 AS2 (ld,%C0,X+) CR_TAB
2125 AS2 (ld,%D0,X) CR_TAB
2126 AS2 (sbiw,r26,%o1+3));
2128 if (reg_dest == reg_base)
2129 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2130 AS2 (ldd,%C0,%C1) CR_TAB
2131 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2132 AS2 (ldd,%A0,%A1) CR_TAB
2133 AS2 (mov,%B0,__tmp_reg__));
2134 else if (reg_dest == reg_base - 2)
2135 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2136 AS2 (ldd,%B0,%B1) CR_TAB
2137 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2138 AS2 (ldd,%D0,%D1) CR_TAB
2139 AS2 (mov,%C0,__tmp_reg__));
2140 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2141 AS2 (ldd,%B0,%B1) CR_TAB
2142 AS2 (ldd,%C0,%C1) CR_TAB
2145 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2146 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2147 AS2 (ld,%C0,%1) CR_TAB
2148 AS2 (ld,%B0,%1) CR_TAB
2150 else if (GET_CODE (base) == POST_INC) /* (R++) */
2151 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2152 AS2 (ld,%B0,%1) CR_TAB
2153 AS2 (ld,%C0,%1) CR_TAB
2155 else if (CONSTANT_ADDRESS_P (base))
2156 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2157 AS2 (lds,%B0,%B1) CR_TAB
2158 AS2 (lds,%C0,%C1) CR_TAB
2161 fatal_insn ("unknown move insn:",insn);
2166 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2170 rtx base = XEXP (dest, 0);
2171 int reg_base = true_regnum (base);
2172 int reg_src = true_regnum (src);
2178 if (CONSTANT_ADDRESS_P (base))
2179 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2180 AS2 (sts,%B0,%B1) CR_TAB
2181 AS2 (sts,%C0,%C1) CR_TAB
2183 if (reg_base > 0) /* (r) */
2185 if (reg_base == REG_X) /* (R26) */
2187 if (reg_src == REG_X)
2189 /* "st X+,r26" is undefined */
2190 if (reg_unused_after (insn, base))
2191 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2192 AS2 (st,X,r26) CR_TAB
2193 AS2 (adiw,r26,1) CR_TAB
2194 AS2 (st,X+,__tmp_reg__) CR_TAB
2195 AS2 (st,X+,r28) CR_TAB
2198 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2199 AS2 (st,X,r26) CR_TAB
2200 AS2 (adiw,r26,1) CR_TAB
2201 AS2 (st,X+,__tmp_reg__) CR_TAB
2202 AS2 (st,X+,r28) CR_TAB
2203 AS2 (st,X,r29) CR_TAB
2206 else if (reg_base == reg_src + 2)
2208 if (reg_unused_after (insn, base))
2209 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2210 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2211 AS2 (st,%0+,%A1) CR_TAB
2212 AS2 (st,%0+,%B1) CR_TAB
2213 AS2 (st,%0+,__zero_reg__) CR_TAB
2214 AS2 (st,%0,__tmp_reg__) CR_TAB
2215 AS1 (clr,__zero_reg__));
2217 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2218 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2219 AS2 (st,%0+,%A1) CR_TAB
2220 AS2 (st,%0+,%B1) CR_TAB
2221 AS2 (st,%0+,__zero_reg__) CR_TAB
2222 AS2 (st,%0,__tmp_reg__) CR_TAB
2223 AS1 (clr,__zero_reg__) CR_TAB
2226 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2227 AS2 (st,%0+,%B1) CR_TAB
2228 AS2 (st,%0+,%C1) CR_TAB
2229 AS2 (st,%0,%D1) CR_TAB
2233 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2234 AS2 (std,%0+1,%B1) CR_TAB
2235 AS2 (std,%0+2,%C1) CR_TAB
2236 AS2 (std,%0+3,%D1));
2238 else if (GET_CODE (base) == PLUS) /* (R + i) */
2240 int disp = INTVAL (XEXP (base, 1));
2241 reg_base = REGNO (XEXP (base, 0));
2242 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2244 if (reg_base != REG_Y)
2245 fatal_insn ("incorrect insn:",insn);
2247 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2248 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2249 AS2 (std,Y+60,%A1) CR_TAB
2250 AS2 (std,Y+61,%B1) CR_TAB
2251 AS2 (std,Y+62,%C1) CR_TAB
2252 AS2 (std,Y+63,%D1) CR_TAB
2253 AS2 (sbiw,r28,%o0-60));
2255 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2256 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2257 AS2 (st,Y,%A1) CR_TAB
2258 AS2 (std,Y+1,%B1) CR_TAB
2259 AS2 (std,Y+2,%C1) CR_TAB
2260 AS2 (std,Y+3,%D1) CR_TAB
2261 AS2 (subi,r28,lo8(%o0)) CR_TAB
2262 AS2 (sbci,r29,hi8(%o0)));
2264 if (reg_base == REG_X)
2267 if (reg_src == REG_X)
2270 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2271 AS2 (mov,__zero_reg__,r27) CR_TAB
2272 AS2 (adiw,r26,%o0) CR_TAB
2273 AS2 (st,X+,__tmp_reg__) CR_TAB
2274 AS2 (st,X+,__zero_reg__) CR_TAB
2275 AS2 (st,X+,r28) CR_TAB
2276 AS2 (st,X,r29) CR_TAB
2277 AS1 (clr,__zero_reg__) CR_TAB
2278 AS2 (sbiw,r26,%o0+3));
2280 else if (reg_src == REG_X - 2)
2283 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2284 AS2 (mov,__zero_reg__,r27) CR_TAB
2285 AS2 (adiw,r26,%o0) CR_TAB
2286 AS2 (st,X+,r24) CR_TAB
2287 AS2 (st,X+,r25) CR_TAB
2288 AS2 (st,X+,__tmp_reg__) CR_TAB
2289 AS2 (st,X,__zero_reg__) CR_TAB
2290 AS1 (clr,__zero_reg__) CR_TAB
2291 AS2 (sbiw,r26,%o0+3));
2294 return (AS2 (adiw,r26,%o0) CR_TAB
2295 AS2 (st,X+,%A1) CR_TAB
2296 AS2 (st,X+,%B1) CR_TAB
2297 AS2 (st,X+,%C1) CR_TAB
2298 AS2 (st,X,%D1) CR_TAB
2299 AS2 (sbiw,r26,%o0+3));
2301 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2302 AS2 (std,%B0,%B1) CR_TAB
2303 AS2 (std,%C0,%C1) CR_TAB
2306 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2307 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2308 AS2 (st,%0,%C1) CR_TAB
2309 AS2 (st,%0,%B1) CR_TAB
2311 else if (GET_CODE (base) == POST_INC) /* (R++) */
2312 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2313 AS2 (st,%0,%B1) CR_TAB
2314 AS2 (st,%0,%C1) CR_TAB
2316 fatal_insn ("unknown move insn:",insn);
2321 output_movsisf(rtx insn, rtx operands[], int *l)
2324 rtx dest = operands[0];
2325 rtx src = operands[1];
2331 if (register_operand (dest, VOIDmode))
2333 if (register_operand (src, VOIDmode)) /* mov r,r */
2335 if (true_regnum (dest) > true_regnum (src))
2340 return (AS2 (movw,%C0,%C1) CR_TAB
2341 AS2 (movw,%A0,%A1));
2344 return (AS2 (mov,%D0,%D1) CR_TAB
2345 AS2 (mov,%C0,%C1) CR_TAB
2346 AS2 (mov,%B0,%B1) CR_TAB
2354 return (AS2 (movw,%A0,%A1) CR_TAB
2355 AS2 (movw,%C0,%C1));
2358 return (AS2 (mov,%A0,%A1) CR_TAB
2359 AS2 (mov,%B0,%B1) CR_TAB
2360 AS2 (mov,%C0,%C1) CR_TAB
2364 else if (CONSTANT_P (src))
2366 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2369 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2370 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2371 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2372 AS2 (ldi,%D0,hhi8(%1)));
2375 if (GET_CODE (src) == CONST_INT)
2377 const char *const clr_op0 =
2378 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2379 AS1 (clr,%B0) CR_TAB
2381 : (AS1 (clr,%A0) CR_TAB
2382 AS1 (clr,%B0) CR_TAB
2383 AS1 (clr,%C0) CR_TAB
2386 if (src == const0_rtx) /* mov r,L */
2388 *l = AVR_HAVE_MOVW ? 3 : 4;
2391 else if (src == const1_rtx)
2394 output_asm_insn (clr_op0, operands);
2395 *l = AVR_HAVE_MOVW ? 4 : 5;
2396 return AS1 (inc,%A0);
2398 else if (src == constm1_rtx)
2400 /* Immediate constants -1 to any register */
2404 return (AS1 (clr,%A0) CR_TAB
2405 AS1 (dec,%A0) CR_TAB
2406 AS2 (mov,%B0,%A0) CR_TAB
2407 AS2 (movw,%C0,%A0));
2410 return (AS1 (clr,%A0) CR_TAB
2411 AS1 (dec,%A0) CR_TAB
2412 AS2 (mov,%B0,%A0) CR_TAB
2413 AS2 (mov,%C0,%A0) CR_TAB
2418 int bit_nr = exact_log2 (INTVAL (src));
2422 *l = AVR_HAVE_MOVW ? 5 : 6;
2425 output_asm_insn (clr_op0, operands);
2426 output_asm_insn ("set", operands);
2429 avr_output_bld (operands, bit_nr);
2436 /* Last resort, better than loading from memory. */
2438 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2439 AS2 (ldi,r31,lo8(%1)) CR_TAB
2440 AS2 (mov,%A0,r31) CR_TAB
2441 AS2 (ldi,r31,hi8(%1)) CR_TAB
2442 AS2 (mov,%B0,r31) CR_TAB
2443 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2444 AS2 (mov,%C0,r31) CR_TAB
2445 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2446 AS2 (mov,%D0,r31) CR_TAB
2447 AS2 (mov,r31,__tmp_reg__));
2449 else if (GET_CODE (src) == MEM)
2450 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2452 else if (GET_CODE (dest) == MEM)
2454 const char *template;
2456 if (src == const0_rtx)
2457 operands[1] = zero_reg_rtx;
2459 template = out_movsi_mr_r (insn, operands, real_l);
2462 output_asm_insn (template, operands);
2467 fatal_insn ("invalid insn:", insn);
2472 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2476 rtx x = XEXP (dest, 0);
2482 if (CONSTANT_ADDRESS_P (x))
2484 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2487 return AS2 (out,__SREG__,%1);
2489 if (avr_io_address_p (x, 1))
2492 return AS2 (out,%0-0x20,%1);
2495 return AS2 (sts,%0,%1);
2497 /* memory access by reg+disp */
2498 else if (GET_CODE (x) == PLUS
2499 && REG_P (XEXP (x,0))
2500 && GET_CODE (XEXP (x,1)) == CONST_INT)
2502 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2504 int disp = INTVAL (XEXP (x,1));
2505 if (REGNO (XEXP (x,0)) != REG_Y)
2506 fatal_insn ("incorrect insn:",insn);
2508 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2509 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2510 AS2 (std,Y+63,%1) CR_TAB
2511 AS2 (sbiw,r28,%o0-63));
2513 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2514 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2515 AS2 (st,Y,%1) CR_TAB
2516 AS2 (subi,r28,lo8(%o0)) CR_TAB
2517 AS2 (sbci,r29,hi8(%o0)));
2519 else if (REGNO (XEXP (x,0)) == REG_X)
2521 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2523 if (reg_unused_after (insn, XEXP (x,0)))
2524 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2525 AS2 (adiw,r26,%o0) CR_TAB
2526 AS2 (st,X,__tmp_reg__));
2528 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2529 AS2 (adiw,r26,%o0) CR_TAB
2530 AS2 (st,X,__tmp_reg__) CR_TAB
2531 AS2 (sbiw,r26,%o0));
2535 if (reg_unused_after (insn, XEXP (x,0)))
2536 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2539 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2540 AS2 (st,X,%1) CR_TAB
2541 AS2 (sbiw,r26,%o0));
2545 return AS2 (std,%0,%1);
2548 return AS2 (st,%0,%1);
2552 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2556 rtx base = XEXP (dest, 0);
2557 int reg_base = true_regnum (base);
2558 int reg_src = true_regnum (src);
2559 /* "volatile" forces writing high byte first, even if less efficient,
2560 for correct operation with 16-bit I/O registers. */
2561 int mem_volatile_p = MEM_VOLATILE_P (dest);
2566 if (CONSTANT_ADDRESS_P (base))
2568 if (avr_io_address_p (base, 2))
2571 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2572 AS2 (out,%A0-0x20,%A1));
2574 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2579 if (reg_base == REG_X)
2581 if (reg_src == REG_X)
2583 /* "st X+,r26" and "st -X,r26" are undefined. */
2584 if (!mem_volatile_p && reg_unused_after (insn, src))
2585 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2586 AS2 (st,X,r26) CR_TAB
2587 AS2 (adiw,r26,1) CR_TAB
2588 AS2 (st,X,__tmp_reg__));
2590 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2591 AS2 (adiw,r26,1) CR_TAB
2592 AS2 (st,X,__tmp_reg__) CR_TAB
2593 AS2 (sbiw,r26,1) CR_TAB
2598 if (!mem_volatile_p && reg_unused_after (insn, base))
2599 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2602 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2603 AS2 (st,X,%B1) CR_TAB
2608 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2611 else if (GET_CODE (base) == PLUS)
2613 int disp = INTVAL (XEXP (base, 1));
2614 reg_base = REGNO (XEXP (base, 0));
2615 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2617 if (reg_base != REG_Y)
2618 fatal_insn ("incorrect insn:",insn);
2620 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2621 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2622 AS2 (std,Y+63,%B1) CR_TAB
2623 AS2 (std,Y+62,%A1) CR_TAB
2624 AS2 (sbiw,r28,%o0-62));
2626 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2627 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2628 AS2 (std,Y+1,%B1) CR_TAB
2629 AS2 (st,Y,%A1) CR_TAB
2630 AS2 (subi,r28,lo8(%o0)) CR_TAB
2631 AS2 (sbci,r29,hi8(%o0)));
2633 if (reg_base == REG_X)
2636 if (reg_src == REG_X)
2639 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2640 AS2 (mov,__zero_reg__,r27) CR_TAB
2641 AS2 (adiw,r26,%o0+1) CR_TAB
2642 AS2 (st,X,__zero_reg__) CR_TAB
2643 AS2 (st,-X,__tmp_reg__) CR_TAB
2644 AS1 (clr,__zero_reg__) CR_TAB
2645 AS2 (sbiw,r26,%o0));
2648 return (AS2 (adiw,r26,%o0+1) CR_TAB
2649 AS2 (st,X,%B1) CR_TAB
2650 AS2 (st,-X,%A1) CR_TAB
2651 AS2 (sbiw,r26,%o0));
2653 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2656 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2657 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2659 else if (GET_CODE (base) == POST_INC) /* (R++) */
2663 if (REGNO (XEXP (base, 0)) == REG_X)
2666 return (AS2 (adiw,r26,1) CR_TAB
2667 AS2 (st,X,%B1) CR_TAB
2668 AS2 (st,-X,%A1) CR_TAB
2674 return (AS2 (std,%p0+1,%B1) CR_TAB
2675 AS2 (st,%p0,%A1) CR_TAB
2681 return (AS2 (st,%0,%A1) CR_TAB
2684 fatal_insn ("unknown move insn:",insn);
2688 /* Return 1 if frame pointer for current function required. */
2691 frame_pointer_required_p (void)
2693 return (current_function_calls_alloca
2694 || current_function_args_info.nregs == 0
2695 || get_frame_size () > 0);
2698 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2701 compare_condition (rtx insn)
2703 rtx next = next_real_insn (insn);
2704 RTX_CODE cond = UNKNOWN;
2705 if (next && GET_CODE (next) == JUMP_INSN)
2707 rtx pat = PATTERN (next);
2708 rtx src = SET_SRC (pat);
2709 rtx t = XEXP (src, 0);
2710 cond = GET_CODE (t);
2715 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2718 compare_sign_p (rtx insn)
2720 RTX_CODE cond = compare_condition (insn);
2721 return (cond == GE || cond == LT);
2724 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2725 that needs to be swapped (GT, GTU, LE, LEU). */
2728 compare_diff_p (rtx insn)
2730 RTX_CODE cond = compare_condition (insn);
2731 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2734 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2737 compare_eq_p (rtx insn)
2739 RTX_CODE cond = compare_condition (insn);
2740 return (cond == EQ || cond == NE);
2744 /* Output test instruction for HImode. */
2747 out_tsthi (rtx insn, int *l)
2749 if (compare_sign_p (insn))
2752 return AS1 (tst,%B0);
2754 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2755 && compare_eq_p (insn))
2757 /* Faster than sbiw if we can clobber the operand. */
2759 return AS2 (or,%A0,%B0);
2761 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2764 return AS2 (sbiw,%0,0);
2767 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2768 AS2 (cpc,%B0,__zero_reg__));
2772 /* Output test instruction for SImode. */
2775 out_tstsi (rtx insn, int *l)
2777 if (compare_sign_p (insn))
2780 return AS1 (tst,%D0);
2782 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2785 return (AS2 (sbiw,%A0,0) CR_TAB
2786 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2787 AS2 (cpc,%D0,__zero_reg__));
2790 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2791 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2792 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2793 AS2 (cpc,%D0,__zero_reg__));
2797 /* Generate asm equivalent for various shifts.
2798 Shift count is a CONST_INT, MEM or REG.
2799 This only handles cases that are not already
2800 carefully hand-optimized in ?sh??i3_out. */
2803 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2804 int *len, int t_len)
2808 int second_label = 1;
2809 int saved_in_tmp = 0;
2810 int use_zero_reg = 0;
2812 op[0] = operands[0];
2813 op[1] = operands[1];
2814 op[2] = operands[2];
2815 op[3] = operands[3];
2821 if (GET_CODE (operands[2]) == CONST_INT)
2823 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2824 int count = INTVAL (operands[2]);
2825 int max_len = 10; /* If larger than this, always use a loop. */
2834 if (count < 8 && !scratch)
2838 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2840 if (t_len * count <= max_len)
2842 /* Output shifts inline with no loop - faster. */
2844 *len = t_len * count;
2848 output_asm_insn (template, op);
2857 strcat (str, AS2 (ldi,%3,%2));
2859 else if (use_zero_reg)
2861 /* Hack to save one word: use __zero_reg__ as loop counter.
2862 Set one bit, then shift in a loop until it is 0 again. */
2864 op[3] = zero_reg_rtx;
2868 strcat (str, ("set" CR_TAB
2869 AS2 (bld,%3,%2-1)));
2873 /* No scratch register available, use one from LD_REGS (saved in
2874 __tmp_reg__) that doesn't overlap with registers to shift. */
2876 op[3] = gen_rtx_REG (QImode,
2877 ((true_regnum (operands[0]) - 1) & 15) + 16);
2878 op[4] = tmp_reg_rtx;
2882 *len = 3; /* Includes "mov %3,%4" after the loop. */
2884 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2890 else if (GET_CODE (operands[2]) == MEM)
2894 op[3] = op_mov[0] = tmp_reg_rtx;
2898 out_movqi_r_mr (insn, op_mov, len);
2900 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2902 else if (register_operand (operands[2], QImode))
2904 if (reg_unused_after (insn, operands[2]))
2908 op[3] = tmp_reg_rtx;
2910 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2914 fatal_insn ("bad shift insn:", insn);
2921 strcat (str, AS1 (rjmp,2f));
2925 *len += t_len + 2; /* template + dec + brXX */
2928 strcat (str, "\n1:\t");
2929 strcat (str, template);
2930 strcat (str, second_label ? "\n2:\t" : "\n\t");
2931 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2932 strcat (str, CR_TAB);
2933 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2935 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2936 output_asm_insn (str, op);
2941 /* 8bit shift left ((char)x << i) */
2944 ashlqi3_out (rtx insn, rtx operands[], int *len)
2946 if (GET_CODE (operands[2]) == CONST_INT)
2953 switch (INTVAL (operands[2]))
2956 if (INTVAL (operands[2]) < 8)
2960 return AS1 (clr,%0);
2964 return AS1 (lsl,%0);
2968 return (AS1 (lsl,%0) CR_TAB
2973 return (AS1 (lsl,%0) CR_TAB
2978 if (test_hard_reg_class (LD_REGS, operands[0]))
2981 return (AS1 (swap,%0) CR_TAB
2982 AS2 (andi,%0,0xf0));
2985 return (AS1 (lsl,%0) CR_TAB
2991 if (test_hard_reg_class (LD_REGS, operands[0]))
2994 return (AS1 (swap,%0) CR_TAB
2996 AS2 (andi,%0,0xe0));
2999 return (AS1 (lsl,%0) CR_TAB
3006 if (test_hard_reg_class (LD_REGS, operands[0]))
3009 return (AS1 (swap,%0) CR_TAB
3012 AS2 (andi,%0,0xc0));
3015 return (AS1 (lsl,%0) CR_TAB
3024 return (AS1 (ror,%0) CR_TAB
3029 else if (CONSTANT_P (operands[2]))
3030 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3032 out_shift_with_cnt (AS1 (lsl,%0),
3033 insn, operands, len, 1);
3038 /* 16bit shift left ((short)x << i) */
3041 ashlhi3_out (rtx insn, rtx operands[], int *len)
3043 if (GET_CODE (operands[2]) == CONST_INT)
3045 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3046 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3053 switch (INTVAL (operands[2]))
3056 if (INTVAL (operands[2]) < 16)
3060 return (AS1 (clr,%B0) CR_TAB
3064 if (optimize_size && scratch)
3069 return (AS1 (swap,%A0) CR_TAB
3070 AS1 (swap,%B0) CR_TAB
3071 AS2 (andi,%B0,0xf0) CR_TAB
3072 AS2 (eor,%B0,%A0) CR_TAB
3073 AS2 (andi,%A0,0xf0) CR_TAB
3079 return (AS1 (swap,%A0) CR_TAB
3080 AS1 (swap,%B0) CR_TAB
3081 AS2 (ldi,%3,0xf0) CR_TAB
3082 AS2 (and,%B0,%3) CR_TAB
3083 AS2 (eor,%B0,%A0) CR_TAB
3084 AS2 (and,%A0,%3) CR_TAB
3087 break; /* optimize_size ? 6 : 8 */
3091 break; /* scratch ? 5 : 6 */
3095 return (AS1 (lsl,%A0) CR_TAB
3096 AS1 (rol,%B0) CR_TAB
3097 AS1 (swap,%A0) CR_TAB
3098 AS1 (swap,%B0) CR_TAB
3099 AS2 (andi,%B0,0xf0) CR_TAB
3100 AS2 (eor,%B0,%A0) CR_TAB
3101 AS2 (andi,%A0,0xf0) CR_TAB
3107 return (AS1 (lsl,%A0) CR_TAB
3108 AS1 (rol,%B0) CR_TAB
3109 AS1 (swap,%A0) CR_TAB
3110 AS1 (swap,%B0) CR_TAB
3111 AS2 (ldi,%3,0xf0) CR_TAB
3112 AS2 (and,%B0,%3) CR_TAB
3113 AS2 (eor,%B0,%A0) CR_TAB
3114 AS2 (and,%A0,%3) CR_TAB
3121 break; /* scratch ? 5 : 6 */
3123 return (AS1 (clr,__tmp_reg__) CR_TAB
3124 AS1 (lsr,%B0) CR_TAB
3125 AS1 (ror,%A0) CR_TAB
3126 AS1 (ror,__tmp_reg__) CR_TAB
3127 AS1 (lsr,%B0) CR_TAB
3128 AS1 (ror,%A0) CR_TAB
3129 AS1 (ror,__tmp_reg__) CR_TAB
3130 AS2 (mov,%B0,%A0) CR_TAB
3131 AS2 (mov,%A0,__tmp_reg__));
3135 return (AS1 (lsr,%B0) CR_TAB
3136 AS2 (mov,%B0,%A0) CR_TAB
3137 AS1 (clr,%A0) CR_TAB
3138 AS1 (ror,%B0) CR_TAB
3142 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3147 return (AS2 (mov,%B0,%A0) CR_TAB
3148 AS1 (clr,%A0) CR_TAB
3153 return (AS2 (mov,%B0,%A0) CR_TAB
3154 AS1 (clr,%A0) CR_TAB
3155 AS1 (lsl,%B0) CR_TAB
3160 return (AS2 (mov,%B0,%A0) CR_TAB
3161 AS1 (clr,%A0) CR_TAB
3162 AS1 (lsl,%B0) CR_TAB
3163 AS1 (lsl,%B0) CR_TAB
3170 return (AS2 (mov,%B0,%A0) CR_TAB
3171 AS1 (clr,%A0) CR_TAB
3172 AS1 (swap,%B0) CR_TAB
3173 AS2 (andi,%B0,0xf0));
3178 return (AS2 (mov,%B0,%A0) CR_TAB
3179 AS1 (clr,%A0) CR_TAB
3180 AS1 (swap,%B0) CR_TAB
3181 AS2 (ldi,%3,0xf0) CR_TAB
3185 return (AS2 (mov,%B0,%A0) CR_TAB
3186 AS1 (clr,%A0) CR_TAB
3187 AS1 (lsl,%B0) CR_TAB
3188 AS1 (lsl,%B0) CR_TAB
3189 AS1 (lsl,%B0) CR_TAB
3196 return (AS2 (mov,%B0,%A0) CR_TAB
3197 AS1 (clr,%A0) CR_TAB
3198 AS1 (swap,%B0) CR_TAB
3199 AS1 (lsl,%B0) CR_TAB
3200 AS2 (andi,%B0,0xe0));
3202 if (AVR_HAVE_MUL && scratch)
3205 return (AS2 (ldi,%3,0x20) CR_TAB
3206 AS2 (mul,%A0,%3) CR_TAB
3207 AS2 (mov,%B0,r0) CR_TAB
3208 AS1 (clr,%A0) CR_TAB
3209 AS1 (clr,__zero_reg__));
3211 if (optimize_size && scratch)
3216 return (AS2 (mov,%B0,%A0) CR_TAB
3217 AS1 (clr,%A0) CR_TAB
3218 AS1 (swap,%B0) CR_TAB
3219 AS1 (lsl,%B0) CR_TAB
3220 AS2 (ldi,%3,0xe0) CR_TAB
3226 return ("set" CR_TAB
3227 AS2 (bld,r1,5) CR_TAB
3228 AS2 (mul,%A0,r1) CR_TAB
3229 AS2 (mov,%B0,r0) CR_TAB
3230 AS1 (clr,%A0) CR_TAB
3231 AS1 (clr,__zero_reg__));
3234 return (AS2 (mov,%B0,%A0) CR_TAB
3235 AS1 (clr,%A0) CR_TAB
3236 AS1 (lsl,%B0) CR_TAB
3237 AS1 (lsl,%B0) CR_TAB
3238 AS1 (lsl,%B0) CR_TAB
3239 AS1 (lsl,%B0) CR_TAB
3243 if (AVR_HAVE_MUL && ldi_ok)
3246 return (AS2 (ldi,%B0,0x40) CR_TAB
3247 AS2 (mul,%A0,%B0) CR_TAB
3248 AS2 (mov,%B0,r0) CR_TAB
3249 AS1 (clr,%A0) CR_TAB
3250 AS1 (clr,__zero_reg__));
3252 if (AVR_HAVE_MUL && scratch)
3255 return (AS2 (ldi,%3,0x40) CR_TAB
3256 AS2 (mul,%A0,%3) CR_TAB
3257 AS2 (mov,%B0,r0) CR_TAB
3258 AS1 (clr,%A0) CR_TAB
3259 AS1 (clr,__zero_reg__));
3261 if (optimize_size && ldi_ok)
3264 return (AS2 (mov,%B0,%A0) CR_TAB
3265 AS2 (ldi,%A0,6) "\n1:\t"
3266 AS1 (lsl,%B0) CR_TAB
3267 AS1 (dec,%A0) CR_TAB
3270 if (optimize_size && scratch)
3273 return (AS1 (clr,%B0) CR_TAB
3274 AS1 (lsr,%A0) CR_TAB
3275 AS1 (ror,%B0) CR_TAB
3276 AS1 (lsr,%A0) CR_TAB
3277 AS1 (ror,%B0) CR_TAB
3282 return (AS1 (clr,%B0) CR_TAB
3283 AS1 (lsr,%A0) CR_TAB
3284 AS1 (ror,%B0) CR_TAB
3289 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3291 insn, operands, len, 2);
3296 /* 32bit shift left ((long)x << i) */
3299 ashlsi3_out (rtx insn, rtx operands[], int *len)
3301 if (GET_CODE (operands[2]) == CONST_INT)
3309 switch (INTVAL (operands[2]))
3312 if (INTVAL (operands[2]) < 32)
3316 return *len = 3, (AS1 (clr,%D0) CR_TAB
3317 AS1 (clr,%C0) CR_TAB
3318 AS2 (movw,%A0,%C0));
3320 return (AS1 (clr,%D0) CR_TAB
3321 AS1 (clr,%C0) CR_TAB
3322 AS1 (clr,%B0) CR_TAB
3327 int reg0 = true_regnum (operands[0]);
3328 int reg1 = true_regnum (operands[1]);
3331 return (AS2 (mov,%D0,%C1) CR_TAB
3332 AS2 (mov,%C0,%B1) CR_TAB
3333 AS2 (mov,%B0,%A1) CR_TAB
3336 return (AS1 (clr,%A0) CR_TAB
3337 AS2 (mov,%B0,%A1) CR_TAB
3338 AS2 (mov,%C0,%B1) CR_TAB
3344 int reg0 = true_regnum (operands[0]);
3345 int reg1 = true_regnum (operands[1]);
3346 if (reg0 + 2 == reg1)
3347 return *len = 2, (AS1 (clr,%B0) CR_TAB
3350 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3351 AS1 (clr,%B0) CR_TAB
3354 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3355 AS2 (mov,%D0,%B1) CR_TAB
3356 AS1 (clr,%B0) CR_TAB
3362 return (AS2 (mov,%D0,%A1) CR_TAB
3363 AS1 (clr,%C0) CR_TAB
3364 AS1 (clr,%B0) CR_TAB
3369 return (AS1 (clr,%D0) CR_TAB
3370 AS1 (lsr,%A0) CR_TAB
3371 AS1 (ror,%D0) CR_TAB
3372 AS1 (clr,%C0) CR_TAB
3373 AS1 (clr,%B0) CR_TAB
3378 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3379 AS1 (rol,%B0) CR_TAB
3380 AS1 (rol,%C0) CR_TAB
3382 insn, operands, len, 4);
3386 /* 8bit arithmetic shift right ((signed char)x >> i) */
3389 ashrqi3_out (rtx insn, rtx operands[], int *len)
3391 if (GET_CODE (operands[2]) == CONST_INT)
3398 switch (INTVAL (operands[2]))
3402 return AS1 (asr,%0);
3406 return (AS1 (asr,%0) CR_TAB
3411 return (AS1 (asr,%0) CR_TAB
3417 return (AS1 (asr,%0) CR_TAB
3424 return (AS1 (asr,%0) CR_TAB
3432 return (AS2 (bst,%0,6) CR_TAB
3434 AS2 (sbc,%0,%0) CR_TAB
3438 if (INTVAL (operands[2]) < 8)
3445 return (AS1 (lsl,%0) CR_TAB
3449 else if (CONSTANT_P (operands[2]))
3450 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3452 out_shift_with_cnt (AS1 (asr,%0),
3453 insn, operands, len, 1);
3458 /* 16bit arithmetic shift right ((signed short)x >> i) */
3461 ashrhi3_out (rtx insn, rtx operands[], int *len)
3463 if (GET_CODE (operands[2]) == CONST_INT)
3465 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3466 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3473 switch (INTVAL (operands[2]))
3477 /* XXX try to optimize this too? */
3482 break; /* scratch ? 5 : 6 */
3484 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3485 AS2 (mov,%A0,%B0) CR_TAB
3486 AS1 (lsl,__tmp_reg__) CR_TAB
3487 AS1 (rol,%A0) CR_TAB
3488 AS2 (sbc,%B0,%B0) CR_TAB
3489 AS1 (lsl,__tmp_reg__) CR_TAB
3490 AS1 (rol,%A0) CR_TAB
3495 return (AS1 (lsl,%A0) CR_TAB
3496 AS2 (mov,%A0,%B0) CR_TAB
3497 AS1 (rol,%A0) CR_TAB
3502 int reg0 = true_regnum (operands[0]);
3503 int reg1 = true_regnum (operands[1]);
3506 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3507 AS1 (lsl,%B0) CR_TAB
3510 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3511 AS1 (clr,%B0) CR_TAB
3512 AS2 (sbrc,%A0,7) CR_TAB
3518 return (AS2 (mov,%A0,%B0) CR_TAB
3519 AS1 (lsl,%B0) CR_TAB
3520 AS2 (sbc,%B0,%B0) CR_TAB
3525 return (AS2 (mov,%A0,%B0) CR_TAB
3526 AS1 (lsl,%B0) CR_TAB
3527 AS2 (sbc,%B0,%B0) CR_TAB
3528 AS1 (asr,%A0) CR_TAB
3532 if (AVR_HAVE_MUL && ldi_ok)
3535 return (AS2 (ldi,%A0,0x20) CR_TAB
3536 AS2 (muls,%B0,%A0) CR_TAB
3537 AS2 (mov,%A0,r1) CR_TAB
3538 AS2 (sbc,%B0,%B0) CR_TAB
3539 AS1 (clr,__zero_reg__));
3541 if (optimize_size && scratch)
3544 return (AS2 (mov,%A0,%B0) CR_TAB
3545 AS1 (lsl,%B0) CR_TAB
3546 AS2 (sbc,%B0,%B0) CR_TAB
3547 AS1 (asr,%A0) CR_TAB
3548 AS1 (asr,%A0) CR_TAB
3552 if (AVR_HAVE_MUL && ldi_ok)
3555 return (AS2 (ldi,%A0,0x10) CR_TAB
3556 AS2 (muls,%B0,%A0) CR_TAB
3557 AS2 (mov,%A0,r1) CR_TAB
3558 AS2 (sbc,%B0,%B0) CR_TAB
3559 AS1 (clr,__zero_reg__));
3561 if (optimize_size && scratch)
3564 return (AS2 (mov,%A0,%B0) CR_TAB
3565 AS1 (lsl,%B0) CR_TAB
3566 AS2 (sbc,%B0,%B0) CR_TAB
3567 AS1 (asr,%A0) CR_TAB
3568 AS1 (asr,%A0) CR_TAB
3569 AS1 (asr,%A0) CR_TAB
3573 if (AVR_HAVE_MUL && ldi_ok)
3576 return (AS2 (ldi,%A0,0x08) CR_TAB
3577 AS2 (muls,%B0,%A0) CR_TAB
3578 AS2 (mov,%A0,r1) CR_TAB
3579 AS2 (sbc,%B0,%B0) CR_TAB
3580 AS1 (clr,__zero_reg__));
3583 break; /* scratch ? 5 : 7 */
3585 return (AS2 (mov,%A0,%B0) CR_TAB
3586 AS1 (lsl,%B0) CR_TAB
3587 AS2 (sbc,%B0,%B0) CR_TAB
3588 AS1 (asr,%A0) CR_TAB
3589 AS1 (asr,%A0) CR_TAB
3590 AS1 (asr,%A0) CR_TAB
3591 AS1 (asr,%A0) CR_TAB
3596 return (AS1 (lsl,%B0) CR_TAB
3597 AS2 (sbc,%A0,%A0) CR_TAB
3598 AS1 (lsl,%B0) CR_TAB
3599 AS2 (mov,%B0,%A0) CR_TAB
3603 if (INTVAL (operands[2]) < 16)
3609 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3610 AS2 (sbc,%A0,%A0) CR_TAB
3615 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3617 insn, operands, len, 2);
3622 /* 32bit arithmetic shift right ((signed long)x >> i) */
3625 ashrsi3_out (rtx insn, rtx operands[], int *len)
3627 if (GET_CODE (operands[2]) == CONST_INT)
3635 switch (INTVAL (operands[2]))
3639 int reg0 = true_regnum (operands[0]);
3640 int reg1 = true_regnum (operands[1]);
3643 return (AS2 (mov,%A0,%B1) CR_TAB
3644 AS2 (mov,%B0,%C1) CR_TAB
3645 AS2 (mov,%C0,%D1) CR_TAB
3646 AS1 (clr,%D0) CR_TAB
3647 AS2 (sbrc,%C0,7) CR_TAB
3650 return (AS1 (clr,%D0) CR_TAB
3651 AS2 (sbrc,%D1,7) CR_TAB
3652 AS1 (dec,%D0) CR_TAB
3653 AS2 (mov,%C0,%D1) CR_TAB
3654 AS2 (mov,%B0,%C1) CR_TAB
3660 int reg0 = true_regnum (operands[0]);
3661 int reg1 = true_regnum (operands[1]);
3663 if (reg0 == reg1 + 2)
3664 return *len = 4, (AS1 (clr,%D0) CR_TAB
3665 AS2 (sbrc,%B0,7) CR_TAB
3666 AS1 (com,%D0) CR_TAB
3669 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3670 AS1 (clr,%D0) CR_TAB
3671 AS2 (sbrc,%B0,7) CR_TAB
3672 AS1 (com,%D0) CR_TAB
3675 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3676 AS2 (mov,%A0,%C1) CR_TAB
3677 AS1 (clr,%D0) CR_TAB
3678 AS2 (sbrc,%B0,7) CR_TAB
3679 AS1 (com,%D0) CR_TAB
3684 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3685 AS1 (clr,%D0) CR_TAB
3686 AS2 (sbrc,%A0,7) CR_TAB
3687 AS1 (com,%D0) CR_TAB
3688 AS2 (mov,%B0,%D0) CR_TAB
3692 if (INTVAL (operands[2]) < 32)
3699 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3700 AS2 (sbc,%A0,%A0) CR_TAB
3701 AS2 (mov,%B0,%A0) CR_TAB
3702 AS2 (movw,%C0,%A0));
3704 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3705 AS2 (sbc,%A0,%A0) CR_TAB
3706 AS2 (mov,%B0,%A0) CR_TAB
3707 AS2 (mov,%C0,%A0) CR_TAB
3712 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3713 AS1 (ror,%C0) CR_TAB
3714 AS1 (ror,%B0) CR_TAB
3716 insn, operands, len, 4);
3720 /* 8bit logic shift right ((unsigned char)x >> i) */
3723 lshrqi3_out (rtx insn, rtx operands[], int *len)
3725 if (GET_CODE (operands[2]) == CONST_INT)
3732 switch (INTVAL (operands[2]))
3735 if (INTVAL (operands[2]) < 8)
3739 return AS1 (clr,%0);
3743 return AS1 (lsr,%0);
3747 return (AS1 (lsr,%0) CR_TAB
3751 return (AS1 (lsr,%0) CR_TAB
3756 if (test_hard_reg_class (LD_REGS, operands[0]))
3759 return (AS1 (swap,%0) CR_TAB
3760 AS2 (andi,%0,0x0f));
3763 return (AS1 (lsr,%0) CR_TAB
3769 if (test_hard_reg_class (LD_REGS, operands[0]))
3772 return (AS1 (swap,%0) CR_TAB
3777 return (AS1 (lsr,%0) CR_TAB
3784 if (test_hard_reg_class (LD_REGS, operands[0]))
3787 return (AS1 (swap,%0) CR_TAB
3793 return (AS1 (lsr,%0) CR_TAB
3802 return (AS1 (rol,%0) CR_TAB
3807 else if (CONSTANT_P (operands[2]))
3808 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3810 out_shift_with_cnt (AS1 (lsr,%0),
3811 insn, operands, len, 1);
3815 /* 16bit logic shift right ((unsigned short)x >> i) */
3818 lshrhi3_out (rtx insn, rtx operands[], int *len)
3820 if (GET_CODE (operands[2]) == CONST_INT)
3822 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3823 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3830 switch (INTVAL (operands[2]))
3833 if (INTVAL (operands[2]) < 16)
3837 return (AS1 (clr,%B0) CR_TAB
3841 if (optimize_size && scratch)
3846 return (AS1 (swap,%B0) CR_TAB
3847 AS1 (swap,%A0) CR_TAB
3848 AS2 (andi,%A0,0x0f) CR_TAB
3849 AS2 (eor,%A0,%B0) CR_TAB
3850 AS2 (andi,%B0,0x0f) CR_TAB
3856 return (AS1 (swap,%B0) CR_TAB
3857 AS1 (swap,%A0) CR_TAB
3858 AS2 (ldi,%3,0x0f) CR_TAB
3859 AS2 (and,%A0,%3) CR_TAB
3860 AS2 (eor,%A0,%B0) CR_TAB
3861 AS2 (and,%B0,%3) CR_TAB
3864 break; /* optimize_size ? 6 : 8 */
3868 break; /* scratch ? 5 : 6 */
3872 return (AS1 (lsr,%B0) CR_TAB
3873 AS1 (ror,%A0) CR_TAB
3874 AS1 (swap,%B0) CR_TAB
3875 AS1 (swap,%A0) CR_TAB
3876 AS2 (andi,%A0,0x0f) CR_TAB
3877 AS2 (eor,%A0,%B0) CR_TAB
3878 AS2 (andi,%B0,0x0f) CR_TAB
3884 return (AS1 (lsr,%B0) CR_TAB
3885 AS1 (ror,%A0) CR_TAB
3886 AS1 (swap,%B0) CR_TAB
3887 AS1 (swap,%A0) CR_TAB
3888 AS2 (ldi,%3,0x0f) CR_TAB
3889 AS2 (and,%A0,%3) CR_TAB
3890 AS2 (eor,%A0,%B0) CR_TAB
3891 AS2 (and,%B0,%3) CR_TAB
3898 break; /* scratch ? 5 : 6 */
3900 return (AS1 (clr,__tmp_reg__) CR_TAB
3901 AS1 (lsl,%A0) CR_TAB
3902 AS1 (rol,%B0) CR_TAB
3903 AS1 (rol,__tmp_reg__) CR_TAB
3904 AS1 (lsl,%A0) CR_TAB
3905 AS1 (rol,%B0) CR_TAB
3906 AS1 (rol,__tmp_reg__) CR_TAB
3907 AS2 (mov,%A0,%B0) CR_TAB
3908 AS2 (mov,%B0,__tmp_reg__));
3912 return (AS1 (lsl,%A0) CR_TAB
3913 AS2 (mov,%A0,%B0) CR_TAB
3914 AS1 (rol,%A0) CR_TAB
3915 AS2 (sbc,%B0,%B0) CR_TAB
3919 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3924 return (AS2 (mov,%A0,%B0) CR_TAB
3925 AS1 (clr,%B0) CR_TAB
3930 return (AS2 (mov,%A0,%B0) CR_TAB
3931 AS1 (clr,%B0) CR_TAB
3932 AS1 (lsr,%A0) CR_TAB
3937 return (AS2 (mov,%A0,%B0) CR_TAB
3938 AS1 (clr,%B0) CR_TAB
3939 AS1 (lsr,%A0) CR_TAB
3940 AS1 (lsr,%A0) CR_TAB
3947 return (AS2 (mov,%A0,%B0) CR_TAB
3948 AS1 (clr,%B0) CR_TAB
3949 AS1 (swap,%A0) CR_TAB
3950 AS2 (andi,%A0,0x0f));
3955 return (AS2 (mov,%A0,%B0) CR_TAB
3956 AS1 (clr,%B0) CR_TAB
3957 AS1 (swap,%A0) CR_TAB
3958 AS2 (ldi,%3,0x0f) CR_TAB
3962 return (AS2 (mov,%A0,%B0) CR_TAB
3963 AS1 (clr,%B0) CR_TAB
3964 AS1 (lsr,%A0) CR_TAB
3965 AS1 (lsr,%A0) CR_TAB
3966 AS1 (lsr,%A0) CR_TAB
3973 return (AS2 (mov,%A0,%B0) CR_TAB
3974 AS1 (clr,%B0) CR_TAB
3975 AS1 (swap,%A0) CR_TAB
3976 AS1 (lsr,%A0) CR_TAB
3977 AS2 (andi,%A0,0x07));
3979 if (AVR_HAVE_MUL && scratch)
3982 return (AS2 (ldi,%3,0x08) CR_TAB
3983 AS2 (mul,%B0,%3) CR_TAB
3984 AS2 (mov,%A0,r1) CR_TAB
3985 AS1 (clr,%B0) CR_TAB
3986 AS1 (clr,__zero_reg__));
3988 if (optimize_size && scratch)
3993 return (AS2 (mov,%A0,%B0) CR_TAB
3994 AS1 (clr,%B0) CR_TAB
3995 AS1 (swap,%A0) CR_TAB
3996 AS1 (lsr,%A0) CR_TAB
3997 AS2 (ldi,%3,0x07) CR_TAB
4003 return ("set" CR_TAB
4004 AS2 (bld,r1,3) CR_TAB
4005 AS2 (mul,%B0,r1) CR_TAB
4006 AS2 (mov,%A0,r1) CR_TAB
4007 AS1 (clr,%B0) CR_TAB
4008 AS1 (clr,__zero_reg__));
4011 return (AS2 (mov,%A0,%B0) CR_TAB
4012 AS1 (clr,%B0) CR_TAB
4013 AS1 (lsr,%A0) CR_TAB
4014 AS1 (lsr,%A0) CR_TAB
4015 AS1 (lsr,%A0) CR_TAB
4016 AS1 (lsr,%A0) CR_TAB
4020 if (AVR_HAVE_MUL && ldi_ok)
4023 return (AS2 (ldi,%A0,0x04) CR_TAB
4024 AS2 (mul,%B0,%A0) CR_TAB
4025 AS2 (mov,%A0,r1) CR_TAB
4026 AS1 (clr,%B0) CR_TAB
4027 AS1 (clr,__zero_reg__));
4029 if (AVR_HAVE_MUL && scratch)
4032 return (AS2 (ldi,%3,0x04) CR_TAB
4033 AS2 (mul,%B0,%3) CR_TAB
4034 AS2 (mov,%A0,r1) CR_TAB
4035 AS1 (clr,%B0) CR_TAB
4036 AS1 (clr,__zero_reg__));
4038 if (optimize_size && ldi_ok)
4041 return (AS2 (mov,%A0,%B0) CR_TAB
4042 AS2 (ldi,%B0,6) "\n1:\t"
4043 AS1 (lsr,%A0) CR_TAB
4044 AS1 (dec,%B0) CR_TAB
4047 if (optimize_size && scratch)
4050 return (AS1 (clr,%A0) CR_TAB
4051 AS1 (lsl,%B0) CR_TAB
4052 AS1 (rol,%A0) CR_TAB
4053 AS1 (lsl,%B0) CR_TAB
4054 AS1 (rol,%A0) CR_TAB
4059 return (AS1 (clr,%A0) CR_TAB
4060 AS1 (lsl,%B0) CR_TAB
4061 AS1 (rol,%A0) CR_TAB
4066 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4068 insn, operands, len, 2);
4072 /* 32bit logic shift right ((unsigned int)x >> i) */
4075 lshrsi3_out (rtx insn, rtx operands[], int *len)
4077 if (GET_CODE (operands[2]) == CONST_INT)
4085 switch (INTVAL (operands[2]))
4088 if (INTVAL (operands[2]) < 32)
4092 return *len = 3, (AS1 (clr,%D0) CR_TAB
4093 AS1 (clr,%C0) CR_TAB
4094 AS2 (movw,%A0,%C0));
4096 return (AS1 (clr,%D0) CR_TAB
4097 AS1 (clr,%C0) CR_TAB
4098 AS1 (clr,%B0) CR_TAB
4103 int reg0 = true_regnum (operands[0]);
4104 int reg1 = true_regnum (operands[1]);
4107 return (AS2 (mov,%A0,%B1) CR_TAB
4108 AS2 (mov,%B0,%C1) CR_TAB
4109 AS2 (mov,%C0,%D1) CR_TAB
4112 return (AS1 (clr,%D0) CR_TAB
4113 AS2 (mov,%C0,%D1) CR_TAB
4114 AS2 (mov,%B0,%C1) CR_TAB
4120 int reg0 = true_regnum (operands[0]);
4121 int reg1 = true_regnum (operands[1]);
4123 if (reg0 == reg1 + 2)
4124 return *len = 2, (AS1 (clr,%C0) CR_TAB
4127 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4128 AS1 (clr,%C0) CR_TAB
4131 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4132 AS2 (mov,%A0,%C1) CR_TAB
4133 AS1 (clr,%C0) CR_TAB
4138 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4139 AS1 (clr,%B0) CR_TAB
4140 AS1 (clr,%C0) CR_TAB
4145 return (AS1 (clr,%A0) CR_TAB
4146 AS2 (sbrc,%D0,7) CR_TAB
4147 AS1 (inc,%A0) CR_TAB
4148 AS1 (clr,%B0) CR_TAB
4149 AS1 (clr,%C0) CR_TAB
4154 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4155 AS1 (ror,%C0) CR_TAB
4156 AS1 (ror,%B0) CR_TAB
4158 insn, operands, len, 4);
4162 /* Modifies the length assigned to instruction INSN
4163 LEN is the initially computed length of the insn. */
4166 adjust_insn_length (rtx insn, int len)
4168 rtx patt = PATTERN (insn);
4171 if (GET_CODE (patt) == SET)
4174 op[1] = SET_SRC (patt);
4175 op[0] = SET_DEST (patt);
4176 if (general_operand (op[1], VOIDmode)
4177 && general_operand (op[0], VOIDmode))
4179 switch (GET_MODE (op[0]))
4182 output_movqi (insn, op, &len);
4185 output_movhi (insn, op, &len);
4189 output_movsisf (insn, op, &len);
4195 else if (op[0] == cc0_rtx && REG_P (op[1]))
4197 switch (GET_MODE (op[1]))
4199 case HImode: out_tsthi (insn,&len); break;
4200 case SImode: out_tstsi (insn,&len); break;
4204 else if (GET_CODE (op[1]) == AND)
4206 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4208 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4209 if (GET_MODE (op[1]) == SImode)
4210 len = (((mask & 0xff) != 0xff)
4211 + ((mask & 0xff00) != 0xff00)
4212 + ((mask & 0xff0000L) != 0xff0000L)
4213 + ((mask & 0xff000000L) != 0xff000000L));
4214 else if (GET_MODE (op[1]) == HImode)
4215 len = (((mask & 0xff) != 0xff)
4216 + ((mask & 0xff00) != 0xff00));
4219 else if (GET_CODE (op[1]) == IOR)
4221 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4223 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4224 if (GET_MODE (op[1]) == SImode)
4225 len = (((mask & 0xff) != 0)
4226 + ((mask & 0xff00) != 0)
4227 + ((mask & 0xff0000L) != 0)
4228 + ((mask & 0xff000000L) != 0));
4229 else if (GET_MODE (op[1]) == HImode)
4230 len = (((mask & 0xff) != 0)
4231 + ((mask & 0xff00) != 0));
4235 set = single_set (insn);
4240 op[1] = SET_SRC (set);
4241 op[0] = SET_DEST (set);
4243 if (GET_CODE (patt) == PARALLEL
4244 && general_operand (op[1], VOIDmode)
4245 && general_operand (op[0], VOIDmode))
4247 if (XVECLEN (patt, 0) == 2)
4248 op[2] = XVECEXP (patt, 0, 1);
4250 switch (GET_MODE (op[0]))
4256 output_reload_inhi (insn, op, &len);
4260 output_reload_insisf (insn, op, &len);
4266 else if (GET_CODE (op[1]) == ASHIFT
4267 || GET_CODE (op[1]) == ASHIFTRT
4268 || GET_CODE (op[1]) == LSHIFTRT)
4272 ops[1] = XEXP (op[1],0);
4273 ops[2] = XEXP (op[1],1);
4274 switch (GET_CODE (op[1]))
4277 switch (GET_MODE (op[0]))
4279 case QImode: ashlqi3_out (insn,ops,&len); break;
4280 case HImode: ashlhi3_out (insn,ops,&len); break;
4281 case SImode: ashlsi3_out (insn,ops,&len); break;
4286 switch (GET_MODE (op[0]))
4288 case QImode: ashrqi3_out (insn,ops,&len); break;
4289 case HImode: ashrhi3_out (insn,ops,&len); break;
4290 case SImode: ashrsi3_out (insn,ops,&len); break;
4295 switch (GET_MODE (op[0]))
4297 case QImode: lshrqi3_out (insn,ops,&len); break;
4298 case HImode: lshrhi3_out (insn,ops,&len); break;
4299 case SImode: lshrsi3_out (insn,ops,&len); break;
4311 /* Return nonzero if register REG dead after INSN. */
4314 reg_unused_after (rtx insn, rtx reg)
4316 return (dead_or_set_p (insn, reg)
4317 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4320 /* Return nonzero if REG is not used after INSN.
4321 We assume REG is a reload reg, and therefore does
4322 not live past labels. It may live past calls or jumps though. */
4325 _reg_unused_after (rtx insn, rtx reg)
4330 /* If the reg is set by this instruction, then it is safe for our
4331 case. Disregard the case where this is a store to memory, since
4332 we are checking a register used in the store address. */
4333 set = single_set (insn);
4334 if (set && GET_CODE (SET_DEST (set)) != MEM
4335 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4338 while ((insn = NEXT_INSN (insn)))
4341 code = GET_CODE (insn);
4344 /* If this is a label that existed before reload, then the register
4345 if dead here. However, if this is a label added by reorg, then
4346 the register may still be live here. We can't tell the difference,
4347 so we just ignore labels completely. */
4348 if (code == CODE_LABEL)
4356 if (code == JUMP_INSN)
4359 /* If this is a sequence, we must handle them all at once.
4360 We could have for instance a call that sets the target register,
4361 and an insn in a delay slot that uses the register. In this case,
4362 we must return 0. */
4363 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4368 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4370 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4371 rtx set = single_set (this_insn);
4373 if (GET_CODE (this_insn) == CALL_INSN)
4375 else if (GET_CODE (this_insn) == JUMP_INSN)
4377 if (INSN_ANNULLED_BRANCH_P (this_insn))
4382 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4384 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4386 if (GET_CODE (SET_DEST (set)) != MEM)
4392 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4397 else if (code == JUMP_INSN)
4401 if (code == CALL_INSN)
4404 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4405 if (GET_CODE (XEXP (tem, 0)) == USE
4406 && REG_P (XEXP (XEXP (tem, 0), 0))
4407 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4409 if (call_used_regs[REGNO (reg)])
4413 set = single_set (insn);
4415 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4417 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4418 return GET_CODE (SET_DEST (set)) != MEM;
4419 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4425 /* Target hook for assembling integer objects. The AVR version needs
4426 special handling for references to certain labels. */
4429 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4431 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4432 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4433 || GET_CODE (x) == LABEL_REF))
4435 fputs ("\t.word\tpm(", asm_out_file);
4436 output_addr_const (asm_out_file, x);
4437 fputs (")\n", asm_out_file);
4440 return default_assemble_integer (x, size, aligned_p);
4443 /* The routine used to output NUL terminated strings. We use a special
4444 version of this for most svr4 targets because doing so makes the
4445 generated assembly code more compact (and thus faster to assemble)
4446 as well as more readable, especially for targets like the i386
4447 (where the only alternative is to output character sequences as
4448 comma separated lists of numbers). */
4451 gas_output_limited_string(FILE *file, const char *str)
4453 const unsigned char *_limited_str = (const unsigned char *) str;
4455 fprintf (file, "%s\"", STRING_ASM_OP);
4456 for (; (ch = *_limited_str); _limited_str++)
4459 switch (escape = ESCAPES[ch])
4465 fprintf (file, "\\%03o", ch);
4469 putc (escape, file);
4473 fprintf (file, "\"\n");
4476 /* The routine used to output sequences of byte values. We use a special
4477 version of this for most svr4 targets because doing so makes the
4478 generated assembly code more compact (and thus faster to assemble)
4479 as well as more readable. Note that if we find subparts of the
4480 character sequence which end with NUL (and which are shorter than
4481 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4484 gas_output_ascii(FILE *file, const char *str, size_t length)
4486 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4487 const unsigned char *limit = _ascii_bytes + length;
4488 unsigned bytes_in_chunk = 0;
4489 for (; _ascii_bytes < limit; _ascii_bytes++)
4491 const unsigned char *p;
4492 if (bytes_in_chunk >= 60)
4494 fprintf (file, "\"\n");
4497 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4499 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4501 if (bytes_in_chunk > 0)
4503 fprintf (file, "\"\n");
4506 gas_output_limited_string (file, (const char*)_ascii_bytes);
4513 if (bytes_in_chunk == 0)
4514 fprintf (file, "\t.ascii\t\"");
4515 switch (escape = ESCAPES[ch = *_ascii_bytes])
4522 fprintf (file, "\\%03o", ch);
4523 bytes_in_chunk += 4;
4527 putc (escape, file);
4528 bytes_in_chunk += 2;
4533 if (bytes_in_chunk > 0)
4534 fprintf (file, "\"\n");
4537 /* Return value is nonzero if pseudos that have been
4538 assigned to registers of class CLASS would likely be spilled
4539 because registers of CLASS are needed for spill registers. */
4542 class_likely_spilled_p (int c)
4544 return (c != ALL_REGS && c != ADDW_REGS);
4547 /* Valid attributes:
4548 progmem - put data to program memory;
4549 signal - make a function to be hardware interrupt. After function
4550 prologue interrupts are disabled;
4551 interrupt - make a function to be hardware interrupt. After function
4552 prologue interrupts are enabled;
4553 naked - don't generate function prologue/epilogue and `ret' command.
4555 Only `progmem' attribute valid for type. */
4557 const struct attribute_spec avr_attribute_table[] =
4559 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4560 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4561 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4562 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4563 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4564 { NULL, 0, 0, false, false, false, NULL }
4567 /* Handle a "progmem" attribute; arguments as in
4568 struct attribute_spec.handler. */
4570 avr_handle_progmem_attribute (tree *node, tree name,
4571 tree args ATTRIBUTE_UNUSED,
4572 int flags ATTRIBUTE_UNUSED,
4577 if (TREE_CODE (*node) == TYPE_DECL)
4579 /* This is really a decl attribute, not a type attribute,
4580 but try to handle it for GCC 3.0 backwards compatibility. */
4582 tree type = TREE_TYPE (*node);
4583 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4584 tree newtype = build_type_attribute_variant (type, attr);
4586 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4587 TREE_TYPE (*node) = newtype;
4588 *no_add_attrs = true;
4590 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4592 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4594 warning (0, "only initialized variables can be placed into "
4595 "program memory area");
4596 *no_add_attrs = true;
4601 warning (OPT_Wattributes, "%qs attribute ignored",
4602 IDENTIFIER_POINTER (name));
4603 *no_add_attrs = true;
4610 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4611 struct attribute_spec.handler. */
4614 avr_handle_fndecl_attribute (tree *node, tree name,
4615 tree args ATTRIBUTE_UNUSED,
4616 int flags ATTRIBUTE_UNUSED,
4619 if (TREE_CODE (*node) != FUNCTION_DECL)
4621 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4622 IDENTIFIER_POINTER (name));
4623 *no_add_attrs = true;
4627 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4628 const char *attr = IDENTIFIER_POINTER (name);
4630 /* If the function has the 'signal' or 'interrupt' attribute, test to
4631 make sure that the name of the function is "__vector_NN" so as to
4632 catch when the user misspells the interrupt vector name. */
4634 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4636 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4638 warning (0, "%qs appears to be a misspelled interrupt handler",
4642 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4644 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4646 warning (0, "%qs appears to be a misspelled signal handler",
4656 avr_handle_fntype_attribute (tree *node, tree name,
4657 tree args ATTRIBUTE_UNUSED,
4658 int flags ATTRIBUTE_UNUSED,
4661 if (TREE_CODE (*node) != FUNCTION_TYPE)
4663 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4664 IDENTIFIER_POINTER (name));
4665 *no_add_attrs = true;
4671 /* Look for attribute `progmem' in DECL
4672 if found return 1, otherwise 0. */
4675 avr_progmem_p (tree decl, tree attributes)
4679 if (TREE_CODE (decl) != VAR_DECL)
4683 != lookup_attribute ("progmem", attributes))
4689 while (TREE_CODE (a) == ARRAY_TYPE);
4691 if (a == error_mark_node)
4694 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4700 /* Add the section attribute if the variable is in progmem. */
4703 avr_insert_attributes (tree node, tree *attributes)
4705 if (TREE_CODE (node) == VAR_DECL
4706 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4707 && avr_progmem_p (node, *attributes))
4709 static const char dsec[] = ".progmem.data";
4710 *attributes = tree_cons (get_identifier ("section"),
4711 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4714 /* ??? This seems sketchy. Why can't the user declare the
4715 thing const in the first place? */
4716 TREE_READONLY (node) = 1;
4720 /* A get_unnamed_section callback for switching to progmem_section. */
4723 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4725 fprintf (asm_out_file,
4726 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4727 AVR_MEGA ? "a" : "ax");
4728 /* Should already be aligned, this is just to be safe if it isn't. */
4729 fprintf (asm_out_file, "\t.p2align 1\n");
4732 /* Implement TARGET_ASM_INIT_SECTIONS. */
4735 avr_asm_init_sections (void)
4737 progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
4738 avr_output_progmem_section_asm_op,
4740 readonly_data_section = data_section;
4744 avr_section_type_flags (tree decl, const char *name, int reloc)
4746 unsigned int flags = default_section_type_flags (decl, name, reloc);
4748 if (strncmp (name, ".noinit", 7) == 0)
4750 if (decl && TREE_CODE (decl) == VAR_DECL
4751 && DECL_INITIAL (decl) == NULL_TREE)
4752 flags |= SECTION_BSS; /* @nobits */
4754 warning (0, "only uninitialized variables can be placed in the "
4761 /* Outputs some appropriate text to go at the start of an assembler
4765 avr_file_start (void)
4768 error ("MCU %qs supported for assembler only", avr_mcu_name);
4770 default_file_start ();
4772 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4773 fputs ("__SREG__ = 0x3f\n"
4775 "__SP_L__ = 0x3d\n", asm_out_file);
4777 fputs ("__tmp_reg__ = 0\n"
4778 "__zero_reg__ = 1\n", asm_out_file);
4780 /* FIXME: output these only if there is anything in the .data / .bss
4781 sections - some code size could be saved by not linking in the
4782 initialization code from libgcc if one or both sections are empty. */
4783 fputs ("\t.global __do_copy_data\n", asm_out_file);
4784 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4787 /* Outputs to the stdio stream FILE some
4788 appropriate text to go at the end of an assembler file. */
4795 /* Choose the order in which to allocate hard registers for
4796 pseudo-registers local to a basic block.
4798 Store the desired register order in the array `reg_alloc_order'.
4799 Element 0 should be the register to allocate first; element 1, the
4800 next register; and so on. */
4803 order_regs_for_local_alloc (void)
4806 static const int order_0[] = {
4814 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4818 static const int order_1[] = {
4826 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4830 static const int order_2[] = {
4839 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4844 const int *order = (TARGET_ORDER_1 ? order_1 :
4845 TARGET_ORDER_2 ? order_2 :
4847 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4848 reg_alloc_order[i] = order[i];
4852 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4853 cost of an RTX operand given its context. X is the rtx of the
4854 operand, MODE is its mode, and OUTER is the rtx_code of this
4855 operand's parent operator. */
4858 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4860 enum rtx_code code = GET_CODE (x);
4871 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4878 avr_rtx_costs (x, code, outer, &total);
4882 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4883 is to be calculated. Return true if the complete cost has been
4884 computed, and false if subexpressions should be scanned. In either
4885 case, *TOTAL contains the cost result. */
4888 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4890 enum machine_mode mode = GET_MODE (x);
4897 /* Immediate constants are as cheap as registers. */
4905 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4913 *total = COSTS_N_INSNS (1);
4917 *total = COSTS_N_INSNS (3);
4921 *total = COSTS_N_INSNS (7);
4927 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4935 *total = COSTS_N_INSNS (1);
4941 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4945 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4946 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4950 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4951 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4952 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4956 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4957 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4958 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4965 *total = COSTS_N_INSNS (1);
4966 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4967 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4971 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4973 *total = COSTS_N_INSNS (2);
4974 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4976 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4977 *total = COSTS_N_INSNS (1);
4979 *total = COSTS_N_INSNS (2);
4983 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4985 *total = COSTS_N_INSNS (4);
4986 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4988 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4989 *total = COSTS_N_INSNS (1);
4991 *total = COSTS_N_INSNS (4);
4997 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5003 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5004 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5005 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5006 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5010 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5011 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5012 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5020 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5021 else if (optimize_size)
5022 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5029 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5030 else if (optimize_size)
5031 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5039 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5040 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5048 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5051 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5052 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5059 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5061 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5062 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5066 val = INTVAL (XEXP (x, 1));
5068 *total = COSTS_N_INSNS (3);
5069 else if (val >= 0 && val <= 7)
5070 *total = COSTS_N_INSNS (val);
5072 *total = COSTS_N_INSNS (1);
5077 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5079 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5080 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5083 switch (INTVAL (XEXP (x, 1)))
5090 *total = COSTS_N_INSNS (2);
5093 *total = COSTS_N_INSNS (3);
5099 *total = COSTS_N_INSNS (4);
5104 *total = COSTS_N_INSNS (5);
5107 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5110 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5113 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5116 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5117 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5122 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5124 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5125 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5128 switch (INTVAL (XEXP (x, 1)))
5134 *total = COSTS_N_INSNS (3);
5139 *total = COSTS_N_INSNS (4);
5142 *total = COSTS_N_INSNS (6);
5145 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5148 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5149 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5156 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5163 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5165 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5166 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5170 val = INTVAL (XEXP (x, 1));
5172 *total = COSTS_N_INSNS (4);
5174 *total = COSTS_N_INSNS (2);
5175 else if (val >= 0 && val <= 7)
5176 *total = COSTS_N_INSNS (val);
5178 *total = COSTS_N_INSNS (1);
5183 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5185 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5186 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5189 switch (INTVAL (XEXP (x, 1)))
5195 *total = COSTS_N_INSNS (2);
5198 *total = COSTS_N_INSNS (3);
5204 *total = COSTS_N_INSNS (4);
5208 *total = COSTS_N_INSNS (5);
5211 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5214 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5218 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5221 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5222 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5227 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5229 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5230 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5233 switch (INTVAL (XEXP (x, 1)))
5239 *total = COSTS_N_INSNS (4);
5244 *total = COSTS_N_INSNS (6);
5247 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5250 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5253 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5254 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5261 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5268 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5270 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5271 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5275 val = INTVAL (XEXP (x, 1));
5277 *total = COSTS_N_INSNS (3);
5278 else if (val >= 0 && val <= 7)
5279 *total = COSTS_N_INSNS (val);
5281 *total = COSTS_N_INSNS (1);
5286 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5288 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5289 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5292 switch (INTVAL (XEXP (x, 1)))
5299 *total = COSTS_N_INSNS (2);
5302 *total = COSTS_N_INSNS (3);
5307 *total = COSTS_N_INSNS (4);
5311 *total = COSTS_N_INSNS (5);
5317 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5320 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5324 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5327 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5328 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5333 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5335 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5336 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5339 switch (INTVAL (XEXP (x, 1)))
5345 *total = COSTS_N_INSNS (4);
5348 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5353 *total = COSTS_N_INSNS (4);
5356 *total = COSTS_N_INSNS (6);
5359 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5360 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5367 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5371 switch (GET_MODE (XEXP (x, 0)))
5374 *total = COSTS_N_INSNS (1);
5375 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5376 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5380 *total = COSTS_N_INSNS (2);
5381 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5382 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5383 else if (INTVAL (XEXP (x, 1)) != 0)
5384 *total += COSTS_N_INSNS (1);
5388 *total = COSTS_N_INSNS (4);
5389 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5390 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5391 else if (INTVAL (XEXP (x, 1)) != 0)
5392 *total += COSTS_N_INSNS (3);
5398 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5407 /* Calculate the cost of a memory address. */
5410 avr_address_cost (rtx x)
5412 if (GET_CODE (x) == PLUS
5413 && GET_CODE (XEXP (x,1)) == CONST_INT
5414 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5415 && INTVAL (XEXP (x,1)) >= 61)
5417 if (CONSTANT_ADDRESS_P (x))
5419 if (avr_io_address_p (x, 1))
5426 /* Test for extra memory constraint 'Q'.
5427 It's a memory address based on Y or Z pointer with valid displacement. */
5430 extra_constraint_Q (rtx x)
5432 if (GET_CODE (XEXP (x,0)) == PLUS
5433 && REG_P (XEXP (XEXP (x,0), 0))
5434 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5435 && (INTVAL (XEXP (XEXP (x,0), 1))
5436 <= MAX_LD_OFFSET (GET_MODE (x))))
5438 rtx xx = XEXP (XEXP (x,0), 0);
5439 int regno = REGNO (xx);
5440 if (TARGET_ALL_DEBUG)
5442 fprintf (stderr, ("extra_constraint:\n"
5443 "reload_completed: %d\n"
5444 "reload_in_progress: %d\n"),
5445 reload_completed, reload_in_progress);
5448 if (regno >= FIRST_PSEUDO_REGISTER)
5449 return 1; /* allocate pseudos */
5450 else if (regno == REG_Z || regno == REG_Y)
5451 return 1; /* strictly check */
5452 else if (xx == frame_pointer_rtx
5453 || xx == arg_pointer_rtx)
5454 return 1; /* XXX frame & arg pointer checks */
5459 /* Convert condition code CONDITION to the valid AVR condition code. */
5462 avr_normalize_condition (RTX_CODE condition)
5479 /* This function optimizes conditional jumps. */
5486 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5488 if (! (GET_CODE (insn) == INSN
5489 || GET_CODE (insn) == CALL_INSN
5490 || GET_CODE (insn) == JUMP_INSN)
5491 || !single_set (insn))
5494 pattern = PATTERN (insn);
5496 if (GET_CODE (pattern) == PARALLEL)
5497 pattern = XVECEXP (pattern, 0, 0);
5498 if (GET_CODE (pattern) == SET
5499 && SET_DEST (pattern) == cc0_rtx
5500 && compare_diff_p (insn))
5502 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5504 /* Now we work under compare insn. */
5506 pattern = SET_SRC (pattern);
5507 if (true_regnum (XEXP (pattern,0)) >= 0
5508 && true_regnum (XEXP (pattern,1)) >= 0 )
5510 rtx x = XEXP (pattern,0);
5511 rtx next = next_real_insn (insn);
5512 rtx pat = PATTERN (next);
5513 rtx src = SET_SRC (pat);
5514 rtx t = XEXP (src,0);
5515 PUT_CODE (t, swap_condition (GET_CODE (t)));
5516 XEXP (pattern,0) = XEXP (pattern,1);
5517 XEXP (pattern,1) = x;
5518 INSN_CODE (next) = -1;
5520 else if (true_regnum (XEXP (pattern,0)) >= 0
5521 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5523 rtx x = XEXP (pattern,1);
5524 rtx next = next_real_insn (insn);
5525 rtx pat = PATTERN (next);
5526 rtx src = SET_SRC (pat);
5527 rtx t = XEXP (src,0);
5528 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5530 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5532 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5533 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5534 INSN_CODE (next) = -1;
5535 INSN_CODE (insn) = -1;
5539 else if (true_regnum (SET_SRC (pattern)) >= 0)
5541 /* This is a tst insn */
5542 rtx next = next_real_insn (insn);
5543 rtx pat = PATTERN (next);
5544 rtx src = SET_SRC (pat);
5545 rtx t = XEXP (src,0);
5547 PUT_CODE (t, swap_condition (GET_CODE (t)));
5548 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5550 INSN_CODE (next) = -1;
5551 INSN_CODE (insn) = -1;
5557 /* Returns register number for function return value.*/
5560 avr_ret_register (void)
5565 /* Create an RTX representing the place where a
5566 library function returns a value of mode MODE. */
5569 avr_libcall_value (enum machine_mode mode)
5571 int offs = GET_MODE_SIZE (mode);
5574 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5577 /* Create an RTX representing the place where a
5578 function returns a value of data type VALTYPE. */
5581 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5585 if (TYPE_MODE (type) != BLKmode)
5586 return avr_libcall_value (TYPE_MODE (type));
5588 offs = int_size_in_bytes (type);
5591 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5592 offs = GET_MODE_SIZE (SImode);
5593 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5594 offs = GET_MODE_SIZE (DImode);
5596 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5599 /* Places additional restrictions on the register class to
5600 use when it is necessary to copy value X into a register
5604 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5610 test_hard_reg_class (enum reg_class class, rtx x)
5612 int regno = true_regnum (x);
5616 if (TEST_HARD_REG_CLASS (class, regno))
5624 jump_over_one_insn_p (rtx insn, rtx dest)
5626 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5629 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5630 int dest_addr = INSN_ADDRESSES (uid);
5631 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5634 /* Returns 1 if a value of mode MODE can be stored starting with hard
5635 register number REGNO. On the enhanced core, anything larger than
5636 1 byte must start in even numbered register for "movw" to work
5637 (this way we don't have to check for odd registers everywhere). */
5640 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5642 /* Disallow QImode in stack pointer regs. */
5643 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5646 /* The only thing that can go into registers r28:r29 is a Pmode. */
5647 if (regno == REG_Y && mode == Pmode)
5650 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5651 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5657 /* Modes larger than QImode occupy consecutive registers. */
5658 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5661 /* All modes larger than QImode should start in an even register. */
5662 return !(regno & 1);
5665 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5666 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5667 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5670 avr_io_address_p (rtx x, int size)
5672 return (optimize > 0 && GET_CODE (x) == CONST_INT
5673 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5677 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5683 if (GET_CODE (operands[1]) == CONST_INT)
5685 int val = INTVAL (operands[1]);
5686 if ((val & 0xff) == 0)
5689 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5690 AS2 (ldi,%2,hi8(%1)) CR_TAB
5693 else if ((val & 0xff00) == 0)
5696 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5697 AS2 (mov,%A0,%2) CR_TAB
5698 AS2 (mov,%B0,__zero_reg__));
5700 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5703 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5704 AS2 (mov,%A0,%2) CR_TAB
5709 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5710 AS2 (mov,%A0,%2) CR_TAB
5711 AS2 (ldi,%2,hi8(%1)) CR_TAB
5717 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5719 rtx src = operands[1];
5720 int cnst = (GET_CODE (src) == CONST_INT);
5725 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5726 + ((INTVAL (src) & 0xff00) != 0)
5727 + ((INTVAL (src) & 0xff0000) != 0)
5728 + ((INTVAL (src) & 0xff000000) != 0);
5735 if (cnst && ((INTVAL (src) & 0xff) == 0))
5736 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5739 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5740 output_asm_insn (AS2 (mov, %A0, %2), operands);
5742 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5743 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5746 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5747 output_asm_insn (AS2 (mov, %B0, %2), operands);
5749 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5750 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5753 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5754 output_asm_insn (AS2 (mov, %C0, %2), operands);
5756 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5757 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5760 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5761 output_asm_insn (AS2 (mov, %D0, %2), operands);
5767 avr_output_bld (rtx operands[], int bit_nr)
5769 static char s[] = "bld %A0,0";
5771 s[5] = 'A' + (bit_nr >> 3);
5772 s[8] = '0' + (bit_nr & 7);
5773 output_asm_insn (s, operands);
5777 avr_output_addr_vec_elt (FILE *stream, int value)
5779 switch_to_section (progmem_section);
5781 fprintf (stream, "\t.word pm(.L%d)\n", value);
5783 fprintf (stream, "\trjmp .L%d\n", value);
5786 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5787 registers (for a define_peephole2) in the current function. */
5790 avr_peep2_scratch_safe (rtx scratch)
5792 if ((interrupt_function_p (current_function_decl)
5793 || signal_function_p (current_function_decl))
5794 && leaf_function_p ())
5796 int first_reg = true_regnum (scratch);
5797 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5800 for (reg = first_reg; reg <= last_reg; reg++)
5802 if (!df_regs_ever_live_p (reg))
5809 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5810 or memory location in the I/O space (QImode only).
5812 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5813 Operand 1: register operand to test, or CONST_INT memory address.
5814 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5815 Operand 3: label to jump to if the test is true. */
5818 avr_out_sbxx_branch (rtx insn, rtx operands[])
5820 enum rtx_code comp = GET_CODE (operands[0]);
5821 int long_jump = (get_attr_length (insn) >= 4);
5822 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5826 else if (comp == LT)
5830 comp = reverse_condition (comp);
5832 if (GET_CODE (operands[1]) == CONST_INT)
5834 if (INTVAL (operands[1]) < 0x40)
5837 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5839 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5843 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5845 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5847 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5850 else /* GET_CODE (operands[1]) == REG */
5852 if (GET_MODE (operands[1]) == QImode)
5855 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5857 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5859 else /* HImode or SImode */
5861 static char buf[] = "sbrc %A1,0";
5862 int bit_nr = exact_log2 (INTVAL (operands[2])
5863 & GET_MODE_MASK (GET_MODE (operands[1])));
5865 buf[3] = (comp == EQ) ? 's' : 'c';
5866 buf[6] = 'A' + (bit_nr >> 3);
5867 buf[9] = '0' + (bit_nr & 7);
5868 output_asm_insn (buf, operands);
5873 return (AS1 (rjmp,.+4) CR_TAB
5876 return AS1 (rjmp,%3);
5880 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5883 avr_asm_out_ctor (rtx symbol, int priority)
5885 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5886 default_ctor_section_asm_out_constructor (symbol, priority);
5889 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5892 avr_asm_out_dtor (rtx symbol, int priority)
5894 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5895 default_dtor_section_asm_out_destructor (symbol, priority);
5898 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5901 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5903 if (TYPE_MODE (type) == BLKmode)
5905 HOST_WIDE_INT size = int_size_in_bytes (type);
5906 return (size == -1 || size > 8);