1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_regs_to_save (HARD_REG_SET *);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code);
59 static int avr_num_arg_regs (enum machine_mode, tree);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
66 const struct attribute_spec avr_attribute_table[];
67 static bool avr_assemble_integer (rtx, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree, tree *);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree, const char *, int);
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx, int);
78 static void avr_asm_out_dtor (rtx, int);
79 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
80 static bool avr_rtx_costs (rtx, int, int, int *);
81 static int avr_address_cost (rtx);
82 static bool avr_return_in_memory (const_tree, const_tree);
83 static struct machine_function * avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx;
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx;
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames[] = REGISTER_NAMES;
96 /* This holds the last insn address. */
97 static int last_insn_address = 0;
99 /* Preprocessor macros to define depending on MCU type. */
100 const char *avr_base_arch_macro;
101 const char *avr_extra_arch_macro;
103 /* Current architecture. */
104 const struct base_arch_s *avr_current_arch;
106 section *progmem_section;
108 /* More than 8K of program memory: use "call" and "jmp". */
111 /* Core have 'MUL*' instructions. */
112 int avr_have_mul_p = 0;
114 /* Assembler only. */
115 int avr_asm_only_p = 0;
117 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
118 int avr_have_movw_lpmx_p = 0;
120 static const struct base_arch_s avr_arch_types[] = {
121 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
122 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
123 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
124 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
125 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
126 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
127 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
128 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
129 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
130 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" }
133 /* These names are used as the index into the avr_arch_types[] table
151 const char *const name;
152 int arch; /* index in avr_arch_types[] */
153 /* Must lie outside user's namespace. NULL == no macro. */
154 const char *const macro;
157 /* List of all known AVR MCU types - if updated, it has to be kept
158 in sync in several places (FIXME: is there a better way?):
160 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
161 - t-avr (MULTILIB_MATCHES)
162 - gas/config/tc-avr.c
165 static const struct mcu_type_s avr_mcu_types[] = {
166 /* Classic, <= 8K. */
167 { "avr2", ARCH_AVR2, NULL },
168 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
169 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
170 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
171 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
172 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
173 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
174 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
175 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
176 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
177 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
178 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
179 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
180 /* Classic + MOVW, <= 8K. */
181 { "avr25", ARCH_AVR25, NULL },
182 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
183 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
184 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
185 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
186 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
187 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
188 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
189 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
190 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
191 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
192 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
193 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
194 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
195 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
196 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
197 /* Classic, > 8K, <= 64K. */
198 { "avr3", ARCH_AVR3, NULL },
199 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
200 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
201 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
202 /* Classic, == 128K. */
203 { "avr31", ARCH_AVR31, NULL },
204 { "atmega103", ARCH_AVR3, "__AVR_ATmega103__" },
205 /* Classic + MOVW + JMP/CALL. */
206 { "avr35", ARCH_AVR35, NULL },
207 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
208 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
209 /* Enhanced, <= 8K. */
210 { "avr4", ARCH_AVR4, NULL },
211 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
212 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
213 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
214 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
215 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
216 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
217 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
218 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
219 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
220 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
221 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
222 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
223 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
224 /* Enhanced, > 8K, <= 64K. */
225 { "avr5", ARCH_AVR5, NULL },
226 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
227 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
228 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
229 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
230 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
231 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
232 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
233 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
234 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
235 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
236 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
237 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
238 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
239 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
240 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
241 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
242 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
243 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
244 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
245 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
246 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
247 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
248 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
249 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
250 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
251 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
252 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
253 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
254 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
255 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
256 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
257 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
258 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
259 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
260 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
261 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
262 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
263 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
264 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
265 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
266 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
267 /* Enhanced, == 128K. */
268 { "avr51", ARCH_AVR51, NULL },
269 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
270 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
271 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
272 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
273 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
274 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
275 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
276 /* Assembler only. */
277 { "avr1", ARCH_AVR1, NULL },
278 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
279 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
280 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
281 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
282 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
283 { NULL, ARCH_UNKNOWN, NULL }
286 int avr_case_values_threshold = 30000;
288 /* Initialize the GCC target structure. */
289 #undef TARGET_ASM_ALIGNED_HI_OP
290 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
291 #undef TARGET_ASM_ALIGNED_SI_OP
292 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
293 #undef TARGET_ASM_UNALIGNED_HI_OP
294 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
295 #undef TARGET_ASM_UNALIGNED_SI_OP
296 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
297 #undef TARGET_ASM_INTEGER
298 #define TARGET_ASM_INTEGER avr_assemble_integer
299 #undef TARGET_ASM_FILE_START
300 #define TARGET_ASM_FILE_START avr_file_start
301 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
302 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
303 #undef TARGET_ASM_FILE_END
304 #define TARGET_ASM_FILE_END avr_file_end
306 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
307 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
308 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
309 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
310 #undef TARGET_ATTRIBUTE_TABLE
311 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
312 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
313 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
314 #undef TARGET_INSERT_ATTRIBUTES
315 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
316 #undef TARGET_SECTION_TYPE_FLAGS
317 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
318 #undef TARGET_RTX_COSTS
319 #define TARGET_RTX_COSTS avr_rtx_costs
320 #undef TARGET_ADDRESS_COST
321 #define TARGET_ADDRESS_COST avr_address_cost
322 #undef TARGET_MACHINE_DEPENDENT_REORG
323 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
325 #undef TARGET_RETURN_IN_MEMORY
326 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
328 #undef TARGET_STRICT_ARGUMENT_NAMING
329 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
331 struct gcc_target targetm = TARGET_INITIALIZER;
334 avr_override_options (void)
336 const struct mcu_type_s *t;
337 const struct base_arch_s *base;
339 flag_delete_null_pointer_checks = 0;
341 for (t = avr_mcu_types; t->name; t++)
342 if (strcmp (t->name, avr_mcu_name) == 0)
347 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
349 for (t = avr_mcu_types; t->name; t++)
350 fprintf (stderr," %s\n", t->name);
353 avr_current_arch = &avr_arch_types[t->arch];
354 base = &avr_arch_types[t->arch];
355 avr_asm_only_p = base->asm_only;
356 avr_have_mul_p = base->have_mul;
357 avr_mega_p = base->have_jmp_call;
358 avr_have_movw_lpmx_p = base->have_movw_lpmx;
359 avr_base_arch_macro = base->macro;
360 avr_extra_arch_macro = t->macro;
362 if (optimize && !TARGET_NO_TABLEJUMP)
363 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
365 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
366 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
368 init_machine_status = avr_init_machine_status;
371 /* return register class from register number. */
373 static const int reg_class_tab[]={
374 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
375 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
376 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
377 GENERAL_REGS, /* r0 - r15 */
378 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
379 LD_REGS, /* r16 - 23 */
380 ADDW_REGS,ADDW_REGS, /* r24,r25 */
381 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
382 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
383 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
384 STACK_REG,STACK_REG /* SPL,SPH */
387 /* Function to set up the backend function structure. */
389 static struct machine_function *
390 avr_init_machine_status (void)
392 return ((struct machine_function *)
393 ggc_alloc_cleared (sizeof (struct machine_function)));
396 /* Return register class for register R. */
399 avr_regno_reg_class (int r)
402 return reg_class_tab[r];
406 /* Return nonzero if FUNC is a naked function. */
409 avr_naked_function_p (tree func)
413 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
415 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
416 return a != NULL_TREE;
419 /* Return nonzero if FUNC is an interrupt function as specified
420 by the "interrupt" attribute. */
423 interrupt_function_p (tree func)
427 if (TREE_CODE (func) != FUNCTION_DECL)
430 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
431 return a != NULL_TREE;
434 /* Return nonzero if FUNC is a signal function as specified
435 by the "signal" attribute. */
438 signal_function_p (tree func)
442 if (TREE_CODE (func) != FUNCTION_DECL)
445 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
446 return a != NULL_TREE;
449 /* Return nonzero if FUNC is a OS_task function. */
452 avr_OS_task_function_p (tree func)
456 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
458 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
459 return a != NULL_TREE;
462 /* Return the number of hard registers to push/pop in the prologue/epilogue
463 of the current function, and optionally store these registers in SET. */
466 avr_regs_to_save (HARD_REG_SET *set)
469 int int_or_sig_p = (interrupt_function_p (current_function_decl)
470 || signal_function_p (current_function_decl));
471 int leaf_func_p = leaf_function_p ();
474 CLEAR_HARD_REG_SET (*set);
477 /* No need to save any registers if the function never returns or
478 is have "OS_task" attribute. */
479 if (TREE_THIS_VOLATILE (current_function_decl)
480 || cfun->machine->is_OS_task)
483 for (reg = 0; reg < 32; reg++)
485 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
486 any global register variables. */
490 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
491 || (df_regs_ever_live_p (reg)
492 && (int_or_sig_p || !call_used_regs[reg])
493 && !(frame_pointer_needed
494 && (reg == REG_Y || reg == (REG_Y+1)))))
497 SET_HARD_REG_BIT (*set, reg);
504 /* Compute offset between arg_pointer and frame_pointer. */
507 initial_elimination_offset (int from, int to)
509 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
513 int offset = frame_pointer_needed ? 2 : 0;
515 offset += avr_regs_to_save (NULL);
516 return get_frame_size () + 2 + 1 + offset;
520 /* Return 1 if the function epilogue is just a single "ret". */
523 avr_simple_epilogue (void)
525 return (! frame_pointer_needed
526 && get_frame_size () == 0
527 && avr_regs_to_save (NULL) == 0
528 && ! interrupt_function_p (current_function_decl)
529 && ! signal_function_p (current_function_decl)
530 && ! avr_naked_function_p (current_function_decl)
531 && ! TREE_THIS_VOLATILE (current_function_decl));
534 /* This function checks sequence of live registers. */
537 sequent_regs_live (void)
543 for (reg = 0; reg < 18; ++reg)
545 if (!call_used_regs[reg])
547 if (df_regs_ever_live_p (reg))
557 if (!frame_pointer_needed)
559 if (df_regs_ever_live_p (REG_Y))
567 if (df_regs_ever_live_p (REG_Y+1))
580 return (cur_seq == live_seq) ? live_seq : 0;
583 /* Output function prologue. */
586 expand_prologue (void)
590 HOST_WIDE_INT size = get_frame_size();
591 /* Define templates for push instructions. */
592 rtx pushbyte = gen_rtx_MEM (QImode,
593 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
594 rtx pushword = gen_rtx_MEM (HImode,
595 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
598 last_insn_address = 0;
600 /* Init cfun->machine. */
601 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
602 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
603 cfun->machine->is_signal = signal_function_p (current_function_decl);
604 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
606 /* Prologue: naked. */
607 if (cfun->machine->is_naked)
612 live_seq = sequent_regs_live ();
613 minimize = (TARGET_CALL_PROLOGUES
614 && !cfun->machine->is_interrupt
615 && !cfun->machine->is_signal
616 && !cfun->machine->is_OS_task
619 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
621 if (cfun->machine->is_interrupt)
623 /* Enable interrupts. */
624 insn = emit_insn (gen_enable_interrupt ());
625 RTX_FRAME_RELATED_P (insn) = 1;
629 insn = emit_move_insn (pushbyte, zero_reg_rtx);
630 RTX_FRAME_RELATED_P (insn) = 1;
633 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
634 RTX_FRAME_RELATED_P (insn) = 1;
637 insn = emit_move_insn (tmp_reg_rtx,
638 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
639 RTX_FRAME_RELATED_P (insn) = 1;
640 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
641 RTX_FRAME_RELATED_P (insn) = 1;
643 /* Clear zero reg. */
644 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
645 RTX_FRAME_RELATED_P (insn) = 1;
647 /* Prevent any attempt to delete the setting of ZERO_REG! */
648 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
650 if (minimize && (frame_pointer_needed || live_seq > 6))
652 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
653 gen_int_mode (size, HImode));
654 RTX_FRAME_RELATED_P (insn) = 1;
657 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
658 gen_int_mode (size + live_seq, HImode)));
659 RTX_FRAME_RELATED_P (insn) = 1;
664 avr_regs_to_save (&set);
666 for (reg = 0; reg < 32; ++reg)
668 if (TEST_HARD_REG_BIT (set, reg))
670 /* Emit push of register to save. */
671 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
672 RTX_FRAME_RELATED_P (insn) = 1;
675 if (frame_pointer_needed)
677 if(!cfun->machine->is_OS_task)
679 /* Push frame pointer. */
680 insn = emit_move_insn (pushword, frame_pointer_rtx);
681 RTX_FRAME_RELATED_P (insn) = 1;
686 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
687 RTX_FRAME_RELATED_P (insn) = 1;
691 /* Creating a frame can be done by direct manipulation of the
692 stack or via the frame pointer. These two methods are:
699 the optimum method depends on function type, stack and frame size.
700 To avoid a complex logic, both methods are tested and shortest
704 if (TARGET_TINY_STACK)
706 if (size < -63 || size > 63)
707 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
709 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
710 over 'sbiw' (2 cycles, same size). */
711 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
715 /* Normal sized addition. */
716 myfp = frame_pointer_rtx;
718 /* Calculate length. */
721 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
723 get_attr_length (gen_move_insn (myfp,
724 gen_rtx_PLUS (GET_MODE(myfp), myfp,
728 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
730 /* Method 2-Adjust Stack pointer. */
731 int sp_plus_length = 0;
735 get_attr_length (gen_move_insn (stack_pointer_rtx,
736 gen_rtx_PLUS (HImode, stack_pointer_rtx,
740 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
742 /* Use shortest method. */
743 if (size <= 6 && (sp_plus_length < method1_length))
745 insn = emit_move_insn (stack_pointer_rtx,
746 gen_rtx_PLUS (HImode, stack_pointer_rtx,
747 gen_int_mode (-size, HImode)));
748 RTX_FRAME_RELATED_P (insn) = 1;
749 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
750 RTX_FRAME_RELATED_P (insn) = 1;
754 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
755 RTX_FRAME_RELATED_P (insn) = 1;
756 insn = emit_move_insn (myfp,
757 gen_rtx_PLUS (GET_MODE(myfp), myfp,
758 gen_int_mode (-size, GET_MODE(myfp))));
759 RTX_FRAME_RELATED_P (insn) = 1;
760 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
761 RTX_FRAME_RELATED_P (insn) = 1;
768 /* Output summary at end of function prologue. */
771 avr_asm_function_end_prologue (FILE *file)
773 if (cfun->machine->is_naked)
775 fputs ("/* prologue: naked */\n", file);
779 if (cfun->machine->is_interrupt)
781 fputs ("/* prologue: Interrupt */\n", file);
783 else if (cfun->machine->is_signal)
785 fputs ("/* prologue: Signal */\n", file);
788 fputs ("/* prologue: function */\n", file);
790 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
795 /* Implement EPILOGUE_USES. */
798 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
802 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
807 /* Output RTL epilogue. */
810 expand_epilogue (void)
815 HOST_WIDE_INT size = get_frame_size();
817 /* epilogue: naked */
818 if (cfun->machine->is_naked)
820 emit_jump_insn (gen_return ());
824 live_seq = sequent_regs_live ();
825 minimize = (TARGET_CALL_PROLOGUES
826 && !cfun->machine->is_interrupt
827 && !cfun->machine->is_signal
828 && !cfun->machine->is_OS_task
831 if (minimize && (frame_pointer_needed || live_seq > 4))
833 if (frame_pointer_needed)
835 /* Get rid of frame. */
836 emit_move_insn(frame_pointer_rtx,
837 gen_rtx_PLUS (HImode, frame_pointer_rtx,
838 gen_int_mode (size, HImode)));
842 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
845 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
849 if (frame_pointer_needed)
853 /* Try two methods to adjust stack and select shortest. */
855 /* Method 1-Adjust frame pointer. */
857 get_attr_length (gen_move_insn (frame_pointer_rtx,
858 gen_rtx_PLUS (HImode, frame_pointer_rtx,
861 /* Copy to stack pointer. */
863 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
865 /* Method 2-Adjust Stack pointer. */
866 int sp_plus_length = 0;
870 get_attr_length (gen_move_insn (stack_pointer_rtx,
871 gen_rtx_PLUS (HImode, stack_pointer_rtx,
875 /* Use shortest method. */
876 if (size <= 5 && (sp_plus_length < fp_plus_length))
878 emit_move_insn (stack_pointer_rtx,
879 gen_rtx_PLUS (HImode, stack_pointer_rtx,
880 gen_int_mode (size, HImode)));
884 emit_move_insn (frame_pointer_rtx,
885 gen_rtx_PLUS (HImode, frame_pointer_rtx,
886 gen_int_mode (size, HImode)));
887 /* Copy to stack pointer. */
888 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
891 if(!cfun->machine->is_OS_task)
893 /* Restore previous frame_pointer. */
894 emit_insn (gen_pophi (frame_pointer_rtx));
897 /* Restore used registers. */
899 avr_regs_to_save (&set);
900 for (reg = 31; reg >= 0; --reg)
902 if (TEST_HARD_REG_BIT (set, reg))
903 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
905 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
908 /* Restore SREG using tmp reg as scratch. */
909 emit_insn (gen_popqi (tmp_reg_rtx));
911 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
914 /* Restore tmp REG. */
915 emit_insn (gen_popqi (tmp_reg_rtx));
917 /* Restore zero REG. */
918 emit_insn (gen_popqi (zero_reg_rtx));
921 emit_jump_insn (gen_return ());
925 /* Output summary messages at beginning of function epilogue. */
928 avr_asm_function_begin_epilogue (FILE *file)
930 fprintf (file, "/* epilogue start */\n");
933 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
934 machine for a memory operand of mode MODE. */
937 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
939 enum reg_class r = NO_REGS;
941 if (TARGET_ALL_DEBUG)
943 fprintf (stderr, "mode: (%s) %s %s %s %s:",
945 strict ? "(strict)": "",
946 reload_completed ? "(reload_completed)": "",
947 reload_in_progress ? "(reload_in_progress)": "",
948 reg_renumber ? "(reg_renumber)" : "");
949 if (GET_CODE (x) == PLUS
950 && REG_P (XEXP (x, 0))
951 && GET_CODE (XEXP (x, 1)) == CONST_INT
952 && INTVAL (XEXP (x, 1)) >= 0
953 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
956 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
957 true_regnum (XEXP (x, 0)));
960 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
961 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
963 else if (CONSTANT_ADDRESS_P (x))
965 else if (GET_CODE (x) == PLUS
966 && REG_P (XEXP (x, 0))
967 && GET_CODE (XEXP (x, 1)) == CONST_INT
968 && INTVAL (XEXP (x, 1)) >= 0)
970 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
974 || REGNO (XEXP (x,0)) == REG_Y
975 || REGNO (XEXP (x,0)) == REG_Z)
976 r = BASE_POINTER_REGS;
977 if (XEXP (x,0) == frame_pointer_rtx
978 || XEXP (x,0) == arg_pointer_rtx)
979 r = BASE_POINTER_REGS;
981 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
984 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
985 && REG_P (XEXP (x, 0))
986 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
987 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
991 if (TARGET_ALL_DEBUG)
993 fprintf (stderr, " ret = %c\n", r + '0');
995 return r == NO_REGS ? 0 : (int)r;
998 /* Attempts to replace X with a valid
999 memory address for an operand of mode MODE */
1002 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1005 if (TARGET_ALL_DEBUG)
1007 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1011 if (GET_CODE (oldx) == PLUS
1012 && REG_P (XEXP (oldx,0)))
1014 if (REG_P (XEXP (oldx,1)))
1015 x = force_reg (GET_MODE (oldx), oldx);
1016 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1018 int offs = INTVAL (XEXP (oldx,1));
1019 if (frame_pointer_rtx != XEXP (oldx,0))
1020 if (offs > MAX_LD_OFFSET (mode))
1022 if (TARGET_ALL_DEBUG)
1023 fprintf (stderr, "force_reg (big offset)\n");
1024 x = force_reg (GET_MODE (oldx), oldx);
1032 /* Return a pointer register name as a string. */
1035 ptrreg_to_str (int regno)
1039 case REG_X: return "X";
1040 case REG_Y: return "Y";
1041 case REG_Z: return "Z";
1043 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1048 /* Return the condition name as a string.
1049 Used in conditional jump constructing */
1052 cond_string (enum rtx_code code)
1061 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1066 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1079 /* Output ADDR to FILE as address. */
1082 print_operand_address (FILE *file, rtx addr)
1084 switch (GET_CODE (addr))
1087 fprintf (file, ptrreg_to_str (REGNO (addr)));
1091 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1095 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1099 if (CONSTANT_ADDRESS_P (addr)
1100 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1101 || GET_CODE (addr) == LABEL_REF))
1103 fprintf (file, "pm(");
1104 output_addr_const (file,addr);
1105 fprintf (file ,")");
1108 output_addr_const (file, addr);
1113 /* Output X as assembler operand to file FILE. */
1116 print_operand (FILE *file, rtx x, int code)
1120 if (code >= 'A' && code <= 'D')
1130 if (x == zero_reg_rtx)
1131 fprintf (file, "__zero_reg__");
1133 fprintf (file, reg_names[true_regnum (x) + abcd]);
1135 else if (GET_CODE (x) == CONST_INT)
1136 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1137 else if (GET_CODE (x) == MEM)
1139 rtx addr = XEXP (x,0);
1141 if (CONSTANT_P (addr) && abcd)
1144 output_address (addr);
1145 fprintf (file, ")+%d", abcd);
1147 else if (code == 'o')
1149 if (GET_CODE (addr) != PLUS)
1150 fatal_insn ("bad address, not (reg+disp):", addr);
1152 print_operand (file, XEXP (addr, 1), 0);
1154 else if (code == 'p' || code == 'r')
1156 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1157 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1160 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1162 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1164 else if (GET_CODE (addr) == PLUS)
1166 print_operand_address (file, XEXP (addr,0));
1167 if (REGNO (XEXP (addr, 0)) == REG_X)
1168 fatal_insn ("internal compiler error. Bad address:"
1171 print_operand (file, XEXP (addr,1), code);
1174 print_operand_address (file, addr);
1176 else if (GET_CODE (x) == CONST_DOUBLE)
1180 if (GET_MODE (x) != SFmode)
1181 fatal_insn ("internal compiler error. Unknown mode:", x);
1182 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1183 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1184 fprintf (file, "0x%lx", val);
1186 else if (code == 'j')
1187 fputs (cond_string (GET_CODE (x)), file);
1188 else if (code == 'k')
1189 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1191 print_operand_address (file, x);
1194 /* Update the condition code in the INSN. */
1197 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1201 switch (get_attr_cc (insn))
1204 /* Insn does not affect CC at all. */
1212 set = single_set (insn);
1216 cc_status.flags |= CC_NO_OVERFLOW;
1217 cc_status.value1 = SET_DEST (set);
1222 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1223 The V flag may or may not be known but that's ok because
1224 alter_cond will change tests to use EQ/NE. */
1225 set = single_set (insn);
1229 cc_status.value1 = SET_DEST (set);
1230 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1235 set = single_set (insn);
1238 cc_status.value1 = SET_SRC (set);
1242 /* Insn doesn't leave CC in a usable state. */
1245 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1246 set = single_set (insn);
1249 rtx src = SET_SRC (set);
1251 if (GET_CODE (src) == ASHIFTRT
1252 && GET_MODE (src) == QImode)
1254 rtx x = XEXP (src, 1);
1256 if (GET_CODE (x) == CONST_INT
1260 cc_status.value1 = SET_DEST (set);
1261 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1269 /* Return maximum number of consecutive registers of
1270 class CLASS needed to hold a value of mode MODE. */
1273 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1275 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1278 /* Choose mode for jump insn:
1279 1 - relative jump in range -63 <= x <= 62 ;
1280 2 - relative jump in range -2046 <= x <= 2045 ;
1281 3 - absolute jump (only for ATmega[16]03). */
1284 avr_jump_mode (rtx x, rtx insn)
1286 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1287 ? XEXP (x, 0) : x));
1288 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1289 int jump_distance = cur_addr - dest_addr;
1291 if (-63 <= jump_distance && jump_distance <= 62)
1293 else if (-2046 <= jump_distance && jump_distance <= 2045)
1301 /* return an AVR condition jump commands.
1302 X is a comparison RTX.
1303 LEN is a number returned by avr_jump_mode function.
1304 if REVERSE nonzero then condition code in X must be reversed. */
1307 ret_cond_branch (rtx x, int len, int reverse)
1309 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1314 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1315 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1317 len == 2 ? (AS1 (breq,.+4) CR_TAB
1318 AS1 (brmi,.+2) CR_TAB
1320 (AS1 (breq,.+6) CR_TAB
1321 AS1 (brmi,.+4) CR_TAB
1325 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1327 len == 2 ? (AS1 (breq,.+4) CR_TAB
1328 AS1 (brlt,.+2) CR_TAB
1330 (AS1 (breq,.+6) CR_TAB
1331 AS1 (brlt,.+4) CR_TAB
1334 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1336 len == 2 ? (AS1 (breq,.+4) CR_TAB
1337 AS1 (brlo,.+2) CR_TAB
1339 (AS1 (breq,.+6) CR_TAB
1340 AS1 (brlo,.+4) CR_TAB
1343 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1344 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1346 len == 2 ? (AS1 (breq,.+2) CR_TAB
1347 AS1 (brpl,.+2) CR_TAB
1349 (AS1 (breq,.+2) CR_TAB
1350 AS1 (brpl,.+4) CR_TAB
1353 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1355 len == 2 ? (AS1 (breq,.+2) CR_TAB
1356 AS1 (brge,.+2) CR_TAB
1358 (AS1 (breq,.+2) CR_TAB
1359 AS1 (brge,.+4) CR_TAB
1362 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1364 len == 2 ? (AS1 (breq,.+2) CR_TAB
1365 AS1 (brsh,.+2) CR_TAB
1367 (AS1 (breq,.+2) CR_TAB
1368 AS1 (brsh,.+4) CR_TAB
1376 return AS1 (br%k1,%0);
1378 return (AS1 (br%j1,.+2) CR_TAB
1381 return (AS1 (br%j1,.+4) CR_TAB
1390 return AS1 (br%j1,%0);
1392 return (AS1 (br%k1,.+2) CR_TAB
1395 return (AS1 (br%k1,.+4) CR_TAB
1403 /* Predicate function for immediate operand which fits to byte (8bit) */
1406 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1408 return (GET_CODE (op) == CONST_INT
1409 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1412 /* Output all insn addresses and their sizes into the assembly language
1413 output file. This is helpful for debugging whether the length attributes
1414 in the md file are correct.
1415 Output insn cost for next insn. */
1418 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1419 int num_operands ATTRIBUTE_UNUSED)
1421 int uid = INSN_UID (insn);
1423 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1425 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1426 INSN_ADDRESSES (uid),
1427 INSN_ADDRESSES (uid) - last_insn_address,
1428 rtx_cost (PATTERN (insn), INSN));
1430 last_insn_address = INSN_ADDRESSES (uid);
1433 /* Return 0 if undefined, 1 if always true or always false. */
1436 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1438 unsigned int max = (mode == QImode ? 0xff :
1439 mode == HImode ? 0xffff :
1440 mode == SImode ? 0xffffffff : 0);
1441 if (max && operator && GET_CODE (x) == CONST_INT)
1443 if (unsigned_condition (operator) != operator)
1446 if (max != (INTVAL (x) & max)
1447 && INTVAL (x) != 0xff)
1454 /* Returns nonzero if REGNO is the number of a hard
1455 register in which function arguments are sometimes passed. */
1458 function_arg_regno_p(int r)
1460 return (r >= 8 && r <= 25);
1463 /* Initializing the variable cum for the state at the beginning
1464 of the argument list. */
1467 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1468 tree fndecl ATTRIBUTE_UNUSED)
1471 cum->regno = FIRST_CUM_REG;
1472 if (!libname && fntype)
1474 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1475 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1476 != void_type_node));
1482 /* Returns the number of registers to allocate for a function argument. */
1485 avr_num_arg_regs (enum machine_mode mode, tree type)
1489 if (mode == BLKmode)
1490 size = int_size_in_bytes (type);
1492 size = GET_MODE_SIZE (mode);
1494 /* Align all function arguments to start in even-numbered registers.
1495 Odd-sized arguments leave holes above them. */
1497 return (size + 1) & ~1;
1500 /* Controls whether a function argument is passed
1501 in a register, and which register. */
1504 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1505 int named ATTRIBUTE_UNUSED)
1507 int bytes = avr_num_arg_regs (mode, type);
1509 if (cum->nregs && bytes <= cum->nregs)
1510 return gen_rtx_REG (mode, cum->regno - bytes);
1515 /* Update the summarizer variable CUM to advance past an argument
1516 in the argument list. */
1519 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1520 int named ATTRIBUTE_UNUSED)
1522 int bytes = avr_num_arg_regs (mode, type);
1524 cum->nregs -= bytes;
1525 cum->regno -= bytes;
1527 if (cum->nregs <= 0)
1530 cum->regno = FIRST_CUM_REG;
1534 /***********************************************************************
1535 Functions for outputting various mov's for a various modes
1536 ************************************************************************/
1538 output_movqi (rtx insn, rtx operands[], int *l)
1541 rtx dest = operands[0];
1542 rtx src = operands[1];
1550 if (register_operand (dest, QImode))
1552 if (register_operand (src, QImode)) /* mov r,r */
1554 if (test_hard_reg_class (STACK_REG, dest))
1555 return AS2 (out,%0,%1);
1556 else if (test_hard_reg_class (STACK_REG, src))
1557 return AS2 (in,%0,%1);
1559 return AS2 (mov,%0,%1);
1561 else if (CONSTANT_P (src))
1563 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1564 return AS2 (ldi,%0,lo8(%1));
1566 if (GET_CODE (src) == CONST_INT)
1568 if (src == const0_rtx) /* mov r,L */
1569 return AS1 (clr,%0);
1570 else if (src == const1_rtx)
1573 return (AS1 (clr,%0) CR_TAB
1576 else if (src == constm1_rtx)
1578 /* Immediate constants -1 to any register */
1580 return (AS1 (clr,%0) CR_TAB
1585 int bit_nr = exact_log2 (INTVAL (src));
1591 output_asm_insn ((AS1 (clr,%0) CR_TAB
1594 avr_output_bld (operands, bit_nr);
1601 /* Last resort, larger than loading from memory. */
1603 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1604 AS2 (ldi,r31,lo8(%1)) CR_TAB
1605 AS2 (mov,%0,r31) CR_TAB
1606 AS2 (mov,r31,__tmp_reg__));
1608 else if (GET_CODE (src) == MEM)
1609 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1611 else if (GET_CODE (dest) == MEM)
1613 const char *template;
1615 if (src == const0_rtx)
1616 operands[1] = zero_reg_rtx;
1618 template = out_movqi_mr_r (insn, operands, real_l);
1621 output_asm_insn (template, operands);
1630 output_movhi (rtx insn, rtx operands[], int *l)
1633 rtx dest = operands[0];
1634 rtx src = operands[1];
1640 if (register_operand (dest, HImode))
1642 if (register_operand (src, HImode)) /* mov r,r */
1644 if (test_hard_reg_class (STACK_REG, dest))
1646 if (TARGET_TINY_STACK)
1649 return AS2 (out,__SP_L__,%A1);
1651 /* Use simple load of stack pointer if no interrupts are used
1652 or inside main or signal function prologue where they disabled. */
1653 else if (TARGET_NO_INTERRUPTS
1654 || (reload_completed
1655 && cfun->machine->is_signal
1656 && prologue_epilogue_contains (insn)))
1659 return (AS2 (out,__SP_H__,%B1) CR_TAB
1660 AS2 (out,__SP_L__,%A1));
1662 /* In interrupt prolog we know interrupts are enabled. */
1663 else if (reload_completed
1664 && cfun->machine->is_interrupt
1665 && prologue_epilogue_contains (insn))
1668 return ("cli" CR_TAB
1669 AS2 (out,__SP_H__,%B1) CR_TAB
1671 AS2 (out,__SP_L__,%A1));
1674 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1676 AS2 (out,__SP_H__,%B1) CR_TAB
1677 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1678 AS2 (out,__SP_L__,%A1));
1680 else if (test_hard_reg_class (STACK_REG, src))
1683 return (AS2 (in,%A0,__SP_L__) CR_TAB
1684 AS2 (in,%B0,__SP_H__));
1690 return (AS2 (movw,%0,%1));
1695 return (AS2 (mov,%A0,%A1) CR_TAB
1699 else if (CONSTANT_P (src))
1701 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1704 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1705 AS2 (ldi,%B0,hi8(%1)));
1708 if (GET_CODE (src) == CONST_INT)
1710 if (src == const0_rtx) /* mov r,L */
1713 return (AS1 (clr,%A0) CR_TAB
1716 else if (src == const1_rtx)
1719 return (AS1 (clr,%A0) CR_TAB
1720 AS1 (clr,%B0) CR_TAB
1723 else if (src == constm1_rtx)
1725 /* Immediate constants -1 to any register */
1727 return (AS1 (clr,%0) CR_TAB
1728 AS1 (dec,%A0) CR_TAB
1733 int bit_nr = exact_log2 (INTVAL (src));
1739 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1740 AS1 (clr,%B0) CR_TAB
1743 avr_output_bld (operands, bit_nr);
1749 if ((INTVAL (src) & 0xff) == 0)
1752 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1753 AS1 (clr,%A0) CR_TAB
1754 AS2 (ldi,r31,hi8(%1)) CR_TAB
1755 AS2 (mov,%B0,r31) CR_TAB
1756 AS2 (mov,r31,__tmp_reg__));
1758 else if ((INTVAL (src) & 0xff00) == 0)
1761 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1762 AS2 (ldi,r31,lo8(%1)) CR_TAB
1763 AS2 (mov,%A0,r31) CR_TAB
1764 AS1 (clr,%B0) CR_TAB
1765 AS2 (mov,r31,__tmp_reg__));
1769 /* Last resort, equal to loading from memory. */
1771 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1772 AS2 (ldi,r31,lo8(%1)) CR_TAB
1773 AS2 (mov,%A0,r31) CR_TAB
1774 AS2 (ldi,r31,hi8(%1)) CR_TAB
1775 AS2 (mov,%B0,r31) CR_TAB
1776 AS2 (mov,r31,__tmp_reg__));
1778 else if (GET_CODE (src) == MEM)
1779 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1781 else if (GET_CODE (dest) == MEM)
1783 const char *template;
1785 if (src == const0_rtx)
1786 operands[1] = zero_reg_rtx;
1788 template = out_movhi_mr_r (insn, operands, real_l);
1791 output_asm_insn (template, operands);
1796 fatal_insn ("invalid insn:", insn);
1801 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1805 rtx x = XEXP (src, 0);
1811 if (CONSTANT_ADDRESS_P (x))
1813 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1816 return AS2 (in,%0,__SREG__);
1818 if (avr_io_address_p (x, 1))
1821 return AS2 (in,%0,%1-0x20);
1824 return AS2 (lds,%0,%1);
1826 /* memory access by reg+disp */
1827 else if (GET_CODE (x) == PLUS
1828 && REG_P (XEXP (x,0))
1829 && GET_CODE (XEXP (x,1)) == CONST_INT)
1831 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1833 int disp = INTVAL (XEXP (x,1));
1834 if (REGNO (XEXP (x,0)) != REG_Y)
1835 fatal_insn ("incorrect insn:",insn);
1837 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1838 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1839 AS2 (ldd,%0,Y+63) CR_TAB
1840 AS2 (sbiw,r28,%o1-63));
1842 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1843 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1844 AS2 (ld,%0,Y) CR_TAB
1845 AS2 (subi,r28,lo8(%o1)) CR_TAB
1846 AS2 (sbci,r29,hi8(%o1)));
1848 else if (REGNO (XEXP (x,0)) == REG_X)
1850 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1851 it but I have this situation with extremal optimizing options. */
1852 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1853 || reg_unused_after (insn, XEXP (x,0)))
1854 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1857 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1858 AS2 (ld,%0,X) CR_TAB
1859 AS2 (sbiw,r26,%o1));
1862 return AS2 (ldd,%0,%1);
1865 return AS2 (ld,%0,%1);
1869 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1873 rtx base = XEXP (src, 0);
1874 int reg_dest = true_regnum (dest);
1875 int reg_base = true_regnum (base);
1876 /* "volatile" forces reading low byte first, even if less efficient,
1877 for correct operation with 16-bit I/O registers. */
1878 int mem_volatile_p = MEM_VOLATILE_P (src);
1886 if (reg_dest == reg_base) /* R = (R) */
1889 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1890 AS2 (ld,%B0,%1) CR_TAB
1891 AS2 (mov,%A0,__tmp_reg__));
1893 else if (reg_base == REG_X) /* (R26) */
1895 if (reg_unused_after (insn, base))
1898 return (AS2 (ld,%A0,X+) CR_TAB
1902 return (AS2 (ld,%A0,X+) CR_TAB
1903 AS2 (ld,%B0,X) CR_TAB
1909 return (AS2 (ld,%A0,%1) CR_TAB
1910 AS2 (ldd,%B0,%1+1));
1913 else if (GET_CODE (base) == PLUS) /* (R + i) */
1915 int disp = INTVAL (XEXP (base, 1));
1916 int reg_base = true_regnum (XEXP (base, 0));
1918 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1920 if (REGNO (XEXP (base, 0)) != REG_Y)
1921 fatal_insn ("incorrect insn:",insn);
1923 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1924 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1925 AS2 (ldd,%A0,Y+62) CR_TAB
1926 AS2 (ldd,%B0,Y+63) CR_TAB
1927 AS2 (sbiw,r28,%o1-62));
1929 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1930 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1931 AS2 (ld,%A0,Y) CR_TAB
1932 AS2 (ldd,%B0,Y+1) CR_TAB
1933 AS2 (subi,r28,lo8(%o1)) CR_TAB
1934 AS2 (sbci,r29,hi8(%o1)));
1936 if (reg_base == REG_X)
1938 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1939 it but I have this situation with extremal
1940 optimization options. */
1943 if (reg_base == reg_dest)
1944 return (AS2 (adiw,r26,%o1) CR_TAB
1945 AS2 (ld,__tmp_reg__,X+) CR_TAB
1946 AS2 (ld,%B0,X) CR_TAB
1947 AS2 (mov,%A0,__tmp_reg__));
1949 return (AS2 (adiw,r26,%o1) CR_TAB
1950 AS2 (ld,%A0,X+) CR_TAB
1951 AS2 (ld,%B0,X) CR_TAB
1952 AS2 (sbiw,r26,%o1+1));
1955 if (reg_base == reg_dest)
1958 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1959 AS2 (ldd,%B0,%B1) CR_TAB
1960 AS2 (mov,%A0,__tmp_reg__));
1964 return (AS2 (ldd,%A0,%A1) CR_TAB
1967 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1969 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1970 fatal_insn ("incorrect insn:", insn);
1974 if (REGNO (XEXP (base, 0)) == REG_X)
1977 return (AS2 (sbiw,r26,2) CR_TAB
1978 AS2 (ld,%A0,X+) CR_TAB
1979 AS2 (ld,%B0,X) CR_TAB
1985 return (AS2 (sbiw,%r1,2) CR_TAB
1986 AS2 (ld,%A0,%p1) CR_TAB
1987 AS2 (ldd,%B0,%p1+1));
1992 return (AS2 (ld,%B0,%1) CR_TAB
1995 else if (GET_CODE (base) == POST_INC) /* (R++) */
1997 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1998 fatal_insn ("incorrect insn:", insn);
2001 return (AS2 (ld,%A0,%1) CR_TAB
2004 else if (CONSTANT_ADDRESS_P (base))
2006 if (avr_io_address_p (base, 2))
2009 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2010 AS2 (in,%B0,%B1-0x20));
2013 return (AS2 (lds,%A0,%A1) CR_TAB
2017 fatal_insn ("unknown move insn:",insn);
2022 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2026 rtx base = XEXP (src, 0);
2027 int reg_dest = true_regnum (dest);
2028 int reg_base = true_regnum (base);
2036 if (reg_base == REG_X) /* (R26) */
2038 if (reg_dest == REG_X)
2039 /* "ld r26,-X" is undefined */
2040 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2041 AS2 (ld,r29,X) CR_TAB
2042 AS2 (ld,r28,-X) CR_TAB
2043 AS2 (ld,__tmp_reg__,-X) CR_TAB
2044 AS2 (sbiw,r26,1) CR_TAB
2045 AS2 (ld,r26,X) CR_TAB
2046 AS2 (mov,r27,__tmp_reg__));
2047 else if (reg_dest == REG_X - 2)
2048 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2049 AS2 (ld,%B0,X+) CR_TAB
2050 AS2 (ld,__tmp_reg__,X+) CR_TAB
2051 AS2 (ld,%D0,X) CR_TAB
2052 AS2 (mov,%C0,__tmp_reg__));
2053 else if (reg_unused_after (insn, base))
2054 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2055 AS2 (ld,%B0,X+) CR_TAB
2056 AS2 (ld,%C0,X+) CR_TAB
2059 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2060 AS2 (ld,%B0,X+) CR_TAB
2061 AS2 (ld,%C0,X+) CR_TAB
2062 AS2 (ld,%D0,X) CR_TAB
2067 if (reg_dest == reg_base)
2068 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2069 AS2 (ldd,%C0,%1+2) CR_TAB
2070 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2071 AS2 (ld,%A0,%1) CR_TAB
2072 AS2 (mov,%B0,__tmp_reg__));
2073 else if (reg_base == reg_dest + 2)
2074 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2075 AS2 (ldd,%B0,%1+1) CR_TAB
2076 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2077 AS2 (ldd,%D0,%1+3) CR_TAB
2078 AS2 (mov,%C0,__tmp_reg__));
2080 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2081 AS2 (ldd,%B0,%1+1) CR_TAB
2082 AS2 (ldd,%C0,%1+2) CR_TAB
2083 AS2 (ldd,%D0,%1+3));
2086 else if (GET_CODE (base) == PLUS) /* (R + i) */
2088 int disp = INTVAL (XEXP (base, 1));
2090 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2092 if (REGNO (XEXP (base, 0)) != REG_Y)
2093 fatal_insn ("incorrect insn:",insn);
2095 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2096 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2097 AS2 (ldd,%A0,Y+60) CR_TAB
2098 AS2 (ldd,%B0,Y+61) CR_TAB
2099 AS2 (ldd,%C0,Y+62) CR_TAB
2100 AS2 (ldd,%D0,Y+63) CR_TAB
2101 AS2 (sbiw,r28,%o1-60));
2103 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2104 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2105 AS2 (ld,%A0,Y) CR_TAB
2106 AS2 (ldd,%B0,Y+1) CR_TAB
2107 AS2 (ldd,%C0,Y+2) CR_TAB
2108 AS2 (ldd,%D0,Y+3) CR_TAB
2109 AS2 (subi,r28,lo8(%o1)) CR_TAB
2110 AS2 (sbci,r29,hi8(%o1)));
2113 reg_base = true_regnum (XEXP (base, 0));
2114 if (reg_base == REG_X)
2117 if (reg_dest == REG_X)
2120 /* "ld r26,-X" is undefined */
2121 return (AS2 (adiw,r26,%o1+3) CR_TAB
2122 AS2 (ld,r29,X) CR_TAB
2123 AS2 (ld,r28,-X) CR_TAB
2124 AS2 (ld,__tmp_reg__,-X) CR_TAB
2125 AS2 (sbiw,r26,1) CR_TAB
2126 AS2 (ld,r26,X) CR_TAB
2127 AS2 (mov,r27,__tmp_reg__));
2130 if (reg_dest == REG_X - 2)
2131 return (AS2 (adiw,r26,%o1) CR_TAB
2132 AS2 (ld,r24,X+) CR_TAB
2133 AS2 (ld,r25,X+) CR_TAB
2134 AS2 (ld,__tmp_reg__,X+) CR_TAB
2135 AS2 (ld,r27,X) CR_TAB
2136 AS2 (mov,r26,__tmp_reg__));
2138 return (AS2 (adiw,r26,%o1) CR_TAB
2139 AS2 (ld,%A0,X+) CR_TAB
2140 AS2 (ld,%B0,X+) CR_TAB
2141 AS2 (ld,%C0,X+) CR_TAB
2142 AS2 (ld,%D0,X) CR_TAB
2143 AS2 (sbiw,r26,%o1+3));
2145 if (reg_dest == reg_base)
2146 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2147 AS2 (ldd,%C0,%C1) CR_TAB
2148 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2149 AS2 (ldd,%A0,%A1) CR_TAB
2150 AS2 (mov,%B0,__tmp_reg__));
2151 else if (reg_dest == reg_base - 2)
2152 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2153 AS2 (ldd,%B0,%B1) CR_TAB
2154 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2155 AS2 (ldd,%D0,%D1) CR_TAB
2156 AS2 (mov,%C0,__tmp_reg__));
2157 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2158 AS2 (ldd,%B0,%B1) CR_TAB
2159 AS2 (ldd,%C0,%C1) CR_TAB
2162 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2163 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2164 AS2 (ld,%C0,%1) CR_TAB
2165 AS2 (ld,%B0,%1) CR_TAB
2167 else if (GET_CODE (base) == POST_INC) /* (R++) */
2168 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2169 AS2 (ld,%B0,%1) CR_TAB
2170 AS2 (ld,%C0,%1) CR_TAB
2172 else if (CONSTANT_ADDRESS_P (base))
2173 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2174 AS2 (lds,%B0,%B1) CR_TAB
2175 AS2 (lds,%C0,%C1) CR_TAB
2178 fatal_insn ("unknown move insn:",insn);
2183 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2187 rtx base = XEXP (dest, 0);
2188 int reg_base = true_regnum (base);
2189 int reg_src = true_regnum (src);
2195 if (CONSTANT_ADDRESS_P (base))
2196 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2197 AS2 (sts,%B0,%B1) CR_TAB
2198 AS2 (sts,%C0,%C1) CR_TAB
2200 if (reg_base > 0) /* (r) */
2202 if (reg_base == REG_X) /* (R26) */
2204 if (reg_src == REG_X)
2206 /* "st X+,r26" is undefined */
2207 if (reg_unused_after (insn, base))
2208 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2209 AS2 (st,X,r26) CR_TAB
2210 AS2 (adiw,r26,1) CR_TAB
2211 AS2 (st,X+,__tmp_reg__) CR_TAB
2212 AS2 (st,X+,r28) CR_TAB
2215 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2216 AS2 (st,X,r26) CR_TAB
2217 AS2 (adiw,r26,1) CR_TAB
2218 AS2 (st,X+,__tmp_reg__) CR_TAB
2219 AS2 (st,X+,r28) CR_TAB
2220 AS2 (st,X,r29) CR_TAB
2223 else if (reg_base == reg_src + 2)
2225 if (reg_unused_after (insn, base))
2226 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2227 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2228 AS2 (st,%0+,%A1) CR_TAB
2229 AS2 (st,%0+,%B1) CR_TAB
2230 AS2 (st,%0+,__zero_reg__) CR_TAB
2231 AS2 (st,%0,__tmp_reg__) CR_TAB
2232 AS1 (clr,__zero_reg__));
2234 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2235 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2236 AS2 (st,%0+,%A1) CR_TAB
2237 AS2 (st,%0+,%B1) CR_TAB
2238 AS2 (st,%0+,__zero_reg__) CR_TAB
2239 AS2 (st,%0,__tmp_reg__) CR_TAB
2240 AS1 (clr,__zero_reg__) CR_TAB
2243 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2244 AS2 (st,%0+,%B1) CR_TAB
2245 AS2 (st,%0+,%C1) CR_TAB
2246 AS2 (st,%0,%D1) CR_TAB
2250 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2251 AS2 (std,%0+1,%B1) CR_TAB
2252 AS2 (std,%0+2,%C1) CR_TAB
2253 AS2 (std,%0+3,%D1));
2255 else if (GET_CODE (base) == PLUS) /* (R + i) */
2257 int disp = INTVAL (XEXP (base, 1));
2258 reg_base = REGNO (XEXP (base, 0));
2259 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2261 if (reg_base != REG_Y)
2262 fatal_insn ("incorrect insn:",insn);
2264 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2265 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2266 AS2 (std,Y+60,%A1) CR_TAB
2267 AS2 (std,Y+61,%B1) CR_TAB
2268 AS2 (std,Y+62,%C1) CR_TAB
2269 AS2 (std,Y+63,%D1) CR_TAB
2270 AS2 (sbiw,r28,%o0-60));
2272 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2273 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2274 AS2 (st,Y,%A1) CR_TAB
2275 AS2 (std,Y+1,%B1) CR_TAB
2276 AS2 (std,Y+2,%C1) CR_TAB
2277 AS2 (std,Y+3,%D1) CR_TAB
2278 AS2 (subi,r28,lo8(%o0)) CR_TAB
2279 AS2 (sbci,r29,hi8(%o0)));
2281 if (reg_base == REG_X)
2284 if (reg_src == REG_X)
2287 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2288 AS2 (mov,__zero_reg__,r27) CR_TAB
2289 AS2 (adiw,r26,%o0) CR_TAB
2290 AS2 (st,X+,__tmp_reg__) CR_TAB
2291 AS2 (st,X+,__zero_reg__) CR_TAB
2292 AS2 (st,X+,r28) CR_TAB
2293 AS2 (st,X,r29) CR_TAB
2294 AS1 (clr,__zero_reg__) CR_TAB
2295 AS2 (sbiw,r26,%o0+3));
2297 else if (reg_src == REG_X - 2)
2300 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2301 AS2 (mov,__zero_reg__,r27) CR_TAB
2302 AS2 (adiw,r26,%o0) CR_TAB
2303 AS2 (st,X+,r24) CR_TAB
2304 AS2 (st,X+,r25) CR_TAB
2305 AS2 (st,X+,__tmp_reg__) CR_TAB
2306 AS2 (st,X,__zero_reg__) CR_TAB
2307 AS1 (clr,__zero_reg__) CR_TAB
2308 AS2 (sbiw,r26,%o0+3));
2311 return (AS2 (adiw,r26,%o0) CR_TAB
2312 AS2 (st,X+,%A1) CR_TAB
2313 AS2 (st,X+,%B1) CR_TAB
2314 AS2 (st,X+,%C1) CR_TAB
2315 AS2 (st,X,%D1) CR_TAB
2316 AS2 (sbiw,r26,%o0+3));
2318 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2319 AS2 (std,%B0,%B1) CR_TAB
2320 AS2 (std,%C0,%C1) CR_TAB
2323 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2324 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2325 AS2 (st,%0,%C1) CR_TAB
2326 AS2 (st,%0,%B1) CR_TAB
2328 else if (GET_CODE (base) == POST_INC) /* (R++) */
2329 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2330 AS2 (st,%0,%B1) CR_TAB
2331 AS2 (st,%0,%C1) CR_TAB
2333 fatal_insn ("unknown move insn:",insn);
2338 output_movsisf(rtx insn, rtx operands[], int *l)
2341 rtx dest = operands[0];
2342 rtx src = operands[1];
2348 if (register_operand (dest, VOIDmode))
2350 if (register_operand (src, VOIDmode)) /* mov r,r */
2352 if (true_regnum (dest) > true_regnum (src))
2357 return (AS2 (movw,%C0,%C1) CR_TAB
2358 AS2 (movw,%A0,%A1));
2361 return (AS2 (mov,%D0,%D1) CR_TAB
2362 AS2 (mov,%C0,%C1) CR_TAB
2363 AS2 (mov,%B0,%B1) CR_TAB
2371 return (AS2 (movw,%A0,%A1) CR_TAB
2372 AS2 (movw,%C0,%C1));
2375 return (AS2 (mov,%A0,%A1) CR_TAB
2376 AS2 (mov,%B0,%B1) CR_TAB
2377 AS2 (mov,%C0,%C1) CR_TAB
2381 else if (CONSTANT_P (src))
2383 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2386 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2387 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2388 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2389 AS2 (ldi,%D0,hhi8(%1)));
2392 if (GET_CODE (src) == CONST_INT)
2394 const char *const clr_op0 =
2395 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2396 AS1 (clr,%B0) CR_TAB
2398 : (AS1 (clr,%A0) CR_TAB
2399 AS1 (clr,%B0) CR_TAB
2400 AS1 (clr,%C0) CR_TAB
2403 if (src == const0_rtx) /* mov r,L */
2405 *l = AVR_HAVE_MOVW ? 3 : 4;
2408 else if (src == const1_rtx)
2411 output_asm_insn (clr_op0, operands);
2412 *l = AVR_HAVE_MOVW ? 4 : 5;
2413 return AS1 (inc,%A0);
2415 else if (src == constm1_rtx)
2417 /* Immediate constants -1 to any register */
2421 return (AS1 (clr,%A0) CR_TAB
2422 AS1 (dec,%A0) CR_TAB
2423 AS2 (mov,%B0,%A0) CR_TAB
2424 AS2 (movw,%C0,%A0));
2427 return (AS1 (clr,%A0) CR_TAB
2428 AS1 (dec,%A0) CR_TAB
2429 AS2 (mov,%B0,%A0) CR_TAB
2430 AS2 (mov,%C0,%A0) CR_TAB
2435 int bit_nr = exact_log2 (INTVAL (src));
2439 *l = AVR_HAVE_MOVW ? 5 : 6;
2442 output_asm_insn (clr_op0, operands);
2443 output_asm_insn ("set", operands);
2446 avr_output_bld (operands, bit_nr);
2453 /* Last resort, better than loading from memory. */
2455 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2456 AS2 (ldi,r31,lo8(%1)) CR_TAB
2457 AS2 (mov,%A0,r31) CR_TAB
2458 AS2 (ldi,r31,hi8(%1)) CR_TAB
2459 AS2 (mov,%B0,r31) CR_TAB
2460 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2461 AS2 (mov,%C0,r31) CR_TAB
2462 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2463 AS2 (mov,%D0,r31) CR_TAB
2464 AS2 (mov,r31,__tmp_reg__));
2466 else if (GET_CODE (src) == MEM)
2467 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2469 else if (GET_CODE (dest) == MEM)
2471 const char *template;
2473 if (src == const0_rtx)
2474 operands[1] = zero_reg_rtx;
2476 template = out_movsi_mr_r (insn, operands, real_l);
2479 output_asm_insn (template, operands);
2484 fatal_insn ("invalid insn:", insn);
2489 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2493 rtx x = XEXP (dest, 0);
2499 if (CONSTANT_ADDRESS_P (x))
2501 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2504 return AS2 (out,__SREG__,%1);
2506 if (avr_io_address_p (x, 1))
2509 return AS2 (out,%0-0x20,%1);
2512 return AS2 (sts,%0,%1);
2514 /* memory access by reg+disp */
2515 else if (GET_CODE (x) == PLUS
2516 && REG_P (XEXP (x,0))
2517 && GET_CODE (XEXP (x,1)) == CONST_INT)
2519 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2521 int disp = INTVAL (XEXP (x,1));
2522 if (REGNO (XEXP (x,0)) != REG_Y)
2523 fatal_insn ("incorrect insn:",insn);
2525 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2526 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2527 AS2 (std,Y+63,%1) CR_TAB
2528 AS2 (sbiw,r28,%o0-63));
2530 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2531 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2532 AS2 (st,Y,%1) CR_TAB
2533 AS2 (subi,r28,lo8(%o0)) CR_TAB
2534 AS2 (sbci,r29,hi8(%o0)));
2536 else if (REGNO (XEXP (x,0)) == REG_X)
2538 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2540 if (reg_unused_after (insn, XEXP (x,0)))
2541 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2542 AS2 (adiw,r26,%o0) CR_TAB
2543 AS2 (st,X,__tmp_reg__));
2545 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2546 AS2 (adiw,r26,%o0) CR_TAB
2547 AS2 (st,X,__tmp_reg__) CR_TAB
2548 AS2 (sbiw,r26,%o0));
2552 if (reg_unused_after (insn, XEXP (x,0)))
2553 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2556 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2557 AS2 (st,X,%1) CR_TAB
2558 AS2 (sbiw,r26,%o0));
2562 return AS2 (std,%0,%1);
2565 return AS2 (st,%0,%1);
2569 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2573 rtx base = XEXP (dest, 0);
2574 int reg_base = true_regnum (base);
2575 int reg_src = true_regnum (src);
2576 /* "volatile" forces writing high byte first, even if less efficient,
2577 for correct operation with 16-bit I/O registers. */
2578 int mem_volatile_p = MEM_VOLATILE_P (dest);
2583 if (CONSTANT_ADDRESS_P (base))
2585 if (avr_io_address_p (base, 2))
2588 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2589 AS2 (out,%A0-0x20,%A1));
2591 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2596 if (reg_base == REG_X)
2598 if (reg_src == REG_X)
2600 /* "st X+,r26" and "st -X,r26" are undefined. */
2601 if (!mem_volatile_p && reg_unused_after (insn, src))
2602 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2603 AS2 (st,X,r26) CR_TAB
2604 AS2 (adiw,r26,1) CR_TAB
2605 AS2 (st,X,__tmp_reg__));
2607 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2608 AS2 (adiw,r26,1) CR_TAB
2609 AS2 (st,X,__tmp_reg__) CR_TAB
2610 AS2 (sbiw,r26,1) CR_TAB
2615 if (!mem_volatile_p && reg_unused_after (insn, base))
2616 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2619 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2620 AS2 (st,X,%B1) CR_TAB
2625 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2628 else if (GET_CODE (base) == PLUS)
2630 int disp = INTVAL (XEXP (base, 1));
2631 reg_base = REGNO (XEXP (base, 0));
2632 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2634 if (reg_base != REG_Y)
2635 fatal_insn ("incorrect insn:",insn);
2637 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2638 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2639 AS2 (std,Y+63,%B1) CR_TAB
2640 AS2 (std,Y+62,%A1) CR_TAB
2641 AS2 (sbiw,r28,%o0-62));
2643 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2644 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2645 AS2 (std,Y+1,%B1) CR_TAB
2646 AS2 (st,Y,%A1) CR_TAB
2647 AS2 (subi,r28,lo8(%o0)) CR_TAB
2648 AS2 (sbci,r29,hi8(%o0)));
2650 if (reg_base == REG_X)
2653 if (reg_src == REG_X)
2656 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2657 AS2 (mov,__zero_reg__,r27) CR_TAB
2658 AS2 (adiw,r26,%o0+1) CR_TAB
2659 AS2 (st,X,__zero_reg__) CR_TAB
2660 AS2 (st,-X,__tmp_reg__) CR_TAB
2661 AS1 (clr,__zero_reg__) CR_TAB
2662 AS2 (sbiw,r26,%o0));
2665 return (AS2 (adiw,r26,%o0+1) CR_TAB
2666 AS2 (st,X,%B1) CR_TAB
2667 AS2 (st,-X,%A1) CR_TAB
2668 AS2 (sbiw,r26,%o0));
2670 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2673 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2674 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2676 else if (GET_CODE (base) == POST_INC) /* (R++) */
2680 if (REGNO (XEXP (base, 0)) == REG_X)
2683 return (AS2 (adiw,r26,1) CR_TAB
2684 AS2 (st,X,%B1) CR_TAB
2685 AS2 (st,-X,%A1) CR_TAB
2691 return (AS2 (std,%p0+1,%B1) CR_TAB
2692 AS2 (st,%p0,%A1) CR_TAB
2698 return (AS2 (st,%0,%A1) CR_TAB
2701 fatal_insn ("unknown move insn:",insn);
2705 /* Return 1 if frame pointer for current function required. */
2708 frame_pointer_required_p (void)
2710 return (current_function_calls_alloca
2711 || current_function_args_info.nregs == 0
2712 || get_frame_size () > 0);
2715 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2718 compare_condition (rtx insn)
2720 rtx next = next_real_insn (insn);
2721 RTX_CODE cond = UNKNOWN;
2722 if (next && GET_CODE (next) == JUMP_INSN)
2724 rtx pat = PATTERN (next);
2725 rtx src = SET_SRC (pat);
2726 rtx t = XEXP (src, 0);
2727 cond = GET_CODE (t);
2732 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2735 compare_sign_p (rtx insn)
2737 RTX_CODE cond = compare_condition (insn);
2738 return (cond == GE || cond == LT);
2741 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2742 that needs to be swapped (GT, GTU, LE, LEU). */
2745 compare_diff_p (rtx insn)
2747 RTX_CODE cond = compare_condition (insn);
2748 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2751 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2754 compare_eq_p (rtx insn)
2756 RTX_CODE cond = compare_condition (insn);
2757 return (cond == EQ || cond == NE);
2761 /* Output test instruction for HImode. */
2764 out_tsthi (rtx insn, int *l)
2766 if (compare_sign_p (insn))
2769 return AS1 (tst,%B0);
2771 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2772 && compare_eq_p (insn))
2774 /* Faster than sbiw if we can clobber the operand. */
2776 return AS2 (or,%A0,%B0);
2778 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2781 return AS2 (sbiw,%0,0);
2784 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2785 AS2 (cpc,%B0,__zero_reg__));
2789 /* Output test instruction for SImode. */
2792 out_tstsi (rtx insn, int *l)
2794 if (compare_sign_p (insn))
2797 return AS1 (tst,%D0);
2799 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2802 return (AS2 (sbiw,%A0,0) CR_TAB
2803 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2804 AS2 (cpc,%D0,__zero_reg__));
2807 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2808 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2809 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2810 AS2 (cpc,%D0,__zero_reg__));
2814 /* Generate asm equivalent for various shifts.
2815 Shift count is a CONST_INT, MEM or REG.
2816 This only handles cases that are not already
2817 carefully hand-optimized in ?sh??i3_out. */
2820 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2821 int *len, int t_len)
2825 int second_label = 1;
2826 int saved_in_tmp = 0;
2827 int use_zero_reg = 0;
2829 op[0] = operands[0];
2830 op[1] = operands[1];
2831 op[2] = operands[2];
2832 op[3] = operands[3];
2838 if (GET_CODE (operands[2]) == CONST_INT)
2840 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2841 int count = INTVAL (operands[2]);
2842 int max_len = 10; /* If larger than this, always use a loop. */
2851 if (count < 8 && !scratch)
2855 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2857 if (t_len * count <= max_len)
2859 /* Output shifts inline with no loop - faster. */
2861 *len = t_len * count;
2865 output_asm_insn (template, op);
2874 strcat (str, AS2 (ldi,%3,%2));
2876 else if (use_zero_reg)
2878 /* Hack to save one word: use __zero_reg__ as loop counter.
2879 Set one bit, then shift in a loop until it is 0 again. */
2881 op[3] = zero_reg_rtx;
2885 strcat (str, ("set" CR_TAB
2886 AS2 (bld,%3,%2-1)));
2890 /* No scratch register available, use one from LD_REGS (saved in
2891 __tmp_reg__) that doesn't overlap with registers to shift. */
2893 op[3] = gen_rtx_REG (QImode,
2894 ((true_regnum (operands[0]) - 1) & 15) + 16);
2895 op[4] = tmp_reg_rtx;
2899 *len = 3; /* Includes "mov %3,%4" after the loop. */
2901 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2907 else if (GET_CODE (operands[2]) == MEM)
2911 op[3] = op_mov[0] = tmp_reg_rtx;
2915 out_movqi_r_mr (insn, op_mov, len);
2917 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2919 else if (register_operand (operands[2], QImode))
2921 if (reg_unused_after (insn, operands[2]))
2925 op[3] = tmp_reg_rtx;
2927 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2931 fatal_insn ("bad shift insn:", insn);
2938 strcat (str, AS1 (rjmp,2f));
2942 *len += t_len + 2; /* template + dec + brXX */
2945 strcat (str, "\n1:\t");
2946 strcat (str, template);
2947 strcat (str, second_label ? "\n2:\t" : "\n\t");
2948 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2949 strcat (str, CR_TAB);
2950 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2952 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2953 output_asm_insn (str, op);
2958 /* 8bit shift left ((char)x << i) */
2961 ashlqi3_out (rtx insn, rtx operands[], int *len)
2963 if (GET_CODE (operands[2]) == CONST_INT)
2970 switch (INTVAL (operands[2]))
2973 if (INTVAL (operands[2]) < 8)
2977 return AS1 (clr,%0);
2981 return AS1 (lsl,%0);
2985 return (AS1 (lsl,%0) CR_TAB
2990 return (AS1 (lsl,%0) CR_TAB
2995 if (test_hard_reg_class (LD_REGS, operands[0]))
2998 return (AS1 (swap,%0) CR_TAB
2999 AS2 (andi,%0,0xf0));
3002 return (AS1 (lsl,%0) CR_TAB
3008 if (test_hard_reg_class (LD_REGS, operands[0]))
3011 return (AS1 (swap,%0) CR_TAB
3013 AS2 (andi,%0,0xe0));
3016 return (AS1 (lsl,%0) CR_TAB
3023 if (test_hard_reg_class (LD_REGS, operands[0]))
3026 return (AS1 (swap,%0) CR_TAB
3029 AS2 (andi,%0,0xc0));
3032 return (AS1 (lsl,%0) CR_TAB
3041 return (AS1 (ror,%0) CR_TAB
3046 else if (CONSTANT_P (operands[2]))
3047 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3049 out_shift_with_cnt (AS1 (lsl,%0),
3050 insn, operands, len, 1);
3055 /* 16bit shift left ((short)x << i) */
3058 ashlhi3_out (rtx insn, rtx operands[], int *len)
3060 if (GET_CODE (operands[2]) == CONST_INT)
3062 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3063 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3070 switch (INTVAL (operands[2]))
3073 if (INTVAL (operands[2]) < 16)
3077 return (AS1 (clr,%B0) CR_TAB
3081 if (optimize_size && scratch)
3086 return (AS1 (swap,%A0) CR_TAB
3087 AS1 (swap,%B0) CR_TAB
3088 AS2 (andi,%B0,0xf0) CR_TAB
3089 AS2 (eor,%B0,%A0) CR_TAB
3090 AS2 (andi,%A0,0xf0) CR_TAB
3096 return (AS1 (swap,%A0) CR_TAB
3097 AS1 (swap,%B0) CR_TAB
3098 AS2 (ldi,%3,0xf0) CR_TAB
3099 AS2 (and,%B0,%3) CR_TAB
3100 AS2 (eor,%B0,%A0) CR_TAB
3101 AS2 (and,%A0,%3) CR_TAB
3104 break; /* optimize_size ? 6 : 8 */
3108 break; /* scratch ? 5 : 6 */
3112 return (AS1 (lsl,%A0) CR_TAB
3113 AS1 (rol,%B0) CR_TAB
3114 AS1 (swap,%A0) CR_TAB
3115 AS1 (swap,%B0) CR_TAB
3116 AS2 (andi,%B0,0xf0) CR_TAB
3117 AS2 (eor,%B0,%A0) CR_TAB
3118 AS2 (andi,%A0,0xf0) CR_TAB
3124 return (AS1 (lsl,%A0) CR_TAB
3125 AS1 (rol,%B0) CR_TAB
3126 AS1 (swap,%A0) CR_TAB
3127 AS1 (swap,%B0) CR_TAB
3128 AS2 (ldi,%3,0xf0) CR_TAB
3129 AS2 (and,%B0,%3) CR_TAB
3130 AS2 (eor,%B0,%A0) CR_TAB
3131 AS2 (and,%A0,%3) CR_TAB
3138 break; /* scratch ? 5 : 6 */
3140 return (AS1 (clr,__tmp_reg__) CR_TAB
3141 AS1 (lsr,%B0) CR_TAB
3142 AS1 (ror,%A0) CR_TAB
3143 AS1 (ror,__tmp_reg__) CR_TAB
3144 AS1 (lsr,%B0) CR_TAB
3145 AS1 (ror,%A0) CR_TAB
3146 AS1 (ror,__tmp_reg__) CR_TAB
3147 AS2 (mov,%B0,%A0) CR_TAB
3148 AS2 (mov,%A0,__tmp_reg__));
3152 return (AS1 (lsr,%B0) CR_TAB
3153 AS2 (mov,%B0,%A0) CR_TAB
3154 AS1 (clr,%A0) CR_TAB
3155 AS1 (ror,%B0) CR_TAB
3159 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3164 return (AS2 (mov,%B0,%A0) CR_TAB
3165 AS1 (clr,%A0) CR_TAB
3170 return (AS2 (mov,%B0,%A0) CR_TAB
3171 AS1 (clr,%A0) CR_TAB
3172 AS1 (lsl,%B0) CR_TAB
3177 return (AS2 (mov,%B0,%A0) CR_TAB
3178 AS1 (clr,%A0) CR_TAB
3179 AS1 (lsl,%B0) CR_TAB
3180 AS1 (lsl,%B0) CR_TAB
3187 return (AS2 (mov,%B0,%A0) CR_TAB
3188 AS1 (clr,%A0) CR_TAB
3189 AS1 (swap,%B0) CR_TAB
3190 AS2 (andi,%B0,0xf0));
3195 return (AS2 (mov,%B0,%A0) CR_TAB
3196 AS1 (clr,%A0) CR_TAB
3197 AS1 (swap,%B0) CR_TAB
3198 AS2 (ldi,%3,0xf0) CR_TAB
3202 return (AS2 (mov,%B0,%A0) CR_TAB
3203 AS1 (clr,%A0) CR_TAB
3204 AS1 (lsl,%B0) CR_TAB
3205 AS1 (lsl,%B0) CR_TAB
3206 AS1 (lsl,%B0) CR_TAB
3213 return (AS2 (mov,%B0,%A0) CR_TAB
3214 AS1 (clr,%A0) CR_TAB
3215 AS1 (swap,%B0) CR_TAB
3216 AS1 (lsl,%B0) CR_TAB
3217 AS2 (andi,%B0,0xe0));
3219 if (AVR_HAVE_MUL && scratch)
3222 return (AS2 (ldi,%3,0x20) CR_TAB
3223 AS2 (mul,%A0,%3) CR_TAB
3224 AS2 (mov,%B0,r0) CR_TAB
3225 AS1 (clr,%A0) CR_TAB
3226 AS1 (clr,__zero_reg__));
3228 if (optimize_size && scratch)
3233 return (AS2 (mov,%B0,%A0) CR_TAB
3234 AS1 (clr,%A0) CR_TAB
3235 AS1 (swap,%B0) CR_TAB
3236 AS1 (lsl,%B0) CR_TAB
3237 AS2 (ldi,%3,0xe0) CR_TAB
3243 return ("set" CR_TAB
3244 AS2 (bld,r1,5) CR_TAB
3245 AS2 (mul,%A0,r1) CR_TAB
3246 AS2 (mov,%B0,r0) CR_TAB
3247 AS1 (clr,%A0) CR_TAB
3248 AS1 (clr,__zero_reg__));
3251 return (AS2 (mov,%B0,%A0) CR_TAB
3252 AS1 (clr,%A0) CR_TAB
3253 AS1 (lsl,%B0) CR_TAB
3254 AS1 (lsl,%B0) CR_TAB
3255 AS1 (lsl,%B0) CR_TAB
3256 AS1 (lsl,%B0) CR_TAB
3260 if (AVR_HAVE_MUL && ldi_ok)
3263 return (AS2 (ldi,%B0,0x40) CR_TAB
3264 AS2 (mul,%A0,%B0) CR_TAB
3265 AS2 (mov,%B0,r0) CR_TAB
3266 AS1 (clr,%A0) CR_TAB
3267 AS1 (clr,__zero_reg__));
3269 if (AVR_HAVE_MUL && scratch)
3272 return (AS2 (ldi,%3,0x40) CR_TAB
3273 AS2 (mul,%A0,%3) CR_TAB
3274 AS2 (mov,%B0,r0) CR_TAB
3275 AS1 (clr,%A0) CR_TAB
3276 AS1 (clr,__zero_reg__));
3278 if (optimize_size && ldi_ok)
3281 return (AS2 (mov,%B0,%A0) CR_TAB
3282 AS2 (ldi,%A0,6) "\n1:\t"
3283 AS1 (lsl,%B0) CR_TAB
3284 AS1 (dec,%A0) CR_TAB
3287 if (optimize_size && scratch)
3290 return (AS1 (clr,%B0) CR_TAB
3291 AS1 (lsr,%A0) CR_TAB
3292 AS1 (ror,%B0) CR_TAB
3293 AS1 (lsr,%A0) CR_TAB
3294 AS1 (ror,%B0) CR_TAB
3299 return (AS1 (clr,%B0) CR_TAB
3300 AS1 (lsr,%A0) CR_TAB
3301 AS1 (ror,%B0) CR_TAB
3306 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3308 insn, operands, len, 2);
3313 /* 32bit shift left ((long)x << i) */
3316 ashlsi3_out (rtx insn, rtx operands[], int *len)
3318 if (GET_CODE (operands[2]) == CONST_INT)
3326 switch (INTVAL (operands[2]))
3329 if (INTVAL (operands[2]) < 32)
3333 return *len = 3, (AS1 (clr,%D0) CR_TAB
3334 AS1 (clr,%C0) CR_TAB
3335 AS2 (movw,%A0,%C0));
3337 return (AS1 (clr,%D0) CR_TAB
3338 AS1 (clr,%C0) CR_TAB
3339 AS1 (clr,%B0) CR_TAB
3344 int reg0 = true_regnum (operands[0]);
3345 int reg1 = true_regnum (operands[1]);
3348 return (AS2 (mov,%D0,%C1) CR_TAB
3349 AS2 (mov,%C0,%B1) CR_TAB
3350 AS2 (mov,%B0,%A1) CR_TAB
3353 return (AS1 (clr,%A0) CR_TAB
3354 AS2 (mov,%B0,%A1) CR_TAB
3355 AS2 (mov,%C0,%B1) CR_TAB
3361 int reg0 = true_regnum (operands[0]);
3362 int reg1 = true_regnum (operands[1]);
3363 if (reg0 + 2 == reg1)
3364 return *len = 2, (AS1 (clr,%B0) CR_TAB
3367 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3368 AS1 (clr,%B0) CR_TAB
3371 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3372 AS2 (mov,%D0,%B1) CR_TAB
3373 AS1 (clr,%B0) CR_TAB
3379 return (AS2 (mov,%D0,%A1) CR_TAB
3380 AS1 (clr,%C0) CR_TAB
3381 AS1 (clr,%B0) CR_TAB
3386 return (AS1 (clr,%D0) CR_TAB
3387 AS1 (lsr,%A0) CR_TAB
3388 AS1 (ror,%D0) CR_TAB
3389 AS1 (clr,%C0) CR_TAB
3390 AS1 (clr,%B0) CR_TAB
3395 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3396 AS1 (rol,%B0) CR_TAB
3397 AS1 (rol,%C0) CR_TAB
3399 insn, operands, len, 4);
3403 /* 8bit arithmetic shift right ((signed char)x >> i) */
3406 ashrqi3_out (rtx insn, rtx operands[], int *len)
3408 if (GET_CODE (operands[2]) == CONST_INT)
3415 switch (INTVAL (operands[2]))
3419 return AS1 (asr,%0);
3423 return (AS1 (asr,%0) CR_TAB
3428 return (AS1 (asr,%0) CR_TAB
3434 return (AS1 (asr,%0) CR_TAB
3441 return (AS1 (asr,%0) CR_TAB
3449 return (AS2 (bst,%0,6) CR_TAB
3451 AS2 (sbc,%0,%0) CR_TAB
3455 if (INTVAL (operands[2]) < 8)
3462 return (AS1 (lsl,%0) CR_TAB
3466 else if (CONSTANT_P (operands[2]))
3467 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3469 out_shift_with_cnt (AS1 (asr,%0),
3470 insn, operands, len, 1);
3475 /* 16bit arithmetic shift right ((signed short)x >> i) */
3478 ashrhi3_out (rtx insn, rtx operands[], int *len)
3480 if (GET_CODE (operands[2]) == CONST_INT)
3482 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3483 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3490 switch (INTVAL (operands[2]))
3494 /* XXX try to optimize this too? */
3499 break; /* scratch ? 5 : 6 */
3501 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3502 AS2 (mov,%A0,%B0) CR_TAB
3503 AS1 (lsl,__tmp_reg__) CR_TAB
3504 AS1 (rol,%A0) CR_TAB
3505 AS2 (sbc,%B0,%B0) CR_TAB
3506 AS1 (lsl,__tmp_reg__) CR_TAB
3507 AS1 (rol,%A0) CR_TAB
3512 return (AS1 (lsl,%A0) CR_TAB
3513 AS2 (mov,%A0,%B0) CR_TAB
3514 AS1 (rol,%A0) CR_TAB
3519 int reg0 = true_regnum (operands[0]);
3520 int reg1 = true_regnum (operands[1]);
3523 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3524 AS1 (lsl,%B0) CR_TAB
3527 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3528 AS1 (clr,%B0) CR_TAB
3529 AS2 (sbrc,%A0,7) CR_TAB
3535 return (AS2 (mov,%A0,%B0) CR_TAB
3536 AS1 (lsl,%B0) CR_TAB
3537 AS2 (sbc,%B0,%B0) CR_TAB
3542 return (AS2 (mov,%A0,%B0) CR_TAB
3543 AS1 (lsl,%B0) CR_TAB
3544 AS2 (sbc,%B0,%B0) CR_TAB
3545 AS1 (asr,%A0) CR_TAB
3549 if (AVR_HAVE_MUL && ldi_ok)
3552 return (AS2 (ldi,%A0,0x20) CR_TAB
3553 AS2 (muls,%B0,%A0) CR_TAB
3554 AS2 (mov,%A0,r1) CR_TAB
3555 AS2 (sbc,%B0,%B0) CR_TAB
3556 AS1 (clr,__zero_reg__));
3558 if (optimize_size && scratch)
3561 return (AS2 (mov,%A0,%B0) CR_TAB
3562 AS1 (lsl,%B0) CR_TAB
3563 AS2 (sbc,%B0,%B0) CR_TAB
3564 AS1 (asr,%A0) CR_TAB
3565 AS1 (asr,%A0) CR_TAB
3569 if (AVR_HAVE_MUL && ldi_ok)
3572 return (AS2 (ldi,%A0,0x10) CR_TAB
3573 AS2 (muls,%B0,%A0) CR_TAB
3574 AS2 (mov,%A0,r1) CR_TAB
3575 AS2 (sbc,%B0,%B0) CR_TAB
3576 AS1 (clr,__zero_reg__));
3578 if (optimize_size && scratch)
3581 return (AS2 (mov,%A0,%B0) CR_TAB
3582 AS1 (lsl,%B0) CR_TAB
3583 AS2 (sbc,%B0,%B0) CR_TAB
3584 AS1 (asr,%A0) CR_TAB
3585 AS1 (asr,%A0) CR_TAB
3586 AS1 (asr,%A0) CR_TAB
3590 if (AVR_HAVE_MUL && ldi_ok)
3593 return (AS2 (ldi,%A0,0x08) CR_TAB
3594 AS2 (muls,%B0,%A0) CR_TAB
3595 AS2 (mov,%A0,r1) CR_TAB
3596 AS2 (sbc,%B0,%B0) CR_TAB
3597 AS1 (clr,__zero_reg__));
3600 break; /* scratch ? 5 : 7 */
3602 return (AS2 (mov,%A0,%B0) CR_TAB
3603 AS1 (lsl,%B0) CR_TAB
3604 AS2 (sbc,%B0,%B0) CR_TAB
3605 AS1 (asr,%A0) CR_TAB
3606 AS1 (asr,%A0) CR_TAB
3607 AS1 (asr,%A0) CR_TAB
3608 AS1 (asr,%A0) CR_TAB
3613 return (AS1 (lsl,%B0) CR_TAB
3614 AS2 (sbc,%A0,%A0) CR_TAB
3615 AS1 (lsl,%B0) CR_TAB
3616 AS2 (mov,%B0,%A0) CR_TAB
3620 if (INTVAL (operands[2]) < 16)
3626 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3627 AS2 (sbc,%A0,%A0) CR_TAB
3632 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3634 insn, operands, len, 2);
3639 /* 32bit arithmetic shift right ((signed long)x >> i) */
3642 ashrsi3_out (rtx insn, rtx operands[], int *len)
3644 if (GET_CODE (operands[2]) == CONST_INT)
3652 switch (INTVAL (operands[2]))
3656 int reg0 = true_regnum (operands[0]);
3657 int reg1 = true_regnum (operands[1]);
3660 return (AS2 (mov,%A0,%B1) CR_TAB
3661 AS2 (mov,%B0,%C1) CR_TAB
3662 AS2 (mov,%C0,%D1) CR_TAB
3663 AS1 (clr,%D0) CR_TAB
3664 AS2 (sbrc,%C0,7) CR_TAB
3667 return (AS1 (clr,%D0) CR_TAB
3668 AS2 (sbrc,%D1,7) CR_TAB
3669 AS1 (dec,%D0) CR_TAB
3670 AS2 (mov,%C0,%D1) CR_TAB
3671 AS2 (mov,%B0,%C1) CR_TAB
3677 int reg0 = true_regnum (operands[0]);
3678 int reg1 = true_regnum (operands[1]);
3680 if (reg0 == reg1 + 2)
3681 return *len = 4, (AS1 (clr,%D0) CR_TAB
3682 AS2 (sbrc,%B0,7) CR_TAB
3683 AS1 (com,%D0) CR_TAB
3686 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3687 AS1 (clr,%D0) CR_TAB
3688 AS2 (sbrc,%B0,7) CR_TAB
3689 AS1 (com,%D0) CR_TAB
3692 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3693 AS2 (mov,%A0,%C1) CR_TAB
3694 AS1 (clr,%D0) CR_TAB
3695 AS2 (sbrc,%B0,7) CR_TAB
3696 AS1 (com,%D0) CR_TAB
3701 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3702 AS1 (clr,%D0) CR_TAB
3703 AS2 (sbrc,%A0,7) CR_TAB
3704 AS1 (com,%D0) CR_TAB
3705 AS2 (mov,%B0,%D0) CR_TAB
3709 if (INTVAL (operands[2]) < 32)
3716 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3717 AS2 (sbc,%A0,%A0) CR_TAB
3718 AS2 (mov,%B0,%A0) CR_TAB
3719 AS2 (movw,%C0,%A0));
3721 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3722 AS2 (sbc,%A0,%A0) CR_TAB
3723 AS2 (mov,%B0,%A0) CR_TAB
3724 AS2 (mov,%C0,%A0) CR_TAB
3729 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3730 AS1 (ror,%C0) CR_TAB
3731 AS1 (ror,%B0) CR_TAB
3733 insn, operands, len, 4);
3737 /* 8bit logic shift right ((unsigned char)x >> i) */
3740 lshrqi3_out (rtx insn, rtx operands[], int *len)
3742 if (GET_CODE (operands[2]) == CONST_INT)
3749 switch (INTVAL (operands[2]))
3752 if (INTVAL (operands[2]) < 8)
3756 return AS1 (clr,%0);
3760 return AS1 (lsr,%0);
3764 return (AS1 (lsr,%0) CR_TAB
3768 return (AS1 (lsr,%0) CR_TAB
3773 if (test_hard_reg_class (LD_REGS, operands[0]))
3776 return (AS1 (swap,%0) CR_TAB
3777 AS2 (andi,%0,0x0f));
3780 return (AS1 (lsr,%0) CR_TAB
3786 if (test_hard_reg_class (LD_REGS, operands[0]))
3789 return (AS1 (swap,%0) CR_TAB
3794 return (AS1 (lsr,%0) CR_TAB
3801 if (test_hard_reg_class (LD_REGS, operands[0]))
3804 return (AS1 (swap,%0) CR_TAB
3810 return (AS1 (lsr,%0) CR_TAB
3819 return (AS1 (rol,%0) CR_TAB
3824 else if (CONSTANT_P (operands[2]))
3825 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3827 out_shift_with_cnt (AS1 (lsr,%0),
3828 insn, operands, len, 1);
3832 /* 16bit logic shift right ((unsigned short)x >> i) */
3835 lshrhi3_out (rtx insn, rtx operands[], int *len)
3837 if (GET_CODE (operands[2]) == CONST_INT)
3839 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3840 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3847 switch (INTVAL (operands[2]))
3850 if (INTVAL (operands[2]) < 16)
3854 return (AS1 (clr,%B0) CR_TAB
3858 if (optimize_size && scratch)
3863 return (AS1 (swap,%B0) CR_TAB
3864 AS1 (swap,%A0) CR_TAB
3865 AS2 (andi,%A0,0x0f) CR_TAB
3866 AS2 (eor,%A0,%B0) CR_TAB
3867 AS2 (andi,%B0,0x0f) CR_TAB
3873 return (AS1 (swap,%B0) CR_TAB
3874 AS1 (swap,%A0) CR_TAB
3875 AS2 (ldi,%3,0x0f) CR_TAB
3876 AS2 (and,%A0,%3) CR_TAB
3877 AS2 (eor,%A0,%B0) CR_TAB
3878 AS2 (and,%B0,%3) CR_TAB
3881 break; /* optimize_size ? 6 : 8 */
3885 break; /* scratch ? 5 : 6 */
3889 return (AS1 (lsr,%B0) CR_TAB
3890 AS1 (ror,%A0) CR_TAB
3891 AS1 (swap,%B0) CR_TAB
3892 AS1 (swap,%A0) CR_TAB
3893 AS2 (andi,%A0,0x0f) CR_TAB
3894 AS2 (eor,%A0,%B0) CR_TAB
3895 AS2 (andi,%B0,0x0f) CR_TAB
3901 return (AS1 (lsr,%B0) CR_TAB
3902 AS1 (ror,%A0) CR_TAB
3903 AS1 (swap,%B0) CR_TAB
3904 AS1 (swap,%A0) CR_TAB
3905 AS2 (ldi,%3,0x0f) CR_TAB
3906 AS2 (and,%A0,%3) CR_TAB
3907 AS2 (eor,%A0,%B0) CR_TAB
3908 AS2 (and,%B0,%3) CR_TAB
3915 break; /* scratch ? 5 : 6 */
3917 return (AS1 (clr,__tmp_reg__) CR_TAB
3918 AS1 (lsl,%A0) CR_TAB
3919 AS1 (rol,%B0) CR_TAB
3920 AS1 (rol,__tmp_reg__) CR_TAB
3921 AS1 (lsl,%A0) CR_TAB
3922 AS1 (rol,%B0) CR_TAB
3923 AS1 (rol,__tmp_reg__) CR_TAB
3924 AS2 (mov,%A0,%B0) CR_TAB
3925 AS2 (mov,%B0,__tmp_reg__));
3929 return (AS1 (lsl,%A0) CR_TAB
3930 AS2 (mov,%A0,%B0) CR_TAB
3931 AS1 (rol,%A0) CR_TAB
3932 AS2 (sbc,%B0,%B0) CR_TAB
3936 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3941 return (AS2 (mov,%A0,%B0) CR_TAB
3942 AS1 (clr,%B0) CR_TAB
3947 return (AS2 (mov,%A0,%B0) CR_TAB
3948 AS1 (clr,%B0) CR_TAB
3949 AS1 (lsr,%A0) CR_TAB
3954 return (AS2 (mov,%A0,%B0) CR_TAB
3955 AS1 (clr,%B0) CR_TAB
3956 AS1 (lsr,%A0) CR_TAB
3957 AS1 (lsr,%A0) CR_TAB
3964 return (AS2 (mov,%A0,%B0) CR_TAB
3965 AS1 (clr,%B0) CR_TAB
3966 AS1 (swap,%A0) CR_TAB
3967 AS2 (andi,%A0,0x0f));
3972 return (AS2 (mov,%A0,%B0) CR_TAB
3973 AS1 (clr,%B0) CR_TAB
3974 AS1 (swap,%A0) CR_TAB
3975 AS2 (ldi,%3,0x0f) CR_TAB
3979 return (AS2 (mov,%A0,%B0) CR_TAB
3980 AS1 (clr,%B0) CR_TAB
3981 AS1 (lsr,%A0) CR_TAB
3982 AS1 (lsr,%A0) CR_TAB
3983 AS1 (lsr,%A0) CR_TAB
3990 return (AS2 (mov,%A0,%B0) CR_TAB
3991 AS1 (clr,%B0) CR_TAB
3992 AS1 (swap,%A0) CR_TAB
3993 AS1 (lsr,%A0) CR_TAB
3994 AS2 (andi,%A0,0x07));
3996 if (AVR_HAVE_MUL && scratch)
3999 return (AS2 (ldi,%3,0x08) CR_TAB
4000 AS2 (mul,%B0,%3) CR_TAB
4001 AS2 (mov,%A0,r1) CR_TAB
4002 AS1 (clr,%B0) CR_TAB
4003 AS1 (clr,__zero_reg__));
4005 if (optimize_size && scratch)
4010 return (AS2 (mov,%A0,%B0) CR_TAB
4011 AS1 (clr,%B0) CR_TAB
4012 AS1 (swap,%A0) CR_TAB
4013 AS1 (lsr,%A0) CR_TAB
4014 AS2 (ldi,%3,0x07) CR_TAB
4020 return ("set" CR_TAB
4021 AS2 (bld,r1,3) CR_TAB
4022 AS2 (mul,%B0,r1) CR_TAB
4023 AS2 (mov,%A0,r1) CR_TAB
4024 AS1 (clr,%B0) CR_TAB
4025 AS1 (clr,__zero_reg__));
4028 return (AS2 (mov,%A0,%B0) CR_TAB
4029 AS1 (clr,%B0) CR_TAB
4030 AS1 (lsr,%A0) CR_TAB
4031 AS1 (lsr,%A0) CR_TAB
4032 AS1 (lsr,%A0) CR_TAB
4033 AS1 (lsr,%A0) CR_TAB
4037 if (AVR_HAVE_MUL && ldi_ok)
4040 return (AS2 (ldi,%A0,0x04) CR_TAB
4041 AS2 (mul,%B0,%A0) CR_TAB
4042 AS2 (mov,%A0,r1) CR_TAB
4043 AS1 (clr,%B0) CR_TAB
4044 AS1 (clr,__zero_reg__));
4046 if (AVR_HAVE_MUL && scratch)
4049 return (AS2 (ldi,%3,0x04) CR_TAB
4050 AS2 (mul,%B0,%3) CR_TAB
4051 AS2 (mov,%A0,r1) CR_TAB
4052 AS1 (clr,%B0) CR_TAB
4053 AS1 (clr,__zero_reg__));
4055 if (optimize_size && ldi_ok)
4058 return (AS2 (mov,%A0,%B0) CR_TAB
4059 AS2 (ldi,%B0,6) "\n1:\t"
4060 AS1 (lsr,%A0) CR_TAB
4061 AS1 (dec,%B0) CR_TAB
4064 if (optimize_size && scratch)
4067 return (AS1 (clr,%A0) CR_TAB
4068 AS1 (lsl,%B0) CR_TAB
4069 AS1 (rol,%A0) CR_TAB
4070 AS1 (lsl,%B0) CR_TAB
4071 AS1 (rol,%A0) CR_TAB
4076 return (AS1 (clr,%A0) CR_TAB
4077 AS1 (lsl,%B0) CR_TAB
4078 AS1 (rol,%A0) CR_TAB
4083 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4085 insn, operands, len, 2);
4089 /* 32bit logic shift right ((unsigned int)x >> i) */
4092 lshrsi3_out (rtx insn, rtx operands[], int *len)
4094 if (GET_CODE (operands[2]) == CONST_INT)
4102 switch (INTVAL (operands[2]))
4105 if (INTVAL (operands[2]) < 32)
4109 return *len = 3, (AS1 (clr,%D0) CR_TAB
4110 AS1 (clr,%C0) CR_TAB
4111 AS2 (movw,%A0,%C0));
4113 return (AS1 (clr,%D0) CR_TAB
4114 AS1 (clr,%C0) CR_TAB
4115 AS1 (clr,%B0) CR_TAB
4120 int reg0 = true_regnum (operands[0]);
4121 int reg1 = true_regnum (operands[1]);
4124 return (AS2 (mov,%A0,%B1) CR_TAB
4125 AS2 (mov,%B0,%C1) CR_TAB
4126 AS2 (mov,%C0,%D1) CR_TAB
4129 return (AS1 (clr,%D0) CR_TAB
4130 AS2 (mov,%C0,%D1) CR_TAB
4131 AS2 (mov,%B0,%C1) CR_TAB
4137 int reg0 = true_regnum (operands[0]);
4138 int reg1 = true_regnum (operands[1]);
4140 if (reg0 == reg1 + 2)
4141 return *len = 2, (AS1 (clr,%C0) CR_TAB
4144 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4145 AS1 (clr,%C0) CR_TAB
4148 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4149 AS2 (mov,%A0,%C1) CR_TAB
4150 AS1 (clr,%C0) CR_TAB
4155 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4156 AS1 (clr,%B0) CR_TAB
4157 AS1 (clr,%C0) CR_TAB
4162 return (AS1 (clr,%A0) CR_TAB
4163 AS2 (sbrc,%D0,7) CR_TAB
4164 AS1 (inc,%A0) CR_TAB
4165 AS1 (clr,%B0) CR_TAB
4166 AS1 (clr,%C0) CR_TAB
4171 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4172 AS1 (ror,%C0) CR_TAB
4173 AS1 (ror,%B0) CR_TAB
4175 insn, operands, len, 4);
4179 /* Modifies the length assigned to instruction INSN
4180 LEN is the initially computed length of the insn. */
4183 adjust_insn_length (rtx insn, int len)
4185 rtx patt = PATTERN (insn);
4188 if (GET_CODE (patt) == SET)
4191 op[1] = SET_SRC (patt);
4192 op[0] = SET_DEST (patt);
4193 if (general_operand (op[1], VOIDmode)
4194 && general_operand (op[0], VOIDmode))
4196 switch (GET_MODE (op[0]))
4199 output_movqi (insn, op, &len);
4202 output_movhi (insn, op, &len);
4206 output_movsisf (insn, op, &len);
4212 else if (op[0] == cc0_rtx && REG_P (op[1]))
4214 switch (GET_MODE (op[1]))
4216 case HImode: out_tsthi (insn,&len); break;
4217 case SImode: out_tstsi (insn,&len); break;
4221 else if (GET_CODE (op[1]) == AND)
4223 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4225 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4226 if (GET_MODE (op[1]) == SImode)
4227 len = (((mask & 0xff) != 0xff)
4228 + ((mask & 0xff00) != 0xff00)
4229 + ((mask & 0xff0000L) != 0xff0000L)
4230 + ((mask & 0xff000000L) != 0xff000000L));
4231 else if (GET_MODE (op[1]) == HImode)
4232 len = (((mask & 0xff) != 0xff)
4233 + ((mask & 0xff00) != 0xff00));
4236 else if (GET_CODE (op[1]) == IOR)
4238 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4240 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4241 if (GET_MODE (op[1]) == SImode)
4242 len = (((mask & 0xff) != 0)
4243 + ((mask & 0xff00) != 0)
4244 + ((mask & 0xff0000L) != 0)
4245 + ((mask & 0xff000000L) != 0));
4246 else if (GET_MODE (op[1]) == HImode)
4247 len = (((mask & 0xff) != 0)
4248 + ((mask & 0xff00) != 0));
4252 set = single_set (insn);
4257 op[1] = SET_SRC (set);
4258 op[0] = SET_DEST (set);
4260 if (GET_CODE (patt) == PARALLEL
4261 && general_operand (op[1], VOIDmode)
4262 && general_operand (op[0], VOIDmode))
4264 if (XVECLEN (patt, 0) == 2)
4265 op[2] = XVECEXP (patt, 0, 1);
4267 switch (GET_MODE (op[0]))
4273 output_reload_inhi (insn, op, &len);
4277 output_reload_insisf (insn, op, &len);
4283 else if (GET_CODE (op[1]) == ASHIFT
4284 || GET_CODE (op[1]) == ASHIFTRT
4285 || GET_CODE (op[1]) == LSHIFTRT)
4289 ops[1] = XEXP (op[1],0);
4290 ops[2] = XEXP (op[1],1);
4291 switch (GET_CODE (op[1]))
4294 switch (GET_MODE (op[0]))
4296 case QImode: ashlqi3_out (insn,ops,&len); break;
4297 case HImode: ashlhi3_out (insn,ops,&len); break;
4298 case SImode: ashlsi3_out (insn,ops,&len); break;
4303 switch (GET_MODE (op[0]))
4305 case QImode: ashrqi3_out (insn,ops,&len); break;
4306 case HImode: ashrhi3_out (insn,ops,&len); break;
4307 case SImode: ashrsi3_out (insn,ops,&len); break;
4312 switch (GET_MODE (op[0]))
4314 case QImode: lshrqi3_out (insn,ops,&len); break;
4315 case HImode: lshrhi3_out (insn,ops,&len); break;
4316 case SImode: lshrsi3_out (insn,ops,&len); break;
4328 /* Return nonzero if register REG dead after INSN. */
4331 reg_unused_after (rtx insn, rtx reg)
4333 return (dead_or_set_p (insn, reg)
4334 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4337 /* Return nonzero if REG is not used after INSN.
4338 We assume REG is a reload reg, and therefore does
4339 not live past labels. It may live past calls or jumps though. */
4342 _reg_unused_after (rtx insn, rtx reg)
4347 /* If the reg is set by this instruction, then it is safe for our
4348 case. Disregard the case where this is a store to memory, since
4349 we are checking a register used in the store address. */
4350 set = single_set (insn);
4351 if (set && GET_CODE (SET_DEST (set)) != MEM
4352 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4355 while ((insn = NEXT_INSN (insn)))
4358 code = GET_CODE (insn);
4361 /* If this is a label that existed before reload, then the register
4362 if dead here. However, if this is a label added by reorg, then
4363 the register may still be live here. We can't tell the difference,
4364 so we just ignore labels completely. */
4365 if (code == CODE_LABEL)
4373 if (code == JUMP_INSN)
4376 /* If this is a sequence, we must handle them all at once.
4377 We could have for instance a call that sets the target register,
4378 and an insn in a delay slot that uses the register. In this case,
4379 we must return 0. */
4380 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4385 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4387 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4388 rtx set = single_set (this_insn);
4390 if (GET_CODE (this_insn) == CALL_INSN)
4392 else if (GET_CODE (this_insn) == JUMP_INSN)
4394 if (INSN_ANNULLED_BRANCH_P (this_insn))
4399 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4401 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4403 if (GET_CODE (SET_DEST (set)) != MEM)
4409 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4414 else if (code == JUMP_INSN)
4418 if (code == CALL_INSN)
4421 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4422 if (GET_CODE (XEXP (tem, 0)) == USE
4423 && REG_P (XEXP (XEXP (tem, 0), 0))
4424 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4426 if (call_used_regs[REGNO (reg)])
4430 set = single_set (insn);
4432 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4434 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4435 return GET_CODE (SET_DEST (set)) != MEM;
4436 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4442 /* Target hook for assembling integer objects. The AVR version needs
4443 special handling for references to certain labels. */
4446 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4448 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4449 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4450 || GET_CODE (x) == LABEL_REF))
4452 fputs ("\t.word\tpm(", asm_out_file);
4453 output_addr_const (asm_out_file, x);
4454 fputs (")\n", asm_out_file);
4457 return default_assemble_integer (x, size, aligned_p);
4460 /* The routine used to output NUL terminated strings. We use a special
4461 version of this for most svr4 targets because doing so makes the
4462 generated assembly code more compact (and thus faster to assemble)
4463 as well as more readable, especially for targets like the i386
4464 (where the only alternative is to output character sequences as
4465 comma separated lists of numbers). */
4468 gas_output_limited_string(FILE *file, const char *str)
4470 const unsigned char *_limited_str = (const unsigned char *) str;
4472 fprintf (file, "%s\"", STRING_ASM_OP);
4473 for (; (ch = *_limited_str); _limited_str++)
4476 switch (escape = ESCAPES[ch])
4482 fprintf (file, "\\%03o", ch);
4486 putc (escape, file);
4490 fprintf (file, "\"\n");
4493 /* The routine used to output sequences of byte values. We use a special
4494 version of this for most svr4 targets because doing so makes the
4495 generated assembly code more compact (and thus faster to assemble)
4496 as well as more readable. Note that if we find subparts of the
4497 character sequence which end with NUL (and which are shorter than
4498 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4501 gas_output_ascii(FILE *file, const char *str, size_t length)
4503 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4504 const unsigned char *limit = _ascii_bytes + length;
4505 unsigned bytes_in_chunk = 0;
4506 for (; _ascii_bytes < limit; _ascii_bytes++)
4508 const unsigned char *p;
4509 if (bytes_in_chunk >= 60)
4511 fprintf (file, "\"\n");
4514 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4516 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4518 if (bytes_in_chunk > 0)
4520 fprintf (file, "\"\n");
4523 gas_output_limited_string (file, (const char*)_ascii_bytes);
4530 if (bytes_in_chunk == 0)
4531 fprintf (file, "\t.ascii\t\"");
4532 switch (escape = ESCAPES[ch = *_ascii_bytes])
4539 fprintf (file, "\\%03o", ch);
4540 bytes_in_chunk += 4;
4544 putc (escape, file);
4545 bytes_in_chunk += 2;
4550 if (bytes_in_chunk > 0)
4551 fprintf (file, "\"\n");
4554 /* Return value is nonzero if pseudos that have been
4555 assigned to registers of class CLASS would likely be spilled
4556 because registers of CLASS are needed for spill registers. */
4559 class_likely_spilled_p (int c)
4561 return (c != ALL_REGS && c != ADDW_REGS);
4564 /* Valid attributes:
4565 progmem - put data to program memory;
4566 signal - make a function to be hardware interrupt. After function
4567 prologue interrupts are disabled;
4568 interrupt - make a function to be hardware interrupt. After function
4569 prologue interrupts are enabled;
4570 naked - don't generate function prologue/epilogue and `ret' command.
4572 Only `progmem' attribute valid for type. */
4574 const struct attribute_spec avr_attribute_table[] =
4576 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4577 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4578 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4579 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4580 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4581 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4582 { NULL, 0, 0, false, false, false, NULL }
4585 /* Handle a "progmem" attribute; arguments as in
4586 struct attribute_spec.handler. */
4588 avr_handle_progmem_attribute (tree *node, tree name,
4589 tree args ATTRIBUTE_UNUSED,
4590 int flags ATTRIBUTE_UNUSED,
4595 if (TREE_CODE (*node) == TYPE_DECL)
4597 /* This is really a decl attribute, not a type attribute,
4598 but try to handle it for GCC 3.0 backwards compatibility. */
4600 tree type = TREE_TYPE (*node);
4601 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4602 tree newtype = build_type_attribute_variant (type, attr);
4604 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4605 TREE_TYPE (*node) = newtype;
4606 *no_add_attrs = true;
4608 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4610 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4612 warning (0, "only initialized variables can be placed into "
4613 "program memory area");
4614 *no_add_attrs = true;
4619 warning (OPT_Wattributes, "%qs attribute ignored",
4620 IDENTIFIER_POINTER (name));
4621 *no_add_attrs = true;
4628 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4629 struct attribute_spec.handler. */
4632 avr_handle_fndecl_attribute (tree *node, tree name,
4633 tree args ATTRIBUTE_UNUSED,
4634 int flags ATTRIBUTE_UNUSED,
4637 if (TREE_CODE (*node) != FUNCTION_DECL)
4639 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4640 IDENTIFIER_POINTER (name));
4641 *no_add_attrs = true;
4645 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4646 const char *attr = IDENTIFIER_POINTER (name);
4648 /* If the function has the 'signal' or 'interrupt' attribute, test to
4649 make sure that the name of the function is "__vector_NN" so as to
4650 catch when the user misspells the interrupt vector name. */
4652 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4654 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4656 warning (0, "%qs appears to be a misspelled interrupt handler",
4660 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4662 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4664 warning (0, "%qs appears to be a misspelled signal handler",
4674 avr_handle_fntype_attribute (tree *node, tree name,
4675 tree args ATTRIBUTE_UNUSED,
4676 int flags ATTRIBUTE_UNUSED,
4679 if (TREE_CODE (*node) != FUNCTION_TYPE)
4681 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4682 IDENTIFIER_POINTER (name));
4683 *no_add_attrs = true;
4689 /* Look for attribute `progmem' in DECL
4690 if found return 1, otherwise 0. */
4693 avr_progmem_p (tree decl, tree attributes)
4697 if (TREE_CODE (decl) != VAR_DECL)
4701 != lookup_attribute ("progmem", attributes))
4707 while (TREE_CODE (a) == ARRAY_TYPE);
4709 if (a == error_mark_node)
4712 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4718 /* Add the section attribute if the variable is in progmem. */
4721 avr_insert_attributes (tree node, tree *attributes)
4723 if (TREE_CODE (node) == VAR_DECL
4724 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4725 && avr_progmem_p (node, *attributes))
4727 static const char dsec[] = ".progmem.data";
4728 *attributes = tree_cons (get_identifier ("section"),
4729 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4732 /* ??? This seems sketchy. Why can't the user declare the
4733 thing const in the first place? */
4734 TREE_READONLY (node) = 1;
4738 /* A get_unnamed_section callback for switching to progmem_section. */
4741 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4743 fprintf (asm_out_file,
4744 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4745 AVR_MEGA ? "a" : "ax");
4746 /* Should already be aligned, this is just to be safe if it isn't. */
4747 fprintf (asm_out_file, "\t.p2align 1\n");
4750 /* Implement TARGET_ASM_INIT_SECTIONS. */
4753 avr_asm_init_sections (void)
4755 progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
4756 avr_output_progmem_section_asm_op,
4758 readonly_data_section = data_section;
4762 avr_section_type_flags (tree decl, const char *name, int reloc)
4764 unsigned int flags = default_section_type_flags (decl, name, reloc);
4766 if (strncmp (name, ".noinit", 7) == 0)
4768 if (decl && TREE_CODE (decl) == VAR_DECL
4769 && DECL_INITIAL (decl) == NULL_TREE)
4770 flags |= SECTION_BSS; /* @nobits */
4772 warning (0, "only uninitialized variables can be placed in the "
4779 /* Outputs some appropriate text to go at the start of an assembler
4783 avr_file_start (void)
4786 error ("MCU %qs supported for assembler only", avr_mcu_name);
4788 default_file_start ();
4790 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4791 fputs ("__SREG__ = 0x3f\n"
4793 "__SP_L__ = 0x3d\n", asm_out_file);
4795 fputs ("__tmp_reg__ = 0\n"
4796 "__zero_reg__ = 1\n", asm_out_file);
4798 /* FIXME: output these only if there is anything in the .data / .bss
4799 sections - some code size could be saved by not linking in the
4800 initialization code from libgcc if one or both sections are empty. */
4801 fputs ("\t.global __do_copy_data\n", asm_out_file);
4802 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4805 /* Outputs to the stdio stream FILE some
4806 appropriate text to go at the end of an assembler file. */
4813 /* Choose the order in which to allocate hard registers for
4814 pseudo-registers local to a basic block.
4816 Store the desired register order in the array `reg_alloc_order'.
4817 Element 0 should be the register to allocate first; element 1, the
4818 next register; and so on. */
4821 order_regs_for_local_alloc (void)
4824 static const int order_0[] = {
4832 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4836 static const int order_1[] = {
4844 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4848 static const int order_2[] = {
4857 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4862 const int *order = (TARGET_ORDER_1 ? order_1 :
4863 TARGET_ORDER_2 ? order_2 :
4865 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4866 reg_alloc_order[i] = order[i];
4870 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4871 cost of an RTX operand given its context. X is the rtx of the
4872 operand, MODE is its mode, and OUTER is the rtx_code of this
4873 operand's parent operator. */
4876 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4878 enum rtx_code code = GET_CODE (x);
4889 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4896 avr_rtx_costs (x, code, outer, &total);
4900 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4901 is to be calculated. Return true if the complete cost has been
4902 computed, and false if subexpressions should be scanned. In either
4903 case, *TOTAL contains the cost result. */
4906 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4908 enum machine_mode mode = GET_MODE (x);
4915 /* Immediate constants are as cheap as registers. */
4923 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4931 *total = COSTS_N_INSNS (1);
4935 *total = COSTS_N_INSNS (3);
4939 *total = COSTS_N_INSNS (7);
4945 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4953 *total = COSTS_N_INSNS (1);
4959 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4963 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4964 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4968 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4969 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4970 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4974 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4975 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4976 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4983 *total = COSTS_N_INSNS (1);
4984 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4985 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4989 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4991 *total = COSTS_N_INSNS (2);
4992 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4994 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4995 *total = COSTS_N_INSNS (1);
4997 *total = COSTS_N_INSNS (2);
5001 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5003 *total = COSTS_N_INSNS (4);
5004 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5006 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5007 *total = COSTS_N_INSNS (1);
5009 *total = COSTS_N_INSNS (4);
5015 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5021 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5022 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5023 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5024 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5028 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5029 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5030 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5038 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5039 else if (optimize_size)
5040 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5047 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5048 else if (optimize_size)
5049 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5057 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5058 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5066 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5069 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5070 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5077 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5079 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5080 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5084 val = INTVAL (XEXP (x, 1));
5086 *total = COSTS_N_INSNS (3);
5087 else if (val >= 0 && val <= 7)
5088 *total = COSTS_N_INSNS (val);
5090 *total = COSTS_N_INSNS (1);
5095 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5097 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5098 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5101 switch (INTVAL (XEXP (x, 1)))
5108 *total = COSTS_N_INSNS (2);
5111 *total = COSTS_N_INSNS (3);
5117 *total = COSTS_N_INSNS (4);
5122 *total = COSTS_N_INSNS (5);
5125 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5128 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5131 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5134 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5135 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5140 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5142 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5143 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5146 switch (INTVAL (XEXP (x, 1)))
5152 *total = COSTS_N_INSNS (3);
5157 *total = COSTS_N_INSNS (4);
5160 *total = COSTS_N_INSNS (6);
5163 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5166 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5167 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5174 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5181 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5183 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5184 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5188 val = INTVAL (XEXP (x, 1));
5190 *total = COSTS_N_INSNS (4);
5192 *total = COSTS_N_INSNS (2);
5193 else if (val >= 0 && val <= 7)
5194 *total = COSTS_N_INSNS (val);
5196 *total = COSTS_N_INSNS (1);
5201 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5203 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5204 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5207 switch (INTVAL (XEXP (x, 1)))
5213 *total = COSTS_N_INSNS (2);
5216 *total = COSTS_N_INSNS (3);
5222 *total = COSTS_N_INSNS (4);
5226 *total = COSTS_N_INSNS (5);
5229 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5232 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5236 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5239 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5240 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5245 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5247 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5248 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5251 switch (INTVAL (XEXP (x, 1)))
5257 *total = COSTS_N_INSNS (4);
5262 *total = COSTS_N_INSNS (6);
5265 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5268 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5271 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5272 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5279 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5286 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5288 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5289 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5293 val = INTVAL (XEXP (x, 1));
5295 *total = COSTS_N_INSNS (3);
5296 else if (val >= 0 && val <= 7)
5297 *total = COSTS_N_INSNS (val);
5299 *total = COSTS_N_INSNS (1);
5304 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5306 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5307 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5310 switch (INTVAL (XEXP (x, 1)))
5317 *total = COSTS_N_INSNS (2);
5320 *total = COSTS_N_INSNS (3);
5325 *total = COSTS_N_INSNS (4);
5329 *total = COSTS_N_INSNS (5);
5335 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5338 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5342 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5345 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5346 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5351 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5353 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5354 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5357 switch (INTVAL (XEXP (x, 1)))
5363 *total = COSTS_N_INSNS (4);
5366 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5371 *total = COSTS_N_INSNS (4);
5374 *total = COSTS_N_INSNS (6);
5377 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5378 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5385 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5389 switch (GET_MODE (XEXP (x, 0)))
5392 *total = COSTS_N_INSNS (1);
5393 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5394 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5398 *total = COSTS_N_INSNS (2);
5399 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5400 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5401 else if (INTVAL (XEXP (x, 1)) != 0)
5402 *total += COSTS_N_INSNS (1);
5406 *total = COSTS_N_INSNS (4);
5407 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5408 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5409 else if (INTVAL (XEXP (x, 1)) != 0)
5410 *total += COSTS_N_INSNS (3);
5416 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5425 /* Calculate the cost of a memory address. */
5428 avr_address_cost (rtx x)
5430 if (GET_CODE (x) == PLUS
5431 && GET_CODE (XEXP (x,1)) == CONST_INT
5432 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5433 && INTVAL (XEXP (x,1)) >= 61)
5435 if (CONSTANT_ADDRESS_P (x))
5437 if (avr_io_address_p (x, 1))
5444 /* Test for extra memory constraint 'Q'.
5445 It's a memory address based on Y or Z pointer with valid displacement. */
5448 extra_constraint_Q (rtx x)
5450 if (GET_CODE (XEXP (x,0)) == PLUS
5451 && REG_P (XEXP (XEXP (x,0), 0))
5452 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5453 && (INTVAL (XEXP (XEXP (x,0), 1))
5454 <= MAX_LD_OFFSET (GET_MODE (x))))
5456 rtx xx = XEXP (XEXP (x,0), 0);
5457 int regno = REGNO (xx);
5458 if (TARGET_ALL_DEBUG)
5460 fprintf (stderr, ("extra_constraint:\n"
5461 "reload_completed: %d\n"
5462 "reload_in_progress: %d\n"),
5463 reload_completed, reload_in_progress);
5466 if (regno >= FIRST_PSEUDO_REGISTER)
5467 return 1; /* allocate pseudos */
5468 else if (regno == REG_Z || regno == REG_Y)
5469 return 1; /* strictly check */
5470 else if (xx == frame_pointer_rtx
5471 || xx == arg_pointer_rtx)
5472 return 1; /* XXX frame & arg pointer checks */
5477 /* Convert condition code CONDITION to the valid AVR condition code. */
5480 avr_normalize_condition (RTX_CODE condition)
5497 /* This function optimizes conditional jumps. */
5504 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5506 if (! (GET_CODE (insn) == INSN
5507 || GET_CODE (insn) == CALL_INSN
5508 || GET_CODE (insn) == JUMP_INSN)
5509 || !single_set (insn))
5512 pattern = PATTERN (insn);
5514 if (GET_CODE (pattern) == PARALLEL)
5515 pattern = XVECEXP (pattern, 0, 0);
5516 if (GET_CODE (pattern) == SET
5517 && SET_DEST (pattern) == cc0_rtx
5518 && compare_diff_p (insn))
5520 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5522 /* Now we work under compare insn. */
5524 pattern = SET_SRC (pattern);
5525 if (true_regnum (XEXP (pattern,0)) >= 0
5526 && true_regnum (XEXP (pattern,1)) >= 0 )
5528 rtx x = XEXP (pattern,0);
5529 rtx next = next_real_insn (insn);
5530 rtx pat = PATTERN (next);
5531 rtx src = SET_SRC (pat);
5532 rtx t = XEXP (src,0);
5533 PUT_CODE (t, swap_condition (GET_CODE (t)));
5534 XEXP (pattern,0) = XEXP (pattern,1);
5535 XEXP (pattern,1) = x;
5536 INSN_CODE (next) = -1;
5538 else if (true_regnum (XEXP (pattern,0)) >= 0
5539 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5541 rtx x = XEXP (pattern,1);
5542 rtx next = next_real_insn (insn);
5543 rtx pat = PATTERN (next);
5544 rtx src = SET_SRC (pat);
5545 rtx t = XEXP (src,0);
5546 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5548 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5550 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5551 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5552 INSN_CODE (next) = -1;
5553 INSN_CODE (insn) = -1;
5557 else if (true_regnum (SET_SRC (pattern)) >= 0)
5559 /* This is a tst insn */
5560 rtx next = next_real_insn (insn);
5561 rtx pat = PATTERN (next);
5562 rtx src = SET_SRC (pat);
5563 rtx t = XEXP (src,0);
5565 PUT_CODE (t, swap_condition (GET_CODE (t)));
5566 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5568 INSN_CODE (next) = -1;
5569 INSN_CODE (insn) = -1;
5575 /* Returns register number for function return value.*/
5578 avr_ret_register (void)
5583 /* Create an RTX representing the place where a
5584 library function returns a value of mode MODE. */
5587 avr_libcall_value (enum machine_mode mode)
5589 int offs = GET_MODE_SIZE (mode);
5592 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5595 /* Create an RTX representing the place where a
5596 function returns a value of data type VALTYPE. */
5599 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5603 if (TYPE_MODE (type) != BLKmode)
5604 return avr_libcall_value (TYPE_MODE (type));
5606 offs = int_size_in_bytes (type);
5609 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5610 offs = GET_MODE_SIZE (SImode);
5611 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5612 offs = GET_MODE_SIZE (DImode);
5614 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5617 /* Places additional restrictions on the register class to
5618 use when it is necessary to copy value X into a register
5622 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5628 test_hard_reg_class (enum reg_class class, rtx x)
5630 int regno = true_regnum (x);
5634 if (TEST_HARD_REG_CLASS (class, regno))
5642 jump_over_one_insn_p (rtx insn, rtx dest)
5644 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5647 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5648 int dest_addr = INSN_ADDRESSES (uid);
5649 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5652 /* Returns 1 if a value of mode MODE can be stored starting with hard
5653 register number REGNO. On the enhanced core, anything larger than
5654 1 byte must start in even numbered register for "movw" to work
5655 (this way we don't have to check for odd registers everywhere). */
5658 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5660 /* Disallow QImode in stack pointer regs. */
5661 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5664 /* The only thing that can go into registers r28:r29 is a Pmode. */
5665 if (regno == REG_Y && mode == Pmode)
5668 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5669 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5675 /* Modes larger than QImode occupy consecutive registers. */
5676 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5679 /* All modes larger than QImode should start in an even register. */
5680 return !(regno & 1);
5683 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5684 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5685 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5688 avr_io_address_p (rtx x, int size)
5690 return (optimize > 0 && GET_CODE (x) == CONST_INT
5691 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5695 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5701 if (GET_CODE (operands[1]) == CONST_INT)
5703 int val = INTVAL (operands[1]);
5704 if ((val & 0xff) == 0)
5707 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5708 AS2 (ldi,%2,hi8(%1)) CR_TAB
5711 else if ((val & 0xff00) == 0)
5714 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5715 AS2 (mov,%A0,%2) CR_TAB
5716 AS2 (mov,%B0,__zero_reg__));
5718 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5721 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5722 AS2 (mov,%A0,%2) CR_TAB
5727 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5728 AS2 (mov,%A0,%2) CR_TAB
5729 AS2 (ldi,%2,hi8(%1)) CR_TAB
5735 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5737 rtx src = operands[1];
5738 int cnst = (GET_CODE (src) == CONST_INT);
5743 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5744 + ((INTVAL (src) & 0xff00) != 0)
5745 + ((INTVAL (src) & 0xff0000) != 0)
5746 + ((INTVAL (src) & 0xff000000) != 0);
5753 if (cnst && ((INTVAL (src) & 0xff) == 0))
5754 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5757 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5758 output_asm_insn (AS2 (mov, %A0, %2), operands);
5760 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5761 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5764 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5765 output_asm_insn (AS2 (mov, %B0, %2), operands);
5767 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5768 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5771 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5772 output_asm_insn (AS2 (mov, %C0, %2), operands);
5774 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5775 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5778 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5779 output_asm_insn (AS2 (mov, %D0, %2), operands);
5785 avr_output_bld (rtx operands[], int bit_nr)
5787 static char s[] = "bld %A0,0";
5789 s[5] = 'A' + (bit_nr >> 3);
5790 s[8] = '0' + (bit_nr & 7);
5791 output_asm_insn (s, operands);
5795 avr_output_addr_vec_elt (FILE *stream, int value)
5797 switch_to_section (progmem_section);
5799 fprintf (stream, "\t.word pm(.L%d)\n", value);
5801 fprintf (stream, "\trjmp .L%d\n", value);
5804 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5805 registers (for a define_peephole2) in the current function. */
5808 avr_peep2_scratch_safe (rtx scratch)
5810 if ((interrupt_function_p (current_function_decl)
5811 || signal_function_p (current_function_decl))
5812 && leaf_function_p ())
5814 int first_reg = true_regnum (scratch);
5815 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5818 for (reg = first_reg; reg <= last_reg; reg++)
5820 if (!df_regs_ever_live_p (reg))
5827 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5828 or memory location in the I/O space (QImode only).
5830 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5831 Operand 1: register operand to test, or CONST_INT memory address.
5832 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5833 Operand 3: label to jump to if the test is true. */
5836 avr_out_sbxx_branch (rtx insn, rtx operands[])
5838 enum rtx_code comp = GET_CODE (operands[0]);
5839 int long_jump = (get_attr_length (insn) >= 4);
5840 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5844 else if (comp == LT)
5848 comp = reverse_condition (comp);
5850 if (GET_CODE (operands[1]) == CONST_INT)
5852 if (INTVAL (operands[1]) < 0x40)
5855 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5857 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5861 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5863 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5865 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5868 else /* GET_CODE (operands[1]) == REG */
5870 if (GET_MODE (operands[1]) == QImode)
5873 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5875 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5877 else /* HImode or SImode */
5879 static char buf[] = "sbrc %A1,0";
5880 int bit_nr = exact_log2 (INTVAL (operands[2])
5881 & GET_MODE_MASK (GET_MODE (operands[1])));
5883 buf[3] = (comp == EQ) ? 's' : 'c';
5884 buf[6] = 'A' + (bit_nr >> 3);
5885 buf[9] = '0' + (bit_nr & 7);
5886 output_asm_insn (buf, operands);
5891 return (AS1 (rjmp,.+4) CR_TAB
5894 return AS1 (rjmp,%3);
5898 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5901 avr_asm_out_ctor (rtx symbol, int priority)
5903 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5904 default_ctor_section_asm_out_constructor (symbol, priority);
5907 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5910 avr_asm_out_dtor (rtx symbol, int priority)
5912 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5913 default_dtor_section_asm_out_destructor (symbol, priority);
5916 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5919 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5921 if (TYPE_MODE (type) == BLKmode)
5923 HOST_WIDE_INT size = int_size_in_bytes (type);
5924 return (size == -1 || size > 8);