1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_regs_to_save (HARD_REG_SET *);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code);
59 static int avr_num_arg_regs (enum machine_mode, tree);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
66 const struct attribute_spec avr_attribute_table[];
67 static bool avr_assemble_integer (rtx, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree, tree *);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree, const char *, int);
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx, int);
78 static void avr_asm_out_dtor (rtx, int);
79 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
80 static bool avr_rtx_costs (rtx, int, int, int *);
81 static int avr_address_cost (rtx);
82 static bool avr_return_in_memory (const_tree, const_tree);
83 static struct machine_function * avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx;
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx;
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames[] = REGISTER_NAMES;
96 /* This holds the last insn address. */
97 static int last_insn_address = 0;
99 /* Preprocessor macros to define depending on MCU type. */
100 const char *avr_base_arch_macro;
101 const char *avr_extra_arch_macro;
103 /* Current architecture. */
104 const struct base_arch_s *avr_current_arch;
106 section *progmem_section;
108 /* Core have 'MUL*' instructions. */
109 int avr_have_mul_p = 0;
111 /* Assembler only. */
112 int avr_asm_only_p = 0;
114 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
115 int avr_have_movw_lpmx_p = 0;
117 static const struct base_arch_s avr_arch_types[] = {
118 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
119 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
120 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
121 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
122 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
123 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
124 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
125 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
126 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
127 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
128 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
131 /* These names are used as the index into the avr_arch_types[] table
150 const char *const name;
151 int arch; /* index in avr_arch_types[] */
152 /* Must lie outside user's namespace. NULL == no macro. */
153 const char *const macro;
156 /* List of all known AVR MCU types - if updated, it has to be kept
157 in sync in several places (FIXME: is there a better way?):
159 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
160 - t-avr (MULTILIB_MATCHES)
161 - gas/config/tc-avr.c
164 static const struct mcu_type_s avr_mcu_types[] = {
165 /* Classic, <= 8K. */
166 { "avr2", ARCH_AVR2, NULL },
167 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
168 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
169 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
170 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
171 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
172 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
173 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
174 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
175 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
176 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
177 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
178 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
179 /* Classic + MOVW, <= 8K. */
180 { "avr25", ARCH_AVR25, NULL },
181 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
182 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
183 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
184 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
185 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
186 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
187 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
188 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
189 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
190 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
191 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
192 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
193 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
194 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
195 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
196 /* Classic, > 8K, <= 64K. */
197 { "avr3", ARCH_AVR3, NULL },
198 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
199 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
200 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
201 /* Classic, == 128K. */
202 { "avr31", ARCH_AVR31, NULL },
203 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
204 /* Classic + MOVW + JMP/CALL. */
205 { "avr35", ARCH_AVR35, NULL },
206 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
207 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
208 /* Enhanced, <= 8K. */
209 { "avr4", ARCH_AVR4, NULL },
210 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
211 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
212 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
213 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
214 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
215 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
216 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
217 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
218 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
219 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
220 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
221 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
222 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
223 /* Enhanced, > 8K, <= 64K. */
224 { "avr5", ARCH_AVR5, NULL },
225 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
226 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
227 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
228 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
229 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
230 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
231 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
232 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
233 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
234 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
235 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
236 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
237 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
238 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
239 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
240 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
241 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
242 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
243 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
244 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
245 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
246 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
247 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
248 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
249 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
250 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
251 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
252 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
253 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
254 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
255 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
256 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
257 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
258 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
259 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
260 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
261 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
262 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
263 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
264 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
265 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
266 /* Enhanced, == 128K. */
267 { "avr51", ARCH_AVR51, NULL },
268 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
269 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
270 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
271 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
272 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
273 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
274 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
276 { "avr6", ARCH_AVR6, NULL },
277 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
278 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
279 /* Assembler only. */
280 { "avr1", ARCH_AVR1, NULL },
281 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
282 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
283 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
284 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
285 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
286 { NULL, ARCH_UNKNOWN, NULL }
289 int avr_case_values_threshold = 30000;
291 /* Initialize the GCC target structure. */
292 #undef TARGET_ASM_ALIGNED_HI_OP
293 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
294 #undef TARGET_ASM_ALIGNED_SI_OP
295 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
296 #undef TARGET_ASM_UNALIGNED_HI_OP
297 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
298 #undef TARGET_ASM_UNALIGNED_SI_OP
299 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
300 #undef TARGET_ASM_INTEGER
301 #define TARGET_ASM_INTEGER avr_assemble_integer
302 #undef TARGET_ASM_FILE_START
303 #define TARGET_ASM_FILE_START avr_file_start
304 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
305 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
306 #undef TARGET_ASM_FILE_END
307 #define TARGET_ASM_FILE_END avr_file_end
309 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
310 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
311 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
312 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
313 #undef TARGET_ATTRIBUTE_TABLE
314 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
315 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
316 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
317 #undef TARGET_INSERT_ATTRIBUTES
318 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
319 #undef TARGET_SECTION_TYPE_FLAGS
320 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
321 #undef TARGET_RTX_COSTS
322 #define TARGET_RTX_COSTS avr_rtx_costs
323 #undef TARGET_ADDRESS_COST
324 #define TARGET_ADDRESS_COST avr_address_cost
325 #undef TARGET_MACHINE_DEPENDENT_REORG
326 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
328 #undef TARGET_RETURN_IN_MEMORY
329 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
331 #undef TARGET_STRICT_ARGUMENT_NAMING
332 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
334 struct gcc_target targetm = TARGET_INITIALIZER;
337 avr_override_options (void)
339 const struct mcu_type_s *t;
340 const struct base_arch_s *base;
342 flag_delete_null_pointer_checks = 0;
344 for (t = avr_mcu_types; t->name; t++)
345 if (strcmp (t->name, avr_mcu_name) == 0)
350 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
352 for (t = avr_mcu_types; t->name; t++)
353 fprintf (stderr," %s\n", t->name);
356 avr_current_arch = &avr_arch_types[t->arch];
357 base = &avr_arch_types[t->arch];
358 avr_asm_only_p = base->asm_only;
359 avr_have_mul_p = base->have_mul;
360 avr_have_movw_lpmx_p = base->have_movw_lpmx;
361 avr_base_arch_macro = base->macro;
362 avr_extra_arch_macro = t->macro;
364 if (optimize && !TARGET_NO_TABLEJUMP)
365 avr_case_values_threshold =
366 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
368 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
369 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
371 init_machine_status = avr_init_machine_status;
374 /* return register class from register number. */
376 static const int reg_class_tab[]={
377 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
378 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
379 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
380 GENERAL_REGS, /* r0 - r15 */
381 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
382 LD_REGS, /* r16 - 23 */
383 ADDW_REGS,ADDW_REGS, /* r24,r25 */
384 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
385 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
386 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
387 STACK_REG,STACK_REG /* SPL,SPH */
390 /* Function to set up the backend function structure. */
392 static struct machine_function *
393 avr_init_machine_status (void)
395 return ((struct machine_function *)
396 ggc_alloc_cleared (sizeof (struct machine_function)));
399 /* Return register class for register R. */
402 avr_regno_reg_class (int r)
405 return reg_class_tab[r];
409 /* Return nonzero if FUNC is a naked function. */
412 avr_naked_function_p (tree func)
416 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
418 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
419 return a != NULL_TREE;
422 /* Return nonzero if FUNC is an interrupt function as specified
423 by the "interrupt" attribute. */
426 interrupt_function_p (tree func)
430 if (TREE_CODE (func) != FUNCTION_DECL)
433 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
434 return a != NULL_TREE;
437 /* Return nonzero if FUNC is a signal function as specified
438 by the "signal" attribute. */
441 signal_function_p (tree func)
445 if (TREE_CODE (func) != FUNCTION_DECL)
448 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
449 return a != NULL_TREE;
452 /* Return nonzero if FUNC is a OS_task function. */
455 avr_OS_task_function_p (tree func)
459 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
461 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
462 return a != NULL_TREE;
465 /* Return the number of hard registers to push/pop in the prologue/epilogue
466 of the current function, and optionally store these registers in SET. */
469 avr_regs_to_save (HARD_REG_SET *set)
472 int int_or_sig_p = (interrupt_function_p (current_function_decl)
473 || signal_function_p (current_function_decl));
474 int leaf_func_p = leaf_function_p ();
477 CLEAR_HARD_REG_SET (*set);
480 /* No need to save any registers if the function never returns or
481 is have "OS_task" attribute. */
482 if (TREE_THIS_VOLATILE (current_function_decl)
483 || cfun->machine->is_OS_task)
486 for (reg = 0; reg < 32; reg++)
488 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
489 any global register variables. */
493 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
494 || (df_regs_ever_live_p (reg)
495 && (int_or_sig_p || !call_used_regs[reg])
496 && !(frame_pointer_needed
497 && (reg == REG_Y || reg == (REG_Y+1)))))
500 SET_HARD_REG_BIT (*set, reg);
507 /* Compute offset between arg_pointer and frame_pointer. */
510 initial_elimination_offset (int from, int to)
512 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
516 int offset = frame_pointer_needed ? 2 : 0;
517 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
519 offset += avr_regs_to_save (NULL);
520 return get_frame_size () + (avr_pc_size) + 1 + offset;
524 /* Return 1 if the function epilogue is just a single "ret". */
527 avr_simple_epilogue (void)
529 return (! frame_pointer_needed
530 && get_frame_size () == 0
531 && avr_regs_to_save (NULL) == 0
532 && ! interrupt_function_p (current_function_decl)
533 && ! signal_function_p (current_function_decl)
534 && ! avr_naked_function_p (current_function_decl)
535 && ! TREE_THIS_VOLATILE (current_function_decl));
538 /* This function checks sequence of live registers. */
541 sequent_regs_live (void)
547 for (reg = 0; reg < 18; ++reg)
549 if (!call_used_regs[reg])
551 if (df_regs_ever_live_p (reg))
561 if (!frame_pointer_needed)
563 if (df_regs_ever_live_p (REG_Y))
571 if (df_regs_ever_live_p (REG_Y+1))
584 return (cur_seq == live_seq) ? live_seq : 0;
587 /* Output function prologue. */
590 expand_prologue (void)
595 HOST_WIDE_INT size = get_frame_size();
596 /* Define templates for push instructions. */
597 rtx pushbyte = gen_rtx_MEM (QImode,
598 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
599 rtx pushword = gen_rtx_MEM (HImode,
600 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
603 last_insn_address = 0;
605 /* Init cfun->machine. */
606 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
607 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
608 cfun->machine->is_signal = signal_function_p (current_function_decl);
609 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
611 /* Prologue: naked. */
612 if (cfun->machine->is_naked)
617 avr_regs_to_save (&set);
618 live_seq = sequent_regs_live ();
619 minimize = (TARGET_CALL_PROLOGUES
620 && !cfun->machine->is_interrupt
621 && !cfun->machine->is_signal
622 && !cfun->machine->is_OS_task
625 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
627 if (cfun->machine->is_interrupt)
629 /* Enable interrupts. */
630 insn = emit_insn (gen_enable_interrupt ());
631 RTX_FRAME_RELATED_P (insn) = 1;
635 insn = emit_move_insn (pushbyte, zero_reg_rtx);
636 RTX_FRAME_RELATED_P (insn) = 1;
639 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
640 RTX_FRAME_RELATED_P (insn) = 1;
643 insn = emit_move_insn (tmp_reg_rtx,
644 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
645 RTX_FRAME_RELATED_P (insn) = 1;
646 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
647 RTX_FRAME_RELATED_P (insn) = 1;
651 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
653 insn = emit_move_insn (tmp_reg_rtx,
654 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
655 RTX_FRAME_RELATED_P (insn) = 1;
656 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
657 RTX_FRAME_RELATED_P (insn) = 1;
660 /* Clear zero reg. */
661 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
662 RTX_FRAME_RELATED_P (insn) = 1;
664 /* Prevent any attempt to delete the setting of ZERO_REG! */
665 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
667 if (minimize && (frame_pointer_needed || live_seq > 6))
669 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
670 gen_int_mode (size, HImode));
671 RTX_FRAME_RELATED_P (insn) = 1;
674 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
675 gen_int_mode (size + live_seq, HImode)));
676 RTX_FRAME_RELATED_P (insn) = 1;
681 for (reg = 0; reg < 32; ++reg)
683 if (TEST_HARD_REG_BIT (set, reg))
685 /* Emit push of register to save. */
686 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
687 RTX_FRAME_RELATED_P (insn) = 1;
690 if (frame_pointer_needed)
692 if(!cfun->machine->is_OS_task)
694 /* Push frame pointer. */
695 insn = emit_move_insn (pushword, frame_pointer_rtx);
696 RTX_FRAME_RELATED_P (insn) = 1;
701 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
702 RTX_FRAME_RELATED_P (insn) = 1;
706 /* Creating a frame can be done by direct manipulation of the
707 stack or via the frame pointer. These two methods are:
714 the optimum method depends on function type, stack and frame size.
715 To avoid a complex logic, both methods are tested and shortest
719 if (TARGET_TINY_STACK)
721 if (size < -63 || size > 63)
722 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
724 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
725 over 'sbiw' (2 cycles, same size). */
726 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
730 /* Normal sized addition. */
731 myfp = frame_pointer_rtx;
733 /* Calculate length. */
736 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
738 get_attr_length (gen_move_insn (myfp,
739 gen_rtx_PLUS (GET_MODE(myfp), myfp,
743 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
745 /* Method 2-Adjust Stack pointer. */
746 int sp_plus_length = 0;
750 get_attr_length (gen_move_insn (stack_pointer_rtx,
751 gen_rtx_PLUS (HImode, stack_pointer_rtx,
755 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
757 /* Use shortest method. */
758 if (size <= 6 && (sp_plus_length < method1_length))
760 insn = emit_move_insn (stack_pointer_rtx,
761 gen_rtx_PLUS (HImode, stack_pointer_rtx,
762 gen_int_mode (-size, HImode)));
763 RTX_FRAME_RELATED_P (insn) = 1;
764 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
765 RTX_FRAME_RELATED_P (insn) = 1;
769 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
770 RTX_FRAME_RELATED_P (insn) = 1;
771 insn = emit_move_insn (myfp,
772 gen_rtx_PLUS (GET_MODE(myfp), myfp,
773 gen_int_mode (-size, GET_MODE(myfp))));
774 RTX_FRAME_RELATED_P (insn) = 1;
775 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
776 RTX_FRAME_RELATED_P (insn) = 1;
783 /* Output summary at end of function prologue. */
786 avr_asm_function_end_prologue (FILE *file)
788 if (cfun->machine->is_naked)
790 fputs ("/* prologue: naked */\n", file);
794 if (cfun->machine->is_interrupt)
796 fputs ("/* prologue: Interrupt */\n", file);
798 else if (cfun->machine->is_signal)
800 fputs ("/* prologue: Signal */\n", file);
803 fputs ("/* prologue: function */\n", file);
805 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
810 /* Implement EPILOGUE_USES. */
813 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
817 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
822 /* Output RTL epilogue. */
825 expand_epilogue (void)
831 HOST_WIDE_INT size = get_frame_size();
833 /* epilogue: naked */
834 if (cfun->machine->is_naked)
836 emit_jump_insn (gen_return ());
840 avr_regs_to_save (&set);
841 live_seq = sequent_regs_live ();
842 minimize = (TARGET_CALL_PROLOGUES
843 && !cfun->machine->is_interrupt
844 && !cfun->machine->is_signal
845 && !cfun->machine->is_OS_task
848 if (minimize && (frame_pointer_needed || live_seq > 4))
850 if (frame_pointer_needed)
852 /* Get rid of frame. */
853 emit_move_insn(frame_pointer_rtx,
854 gen_rtx_PLUS (HImode, frame_pointer_rtx,
855 gen_int_mode (size, HImode)));
859 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
862 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
866 if (frame_pointer_needed)
870 /* Try two methods to adjust stack and select shortest. */
872 /* Method 1-Adjust frame pointer. */
874 get_attr_length (gen_move_insn (frame_pointer_rtx,
875 gen_rtx_PLUS (HImode, frame_pointer_rtx,
878 /* Copy to stack pointer. */
880 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
882 /* Method 2-Adjust Stack pointer. */
883 int sp_plus_length = 0;
887 get_attr_length (gen_move_insn (stack_pointer_rtx,
888 gen_rtx_PLUS (HImode, stack_pointer_rtx,
892 /* Use shortest method. */
893 if (size <= 5 && (sp_plus_length < fp_plus_length))
895 emit_move_insn (stack_pointer_rtx,
896 gen_rtx_PLUS (HImode, stack_pointer_rtx,
897 gen_int_mode (size, HImode)));
901 emit_move_insn (frame_pointer_rtx,
902 gen_rtx_PLUS (HImode, frame_pointer_rtx,
903 gen_int_mode (size, HImode)));
904 /* Copy to stack pointer. */
905 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
908 if(!cfun->machine->is_OS_task)
910 /* Restore previous frame_pointer. */
911 emit_insn (gen_pophi (frame_pointer_rtx));
914 /* Restore used registers. */
915 for (reg = 31; reg >= 0; --reg)
917 if (TEST_HARD_REG_BIT (set, reg))
918 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
920 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
922 /* Restore RAMPZ using tmp reg as scratch. */
924 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
926 emit_insn (gen_popqi (tmp_reg_rtx));
927 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
931 /* Restore SREG using tmp reg as scratch. */
932 emit_insn (gen_popqi (tmp_reg_rtx));
934 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
937 /* Restore tmp REG. */
938 emit_insn (gen_popqi (tmp_reg_rtx));
940 /* Restore zero REG. */
941 emit_insn (gen_popqi (zero_reg_rtx));
944 emit_jump_insn (gen_return ());
948 /* Output summary messages at beginning of function epilogue. */
951 avr_asm_function_begin_epilogue (FILE *file)
953 fprintf (file, "/* epilogue start */\n");
956 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
957 machine for a memory operand of mode MODE. */
960 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
962 enum reg_class r = NO_REGS;
964 if (TARGET_ALL_DEBUG)
966 fprintf (stderr, "mode: (%s) %s %s %s %s:",
968 strict ? "(strict)": "",
969 reload_completed ? "(reload_completed)": "",
970 reload_in_progress ? "(reload_in_progress)": "",
971 reg_renumber ? "(reg_renumber)" : "");
972 if (GET_CODE (x) == PLUS
973 && REG_P (XEXP (x, 0))
974 && GET_CODE (XEXP (x, 1)) == CONST_INT
975 && INTVAL (XEXP (x, 1)) >= 0
976 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
979 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
980 true_regnum (XEXP (x, 0)));
983 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
984 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
986 else if (CONSTANT_ADDRESS_P (x))
988 else if (GET_CODE (x) == PLUS
989 && REG_P (XEXP (x, 0))
990 && GET_CODE (XEXP (x, 1)) == CONST_INT
991 && INTVAL (XEXP (x, 1)) >= 0)
993 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
997 || REGNO (XEXP (x,0)) == REG_Y
998 || REGNO (XEXP (x,0)) == REG_Z)
999 r = BASE_POINTER_REGS;
1000 if (XEXP (x,0) == frame_pointer_rtx
1001 || XEXP (x,0) == arg_pointer_rtx)
1002 r = BASE_POINTER_REGS;
1004 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1007 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1008 && REG_P (XEXP (x, 0))
1009 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1010 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1014 if (TARGET_ALL_DEBUG)
1016 fprintf (stderr, " ret = %c\n", r + '0');
1018 return r == NO_REGS ? 0 : (int)r;
1021 /* Attempts to replace X with a valid
1022 memory address for an operand of mode MODE */
1025 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1028 if (TARGET_ALL_DEBUG)
1030 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1034 if (GET_CODE (oldx) == PLUS
1035 && REG_P (XEXP (oldx,0)))
1037 if (REG_P (XEXP (oldx,1)))
1038 x = force_reg (GET_MODE (oldx), oldx);
1039 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1041 int offs = INTVAL (XEXP (oldx,1));
1042 if (frame_pointer_rtx != XEXP (oldx,0))
1043 if (offs > MAX_LD_OFFSET (mode))
1045 if (TARGET_ALL_DEBUG)
1046 fprintf (stderr, "force_reg (big offset)\n");
1047 x = force_reg (GET_MODE (oldx), oldx);
1055 /* Return a pointer register name as a string. */
1058 ptrreg_to_str (int regno)
1062 case REG_X: return "X";
1063 case REG_Y: return "Y";
1064 case REG_Z: return "Z";
1066 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1071 /* Return the condition name as a string.
1072 Used in conditional jump constructing */
1075 cond_string (enum rtx_code code)
1084 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1089 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1102 /* Output ADDR to FILE as address. */
1105 print_operand_address (FILE *file, rtx addr)
1107 switch (GET_CODE (addr))
1110 fprintf (file, ptrreg_to_str (REGNO (addr)));
1114 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1118 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1122 if (CONSTANT_ADDRESS_P (addr)
1123 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1124 || GET_CODE (addr) == LABEL_REF))
1126 fprintf (file, "gs(");
1127 output_addr_const (file,addr);
1128 fprintf (file ,")");
1131 output_addr_const (file, addr);
1136 /* Output X as assembler operand to file FILE. */
1139 print_operand (FILE *file, rtx x, int code)
1143 if (code >= 'A' && code <= 'D')
1148 if (!AVR_HAVE_JMP_CALL)
1151 else if (code == '!')
1153 if (AVR_HAVE_EIJMP_EICALL)
1158 if (x == zero_reg_rtx)
1159 fprintf (file, "__zero_reg__");
1161 fprintf (file, reg_names[true_regnum (x) + abcd]);
1163 else if (GET_CODE (x) == CONST_INT)
1164 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1165 else if (GET_CODE (x) == MEM)
1167 rtx addr = XEXP (x,0);
1169 if (CONSTANT_P (addr) && abcd)
1172 output_address (addr);
1173 fprintf (file, ")+%d", abcd);
1175 else if (code == 'o')
1177 if (GET_CODE (addr) != PLUS)
1178 fatal_insn ("bad address, not (reg+disp):", addr);
1180 print_operand (file, XEXP (addr, 1), 0);
1182 else if (code == 'p' || code == 'r')
1184 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1185 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1188 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1190 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1192 else if (GET_CODE (addr) == PLUS)
1194 print_operand_address (file, XEXP (addr,0));
1195 if (REGNO (XEXP (addr, 0)) == REG_X)
1196 fatal_insn ("internal compiler error. Bad address:"
1199 print_operand (file, XEXP (addr,1), code);
1202 print_operand_address (file, addr);
1204 else if (GET_CODE (x) == CONST_DOUBLE)
1208 if (GET_MODE (x) != SFmode)
1209 fatal_insn ("internal compiler error. Unknown mode:", x);
1210 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1211 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1212 fprintf (file, "0x%lx", val);
1214 else if (code == 'j')
1215 fputs (cond_string (GET_CODE (x)), file);
1216 else if (code == 'k')
1217 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1219 print_operand_address (file, x);
1222 /* Update the condition code in the INSN. */
1225 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1229 switch (get_attr_cc (insn))
1232 /* Insn does not affect CC at all. */
1240 set = single_set (insn);
1244 cc_status.flags |= CC_NO_OVERFLOW;
1245 cc_status.value1 = SET_DEST (set);
1250 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1251 The V flag may or may not be known but that's ok because
1252 alter_cond will change tests to use EQ/NE. */
1253 set = single_set (insn);
1257 cc_status.value1 = SET_DEST (set);
1258 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1263 set = single_set (insn);
1266 cc_status.value1 = SET_SRC (set);
1270 /* Insn doesn't leave CC in a usable state. */
1273 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1274 set = single_set (insn);
1277 rtx src = SET_SRC (set);
1279 if (GET_CODE (src) == ASHIFTRT
1280 && GET_MODE (src) == QImode)
1282 rtx x = XEXP (src, 1);
1284 if (GET_CODE (x) == CONST_INT
1288 cc_status.value1 = SET_DEST (set);
1289 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1297 /* Return maximum number of consecutive registers of
1298 class CLASS needed to hold a value of mode MODE. */
1301 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1303 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1306 /* Choose mode for jump insn:
1307 1 - relative jump in range -63 <= x <= 62 ;
1308 2 - relative jump in range -2046 <= x <= 2045 ;
1309 3 - absolute jump (only for ATmega[16]03). */
1312 avr_jump_mode (rtx x, rtx insn)
1314 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1315 ? XEXP (x, 0) : x));
1316 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1317 int jump_distance = cur_addr - dest_addr;
1319 if (-63 <= jump_distance && jump_distance <= 62)
1321 else if (-2046 <= jump_distance && jump_distance <= 2045)
1323 else if (AVR_HAVE_JMP_CALL)
1329 /* return an AVR condition jump commands.
1330 X is a comparison RTX.
1331 LEN is a number returned by avr_jump_mode function.
1332 if REVERSE nonzero then condition code in X must be reversed. */
1335 ret_cond_branch (rtx x, int len, int reverse)
1337 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1342 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1343 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1345 len == 2 ? (AS1 (breq,.+4) CR_TAB
1346 AS1 (brmi,.+2) CR_TAB
1348 (AS1 (breq,.+6) CR_TAB
1349 AS1 (brmi,.+4) CR_TAB
1353 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1355 len == 2 ? (AS1 (breq,.+4) CR_TAB
1356 AS1 (brlt,.+2) CR_TAB
1358 (AS1 (breq,.+6) CR_TAB
1359 AS1 (brlt,.+4) CR_TAB
1362 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1364 len == 2 ? (AS1 (breq,.+4) CR_TAB
1365 AS1 (brlo,.+2) CR_TAB
1367 (AS1 (breq,.+6) CR_TAB
1368 AS1 (brlo,.+4) CR_TAB
1371 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1372 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1374 len == 2 ? (AS1 (breq,.+2) CR_TAB
1375 AS1 (brpl,.+2) CR_TAB
1377 (AS1 (breq,.+2) CR_TAB
1378 AS1 (brpl,.+4) CR_TAB
1381 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1383 len == 2 ? (AS1 (breq,.+2) CR_TAB
1384 AS1 (brge,.+2) CR_TAB
1386 (AS1 (breq,.+2) CR_TAB
1387 AS1 (brge,.+4) CR_TAB
1390 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1392 len == 2 ? (AS1 (breq,.+2) CR_TAB
1393 AS1 (brsh,.+2) CR_TAB
1395 (AS1 (breq,.+2) CR_TAB
1396 AS1 (brsh,.+4) CR_TAB
1404 return AS1 (br%k1,%0);
1406 return (AS1 (br%j1,.+2) CR_TAB
1409 return (AS1 (br%j1,.+4) CR_TAB
1418 return AS1 (br%j1,%0);
1420 return (AS1 (br%k1,.+2) CR_TAB
1423 return (AS1 (br%k1,.+4) CR_TAB
1431 /* Predicate function for immediate operand which fits to byte (8bit) */
1434 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1436 return (GET_CODE (op) == CONST_INT
1437 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1440 /* Output all insn addresses and their sizes into the assembly language
1441 output file. This is helpful for debugging whether the length attributes
1442 in the md file are correct.
1443 Output insn cost for next insn. */
1446 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1447 int num_operands ATTRIBUTE_UNUSED)
1449 int uid = INSN_UID (insn);
1451 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1453 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1454 INSN_ADDRESSES (uid),
1455 INSN_ADDRESSES (uid) - last_insn_address,
1456 rtx_cost (PATTERN (insn), INSN));
1458 last_insn_address = INSN_ADDRESSES (uid);
1461 /* Return 0 if undefined, 1 if always true or always false. */
1464 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1466 unsigned int max = (mode == QImode ? 0xff :
1467 mode == HImode ? 0xffff :
1468 mode == SImode ? 0xffffffff : 0);
1469 if (max && operator && GET_CODE (x) == CONST_INT)
1471 if (unsigned_condition (operator) != operator)
1474 if (max != (INTVAL (x) & max)
1475 && INTVAL (x) != 0xff)
1482 /* Returns nonzero if REGNO is the number of a hard
1483 register in which function arguments are sometimes passed. */
1486 function_arg_regno_p(int r)
1488 return (r >= 8 && r <= 25);
1491 /* Initializing the variable cum for the state at the beginning
1492 of the argument list. */
1495 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1496 tree fndecl ATTRIBUTE_UNUSED)
1499 cum->regno = FIRST_CUM_REG;
1500 if (!libname && fntype)
1502 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1503 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1504 != void_type_node));
1510 /* Returns the number of registers to allocate for a function argument. */
1513 avr_num_arg_regs (enum machine_mode mode, tree type)
1517 if (mode == BLKmode)
1518 size = int_size_in_bytes (type);
1520 size = GET_MODE_SIZE (mode);
1522 /* Align all function arguments to start in even-numbered registers.
1523 Odd-sized arguments leave holes above them. */
1525 return (size + 1) & ~1;
1528 /* Controls whether a function argument is passed
1529 in a register, and which register. */
1532 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1533 int named ATTRIBUTE_UNUSED)
1535 int bytes = avr_num_arg_regs (mode, type);
1537 if (cum->nregs && bytes <= cum->nregs)
1538 return gen_rtx_REG (mode, cum->regno - bytes);
1543 /* Update the summarizer variable CUM to advance past an argument
1544 in the argument list. */
1547 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1548 int named ATTRIBUTE_UNUSED)
1550 int bytes = avr_num_arg_regs (mode, type);
1552 cum->nregs -= bytes;
1553 cum->regno -= bytes;
1555 if (cum->nregs <= 0)
1558 cum->regno = FIRST_CUM_REG;
1562 /***********************************************************************
1563 Functions for outputting various mov's for a various modes
1564 ************************************************************************/
1566 output_movqi (rtx insn, rtx operands[], int *l)
1569 rtx dest = operands[0];
1570 rtx src = operands[1];
1578 if (register_operand (dest, QImode))
1580 if (register_operand (src, QImode)) /* mov r,r */
1582 if (test_hard_reg_class (STACK_REG, dest))
1583 return AS2 (out,%0,%1);
1584 else if (test_hard_reg_class (STACK_REG, src))
1585 return AS2 (in,%0,%1);
1587 return AS2 (mov,%0,%1);
1589 else if (CONSTANT_P (src))
1591 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1592 return AS2 (ldi,%0,lo8(%1));
1594 if (GET_CODE (src) == CONST_INT)
1596 if (src == const0_rtx) /* mov r,L */
1597 return AS1 (clr,%0);
1598 else if (src == const1_rtx)
1601 return (AS1 (clr,%0) CR_TAB
1604 else if (src == constm1_rtx)
1606 /* Immediate constants -1 to any register */
1608 return (AS1 (clr,%0) CR_TAB
1613 int bit_nr = exact_log2 (INTVAL (src));
1619 output_asm_insn ((AS1 (clr,%0) CR_TAB
1622 avr_output_bld (operands, bit_nr);
1629 /* Last resort, larger than loading from memory. */
1631 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1632 AS2 (ldi,r31,lo8(%1)) CR_TAB
1633 AS2 (mov,%0,r31) CR_TAB
1634 AS2 (mov,r31,__tmp_reg__));
1636 else if (GET_CODE (src) == MEM)
1637 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1639 else if (GET_CODE (dest) == MEM)
1641 const char *template;
1643 if (src == const0_rtx)
1644 operands[1] = zero_reg_rtx;
1646 template = out_movqi_mr_r (insn, operands, real_l);
1649 output_asm_insn (template, operands);
1658 output_movhi (rtx insn, rtx operands[], int *l)
1661 rtx dest = operands[0];
1662 rtx src = operands[1];
1668 if (register_operand (dest, HImode))
1670 if (register_operand (src, HImode)) /* mov r,r */
1672 if (test_hard_reg_class (STACK_REG, dest))
1674 if (TARGET_TINY_STACK)
1677 return AS2 (out,__SP_L__,%A1);
1679 /* Use simple load of stack pointer if no interrupts are used
1680 or inside main or signal function prologue where they disabled. */
1681 else if (TARGET_NO_INTERRUPTS
1682 || (reload_completed
1683 && cfun->machine->is_signal
1684 && prologue_epilogue_contains (insn)))
1687 return (AS2 (out,__SP_H__,%B1) CR_TAB
1688 AS2 (out,__SP_L__,%A1));
1690 /* In interrupt prolog we know interrupts are enabled. */
1691 else if (reload_completed
1692 && cfun->machine->is_interrupt
1693 && prologue_epilogue_contains (insn))
1696 return ("cli" CR_TAB
1697 AS2 (out,__SP_H__,%B1) CR_TAB
1699 AS2 (out,__SP_L__,%A1));
1702 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1704 AS2 (out,__SP_H__,%B1) CR_TAB
1705 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1706 AS2 (out,__SP_L__,%A1));
1708 else if (test_hard_reg_class (STACK_REG, src))
1711 return (AS2 (in,%A0,__SP_L__) CR_TAB
1712 AS2 (in,%B0,__SP_H__));
1718 return (AS2 (movw,%0,%1));
1723 return (AS2 (mov,%A0,%A1) CR_TAB
1727 else if (CONSTANT_P (src))
1729 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1732 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1733 AS2 (ldi,%B0,hi8(%1)));
1736 if (GET_CODE (src) == CONST_INT)
1738 if (src == const0_rtx) /* mov r,L */
1741 return (AS1 (clr,%A0) CR_TAB
1744 else if (src == const1_rtx)
1747 return (AS1 (clr,%A0) CR_TAB
1748 AS1 (clr,%B0) CR_TAB
1751 else if (src == constm1_rtx)
1753 /* Immediate constants -1 to any register */
1755 return (AS1 (clr,%0) CR_TAB
1756 AS1 (dec,%A0) CR_TAB
1761 int bit_nr = exact_log2 (INTVAL (src));
1767 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1768 AS1 (clr,%B0) CR_TAB
1771 avr_output_bld (operands, bit_nr);
1777 if ((INTVAL (src) & 0xff) == 0)
1780 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1781 AS1 (clr,%A0) CR_TAB
1782 AS2 (ldi,r31,hi8(%1)) CR_TAB
1783 AS2 (mov,%B0,r31) CR_TAB
1784 AS2 (mov,r31,__tmp_reg__));
1786 else if ((INTVAL (src) & 0xff00) == 0)
1789 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1790 AS2 (ldi,r31,lo8(%1)) CR_TAB
1791 AS2 (mov,%A0,r31) CR_TAB
1792 AS1 (clr,%B0) CR_TAB
1793 AS2 (mov,r31,__tmp_reg__));
1797 /* Last resort, equal to loading from memory. */
1799 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1800 AS2 (ldi,r31,lo8(%1)) CR_TAB
1801 AS2 (mov,%A0,r31) CR_TAB
1802 AS2 (ldi,r31,hi8(%1)) CR_TAB
1803 AS2 (mov,%B0,r31) CR_TAB
1804 AS2 (mov,r31,__tmp_reg__));
1806 else if (GET_CODE (src) == MEM)
1807 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1809 else if (GET_CODE (dest) == MEM)
1811 const char *template;
1813 if (src == const0_rtx)
1814 operands[1] = zero_reg_rtx;
1816 template = out_movhi_mr_r (insn, operands, real_l);
1819 output_asm_insn (template, operands);
1824 fatal_insn ("invalid insn:", insn);
1829 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1833 rtx x = XEXP (src, 0);
1839 if (CONSTANT_ADDRESS_P (x))
1841 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1844 return AS2 (in,%0,__SREG__);
1846 if (optimize > 0 && io_address_operand (x, QImode))
1849 return AS2 (in,%0,%1-0x20);
1852 return AS2 (lds,%0,%1);
1854 /* memory access by reg+disp */
1855 else if (GET_CODE (x) == PLUS
1856 && REG_P (XEXP (x,0))
1857 && GET_CODE (XEXP (x,1)) == CONST_INT)
1859 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1861 int disp = INTVAL (XEXP (x,1));
1862 if (REGNO (XEXP (x,0)) != REG_Y)
1863 fatal_insn ("incorrect insn:",insn);
1865 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1866 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1867 AS2 (ldd,%0,Y+63) CR_TAB
1868 AS2 (sbiw,r28,%o1-63));
1870 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1871 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1872 AS2 (ld,%0,Y) CR_TAB
1873 AS2 (subi,r28,lo8(%o1)) CR_TAB
1874 AS2 (sbci,r29,hi8(%o1)));
1876 else if (REGNO (XEXP (x,0)) == REG_X)
1878 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1879 it but I have this situation with extremal optimizing options. */
1880 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1881 || reg_unused_after (insn, XEXP (x,0)))
1882 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1885 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1886 AS2 (ld,%0,X) CR_TAB
1887 AS2 (sbiw,r26,%o1));
1890 return AS2 (ldd,%0,%1);
1893 return AS2 (ld,%0,%1);
1897 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1901 rtx base = XEXP (src, 0);
1902 int reg_dest = true_regnum (dest);
1903 int reg_base = true_regnum (base);
1904 /* "volatile" forces reading low byte first, even if less efficient,
1905 for correct operation with 16-bit I/O registers. */
1906 int mem_volatile_p = MEM_VOLATILE_P (src);
1914 if (reg_dest == reg_base) /* R = (R) */
1917 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1918 AS2 (ld,%B0,%1) CR_TAB
1919 AS2 (mov,%A0,__tmp_reg__));
1921 else if (reg_base == REG_X) /* (R26) */
1923 if (reg_unused_after (insn, base))
1926 return (AS2 (ld,%A0,X+) CR_TAB
1930 return (AS2 (ld,%A0,X+) CR_TAB
1931 AS2 (ld,%B0,X) CR_TAB
1937 return (AS2 (ld,%A0,%1) CR_TAB
1938 AS2 (ldd,%B0,%1+1));
1941 else if (GET_CODE (base) == PLUS) /* (R + i) */
1943 int disp = INTVAL (XEXP (base, 1));
1944 int reg_base = true_regnum (XEXP (base, 0));
1946 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1948 if (REGNO (XEXP (base, 0)) != REG_Y)
1949 fatal_insn ("incorrect insn:",insn);
1951 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1952 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1953 AS2 (ldd,%A0,Y+62) CR_TAB
1954 AS2 (ldd,%B0,Y+63) CR_TAB
1955 AS2 (sbiw,r28,%o1-62));
1957 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1958 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1959 AS2 (ld,%A0,Y) CR_TAB
1960 AS2 (ldd,%B0,Y+1) CR_TAB
1961 AS2 (subi,r28,lo8(%o1)) CR_TAB
1962 AS2 (sbci,r29,hi8(%o1)));
1964 if (reg_base == REG_X)
1966 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1967 it but I have this situation with extremal
1968 optimization options. */
1971 if (reg_base == reg_dest)
1972 return (AS2 (adiw,r26,%o1) CR_TAB
1973 AS2 (ld,__tmp_reg__,X+) CR_TAB
1974 AS2 (ld,%B0,X) CR_TAB
1975 AS2 (mov,%A0,__tmp_reg__));
1977 return (AS2 (adiw,r26,%o1) CR_TAB
1978 AS2 (ld,%A0,X+) CR_TAB
1979 AS2 (ld,%B0,X) CR_TAB
1980 AS2 (sbiw,r26,%o1+1));
1983 if (reg_base == reg_dest)
1986 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1987 AS2 (ldd,%B0,%B1) CR_TAB
1988 AS2 (mov,%A0,__tmp_reg__));
1992 return (AS2 (ldd,%A0,%A1) CR_TAB
1995 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1997 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1998 fatal_insn ("incorrect insn:", insn);
2002 if (REGNO (XEXP (base, 0)) == REG_X)
2005 return (AS2 (sbiw,r26,2) CR_TAB
2006 AS2 (ld,%A0,X+) CR_TAB
2007 AS2 (ld,%B0,X) CR_TAB
2013 return (AS2 (sbiw,%r1,2) CR_TAB
2014 AS2 (ld,%A0,%p1) CR_TAB
2015 AS2 (ldd,%B0,%p1+1));
2020 return (AS2 (ld,%B0,%1) CR_TAB
2023 else if (GET_CODE (base) == POST_INC) /* (R++) */
2025 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2026 fatal_insn ("incorrect insn:", insn);
2029 return (AS2 (ld,%A0,%1) CR_TAB
2032 else if (CONSTANT_ADDRESS_P (base))
2034 if (optimize > 0 && io_address_operand (base, HImode))
2037 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2038 AS2 (in,%B0,%B1-0x20));
2041 return (AS2 (lds,%A0,%A1) CR_TAB
2045 fatal_insn ("unknown move insn:",insn);
2050 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2054 rtx base = XEXP (src, 0);
2055 int reg_dest = true_regnum (dest);
2056 int reg_base = true_regnum (base);
2064 if (reg_base == REG_X) /* (R26) */
2066 if (reg_dest == REG_X)
2067 /* "ld r26,-X" is undefined */
2068 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2069 AS2 (ld,r29,X) CR_TAB
2070 AS2 (ld,r28,-X) CR_TAB
2071 AS2 (ld,__tmp_reg__,-X) CR_TAB
2072 AS2 (sbiw,r26,1) CR_TAB
2073 AS2 (ld,r26,X) CR_TAB
2074 AS2 (mov,r27,__tmp_reg__));
2075 else if (reg_dest == REG_X - 2)
2076 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2077 AS2 (ld,%B0,X+) CR_TAB
2078 AS2 (ld,__tmp_reg__,X+) CR_TAB
2079 AS2 (ld,%D0,X) CR_TAB
2080 AS2 (mov,%C0,__tmp_reg__));
2081 else if (reg_unused_after (insn, base))
2082 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2083 AS2 (ld,%B0,X+) CR_TAB
2084 AS2 (ld,%C0,X+) CR_TAB
2087 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2088 AS2 (ld,%B0,X+) CR_TAB
2089 AS2 (ld,%C0,X+) CR_TAB
2090 AS2 (ld,%D0,X) CR_TAB
2095 if (reg_dest == reg_base)
2096 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2097 AS2 (ldd,%C0,%1+2) CR_TAB
2098 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2099 AS2 (ld,%A0,%1) CR_TAB
2100 AS2 (mov,%B0,__tmp_reg__));
2101 else if (reg_base == reg_dest + 2)
2102 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2103 AS2 (ldd,%B0,%1+1) CR_TAB
2104 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2105 AS2 (ldd,%D0,%1+3) CR_TAB
2106 AS2 (mov,%C0,__tmp_reg__));
2108 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2109 AS2 (ldd,%B0,%1+1) CR_TAB
2110 AS2 (ldd,%C0,%1+2) CR_TAB
2111 AS2 (ldd,%D0,%1+3));
2114 else if (GET_CODE (base) == PLUS) /* (R + i) */
2116 int disp = INTVAL (XEXP (base, 1));
2118 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2120 if (REGNO (XEXP (base, 0)) != REG_Y)
2121 fatal_insn ("incorrect insn:",insn);
2123 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2124 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2125 AS2 (ldd,%A0,Y+60) CR_TAB
2126 AS2 (ldd,%B0,Y+61) CR_TAB
2127 AS2 (ldd,%C0,Y+62) CR_TAB
2128 AS2 (ldd,%D0,Y+63) CR_TAB
2129 AS2 (sbiw,r28,%o1-60));
2131 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2132 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2133 AS2 (ld,%A0,Y) CR_TAB
2134 AS2 (ldd,%B0,Y+1) CR_TAB
2135 AS2 (ldd,%C0,Y+2) CR_TAB
2136 AS2 (ldd,%D0,Y+3) CR_TAB
2137 AS2 (subi,r28,lo8(%o1)) CR_TAB
2138 AS2 (sbci,r29,hi8(%o1)));
2141 reg_base = true_regnum (XEXP (base, 0));
2142 if (reg_base == REG_X)
2145 if (reg_dest == REG_X)
2148 /* "ld r26,-X" is undefined */
2149 return (AS2 (adiw,r26,%o1+3) CR_TAB
2150 AS2 (ld,r29,X) CR_TAB
2151 AS2 (ld,r28,-X) CR_TAB
2152 AS2 (ld,__tmp_reg__,-X) CR_TAB
2153 AS2 (sbiw,r26,1) CR_TAB
2154 AS2 (ld,r26,X) CR_TAB
2155 AS2 (mov,r27,__tmp_reg__));
2158 if (reg_dest == REG_X - 2)
2159 return (AS2 (adiw,r26,%o1) CR_TAB
2160 AS2 (ld,r24,X+) CR_TAB
2161 AS2 (ld,r25,X+) CR_TAB
2162 AS2 (ld,__tmp_reg__,X+) CR_TAB
2163 AS2 (ld,r27,X) CR_TAB
2164 AS2 (mov,r26,__tmp_reg__));
2166 return (AS2 (adiw,r26,%o1) CR_TAB
2167 AS2 (ld,%A0,X+) CR_TAB
2168 AS2 (ld,%B0,X+) CR_TAB
2169 AS2 (ld,%C0,X+) CR_TAB
2170 AS2 (ld,%D0,X) CR_TAB
2171 AS2 (sbiw,r26,%o1+3));
2173 if (reg_dest == reg_base)
2174 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2175 AS2 (ldd,%C0,%C1) CR_TAB
2176 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2177 AS2 (ldd,%A0,%A1) CR_TAB
2178 AS2 (mov,%B0,__tmp_reg__));
2179 else if (reg_dest == reg_base - 2)
2180 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2181 AS2 (ldd,%B0,%B1) CR_TAB
2182 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2183 AS2 (ldd,%D0,%D1) CR_TAB
2184 AS2 (mov,%C0,__tmp_reg__));
2185 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2186 AS2 (ldd,%B0,%B1) CR_TAB
2187 AS2 (ldd,%C0,%C1) CR_TAB
2190 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2191 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2192 AS2 (ld,%C0,%1) CR_TAB
2193 AS2 (ld,%B0,%1) CR_TAB
2195 else if (GET_CODE (base) == POST_INC) /* (R++) */
2196 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2197 AS2 (ld,%B0,%1) CR_TAB
2198 AS2 (ld,%C0,%1) CR_TAB
2200 else if (CONSTANT_ADDRESS_P (base))
2201 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2202 AS2 (lds,%B0,%B1) CR_TAB
2203 AS2 (lds,%C0,%C1) CR_TAB
2206 fatal_insn ("unknown move insn:",insn);
2211 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2215 rtx base = XEXP (dest, 0);
2216 int reg_base = true_regnum (base);
2217 int reg_src = true_regnum (src);
2223 if (CONSTANT_ADDRESS_P (base))
2224 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2225 AS2 (sts,%B0,%B1) CR_TAB
2226 AS2 (sts,%C0,%C1) CR_TAB
2228 if (reg_base > 0) /* (r) */
2230 if (reg_base == REG_X) /* (R26) */
2232 if (reg_src == REG_X)
2234 /* "st X+,r26" is undefined */
2235 if (reg_unused_after (insn, base))
2236 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2237 AS2 (st,X,r26) CR_TAB
2238 AS2 (adiw,r26,1) CR_TAB
2239 AS2 (st,X+,__tmp_reg__) CR_TAB
2240 AS2 (st,X+,r28) CR_TAB
2243 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2244 AS2 (st,X,r26) CR_TAB
2245 AS2 (adiw,r26,1) CR_TAB
2246 AS2 (st,X+,__tmp_reg__) CR_TAB
2247 AS2 (st,X+,r28) CR_TAB
2248 AS2 (st,X,r29) CR_TAB
2251 else if (reg_base == reg_src + 2)
2253 if (reg_unused_after (insn, base))
2254 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2255 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2256 AS2 (st,%0+,%A1) CR_TAB
2257 AS2 (st,%0+,%B1) CR_TAB
2258 AS2 (st,%0+,__zero_reg__) CR_TAB
2259 AS2 (st,%0,__tmp_reg__) CR_TAB
2260 AS1 (clr,__zero_reg__));
2262 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2263 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2264 AS2 (st,%0+,%A1) CR_TAB
2265 AS2 (st,%0+,%B1) CR_TAB
2266 AS2 (st,%0+,__zero_reg__) CR_TAB
2267 AS2 (st,%0,__tmp_reg__) CR_TAB
2268 AS1 (clr,__zero_reg__) CR_TAB
2271 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2272 AS2 (st,%0+,%B1) CR_TAB
2273 AS2 (st,%0+,%C1) CR_TAB
2274 AS2 (st,%0,%D1) CR_TAB
2278 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2279 AS2 (std,%0+1,%B1) CR_TAB
2280 AS2 (std,%0+2,%C1) CR_TAB
2281 AS2 (std,%0+3,%D1));
2283 else if (GET_CODE (base) == PLUS) /* (R + i) */
2285 int disp = INTVAL (XEXP (base, 1));
2286 reg_base = REGNO (XEXP (base, 0));
2287 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2289 if (reg_base != REG_Y)
2290 fatal_insn ("incorrect insn:",insn);
2292 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2293 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2294 AS2 (std,Y+60,%A1) CR_TAB
2295 AS2 (std,Y+61,%B1) CR_TAB
2296 AS2 (std,Y+62,%C1) CR_TAB
2297 AS2 (std,Y+63,%D1) CR_TAB
2298 AS2 (sbiw,r28,%o0-60));
2300 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2301 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2302 AS2 (st,Y,%A1) CR_TAB
2303 AS2 (std,Y+1,%B1) CR_TAB
2304 AS2 (std,Y+2,%C1) CR_TAB
2305 AS2 (std,Y+3,%D1) CR_TAB
2306 AS2 (subi,r28,lo8(%o0)) CR_TAB
2307 AS2 (sbci,r29,hi8(%o0)));
2309 if (reg_base == REG_X)
2312 if (reg_src == REG_X)
2315 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2316 AS2 (mov,__zero_reg__,r27) CR_TAB
2317 AS2 (adiw,r26,%o0) CR_TAB
2318 AS2 (st,X+,__tmp_reg__) CR_TAB
2319 AS2 (st,X+,__zero_reg__) CR_TAB
2320 AS2 (st,X+,r28) CR_TAB
2321 AS2 (st,X,r29) CR_TAB
2322 AS1 (clr,__zero_reg__) CR_TAB
2323 AS2 (sbiw,r26,%o0+3));
2325 else if (reg_src == REG_X - 2)
2328 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2329 AS2 (mov,__zero_reg__,r27) CR_TAB
2330 AS2 (adiw,r26,%o0) CR_TAB
2331 AS2 (st,X+,r24) CR_TAB
2332 AS2 (st,X+,r25) CR_TAB
2333 AS2 (st,X+,__tmp_reg__) CR_TAB
2334 AS2 (st,X,__zero_reg__) CR_TAB
2335 AS1 (clr,__zero_reg__) CR_TAB
2336 AS2 (sbiw,r26,%o0+3));
2339 return (AS2 (adiw,r26,%o0) CR_TAB
2340 AS2 (st,X+,%A1) CR_TAB
2341 AS2 (st,X+,%B1) CR_TAB
2342 AS2 (st,X+,%C1) CR_TAB
2343 AS2 (st,X,%D1) CR_TAB
2344 AS2 (sbiw,r26,%o0+3));
2346 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2347 AS2 (std,%B0,%B1) CR_TAB
2348 AS2 (std,%C0,%C1) CR_TAB
2351 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2352 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2353 AS2 (st,%0,%C1) CR_TAB
2354 AS2 (st,%0,%B1) CR_TAB
2356 else if (GET_CODE (base) == POST_INC) /* (R++) */
2357 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2358 AS2 (st,%0,%B1) CR_TAB
2359 AS2 (st,%0,%C1) CR_TAB
2361 fatal_insn ("unknown move insn:",insn);
2366 output_movsisf(rtx insn, rtx operands[], int *l)
2369 rtx dest = operands[0];
2370 rtx src = operands[1];
2376 if (register_operand (dest, VOIDmode))
2378 if (register_operand (src, VOIDmode)) /* mov r,r */
2380 if (true_regnum (dest) > true_regnum (src))
2385 return (AS2 (movw,%C0,%C1) CR_TAB
2386 AS2 (movw,%A0,%A1));
2389 return (AS2 (mov,%D0,%D1) CR_TAB
2390 AS2 (mov,%C0,%C1) CR_TAB
2391 AS2 (mov,%B0,%B1) CR_TAB
2399 return (AS2 (movw,%A0,%A1) CR_TAB
2400 AS2 (movw,%C0,%C1));
2403 return (AS2 (mov,%A0,%A1) CR_TAB
2404 AS2 (mov,%B0,%B1) CR_TAB
2405 AS2 (mov,%C0,%C1) CR_TAB
2409 else if (CONSTANT_P (src))
2411 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2414 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2415 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2416 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2417 AS2 (ldi,%D0,hhi8(%1)));
2420 if (GET_CODE (src) == CONST_INT)
2422 const char *const clr_op0 =
2423 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2424 AS1 (clr,%B0) CR_TAB
2426 : (AS1 (clr,%A0) CR_TAB
2427 AS1 (clr,%B0) CR_TAB
2428 AS1 (clr,%C0) CR_TAB
2431 if (src == const0_rtx) /* mov r,L */
2433 *l = AVR_HAVE_MOVW ? 3 : 4;
2436 else if (src == const1_rtx)
2439 output_asm_insn (clr_op0, operands);
2440 *l = AVR_HAVE_MOVW ? 4 : 5;
2441 return AS1 (inc,%A0);
2443 else if (src == constm1_rtx)
2445 /* Immediate constants -1 to any register */
2449 return (AS1 (clr,%A0) CR_TAB
2450 AS1 (dec,%A0) CR_TAB
2451 AS2 (mov,%B0,%A0) CR_TAB
2452 AS2 (movw,%C0,%A0));
2455 return (AS1 (clr,%A0) CR_TAB
2456 AS1 (dec,%A0) CR_TAB
2457 AS2 (mov,%B0,%A0) CR_TAB
2458 AS2 (mov,%C0,%A0) CR_TAB
2463 int bit_nr = exact_log2 (INTVAL (src));
2467 *l = AVR_HAVE_MOVW ? 5 : 6;
2470 output_asm_insn (clr_op0, operands);
2471 output_asm_insn ("set", operands);
2474 avr_output_bld (operands, bit_nr);
2481 /* Last resort, better than loading from memory. */
2483 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2484 AS2 (ldi,r31,lo8(%1)) CR_TAB
2485 AS2 (mov,%A0,r31) CR_TAB
2486 AS2 (ldi,r31,hi8(%1)) CR_TAB
2487 AS2 (mov,%B0,r31) CR_TAB
2488 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2489 AS2 (mov,%C0,r31) CR_TAB
2490 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2491 AS2 (mov,%D0,r31) CR_TAB
2492 AS2 (mov,r31,__tmp_reg__));
2494 else if (GET_CODE (src) == MEM)
2495 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2497 else if (GET_CODE (dest) == MEM)
2499 const char *template;
2501 if (src == const0_rtx)
2502 operands[1] = zero_reg_rtx;
2504 template = out_movsi_mr_r (insn, operands, real_l);
2507 output_asm_insn (template, operands);
2512 fatal_insn ("invalid insn:", insn);
2517 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2521 rtx x = XEXP (dest, 0);
2527 if (CONSTANT_ADDRESS_P (x))
2529 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2532 return AS2 (out,__SREG__,%1);
2534 if (optimize > 0 && io_address_operand (x, QImode))
2537 return AS2 (out,%0-0x20,%1);
2540 return AS2 (sts,%0,%1);
2542 /* memory access by reg+disp */
2543 else if (GET_CODE (x) == PLUS
2544 && REG_P (XEXP (x,0))
2545 && GET_CODE (XEXP (x,1)) == CONST_INT)
2547 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2549 int disp = INTVAL (XEXP (x,1));
2550 if (REGNO (XEXP (x,0)) != REG_Y)
2551 fatal_insn ("incorrect insn:",insn);
2553 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2554 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2555 AS2 (std,Y+63,%1) CR_TAB
2556 AS2 (sbiw,r28,%o0-63));
2558 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2559 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2560 AS2 (st,Y,%1) CR_TAB
2561 AS2 (subi,r28,lo8(%o0)) CR_TAB
2562 AS2 (sbci,r29,hi8(%o0)));
2564 else if (REGNO (XEXP (x,0)) == REG_X)
2566 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2568 if (reg_unused_after (insn, XEXP (x,0)))
2569 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2570 AS2 (adiw,r26,%o0) CR_TAB
2571 AS2 (st,X,__tmp_reg__));
2573 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2574 AS2 (adiw,r26,%o0) CR_TAB
2575 AS2 (st,X,__tmp_reg__) CR_TAB
2576 AS2 (sbiw,r26,%o0));
2580 if (reg_unused_after (insn, XEXP (x,0)))
2581 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2584 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2585 AS2 (st,X,%1) CR_TAB
2586 AS2 (sbiw,r26,%o0));
2590 return AS2 (std,%0,%1);
2593 return AS2 (st,%0,%1);
2597 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2601 rtx base = XEXP (dest, 0);
2602 int reg_base = true_regnum (base);
2603 int reg_src = true_regnum (src);
2604 /* "volatile" forces writing high byte first, even if less efficient,
2605 for correct operation with 16-bit I/O registers. */
2606 int mem_volatile_p = MEM_VOLATILE_P (dest);
2611 if (CONSTANT_ADDRESS_P (base))
2613 if (optimize > 0 && io_address_operand (base, HImode))
2616 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2617 AS2 (out,%A0-0x20,%A1));
2619 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2624 if (reg_base == REG_X)
2626 if (reg_src == REG_X)
2628 /* "st X+,r26" and "st -X,r26" are undefined. */
2629 if (!mem_volatile_p && reg_unused_after (insn, src))
2630 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2631 AS2 (st,X,r26) CR_TAB
2632 AS2 (adiw,r26,1) CR_TAB
2633 AS2 (st,X,__tmp_reg__));
2635 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2636 AS2 (adiw,r26,1) CR_TAB
2637 AS2 (st,X,__tmp_reg__) CR_TAB
2638 AS2 (sbiw,r26,1) CR_TAB
2643 if (!mem_volatile_p && reg_unused_after (insn, base))
2644 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2647 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2648 AS2 (st,X,%B1) CR_TAB
2653 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2656 else if (GET_CODE (base) == PLUS)
2658 int disp = INTVAL (XEXP (base, 1));
2659 reg_base = REGNO (XEXP (base, 0));
2660 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2662 if (reg_base != REG_Y)
2663 fatal_insn ("incorrect insn:",insn);
2665 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2666 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2667 AS2 (std,Y+63,%B1) CR_TAB
2668 AS2 (std,Y+62,%A1) CR_TAB
2669 AS2 (sbiw,r28,%o0-62));
2671 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2672 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2673 AS2 (std,Y+1,%B1) CR_TAB
2674 AS2 (st,Y,%A1) CR_TAB
2675 AS2 (subi,r28,lo8(%o0)) CR_TAB
2676 AS2 (sbci,r29,hi8(%o0)));
2678 if (reg_base == REG_X)
2681 if (reg_src == REG_X)
2684 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2685 AS2 (mov,__zero_reg__,r27) CR_TAB
2686 AS2 (adiw,r26,%o0+1) CR_TAB
2687 AS2 (st,X,__zero_reg__) CR_TAB
2688 AS2 (st,-X,__tmp_reg__) CR_TAB
2689 AS1 (clr,__zero_reg__) CR_TAB
2690 AS2 (sbiw,r26,%o0));
2693 return (AS2 (adiw,r26,%o0+1) CR_TAB
2694 AS2 (st,X,%B1) CR_TAB
2695 AS2 (st,-X,%A1) CR_TAB
2696 AS2 (sbiw,r26,%o0));
2698 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2701 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2702 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2704 else if (GET_CODE (base) == POST_INC) /* (R++) */
2708 if (REGNO (XEXP (base, 0)) == REG_X)
2711 return (AS2 (adiw,r26,1) CR_TAB
2712 AS2 (st,X,%B1) CR_TAB
2713 AS2 (st,-X,%A1) CR_TAB
2719 return (AS2 (std,%p0+1,%B1) CR_TAB
2720 AS2 (st,%p0,%A1) CR_TAB
2726 return (AS2 (st,%0,%A1) CR_TAB
2729 fatal_insn ("unknown move insn:",insn);
2733 /* Return 1 if frame pointer for current function required. */
2736 frame_pointer_required_p (void)
2738 return (cfun->calls_alloca
2739 || crtl->args.info.nregs == 0
2740 || get_frame_size () > 0);
2743 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2746 compare_condition (rtx insn)
2748 rtx next = next_real_insn (insn);
2749 RTX_CODE cond = UNKNOWN;
2750 if (next && GET_CODE (next) == JUMP_INSN)
2752 rtx pat = PATTERN (next);
2753 rtx src = SET_SRC (pat);
2754 rtx t = XEXP (src, 0);
2755 cond = GET_CODE (t);
2760 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2763 compare_sign_p (rtx insn)
2765 RTX_CODE cond = compare_condition (insn);
2766 return (cond == GE || cond == LT);
2769 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2770 that needs to be swapped (GT, GTU, LE, LEU). */
2773 compare_diff_p (rtx insn)
2775 RTX_CODE cond = compare_condition (insn);
2776 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2779 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2782 compare_eq_p (rtx insn)
2784 RTX_CODE cond = compare_condition (insn);
2785 return (cond == EQ || cond == NE);
2789 /* Output test instruction for HImode. */
2792 out_tsthi (rtx insn, int *l)
2794 if (compare_sign_p (insn))
2797 return AS1 (tst,%B0);
2799 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2800 && compare_eq_p (insn))
2802 /* Faster than sbiw if we can clobber the operand. */
2804 return AS2 (or,%A0,%B0);
2806 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2809 return AS2 (sbiw,%0,0);
2812 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2813 AS2 (cpc,%B0,__zero_reg__));
2817 /* Output test instruction for SImode. */
2820 out_tstsi (rtx insn, int *l)
2822 if (compare_sign_p (insn))
2825 return AS1 (tst,%D0);
2827 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2830 return (AS2 (sbiw,%A0,0) CR_TAB
2831 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2832 AS2 (cpc,%D0,__zero_reg__));
2835 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2836 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2837 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2838 AS2 (cpc,%D0,__zero_reg__));
2842 /* Generate asm equivalent for various shifts.
2843 Shift count is a CONST_INT, MEM or REG.
2844 This only handles cases that are not already
2845 carefully hand-optimized in ?sh??i3_out. */
2848 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2849 int *len, int t_len)
2853 int second_label = 1;
2854 int saved_in_tmp = 0;
2855 int use_zero_reg = 0;
2857 op[0] = operands[0];
2858 op[1] = operands[1];
2859 op[2] = operands[2];
2860 op[3] = operands[3];
2866 if (GET_CODE (operands[2]) == CONST_INT)
2868 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2869 int count = INTVAL (operands[2]);
2870 int max_len = 10; /* If larger than this, always use a loop. */
2879 if (count < 8 && !scratch)
2883 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2885 if (t_len * count <= max_len)
2887 /* Output shifts inline with no loop - faster. */
2889 *len = t_len * count;
2893 output_asm_insn (template, op);
2902 strcat (str, AS2 (ldi,%3,%2));
2904 else if (use_zero_reg)
2906 /* Hack to save one word: use __zero_reg__ as loop counter.
2907 Set one bit, then shift in a loop until it is 0 again. */
2909 op[3] = zero_reg_rtx;
2913 strcat (str, ("set" CR_TAB
2914 AS2 (bld,%3,%2-1)));
2918 /* No scratch register available, use one from LD_REGS (saved in
2919 __tmp_reg__) that doesn't overlap with registers to shift. */
2921 op[3] = gen_rtx_REG (QImode,
2922 ((true_regnum (operands[0]) - 1) & 15) + 16);
2923 op[4] = tmp_reg_rtx;
2927 *len = 3; /* Includes "mov %3,%4" after the loop. */
2929 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2935 else if (GET_CODE (operands[2]) == MEM)
2939 op[3] = op_mov[0] = tmp_reg_rtx;
2943 out_movqi_r_mr (insn, op_mov, len);
2945 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2947 else if (register_operand (operands[2], QImode))
2949 if (reg_unused_after (insn, operands[2]))
2953 op[3] = tmp_reg_rtx;
2955 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2959 fatal_insn ("bad shift insn:", insn);
2966 strcat (str, AS1 (rjmp,2f));
2970 *len += t_len + 2; /* template + dec + brXX */
2973 strcat (str, "\n1:\t");
2974 strcat (str, template);
2975 strcat (str, second_label ? "\n2:\t" : "\n\t");
2976 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2977 strcat (str, CR_TAB);
2978 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2980 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2981 output_asm_insn (str, op);
2986 /* 8bit shift left ((char)x << i) */
2989 ashlqi3_out (rtx insn, rtx operands[], int *len)
2991 if (GET_CODE (operands[2]) == CONST_INT)
2998 switch (INTVAL (operands[2]))
3001 if (INTVAL (operands[2]) < 8)
3005 return AS1 (clr,%0);
3009 return AS1 (lsl,%0);
3013 return (AS1 (lsl,%0) CR_TAB
3018 return (AS1 (lsl,%0) CR_TAB
3023 if (test_hard_reg_class (LD_REGS, operands[0]))
3026 return (AS1 (swap,%0) CR_TAB
3027 AS2 (andi,%0,0xf0));
3030 return (AS1 (lsl,%0) CR_TAB
3036 if (test_hard_reg_class (LD_REGS, operands[0]))
3039 return (AS1 (swap,%0) CR_TAB
3041 AS2 (andi,%0,0xe0));
3044 return (AS1 (lsl,%0) CR_TAB
3051 if (test_hard_reg_class (LD_REGS, operands[0]))
3054 return (AS1 (swap,%0) CR_TAB
3057 AS2 (andi,%0,0xc0));
3060 return (AS1 (lsl,%0) CR_TAB
3069 return (AS1 (ror,%0) CR_TAB
3074 else if (CONSTANT_P (operands[2]))
3075 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3077 out_shift_with_cnt (AS1 (lsl,%0),
3078 insn, operands, len, 1);
3083 /* 16bit shift left ((short)x << i) */
3086 ashlhi3_out (rtx insn, rtx operands[], int *len)
3088 if (GET_CODE (operands[2]) == CONST_INT)
3090 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3091 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3098 switch (INTVAL (operands[2]))
3101 if (INTVAL (operands[2]) < 16)
3105 return (AS1 (clr,%B0) CR_TAB
3109 if (optimize_size && scratch)
3114 return (AS1 (swap,%A0) CR_TAB
3115 AS1 (swap,%B0) CR_TAB
3116 AS2 (andi,%B0,0xf0) CR_TAB
3117 AS2 (eor,%B0,%A0) CR_TAB
3118 AS2 (andi,%A0,0xf0) CR_TAB
3124 return (AS1 (swap,%A0) CR_TAB
3125 AS1 (swap,%B0) CR_TAB
3126 AS2 (ldi,%3,0xf0) CR_TAB
3127 AS2 (and,%B0,%3) CR_TAB
3128 AS2 (eor,%B0,%A0) CR_TAB
3129 AS2 (and,%A0,%3) CR_TAB
3132 break; /* optimize_size ? 6 : 8 */
3136 break; /* scratch ? 5 : 6 */
3140 return (AS1 (lsl,%A0) CR_TAB
3141 AS1 (rol,%B0) CR_TAB
3142 AS1 (swap,%A0) CR_TAB
3143 AS1 (swap,%B0) CR_TAB
3144 AS2 (andi,%B0,0xf0) CR_TAB
3145 AS2 (eor,%B0,%A0) CR_TAB
3146 AS2 (andi,%A0,0xf0) CR_TAB
3152 return (AS1 (lsl,%A0) CR_TAB
3153 AS1 (rol,%B0) CR_TAB
3154 AS1 (swap,%A0) CR_TAB
3155 AS1 (swap,%B0) CR_TAB
3156 AS2 (ldi,%3,0xf0) CR_TAB
3157 AS2 (and,%B0,%3) CR_TAB
3158 AS2 (eor,%B0,%A0) CR_TAB
3159 AS2 (and,%A0,%3) CR_TAB
3166 break; /* scratch ? 5 : 6 */
3168 return (AS1 (clr,__tmp_reg__) CR_TAB
3169 AS1 (lsr,%B0) CR_TAB
3170 AS1 (ror,%A0) CR_TAB
3171 AS1 (ror,__tmp_reg__) CR_TAB
3172 AS1 (lsr,%B0) CR_TAB
3173 AS1 (ror,%A0) CR_TAB
3174 AS1 (ror,__tmp_reg__) CR_TAB
3175 AS2 (mov,%B0,%A0) CR_TAB
3176 AS2 (mov,%A0,__tmp_reg__));
3180 return (AS1 (lsr,%B0) CR_TAB
3181 AS2 (mov,%B0,%A0) CR_TAB
3182 AS1 (clr,%A0) CR_TAB
3183 AS1 (ror,%B0) CR_TAB
3187 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3192 return (AS2 (mov,%B0,%A0) CR_TAB
3193 AS1 (clr,%A0) CR_TAB
3198 return (AS2 (mov,%B0,%A0) CR_TAB
3199 AS1 (clr,%A0) CR_TAB
3200 AS1 (lsl,%B0) CR_TAB
3205 return (AS2 (mov,%B0,%A0) CR_TAB
3206 AS1 (clr,%A0) CR_TAB
3207 AS1 (lsl,%B0) CR_TAB
3208 AS1 (lsl,%B0) CR_TAB
3215 return (AS2 (mov,%B0,%A0) CR_TAB
3216 AS1 (clr,%A0) CR_TAB
3217 AS1 (swap,%B0) CR_TAB
3218 AS2 (andi,%B0,0xf0));
3223 return (AS2 (mov,%B0,%A0) CR_TAB
3224 AS1 (clr,%A0) CR_TAB
3225 AS1 (swap,%B0) CR_TAB
3226 AS2 (ldi,%3,0xf0) CR_TAB
3230 return (AS2 (mov,%B0,%A0) CR_TAB
3231 AS1 (clr,%A0) CR_TAB
3232 AS1 (lsl,%B0) CR_TAB
3233 AS1 (lsl,%B0) CR_TAB
3234 AS1 (lsl,%B0) CR_TAB
3241 return (AS2 (mov,%B0,%A0) CR_TAB
3242 AS1 (clr,%A0) CR_TAB
3243 AS1 (swap,%B0) CR_TAB
3244 AS1 (lsl,%B0) CR_TAB
3245 AS2 (andi,%B0,0xe0));
3247 if (AVR_HAVE_MUL && scratch)
3250 return (AS2 (ldi,%3,0x20) CR_TAB
3251 AS2 (mul,%A0,%3) CR_TAB
3252 AS2 (mov,%B0,r0) CR_TAB
3253 AS1 (clr,%A0) CR_TAB
3254 AS1 (clr,__zero_reg__));
3256 if (optimize_size && scratch)
3261 return (AS2 (mov,%B0,%A0) CR_TAB
3262 AS1 (clr,%A0) CR_TAB
3263 AS1 (swap,%B0) CR_TAB
3264 AS1 (lsl,%B0) CR_TAB
3265 AS2 (ldi,%3,0xe0) CR_TAB
3271 return ("set" CR_TAB
3272 AS2 (bld,r1,5) CR_TAB
3273 AS2 (mul,%A0,r1) CR_TAB
3274 AS2 (mov,%B0,r0) CR_TAB
3275 AS1 (clr,%A0) CR_TAB
3276 AS1 (clr,__zero_reg__));
3279 return (AS2 (mov,%B0,%A0) CR_TAB
3280 AS1 (clr,%A0) CR_TAB
3281 AS1 (lsl,%B0) CR_TAB
3282 AS1 (lsl,%B0) CR_TAB
3283 AS1 (lsl,%B0) CR_TAB
3284 AS1 (lsl,%B0) CR_TAB
3288 if (AVR_HAVE_MUL && ldi_ok)
3291 return (AS2 (ldi,%B0,0x40) CR_TAB
3292 AS2 (mul,%A0,%B0) CR_TAB
3293 AS2 (mov,%B0,r0) CR_TAB
3294 AS1 (clr,%A0) CR_TAB
3295 AS1 (clr,__zero_reg__));
3297 if (AVR_HAVE_MUL && scratch)
3300 return (AS2 (ldi,%3,0x40) CR_TAB
3301 AS2 (mul,%A0,%3) CR_TAB
3302 AS2 (mov,%B0,r0) CR_TAB
3303 AS1 (clr,%A0) CR_TAB
3304 AS1 (clr,__zero_reg__));
3306 if (optimize_size && ldi_ok)
3309 return (AS2 (mov,%B0,%A0) CR_TAB
3310 AS2 (ldi,%A0,6) "\n1:\t"
3311 AS1 (lsl,%B0) CR_TAB
3312 AS1 (dec,%A0) CR_TAB
3315 if (optimize_size && scratch)
3318 return (AS1 (clr,%B0) CR_TAB
3319 AS1 (lsr,%A0) CR_TAB
3320 AS1 (ror,%B0) CR_TAB
3321 AS1 (lsr,%A0) CR_TAB
3322 AS1 (ror,%B0) CR_TAB
3327 return (AS1 (clr,%B0) CR_TAB
3328 AS1 (lsr,%A0) CR_TAB
3329 AS1 (ror,%B0) CR_TAB
3334 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3336 insn, operands, len, 2);
3341 /* 32bit shift left ((long)x << i) */
3344 ashlsi3_out (rtx insn, rtx operands[], int *len)
3346 if (GET_CODE (operands[2]) == CONST_INT)
3354 switch (INTVAL (operands[2]))
3357 if (INTVAL (operands[2]) < 32)
3361 return *len = 3, (AS1 (clr,%D0) CR_TAB
3362 AS1 (clr,%C0) CR_TAB
3363 AS2 (movw,%A0,%C0));
3365 return (AS1 (clr,%D0) CR_TAB
3366 AS1 (clr,%C0) CR_TAB
3367 AS1 (clr,%B0) CR_TAB
3372 int reg0 = true_regnum (operands[0]);
3373 int reg1 = true_regnum (operands[1]);
3376 return (AS2 (mov,%D0,%C1) CR_TAB
3377 AS2 (mov,%C0,%B1) CR_TAB
3378 AS2 (mov,%B0,%A1) CR_TAB
3381 return (AS1 (clr,%A0) CR_TAB
3382 AS2 (mov,%B0,%A1) CR_TAB
3383 AS2 (mov,%C0,%B1) CR_TAB
3389 int reg0 = true_regnum (operands[0]);
3390 int reg1 = true_regnum (operands[1]);
3391 if (reg0 + 2 == reg1)
3392 return *len = 2, (AS1 (clr,%B0) CR_TAB
3395 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3396 AS1 (clr,%B0) CR_TAB
3399 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3400 AS2 (mov,%D0,%B1) CR_TAB
3401 AS1 (clr,%B0) CR_TAB
3407 return (AS2 (mov,%D0,%A1) CR_TAB
3408 AS1 (clr,%C0) CR_TAB
3409 AS1 (clr,%B0) CR_TAB
3414 return (AS1 (clr,%D0) CR_TAB
3415 AS1 (lsr,%A0) CR_TAB
3416 AS1 (ror,%D0) CR_TAB
3417 AS1 (clr,%C0) CR_TAB
3418 AS1 (clr,%B0) CR_TAB
3423 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3424 AS1 (rol,%B0) CR_TAB
3425 AS1 (rol,%C0) CR_TAB
3427 insn, operands, len, 4);
3431 /* 8bit arithmetic shift right ((signed char)x >> i) */
3434 ashrqi3_out (rtx insn, rtx operands[], int *len)
3436 if (GET_CODE (operands[2]) == CONST_INT)
3443 switch (INTVAL (operands[2]))
3447 return AS1 (asr,%0);
3451 return (AS1 (asr,%0) CR_TAB
3456 return (AS1 (asr,%0) CR_TAB
3462 return (AS1 (asr,%0) CR_TAB
3469 return (AS1 (asr,%0) CR_TAB
3477 return (AS2 (bst,%0,6) CR_TAB
3479 AS2 (sbc,%0,%0) CR_TAB
3483 if (INTVAL (operands[2]) < 8)
3490 return (AS1 (lsl,%0) CR_TAB
3494 else if (CONSTANT_P (operands[2]))
3495 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3497 out_shift_with_cnt (AS1 (asr,%0),
3498 insn, operands, len, 1);
3503 /* 16bit arithmetic shift right ((signed short)x >> i) */
3506 ashrhi3_out (rtx insn, rtx operands[], int *len)
3508 if (GET_CODE (operands[2]) == CONST_INT)
3510 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3511 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3518 switch (INTVAL (operands[2]))
3522 /* XXX try to optimize this too? */
3527 break; /* scratch ? 5 : 6 */
3529 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3530 AS2 (mov,%A0,%B0) CR_TAB
3531 AS1 (lsl,__tmp_reg__) CR_TAB
3532 AS1 (rol,%A0) CR_TAB
3533 AS2 (sbc,%B0,%B0) CR_TAB
3534 AS1 (lsl,__tmp_reg__) CR_TAB
3535 AS1 (rol,%A0) CR_TAB
3540 return (AS1 (lsl,%A0) CR_TAB
3541 AS2 (mov,%A0,%B0) CR_TAB
3542 AS1 (rol,%A0) CR_TAB
3547 int reg0 = true_regnum (operands[0]);
3548 int reg1 = true_regnum (operands[1]);
3551 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3552 AS1 (lsl,%B0) CR_TAB
3555 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3556 AS1 (clr,%B0) CR_TAB
3557 AS2 (sbrc,%A0,7) CR_TAB
3563 return (AS2 (mov,%A0,%B0) CR_TAB
3564 AS1 (lsl,%B0) CR_TAB
3565 AS2 (sbc,%B0,%B0) CR_TAB
3570 return (AS2 (mov,%A0,%B0) CR_TAB
3571 AS1 (lsl,%B0) CR_TAB
3572 AS2 (sbc,%B0,%B0) CR_TAB
3573 AS1 (asr,%A0) CR_TAB
3577 if (AVR_HAVE_MUL && ldi_ok)
3580 return (AS2 (ldi,%A0,0x20) CR_TAB
3581 AS2 (muls,%B0,%A0) CR_TAB
3582 AS2 (mov,%A0,r1) CR_TAB
3583 AS2 (sbc,%B0,%B0) CR_TAB
3584 AS1 (clr,__zero_reg__));
3586 if (optimize_size && scratch)
3589 return (AS2 (mov,%A0,%B0) CR_TAB
3590 AS1 (lsl,%B0) CR_TAB
3591 AS2 (sbc,%B0,%B0) CR_TAB
3592 AS1 (asr,%A0) CR_TAB
3593 AS1 (asr,%A0) CR_TAB
3597 if (AVR_HAVE_MUL && ldi_ok)
3600 return (AS2 (ldi,%A0,0x10) CR_TAB
3601 AS2 (muls,%B0,%A0) CR_TAB
3602 AS2 (mov,%A0,r1) CR_TAB
3603 AS2 (sbc,%B0,%B0) CR_TAB
3604 AS1 (clr,__zero_reg__));
3606 if (optimize_size && scratch)
3609 return (AS2 (mov,%A0,%B0) CR_TAB
3610 AS1 (lsl,%B0) CR_TAB
3611 AS2 (sbc,%B0,%B0) CR_TAB
3612 AS1 (asr,%A0) CR_TAB
3613 AS1 (asr,%A0) CR_TAB
3614 AS1 (asr,%A0) CR_TAB
3618 if (AVR_HAVE_MUL && ldi_ok)
3621 return (AS2 (ldi,%A0,0x08) CR_TAB
3622 AS2 (muls,%B0,%A0) CR_TAB
3623 AS2 (mov,%A0,r1) CR_TAB
3624 AS2 (sbc,%B0,%B0) CR_TAB
3625 AS1 (clr,__zero_reg__));
3628 break; /* scratch ? 5 : 7 */
3630 return (AS2 (mov,%A0,%B0) CR_TAB
3631 AS1 (lsl,%B0) CR_TAB
3632 AS2 (sbc,%B0,%B0) CR_TAB
3633 AS1 (asr,%A0) CR_TAB
3634 AS1 (asr,%A0) CR_TAB
3635 AS1 (asr,%A0) CR_TAB
3636 AS1 (asr,%A0) CR_TAB
3641 return (AS1 (lsl,%B0) CR_TAB
3642 AS2 (sbc,%A0,%A0) CR_TAB
3643 AS1 (lsl,%B0) CR_TAB
3644 AS2 (mov,%B0,%A0) CR_TAB
3648 if (INTVAL (operands[2]) < 16)
3654 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3655 AS2 (sbc,%A0,%A0) CR_TAB
3660 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3662 insn, operands, len, 2);
3667 /* 32bit arithmetic shift right ((signed long)x >> i) */
3670 ashrsi3_out (rtx insn, rtx operands[], int *len)
3672 if (GET_CODE (operands[2]) == CONST_INT)
3680 switch (INTVAL (operands[2]))
3684 int reg0 = true_regnum (operands[0]);
3685 int reg1 = true_regnum (operands[1]);
3688 return (AS2 (mov,%A0,%B1) CR_TAB
3689 AS2 (mov,%B0,%C1) CR_TAB
3690 AS2 (mov,%C0,%D1) CR_TAB
3691 AS1 (clr,%D0) CR_TAB
3692 AS2 (sbrc,%C0,7) CR_TAB
3695 return (AS1 (clr,%D0) CR_TAB
3696 AS2 (sbrc,%D1,7) CR_TAB
3697 AS1 (dec,%D0) CR_TAB
3698 AS2 (mov,%C0,%D1) CR_TAB
3699 AS2 (mov,%B0,%C1) CR_TAB
3705 int reg0 = true_regnum (operands[0]);
3706 int reg1 = true_regnum (operands[1]);
3708 if (reg0 == reg1 + 2)
3709 return *len = 4, (AS1 (clr,%D0) CR_TAB
3710 AS2 (sbrc,%B0,7) CR_TAB
3711 AS1 (com,%D0) CR_TAB
3714 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3715 AS1 (clr,%D0) CR_TAB
3716 AS2 (sbrc,%B0,7) CR_TAB
3717 AS1 (com,%D0) CR_TAB
3720 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3721 AS2 (mov,%A0,%C1) CR_TAB
3722 AS1 (clr,%D0) CR_TAB
3723 AS2 (sbrc,%B0,7) CR_TAB
3724 AS1 (com,%D0) CR_TAB
3729 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3730 AS1 (clr,%D0) CR_TAB
3731 AS2 (sbrc,%A0,7) CR_TAB
3732 AS1 (com,%D0) CR_TAB
3733 AS2 (mov,%B0,%D0) CR_TAB
3737 if (INTVAL (operands[2]) < 32)
3744 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3745 AS2 (sbc,%A0,%A0) CR_TAB
3746 AS2 (mov,%B0,%A0) CR_TAB
3747 AS2 (movw,%C0,%A0));
3749 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3750 AS2 (sbc,%A0,%A0) CR_TAB
3751 AS2 (mov,%B0,%A0) CR_TAB
3752 AS2 (mov,%C0,%A0) CR_TAB
3757 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3758 AS1 (ror,%C0) CR_TAB
3759 AS1 (ror,%B0) CR_TAB
3761 insn, operands, len, 4);
3765 /* 8bit logic shift right ((unsigned char)x >> i) */
3768 lshrqi3_out (rtx insn, rtx operands[], int *len)
3770 if (GET_CODE (operands[2]) == CONST_INT)
3777 switch (INTVAL (operands[2]))
3780 if (INTVAL (operands[2]) < 8)
3784 return AS1 (clr,%0);
3788 return AS1 (lsr,%0);
3792 return (AS1 (lsr,%0) CR_TAB
3796 return (AS1 (lsr,%0) CR_TAB
3801 if (test_hard_reg_class (LD_REGS, operands[0]))
3804 return (AS1 (swap,%0) CR_TAB
3805 AS2 (andi,%0,0x0f));
3808 return (AS1 (lsr,%0) CR_TAB
3814 if (test_hard_reg_class (LD_REGS, operands[0]))
3817 return (AS1 (swap,%0) CR_TAB
3822 return (AS1 (lsr,%0) CR_TAB
3829 if (test_hard_reg_class (LD_REGS, operands[0]))
3832 return (AS1 (swap,%0) CR_TAB
3838 return (AS1 (lsr,%0) CR_TAB
3847 return (AS1 (rol,%0) CR_TAB
3852 else if (CONSTANT_P (operands[2]))
3853 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3855 out_shift_with_cnt (AS1 (lsr,%0),
3856 insn, operands, len, 1);
3860 /* 16bit logic shift right ((unsigned short)x >> i) */
3863 lshrhi3_out (rtx insn, rtx operands[], int *len)
3865 if (GET_CODE (operands[2]) == CONST_INT)
3867 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3868 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3875 switch (INTVAL (operands[2]))
3878 if (INTVAL (operands[2]) < 16)
3882 return (AS1 (clr,%B0) CR_TAB
3886 if (optimize_size && scratch)
3891 return (AS1 (swap,%B0) CR_TAB
3892 AS1 (swap,%A0) CR_TAB
3893 AS2 (andi,%A0,0x0f) CR_TAB
3894 AS2 (eor,%A0,%B0) CR_TAB
3895 AS2 (andi,%B0,0x0f) CR_TAB
3901 return (AS1 (swap,%B0) CR_TAB
3902 AS1 (swap,%A0) CR_TAB
3903 AS2 (ldi,%3,0x0f) CR_TAB
3904 AS2 (and,%A0,%3) CR_TAB
3905 AS2 (eor,%A0,%B0) CR_TAB
3906 AS2 (and,%B0,%3) CR_TAB
3909 break; /* optimize_size ? 6 : 8 */
3913 break; /* scratch ? 5 : 6 */
3917 return (AS1 (lsr,%B0) CR_TAB
3918 AS1 (ror,%A0) CR_TAB
3919 AS1 (swap,%B0) CR_TAB
3920 AS1 (swap,%A0) CR_TAB
3921 AS2 (andi,%A0,0x0f) CR_TAB
3922 AS2 (eor,%A0,%B0) CR_TAB
3923 AS2 (andi,%B0,0x0f) CR_TAB
3929 return (AS1 (lsr,%B0) CR_TAB
3930 AS1 (ror,%A0) CR_TAB
3931 AS1 (swap,%B0) CR_TAB
3932 AS1 (swap,%A0) CR_TAB
3933 AS2 (ldi,%3,0x0f) CR_TAB
3934 AS2 (and,%A0,%3) CR_TAB
3935 AS2 (eor,%A0,%B0) CR_TAB
3936 AS2 (and,%B0,%3) CR_TAB
3943 break; /* scratch ? 5 : 6 */
3945 return (AS1 (clr,__tmp_reg__) CR_TAB
3946 AS1 (lsl,%A0) CR_TAB
3947 AS1 (rol,%B0) CR_TAB
3948 AS1 (rol,__tmp_reg__) CR_TAB
3949 AS1 (lsl,%A0) CR_TAB
3950 AS1 (rol,%B0) CR_TAB
3951 AS1 (rol,__tmp_reg__) CR_TAB
3952 AS2 (mov,%A0,%B0) CR_TAB
3953 AS2 (mov,%B0,__tmp_reg__));
3957 return (AS1 (lsl,%A0) CR_TAB
3958 AS2 (mov,%A0,%B0) CR_TAB
3959 AS1 (rol,%A0) CR_TAB
3960 AS2 (sbc,%B0,%B0) CR_TAB
3964 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3969 return (AS2 (mov,%A0,%B0) CR_TAB
3970 AS1 (clr,%B0) CR_TAB
3975 return (AS2 (mov,%A0,%B0) CR_TAB
3976 AS1 (clr,%B0) CR_TAB
3977 AS1 (lsr,%A0) CR_TAB
3982 return (AS2 (mov,%A0,%B0) CR_TAB
3983 AS1 (clr,%B0) CR_TAB
3984 AS1 (lsr,%A0) CR_TAB
3985 AS1 (lsr,%A0) CR_TAB
3992 return (AS2 (mov,%A0,%B0) CR_TAB
3993 AS1 (clr,%B0) CR_TAB
3994 AS1 (swap,%A0) CR_TAB
3995 AS2 (andi,%A0,0x0f));
4000 return (AS2 (mov,%A0,%B0) CR_TAB
4001 AS1 (clr,%B0) CR_TAB
4002 AS1 (swap,%A0) CR_TAB
4003 AS2 (ldi,%3,0x0f) CR_TAB
4007 return (AS2 (mov,%A0,%B0) CR_TAB
4008 AS1 (clr,%B0) CR_TAB
4009 AS1 (lsr,%A0) CR_TAB
4010 AS1 (lsr,%A0) CR_TAB
4011 AS1 (lsr,%A0) CR_TAB
4018 return (AS2 (mov,%A0,%B0) CR_TAB
4019 AS1 (clr,%B0) CR_TAB
4020 AS1 (swap,%A0) CR_TAB
4021 AS1 (lsr,%A0) CR_TAB
4022 AS2 (andi,%A0,0x07));
4024 if (AVR_HAVE_MUL && scratch)
4027 return (AS2 (ldi,%3,0x08) CR_TAB
4028 AS2 (mul,%B0,%3) CR_TAB
4029 AS2 (mov,%A0,r1) CR_TAB
4030 AS1 (clr,%B0) CR_TAB
4031 AS1 (clr,__zero_reg__));
4033 if (optimize_size && scratch)
4038 return (AS2 (mov,%A0,%B0) CR_TAB
4039 AS1 (clr,%B0) CR_TAB
4040 AS1 (swap,%A0) CR_TAB
4041 AS1 (lsr,%A0) CR_TAB
4042 AS2 (ldi,%3,0x07) CR_TAB
4048 return ("set" CR_TAB
4049 AS2 (bld,r1,3) CR_TAB
4050 AS2 (mul,%B0,r1) CR_TAB
4051 AS2 (mov,%A0,r1) CR_TAB
4052 AS1 (clr,%B0) CR_TAB
4053 AS1 (clr,__zero_reg__));
4056 return (AS2 (mov,%A0,%B0) CR_TAB
4057 AS1 (clr,%B0) CR_TAB
4058 AS1 (lsr,%A0) CR_TAB
4059 AS1 (lsr,%A0) CR_TAB
4060 AS1 (lsr,%A0) CR_TAB
4061 AS1 (lsr,%A0) CR_TAB
4065 if (AVR_HAVE_MUL && ldi_ok)
4068 return (AS2 (ldi,%A0,0x04) CR_TAB
4069 AS2 (mul,%B0,%A0) CR_TAB
4070 AS2 (mov,%A0,r1) CR_TAB
4071 AS1 (clr,%B0) CR_TAB
4072 AS1 (clr,__zero_reg__));
4074 if (AVR_HAVE_MUL && scratch)
4077 return (AS2 (ldi,%3,0x04) CR_TAB
4078 AS2 (mul,%B0,%3) CR_TAB
4079 AS2 (mov,%A0,r1) CR_TAB
4080 AS1 (clr,%B0) CR_TAB
4081 AS1 (clr,__zero_reg__));
4083 if (optimize_size && ldi_ok)
4086 return (AS2 (mov,%A0,%B0) CR_TAB
4087 AS2 (ldi,%B0,6) "\n1:\t"
4088 AS1 (lsr,%A0) CR_TAB
4089 AS1 (dec,%B0) CR_TAB
4092 if (optimize_size && scratch)
4095 return (AS1 (clr,%A0) CR_TAB
4096 AS1 (lsl,%B0) CR_TAB
4097 AS1 (rol,%A0) CR_TAB
4098 AS1 (lsl,%B0) CR_TAB
4099 AS1 (rol,%A0) CR_TAB
4104 return (AS1 (clr,%A0) CR_TAB
4105 AS1 (lsl,%B0) CR_TAB
4106 AS1 (rol,%A0) CR_TAB
4111 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4113 insn, operands, len, 2);
4117 /* 32bit logic shift right ((unsigned int)x >> i) */
4120 lshrsi3_out (rtx insn, rtx operands[], int *len)
4122 if (GET_CODE (operands[2]) == CONST_INT)
4130 switch (INTVAL (operands[2]))
4133 if (INTVAL (operands[2]) < 32)
4137 return *len = 3, (AS1 (clr,%D0) CR_TAB
4138 AS1 (clr,%C0) CR_TAB
4139 AS2 (movw,%A0,%C0));
4141 return (AS1 (clr,%D0) CR_TAB
4142 AS1 (clr,%C0) CR_TAB
4143 AS1 (clr,%B0) CR_TAB
4148 int reg0 = true_regnum (operands[0]);
4149 int reg1 = true_regnum (operands[1]);
4152 return (AS2 (mov,%A0,%B1) CR_TAB
4153 AS2 (mov,%B0,%C1) CR_TAB
4154 AS2 (mov,%C0,%D1) CR_TAB
4157 return (AS1 (clr,%D0) CR_TAB
4158 AS2 (mov,%C0,%D1) CR_TAB
4159 AS2 (mov,%B0,%C1) CR_TAB
4165 int reg0 = true_regnum (operands[0]);
4166 int reg1 = true_regnum (operands[1]);
4168 if (reg0 == reg1 + 2)
4169 return *len = 2, (AS1 (clr,%C0) CR_TAB
4172 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4173 AS1 (clr,%C0) CR_TAB
4176 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4177 AS2 (mov,%A0,%C1) CR_TAB
4178 AS1 (clr,%C0) CR_TAB
4183 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4184 AS1 (clr,%B0) CR_TAB
4185 AS1 (clr,%C0) CR_TAB
4190 return (AS1 (clr,%A0) CR_TAB
4191 AS2 (sbrc,%D0,7) CR_TAB
4192 AS1 (inc,%A0) CR_TAB
4193 AS1 (clr,%B0) CR_TAB
4194 AS1 (clr,%C0) CR_TAB
4199 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4200 AS1 (ror,%C0) CR_TAB
4201 AS1 (ror,%B0) CR_TAB
4203 insn, operands, len, 4);
4207 /* Modifies the length assigned to instruction INSN
4208 LEN is the initially computed length of the insn. */
4211 adjust_insn_length (rtx insn, int len)
4213 rtx patt = PATTERN (insn);
4216 if (GET_CODE (patt) == SET)
4219 op[1] = SET_SRC (patt);
4220 op[0] = SET_DEST (patt);
4221 if (general_operand (op[1], VOIDmode)
4222 && general_operand (op[0], VOIDmode))
4224 switch (GET_MODE (op[0]))
4227 output_movqi (insn, op, &len);
4230 output_movhi (insn, op, &len);
4234 output_movsisf (insn, op, &len);
4240 else if (op[0] == cc0_rtx && REG_P (op[1]))
4242 switch (GET_MODE (op[1]))
4244 case HImode: out_tsthi (insn,&len); break;
4245 case SImode: out_tstsi (insn,&len); break;
4249 else if (GET_CODE (op[1]) == AND)
4251 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4253 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4254 if (GET_MODE (op[1]) == SImode)
4255 len = (((mask & 0xff) != 0xff)
4256 + ((mask & 0xff00) != 0xff00)
4257 + ((mask & 0xff0000L) != 0xff0000L)
4258 + ((mask & 0xff000000L) != 0xff000000L));
4259 else if (GET_MODE (op[1]) == HImode)
4260 len = (((mask & 0xff) != 0xff)
4261 + ((mask & 0xff00) != 0xff00));
4264 else if (GET_CODE (op[1]) == IOR)
4266 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4268 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4269 if (GET_MODE (op[1]) == SImode)
4270 len = (((mask & 0xff) != 0)
4271 + ((mask & 0xff00) != 0)
4272 + ((mask & 0xff0000L) != 0)
4273 + ((mask & 0xff000000L) != 0));
4274 else if (GET_MODE (op[1]) == HImode)
4275 len = (((mask & 0xff) != 0)
4276 + ((mask & 0xff00) != 0));
4280 set = single_set (insn);
4285 op[1] = SET_SRC (set);
4286 op[0] = SET_DEST (set);
4288 if (GET_CODE (patt) == PARALLEL
4289 && general_operand (op[1], VOIDmode)
4290 && general_operand (op[0], VOIDmode))
4292 if (XVECLEN (patt, 0) == 2)
4293 op[2] = XVECEXP (patt, 0, 1);
4295 switch (GET_MODE (op[0]))
4301 output_reload_inhi (insn, op, &len);
4305 output_reload_insisf (insn, op, &len);
4311 else if (GET_CODE (op[1]) == ASHIFT
4312 || GET_CODE (op[1]) == ASHIFTRT
4313 || GET_CODE (op[1]) == LSHIFTRT)
4317 ops[1] = XEXP (op[1],0);
4318 ops[2] = XEXP (op[1],1);
4319 switch (GET_CODE (op[1]))
4322 switch (GET_MODE (op[0]))
4324 case QImode: ashlqi3_out (insn,ops,&len); break;
4325 case HImode: ashlhi3_out (insn,ops,&len); break;
4326 case SImode: ashlsi3_out (insn,ops,&len); break;
4331 switch (GET_MODE (op[0]))
4333 case QImode: ashrqi3_out (insn,ops,&len); break;
4334 case HImode: ashrhi3_out (insn,ops,&len); break;
4335 case SImode: ashrsi3_out (insn,ops,&len); break;
4340 switch (GET_MODE (op[0]))
4342 case QImode: lshrqi3_out (insn,ops,&len); break;
4343 case HImode: lshrhi3_out (insn,ops,&len); break;
4344 case SImode: lshrsi3_out (insn,ops,&len); break;
4356 /* Return nonzero if register REG dead after INSN. */
4359 reg_unused_after (rtx insn, rtx reg)
4361 return (dead_or_set_p (insn, reg)
4362 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4365 /* Return nonzero if REG is not used after INSN.
4366 We assume REG is a reload reg, and therefore does
4367 not live past labels. It may live past calls or jumps though. */
4370 _reg_unused_after (rtx insn, rtx reg)
4375 /* If the reg is set by this instruction, then it is safe for our
4376 case. Disregard the case where this is a store to memory, since
4377 we are checking a register used in the store address. */
4378 set = single_set (insn);
4379 if (set && GET_CODE (SET_DEST (set)) != MEM
4380 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4383 while ((insn = NEXT_INSN (insn)))
4386 code = GET_CODE (insn);
4389 /* If this is a label that existed before reload, then the register
4390 if dead here. However, if this is a label added by reorg, then
4391 the register may still be live here. We can't tell the difference,
4392 so we just ignore labels completely. */
4393 if (code == CODE_LABEL)
4401 if (code == JUMP_INSN)
4404 /* If this is a sequence, we must handle them all at once.
4405 We could have for instance a call that sets the target register,
4406 and an insn in a delay slot that uses the register. In this case,
4407 we must return 0. */
4408 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4413 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4415 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4416 rtx set = single_set (this_insn);
4418 if (GET_CODE (this_insn) == CALL_INSN)
4420 else if (GET_CODE (this_insn) == JUMP_INSN)
4422 if (INSN_ANNULLED_BRANCH_P (this_insn))
4427 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4429 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4431 if (GET_CODE (SET_DEST (set)) != MEM)
4437 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4442 else if (code == JUMP_INSN)
4446 if (code == CALL_INSN)
4449 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4450 if (GET_CODE (XEXP (tem, 0)) == USE
4451 && REG_P (XEXP (XEXP (tem, 0), 0))
4452 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4454 if (call_used_regs[REGNO (reg)])
4458 set = single_set (insn);
4460 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4462 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4463 return GET_CODE (SET_DEST (set)) != MEM;
4464 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4470 /* Target hook for assembling integer objects. The AVR version needs
4471 special handling for references to certain labels. */
4474 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4476 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4477 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4478 || GET_CODE (x) == LABEL_REF))
4480 fputs ("\t.word\tgs(", asm_out_file);
4481 output_addr_const (asm_out_file, x);
4482 fputs (")\n", asm_out_file);
4485 return default_assemble_integer (x, size, aligned_p);
4488 /* The routine used to output NUL terminated strings. We use a special
4489 version of this for most svr4 targets because doing so makes the
4490 generated assembly code more compact (and thus faster to assemble)
4491 as well as more readable, especially for targets like the i386
4492 (where the only alternative is to output character sequences as
4493 comma separated lists of numbers). */
4496 gas_output_limited_string(FILE *file, const char *str)
4498 const unsigned char *_limited_str = (const unsigned char *) str;
4500 fprintf (file, "%s\"", STRING_ASM_OP);
4501 for (; (ch = *_limited_str); _limited_str++)
4504 switch (escape = ESCAPES[ch])
4510 fprintf (file, "\\%03o", ch);
4514 putc (escape, file);
4518 fprintf (file, "\"\n");
4521 /* The routine used to output sequences of byte values. We use a special
4522 version of this for most svr4 targets because doing so makes the
4523 generated assembly code more compact (and thus faster to assemble)
4524 as well as more readable. Note that if we find subparts of the
4525 character sequence which end with NUL (and which are shorter than
4526 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4529 gas_output_ascii(FILE *file, const char *str, size_t length)
4531 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4532 const unsigned char *limit = _ascii_bytes + length;
4533 unsigned bytes_in_chunk = 0;
4534 for (; _ascii_bytes < limit; _ascii_bytes++)
4536 const unsigned char *p;
4537 if (bytes_in_chunk >= 60)
4539 fprintf (file, "\"\n");
4542 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4544 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4546 if (bytes_in_chunk > 0)
4548 fprintf (file, "\"\n");
4551 gas_output_limited_string (file, (const char*)_ascii_bytes);
4558 if (bytes_in_chunk == 0)
4559 fprintf (file, "\t.ascii\t\"");
4560 switch (escape = ESCAPES[ch = *_ascii_bytes])
4567 fprintf (file, "\\%03o", ch);
4568 bytes_in_chunk += 4;
4572 putc (escape, file);
4573 bytes_in_chunk += 2;
4578 if (bytes_in_chunk > 0)
4579 fprintf (file, "\"\n");
4582 /* Return value is nonzero if pseudos that have been
4583 assigned to registers of class CLASS would likely be spilled
4584 because registers of CLASS are needed for spill registers. */
4587 class_likely_spilled_p (int c)
4589 return (c != ALL_REGS && c != ADDW_REGS);
4592 /* Valid attributes:
4593 progmem - put data to program memory;
4594 signal - make a function to be hardware interrupt. After function
4595 prologue interrupts are disabled;
4596 interrupt - make a function to be hardware interrupt. After function
4597 prologue interrupts are enabled;
4598 naked - don't generate function prologue/epilogue and `ret' command.
4600 Only `progmem' attribute valid for type. */
4602 const struct attribute_spec avr_attribute_table[] =
4604 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4605 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4606 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4607 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4608 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4609 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4610 { NULL, 0, 0, false, false, false, NULL }
4613 /* Handle a "progmem" attribute; arguments as in
4614 struct attribute_spec.handler. */
4616 avr_handle_progmem_attribute (tree *node, tree name,
4617 tree args ATTRIBUTE_UNUSED,
4618 int flags ATTRIBUTE_UNUSED,
4623 if (TREE_CODE (*node) == TYPE_DECL)
4625 /* This is really a decl attribute, not a type attribute,
4626 but try to handle it for GCC 3.0 backwards compatibility. */
4628 tree type = TREE_TYPE (*node);
4629 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4630 tree newtype = build_type_attribute_variant (type, attr);
4632 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4633 TREE_TYPE (*node) = newtype;
4634 *no_add_attrs = true;
4636 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4638 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4640 warning (0, "only initialized variables can be placed into "
4641 "program memory area");
4642 *no_add_attrs = true;
4647 warning (OPT_Wattributes, "%qs attribute ignored",
4648 IDENTIFIER_POINTER (name));
4649 *no_add_attrs = true;
4656 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4657 struct attribute_spec.handler. */
4660 avr_handle_fndecl_attribute (tree *node, tree name,
4661 tree args ATTRIBUTE_UNUSED,
4662 int flags ATTRIBUTE_UNUSED,
4665 if (TREE_CODE (*node) != FUNCTION_DECL)
4667 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4668 IDENTIFIER_POINTER (name));
4669 *no_add_attrs = true;
4673 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4674 const char *attr = IDENTIFIER_POINTER (name);
4676 /* If the function has the 'signal' or 'interrupt' attribute, test to
4677 make sure that the name of the function is "__vector_NN" so as to
4678 catch when the user misspells the interrupt vector name. */
4680 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4682 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4684 warning (0, "%qs appears to be a misspelled interrupt handler",
4688 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4690 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4692 warning (0, "%qs appears to be a misspelled signal handler",
4702 avr_handle_fntype_attribute (tree *node, tree name,
4703 tree args ATTRIBUTE_UNUSED,
4704 int flags ATTRIBUTE_UNUSED,
4707 if (TREE_CODE (*node) != FUNCTION_TYPE)
4709 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4710 IDENTIFIER_POINTER (name));
4711 *no_add_attrs = true;
4717 /* Look for attribute `progmem' in DECL
4718 if found return 1, otherwise 0. */
4721 avr_progmem_p (tree decl, tree attributes)
4725 if (TREE_CODE (decl) != VAR_DECL)
4729 != lookup_attribute ("progmem", attributes))
4735 while (TREE_CODE (a) == ARRAY_TYPE);
4737 if (a == error_mark_node)
4740 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4746 /* Add the section attribute if the variable is in progmem. */
4749 avr_insert_attributes (tree node, tree *attributes)
4751 if (TREE_CODE (node) == VAR_DECL
4752 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4753 && avr_progmem_p (node, *attributes))
4755 static const char dsec[] = ".progmem.data";
4756 *attributes = tree_cons (get_identifier ("section"),
4757 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4760 /* ??? This seems sketchy. Why can't the user declare the
4761 thing const in the first place? */
4762 TREE_READONLY (node) = 1;
4766 /* A get_unnamed_section callback for switching to progmem_section. */
4769 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4771 fprintf (asm_out_file,
4772 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4773 AVR_HAVE_JMP_CALL ? "a" : "ax");
4774 /* Should already be aligned, this is just to be safe if it isn't. */
4775 fprintf (asm_out_file, "\t.p2align 1\n");
4778 /* Implement TARGET_ASM_INIT_SECTIONS. */
4781 avr_asm_init_sections (void)
4783 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4784 avr_output_progmem_section_asm_op,
4786 readonly_data_section = data_section;
4790 avr_section_type_flags (tree decl, const char *name, int reloc)
4792 unsigned int flags = default_section_type_flags (decl, name, reloc);
4794 if (strncmp (name, ".noinit", 7) == 0)
4796 if (decl && TREE_CODE (decl) == VAR_DECL
4797 && DECL_INITIAL (decl) == NULL_TREE)
4798 flags |= SECTION_BSS; /* @nobits */
4800 warning (0, "only uninitialized variables can be placed in the "
4807 /* Outputs some appropriate text to go at the start of an assembler
4811 avr_file_start (void)
4814 error ("MCU %qs supported for assembler only", avr_mcu_name);
4816 default_file_start ();
4818 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4819 fputs ("__SREG__ = 0x3f\n"
4821 "__SP_L__ = 0x3d\n", asm_out_file);
4823 fputs ("__tmp_reg__ = 0\n"
4824 "__zero_reg__ = 1\n", asm_out_file);
4826 /* FIXME: output these only if there is anything in the .data / .bss
4827 sections - some code size could be saved by not linking in the
4828 initialization code from libgcc if one or both sections are empty. */
4829 fputs ("\t.global __do_copy_data\n", asm_out_file);
4830 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4833 /* Outputs to the stdio stream FILE some
4834 appropriate text to go at the end of an assembler file. */
4841 /* Choose the order in which to allocate hard registers for
4842 pseudo-registers local to a basic block.
4844 Store the desired register order in the array `reg_alloc_order'.
4845 Element 0 should be the register to allocate first; element 1, the
4846 next register; and so on. */
4849 order_regs_for_local_alloc (void)
4852 static const int order_0[] = {
4860 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4864 static const int order_1[] = {
4872 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4876 static const int order_2[] = {
4885 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4890 const int *order = (TARGET_ORDER_1 ? order_1 :
4891 TARGET_ORDER_2 ? order_2 :
4893 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4894 reg_alloc_order[i] = order[i];
4898 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4899 cost of an RTX operand given its context. X is the rtx of the
4900 operand, MODE is its mode, and OUTER is the rtx_code of this
4901 operand's parent operator. */
4904 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4906 enum rtx_code code = GET_CODE (x);
4917 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4924 avr_rtx_costs (x, code, outer, &total);
4928 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4929 is to be calculated. Return true if the complete cost has been
4930 computed, and false if subexpressions should be scanned. In either
4931 case, *TOTAL contains the cost result. */
4934 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4936 enum machine_mode mode = GET_MODE (x);
4943 /* Immediate constants are as cheap as registers. */
4951 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4959 *total = COSTS_N_INSNS (1);
4963 *total = COSTS_N_INSNS (3);
4967 *total = COSTS_N_INSNS (7);
4973 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4981 *total = COSTS_N_INSNS (1);
4987 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4991 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4992 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4996 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4997 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4998 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5002 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5003 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5004 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5011 *total = COSTS_N_INSNS (1);
5012 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5013 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5017 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5019 *total = COSTS_N_INSNS (2);
5020 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5022 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5023 *total = COSTS_N_INSNS (1);
5025 *total = COSTS_N_INSNS (2);
5029 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5031 *total = COSTS_N_INSNS (4);
5032 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5034 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5035 *total = COSTS_N_INSNS (1);
5037 *total = COSTS_N_INSNS (4);
5043 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5049 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5050 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5051 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5052 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5056 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5057 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5058 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5066 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5067 else if (optimize_size)
5068 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5075 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5076 else if (optimize_size)
5077 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5085 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5086 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5094 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5097 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5098 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5105 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5107 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5108 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5112 val = INTVAL (XEXP (x, 1));
5114 *total = COSTS_N_INSNS (3);
5115 else if (val >= 0 && val <= 7)
5116 *total = COSTS_N_INSNS (val);
5118 *total = COSTS_N_INSNS (1);
5123 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5125 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5126 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5129 switch (INTVAL (XEXP (x, 1)))
5136 *total = COSTS_N_INSNS (2);
5139 *total = COSTS_N_INSNS (3);
5145 *total = COSTS_N_INSNS (4);
5150 *total = COSTS_N_INSNS (5);
5153 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5156 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5159 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5162 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5163 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5168 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5170 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5171 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5174 switch (INTVAL (XEXP (x, 1)))
5180 *total = COSTS_N_INSNS (3);
5185 *total = COSTS_N_INSNS (4);
5188 *total = COSTS_N_INSNS (6);
5191 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5194 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5195 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5202 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5209 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5211 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5212 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5216 val = INTVAL (XEXP (x, 1));
5218 *total = COSTS_N_INSNS (4);
5220 *total = COSTS_N_INSNS (2);
5221 else if (val >= 0 && val <= 7)
5222 *total = COSTS_N_INSNS (val);
5224 *total = COSTS_N_INSNS (1);
5229 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5231 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5232 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5235 switch (INTVAL (XEXP (x, 1)))
5241 *total = COSTS_N_INSNS (2);
5244 *total = COSTS_N_INSNS (3);
5250 *total = COSTS_N_INSNS (4);
5254 *total = COSTS_N_INSNS (5);
5257 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5260 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5264 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5267 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5268 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5273 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5275 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5276 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5279 switch (INTVAL (XEXP (x, 1)))
5285 *total = COSTS_N_INSNS (4);
5290 *total = COSTS_N_INSNS (6);
5293 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5296 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5299 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5300 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5307 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5314 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5316 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5317 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5321 val = INTVAL (XEXP (x, 1));
5323 *total = COSTS_N_INSNS (3);
5324 else if (val >= 0 && val <= 7)
5325 *total = COSTS_N_INSNS (val);
5327 *total = COSTS_N_INSNS (1);
5332 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5334 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5335 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5338 switch (INTVAL (XEXP (x, 1)))
5345 *total = COSTS_N_INSNS (2);
5348 *total = COSTS_N_INSNS (3);
5353 *total = COSTS_N_INSNS (4);
5357 *total = COSTS_N_INSNS (5);
5363 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5366 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5370 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5373 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5374 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5379 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5381 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5382 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5385 switch (INTVAL (XEXP (x, 1)))
5391 *total = COSTS_N_INSNS (4);
5394 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5399 *total = COSTS_N_INSNS (4);
5402 *total = COSTS_N_INSNS (6);
5405 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5406 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5413 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5417 switch (GET_MODE (XEXP (x, 0)))
5420 *total = COSTS_N_INSNS (1);
5421 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5422 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5426 *total = COSTS_N_INSNS (2);
5427 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5428 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5429 else if (INTVAL (XEXP (x, 1)) != 0)
5430 *total += COSTS_N_INSNS (1);
5434 *total = COSTS_N_INSNS (4);
5435 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5436 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5437 else if (INTVAL (XEXP (x, 1)) != 0)
5438 *total += COSTS_N_INSNS (3);
5444 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5453 /* Calculate the cost of a memory address. */
5456 avr_address_cost (rtx x)
5458 if (GET_CODE (x) == PLUS
5459 && GET_CODE (XEXP (x,1)) == CONST_INT
5460 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5461 && INTVAL (XEXP (x,1)) >= 61)
5463 if (CONSTANT_ADDRESS_P (x))
5465 if (optimize > 0 && io_address_operand (x, QImode))
5472 /* Test for extra memory constraint 'Q'.
5473 It's a memory address based on Y or Z pointer with valid displacement. */
5476 extra_constraint_Q (rtx x)
5478 if (GET_CODE (XEXP (x,0)) == PLUS
5479 && REG_P (XEXP (XEXP (x,0), 0))
5480 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5481 && (INTVAL (XEXP (XEXP (x,0), 1))
5482 <= MAX_LD_OFFSET (GET_MODE (x))))
5484 rtx xx = XEXP (XEXP (x,0), 0);
5485 int regno = REGNO (xx);
5486 if (TARGET_ALL_DEBUG)
5488 fprintf (stderr, ("extra_constraint:\n"
5489 "reload_completed: %d\n"
5490 "reload_in_progress: %d\n"),
5491 reload_completed, reload_in_progress);
5494 if (regno >= FIRST_PSEUDO_REGISTER)
5495 return 1; /* allocate pseudos */
5496 else if (regno == REG_Z || regno == REG_Y)
5497 return 1; /* strictly check */
5498 else if (xx == frame_pointer_rtx
5499 || xx == arg_pointer_rtx)
5500 return 1; /* XXX frame & arg pointer checks */
5505 /* Convert condition code CONDITION to the valid AVR condition code. */
5508 avr_normalize_condition (RTX_CODE condition)
5525 /* This function optimizes conditional jumps. */
5532 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5534 if (! (GET_CODE (insn) == INSN
5535 || GET_CODE (insn) == CALL_INSN
5536 || GET_CODE (insn) == JUMP_INSN)
5537 || !single_set (insn))
5540 pattern = PATTERN (insn);
5542 if (GET_CODE (pattern) == PARALLEL)
5543 pattern = XVECEXP (pattern, 0, 0);
5544 if (GET_CODE (pattern) == SET
5545 && SET_DEST (pattern) == cc0_rtx
5546 && compare_diff_p (insn))
5548 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5550 /* Now we work under compare insn. */
5552 pattern = SET_SRC (pattern);
5553 if (true_regnum (XEXP (pattern,0)) >= 0
5554 && true_regnum (XEXP (pattern,1)) >= 0 )
5556 rtx x = XEXP (pattern,0);
5557 rtx next = next_real_insn (insn);
5558 rtx pat = PATTERN (next);
5559 rtx src = SET_SRC (pat);
5560 rtx t = XEXP (src,0);
5561 PUT_CODE (t, swap_condition (GET_CODE (t)));
5562 XEXP (pattern,0) = XEXP (pattern,1);
5563 XEXP (pattern,1) = x;
5564 INSN_CODE (next) = -1;
5566 else if (true_regnum (XEXP (pattern,0)) >= 0
5567 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5569 rtx x = XEXP (pattern,1);
5570 rtx next = next_real_insn (insn);
5571 rtx pat = PATTERN (next);
5572 rtx src = SET_SRC (pat);
5573 rtx t = XEXP (src,0);
5574 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5576 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5578 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5579 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5580 INSN_CODE (next) = -1;
5581 INSN_CODE (insn) = -1;
5585 else if (true_regnum (SET_SRC (pattern)) >= 0)
5587 /* This is a tst insn */
5588 rtx next = next_real_insn (insn);
5589 rtx pat = PATTERN (next);
5590 rtx src = SET_SRC (pat);
5591 rtx t = XEXP (src,0);
5593 PUT_CODE (t, swap_condition (GET_CODE (t)));
5594 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5596 INSN_CODE (next) = -1;
5597 INSN_CODE (insn) = -1;
5603 /* Returns register number for function return value.*/
5606 avr_ret_register (void)
5611 /* Create an RTX representing the place where a
5612 library function returns a value of mode MODE. */
5615 avr_libcall_value (enum machine_mode mode)
5617 int offs = GET_MODE_SIZE (mode);
5620 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5623 /* Create an RTX representing the place where a
5624 function returns a value of data type VALTYPE. */
5627 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5631 if (TYPE_MODE (type) != BLKmode)
5632 return avr_libcall_value (TYPE_MODE (type));
5634 offs = int_size_in_bytes (type);
5637 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5638 offs = GET_MODE_SIZE (SImode);
5639 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5640 offs = GET_MODE_SIZE (DImode);
5642 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5645 /* Places additional restrictions on the register class to
5646 use when it is necessary to copy value X into a register
5650 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5656 test_hard_reg_class (enum reg_class class, rtx x)
5658 int regno = true_regnum (x);
5662 if (TEST_HARD_REG_CLASS (class, regno))
5670 jump_over_one_insn_p (rtx insn, rtx dest)
5672 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5675 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5676 int dest_addr = INSN_ADDRESSES (uid);
5677 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5680 /* Returns 1 if a value of mode MODE can be stored starting with hard
5681 register number REGNO. On the enhanced core, anything larger than
5682 1 byte must start in even numbered register for "movw" to work
5683 (this way we don't have to check for odd registers everywhere). */
5686 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5688 /* Disallow QImode in stack pointer regs. */
5689 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5692 /* The only thing that can go into registers r28:r29 is a Pmode. */
5693 if (regno == REG_Y && mode == Pmode)
5696 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5697 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5703 /* Modes larger than QImode occupy consecutive registers. */
5704 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5707 /* All modes larger than QImode should start in an even register. */
5708 return !(regno & 1);
5712 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5718 if (GET_CODE (operands[1]) == CONST_INT)
5720 int val = INTVAL (operands[1]);
5721 if ((val & 0xff) == 0)
5724 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5725 AS2 (ldi,%2,hi8(%1)) CR_TAB
5728 else if ((val & 0xff00) == 0)
5731 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5732 AS2 (mov,%A0,%2) CR_TAB
5733 AS2 (mov,%B0,__zero_reg__));
5735 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5738 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5739 AS2 (mov,%A0,%2) CR_TAB
5744 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5745 AS2 (mov,%A0,%2) CR_TAB
5746 AS2 (ldi,%2,hi8(%1)) CR_TAB
5752 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5754 rtx src = operands[1];
5755 int cnst = (GET_CODE (src) == CONST_INT);
5760 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5761 + ((INTVAL (src) & 0xff00) != 0)
5762 + ((INTVAL (src) & 0xff0000) != 0)
5763 + ((INTVAL (src) & 0xff000000) != 0);
5770 if (cnst && ((INTVAL (src) & 0xff) == 0))
5771 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5774 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5775 output_asm_insn (AS2 (mov, %A0, %2), operands);
5777 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5778 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5781 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5782 output_asm_insn (AS2 (mov, %B0, %2), operands);
5784 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5785 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5788 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5789 output_asm_insn (AS2 (mov, %C0, %2), operands);
5791 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5792 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5795 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5796 output_asm_insn (AS2 (mov, %D0, %2), operands);
5802 avr_output_bld (rtx operands[], int bit_nr)
5804 static char s[] = "bld %A0,0";
5806 s[5] = 'A' + (bit_nr >> 3);
5807 s[8] = '0' + (bit_nr & 7);
5808 output_asm_insn (s, operands);
5812 avr_output_addr_vec_elt (FILE *stream, int value)
5814 switch_to_section (progmem_section);
5815 if (AVR_HAVE_JMP_CALL)
5816 fprintf (stream, "\t.word gs(.L%d)\n", value);
5818 fprintf (stream, "\trjmp .L%d\n", value);
5821 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5822 registers (for a define_peephole2) in the current function. */
5825 avr_peep2_scratch_safe (rtx scratch)
5827 if ((interrupt_function_p (current_function_decl)
5828 || signal_function_p (current_function_decl))
5829 && leaf_function_p ())
5831 int first_reg = true_regnum (scratch);
5832 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5835 for (reg = first_reg; reg <= last_reg; reg++)
5837 if (!df_regs_ever_live_p (reg))
5844 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5845 or memory location in the I/O space (QImode only).
5847 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5848 Operand 1: register operand to test, or CONST_INT memory address.
5849 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5850 Operand 3: label to jump to if the test is true. */
5853 avr_out_sbxx_branch (rtx insn, rtx operands[])
5855 enum rtx_code comp = GET_CODE (operands[0]);
5856 int long_jump = (get_attr_length (insn) >= 4);
5857 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5861 else if (comp == LT)
5865 comp = reverse_condition (comp);
5867 if (GET_CODE (operands[1]) == CONST_INT)
5869 if (INTVAL (operands[1]) < 0x40)
5872 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5874 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5878 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5880 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5882 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5885 else /* GET_CODE (operands[1]) == REG */
5887 if (GET_MODE (operands[1]) == QImode)
5890 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5892 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5894 else /* HImode or SImode */
5896 static char buf[] = "sbrc %A1,0";
5897 int bit_nr = exact_log2 (INTVAL (operands[2])
5898 & GET_MODE_MASK (GET_MODE (operands[1])));
5900 buf[3] = (comp == EQ) ? 's' : 'c';
5901 buf[6] = 'A' + (bit_nr >> 3);
5902 buf[9] = '0' + (bit_nr & 7);
5903 output_asm_insn (buf, operands);
5908 return (AS1 (rjmp,.+4) CR_TAB
5911 return AS1 (rjmp,%3);
5915 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5918 avr_asm_out_ctor (rtx symbol, int priority)
5920 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5921 default_ctor_section_asm_out_constructor (symbol, priority);
5924 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5927 avr_asm_out_dtor (rtx symbol, int priority)
5929 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5930 default_dtor_section_asm_out_destructor (symbol, priority);
5933 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5936 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5938 if (TYPE_MODE (type) == BLKmode)
5940 HOST_WIDE_INT size = int_size_in_bytes (type);
5941 return (size == -1 || size > 8);