1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_regs_to_save (HARD_REG_SET *);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code);
59 static int avr_num_arg_regs (enum machine_mode, tree);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
66 const struct attribute_spec avr_attribute_table[];
67 static bool avr_assemble_integer (rtx, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree, tree *);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree, const char *, int);
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx, int);
78 static void avr_asm_out_dtor (rtx, int);
79 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
80 static bool avr_rtx_costs (rtx, int, int, int *);
81 static int avr_address_cost (rtx);
82 static bool avr_return_in_memory (const_tree, const_tree);
83 static struct machine_function * avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx;
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx;
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames[] = REGISTER_NAMES;
96 /* This holds the last insn address. */
97 static int last_insn_address = 0;
99 /* Preprocessor macros to define depending on MCU type. */
100 const char *avr_base_arch_macro;
101 const char *avr_extra_arch_macro;
103 /* Current architecture. */
104 const struct base_arch_s *avr_current_arch;
106 section *progmem_section;
108 /* Core have 'MUL*' instructions. */
109 int avr_have_mul_p = 0;
111 /* Assembler only. */
112 int avr_asm_only_p = 0;
114 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
115 int avr_have_movw_lpmx_p = 0;
117 static const struct base_arch_s avr_arch_types[] = {
118 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
119 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
120 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
121 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
122 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
123 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
124 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
125 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
126 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
127 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
128 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
131 /* These names are used as the index into the avr_arch_types[] table
150 const char *const name;
151 int arch; /* index in avr_arch_types[] */
152 /* Must lie outside user's namespace. NULL == no macro. */
153 const char *const macro;
156 /* List of all known AVR MCU types - if updated, it has to be kept
157 in sync in several places (FIXME: is there a better way?):
159 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
160 - t-avr (MULTILIB_MATCHES)
161 - gas/config/tc-avr.c
164 static const struct mcu_type_s avr_mcu_types[] = {
165 /* Classic, <= 8K. */
166 { "avr2", ARCH_AVR2, NULL },
167 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
168 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
169 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
170 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
171 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
172 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
173 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
174 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
175 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
176 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
177 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
178 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
179 /* Classic + MOVW, <= 8K. */
180 { "avr25", ARCH_AVR25, NULL },
181 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
182 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
183 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
184 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
185 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
186 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
187 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
188 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
189 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
190 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
191 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
192 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
193 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
194 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
195 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
196 /* Classic, > 8K, <= 64K. */
197 { "avr3", ARCH_AVR3, NULL },
198 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
199 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
200 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
201 /* Classic, == 128K. */
202 { "avr31", ARCH_AVR31, NULL },
203 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
204 /* Classic + MOVW + JMP/CALL. */
205 { "avr35", ARCH_AVR35, NULL },
206 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
207 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
208 /* Enhanced, <= 8K. */
209 { "avr4", ARCH_AVR4, NULL },
210 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
211 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
212 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
213 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
214 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
215 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
216 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
217 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
218 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
219 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
220 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
221 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
222 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
223 /* Enhanced, > 8K, <= 64K. */
224 { "avr5", ARCH_AVR5, NULL },
225 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
226 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
227 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
228 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
229 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
230 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
231 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
232 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
233 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
234 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
235 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
236 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
237 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
238 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
239 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
240 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
241 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
242 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
243 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
244 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
245 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
246 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
247 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
248 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
249 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
250 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
251 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
252 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
253 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
254 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
255 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
256 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
257 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
258 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
259 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
260 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
261 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
262 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
263 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
264 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
265 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
266 /* Enhanced, == 128K. */
267 { "avr51", ARCH_AVR51, NULL },
268 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
269 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
270 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
271 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
272 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
273 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
274 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
276 { "avr6", ARCH_AVR6, NULL },
277 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
278 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
279 /* Assembler only. */
280 { "avr1", ARCH_AVR1, NULL },
281 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
282 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
283 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
284 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
285 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
286 { NULL, ARCH_UNKNOWN, NULL }
289 int avr_case_values_threshold = 30000;
291 /* Initialize the GCC target structure. */
292 #undef TARGET_ASM_ALIGNED_HI_OP
293 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
294 #undef TARGET_ASM_ALIGNED_SI_OP
295 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
296 #undef TARGET_ASM_UNALIGNED_HI_OP
297 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
298 #undef TARGET_ASM_UNALIGNED_SI_OP
299 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
300 #undef TARGET_ASM_INTEGER
301 #define TARGET_ASM_INTEGER avr_assemble_integer
302 #undef TARGET_ASM_FILE_START
303 #define TARGET_ASM_FILE_START avr_file_start
304 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
305 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
306 #undef TARGET_ASM_FILE_END
307 #define TARGET_ASM_FILE_END avr_file_end
309 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
310 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
311 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
312 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
313 #undef TARGET_ATTRIBUTE_TABLE
314 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
315 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
316 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
317 #undef TARGET_INSERT_ATTRIBUTES
318 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
319 #undef TARGET_SECTION_TYPE_FLAGS
320 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
321 #undef TARGET_RTX_COSTS
322 #define TARGET_RTX_COSTS avr_rtx_costs
323 #undef TARGET_ADDRESS_COST
324 #define TARGET_ADDRESS_COST avr_address_cost
325 #undef TARGET_MACHINE_DEPENDENT_REORG
326 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
328 #undef TARGET_RETURN_IN_MEMORY
329 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
331 #undef TARGET_STRICT_ARGUMENT_NAMING
332 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
334 struct gcc_target targetm = TARGET_INITIALIZER;
337 avr_override_options (void)
339 const struct mcu_type_s *t;
340 const struct base_arch_s *base;
342 flag_delete_null_pointer_checks = 0;
344 for (t = avr_mcu_types; t->name; t++)
345 if (strcmp (t->name, avr_mcu_name) == 0)
350 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
352 for (t = avr_mcu_types; t->name; t++)
353 fprintf (stderr," %s\n", t->name);
356 avr_current_arch = &avr_arch_types[t->arch];
357 base = &avr_arch_types[t->arch];
358 avr_asm_only_p = base->asm_only;
359 avr_have_mul_p = base->have_mul;
360 avr_have_movw_lpmx_p = base->have_movw_lpmx;
361 avr_base_arch_macro = base->macro;
362 avr_extra_arch_macro = t->macro;
364 if (optimize && !TARGET_NO_TABLEJUMP)
365 avr_case_values_threshold =
366 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
368 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
369 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
371 init_machine_status = avr_init_machine_status;
374 /* return register class from register number. */
376 static const int reg_class_tab[]={
377 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
378 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
379 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
380 GENERAL_REGS, /* r0 - r15 */
381 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
382 LD_REGS, /* r16 - 23 */
383 ADDW_REGS,ADDW_REGS, /* r24,r25 */
384 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
385 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
386 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
387 STACK_REG,STACK_REG /* SPL,SPH */
390 /* Function to set up the backend function structure. */
392 static struct machine_function *
393 avr_init_machine_status (void)
395 return ((struct machine_function *)
396 ggc_alloc_cleared (sizeof (struct machine_function)));
399 /* Return register class for register R. */
402 avr_regno_reg_class (int r)
405 return reg_class_tab[r];
409 /* Return nonzero if FUNC is a naked function. */
412 avr_naked_function_p (tree func)
416 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
418 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
419 return a != NULL_TREE;
422 /* Return nonzero if FUNC is an interrupt function as specified
423 by the "interrupt" attribute. */
426 interrupt_function_p (tree func)
430 if (TREE_CODE (func) != FUNCTION_DECL)
433 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
434 return a != NULL_TREE;
437 /* Return nonzero if FUNC is a signal function as specified
438 by the "signal" attribute. */
441 signal_function_p (tree func)
445 if (TREE_CODE (func) != FUNCTION_DECL)
448 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
449 return a != NULL_TREE;
452 /* Return nonzero if FUNC is a OS_task function. */
455 avr_OS_task_function_p (tree func)
459 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
461 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
462 return a != NULL_TREE;
465 /* Return the number of hard registers to push/pop in the prologue/epilogue
466 of the current function, and optionally store these registers in SET. */
469 avr_regs_to_save (HARD_REG_SET *set)
472 int int_or_sig_p = (interrupt_function_p (current_function_decl)
473 || signal_function_p (current_function_decl));
475 if (!reload_completed)
476 cfun->machine->is_leaf = leaf_function_p ();
479 CLEAR_HARD_REG_SET (*set);
482 /* No need to save any registers if the function never returns or
483 is have "OS_task" attribute. */
484 if (TREE_THIS_VOLATILE (current_function_decl)
485 || cfun->machine->is_OS_task)
488 for (reg = 0; reg < 32; reg++)
490 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
491 any global register variables. */
495 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
496 || (df_regs_ever_live_p (reg)
497 && (int_or_sig_p || !call_used_regs[reg])
498 && !(frame_pointer_needed
499 && (reg == REG_Y || reg == (REG_Y+1)))))
502 SET_HARD_REG_BIT (*set, reg);
509 /* Compute offset between arg_pointer and frame_pointer. */
512 initial_elimination_offset (int from, int to)
514 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
518 int offset = frame_pointer_needed ? 2 : 0;
519 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
521 offset += avr_regs_to_save (NULL);
522 return get_frame_size () + (avr_pc_size) + 1 + offset;
526 /* Return 1 if the function epilogue is just a single "ret". */
529 avr_simple_epilogue (void)
531 return (! frame_pointer_needed
532 && get_frame_size () == 0
533 && avr_regs_to_save (NULL) == 0
534 && ! interrupt_function_p (current_function_decl)
535 && ! signal_function_p (current_function_decl)
536 && ! avr_naked_function_p (current_function_decl)
537 && ! TREE_THIS_VOLATILE (current_function_decl));
540 /* This function checks sequence of live registers. */
543 sequent_regs_live (void)
549 for (reg = 0; reg < 18; ++reg)
551 if (!call_used_regs[reg])
553 if (df_regs_ever_live_p (reg))
563 if (!frame_pointer_needed)
565 if (df_regs_ever_live_p (REG_Y))
573 if (df_regs_ever_live_p (REG_Y+1))
586 return (cur_seq == live_seq) ? live_seq : 0;
589 /* Output function prologue. */
592 expand_prologue (void)
597 HOST_WIDE_INT size = get_frame_size();
598 /* Define templates for push instructions. */
599 rtx pushbyte = gen_rtx_MEM (QImode,
600 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
601 rtx pushword = gen_rtx_MEM (HImode,
602 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
605 last_insn_address = 0;
607 /* Init cfun->machine. */
608 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
609 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
610 cfun->machine->is_signal = signal_function_p (current_function_decl);
611 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
613 /* Prologue: naked. */
614 if (cfun->machine->is_naked)
619 avr_regs_to_save (&set);
620 live_seq = sequent_regs_live ();
621 minimize = (TARGET_CALL_PROLOGUES
622 && !cfun->machine->is_interrupt
623 && !cfun->machine->is_signal
624 && !cfun->machine->is_OS_task
627 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
629 if (cfun->machine->is_interrupt)
631 /* Enable interrupts. */
632 insn = emit_insn (gen_enable_interrupt ());
633 RTX_FRAME_RELATED_P (insn) = 1;
637 insn = emit_move_insn (pushbyte, zero_reg_rtx);
638 RTX_FRAME_RELATED_P (insn) = 1;
641 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
642 RTX_FRAME_RELATED_P (insn) = 1;
645 insn = emit_move_insn (tmp_reg_rtx,
646 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
647 RTX_FRAME_RELATED_P (insn) = 1;
648 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
649 RTX_FRAME_RELATED_P (insn) = 1;
653 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
655 insn = emit_move_insn (tmp_reg_rtx,
656 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
657 RTX_FRAME_RELATED_P (insn) = 1;
658 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
659 RTX_FRAME_RELATED_P (insn) = 1;
662 /* Clear zero reg. */
663 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
664 RTX_FRAME_RELATED_P (insn) = 1;
666 /* Prevent any attempt to delete the setting of ZERO_REG! */
667 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
669 if (minimize && (frame_pointer_needed || live_seq > 6))
671 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
672 gen_int_mode (size, HImode));
673 RTX_FRAME_RELATED_P (insn) = 1;
676 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
677 gen_int_mode (size + live_seq, HImode)));
678 RTX_FRAME_RELATED_P (insn) = 1;
683 for (reg = 0; reg < 32; ++reg)
685 if (TEST_HARD_REG_BIT (set, reg))
687 /* Emit push of register to save. */
688 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
689 RTX_FRAME_RELATED_P (insn) = 1;
692 if (frame_pointer_needed)
694 if(!cfun->machine->is_OS_task)
696 /* Push frame pointer. */
697 insn = emit_move_insn (pushword, frame_pointer_rtx);
698 RTX_FRAME_RELATED_P (insn) = 1;
703 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
704 RTX_FRAME_RELATED_P (insn) = 1;
708 /* Creating a frame can be done by direct manipulation of the
709 stack or via the frame pointer. These two methods are:
716 the optimum method depends on function type, stack and frame size.
717 To avoid a complex logic, both methods are tested and shortest
721 if (TARGET_TINY_STACK)
723 if (size < -63 || size > 63)
724 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
726 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
727 over 'sbiw' (2 cycles, same size). */
728 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
732 /* Normal sized addition. */
733 myfp = frame_pointer_rtx;
735 /* Calculate length. */
738 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
740 get_attr_length (gen_move_insn (myfp,
741 gen_rtx_PLUS (GET_MODE(myfp), myfp,
745 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
747 /* Method 2-Adjust Stack pointer. */
748 int sp_plus_length = 0;
752 get_attr_length (gen_move_insn (stack_pointer_rtx,
753 gen_rtx_PLUS (HImode, stack_pointer_rtx,
757 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
759 /* Use shortest method. */
760 if (size <= 6 && (sp_plus_length < method1_length))
762 insn = emit_move_insn (stack_pointer_rtx,
763 gen_rtx_PLUS (HImode, stack_pointer_rtx,
764 gen_int_mode (-size, HImode)));
765 RTX_FRAME_RELATED_P (insn) = 1;
766 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
767 RTX_FRAME_RELATED_P (insn) = 1;
771 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
772 RTX_FRAME_RELATED_P (insn) = 1;
773 insn = emit_move_insn (myfp,
774 gen_rtx_PLUS (GET_MODE(myfp), myfp,
775 gen_int_mode (-size, GET_MODE(myfp))));
776 RTX_FRAME_RELATED_P (insn) = 1;
777 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
778 RTX_FRAME_RELATED_P (insn) = 1;
785 /* Output summary at end of function prologue. */
788 avr_asm_function_end_prologue (FILE *file)
790 if (cfun->machine->is_naked)
792 fputs ("/* prologue: naked */\n", file);
796 if (cfun->machine->is_interrupt)
798 fputs ("/* prologue: Interrupt */\n", file);
800 else if (cfun->machine->is_signal)
802 fputs ("/* prologue: Signal */\n", file);
805 fputs ("/* prologue: function */\n", file);
807 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
812 /* Implement EPILOGUE_USES. */
815 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
819 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
824 /* Output RTL epilogue. */
827 expand_epilogue (void)
833 HOST_WIDE_INT size = get_frame_size();
835 /* epilogue: naked */
836 if (cfun->machine->is_naked)
838 emit_jump_insn (gen_return ());
842 avr_regs_to_save (&set);
843 live_seq = sequent_regs_live ();
844 minimize = (TARGET_CALL_PROLOGUES
845 && !cfun->machine->is_interrupt
846 && !cfun->machine->is_signal
847 && !cfun->machine->is_OS_task
850 if (minimize && (frame_pointer_needed || live_seq > 4))
852 if (frame_pointer_needed)
854 /* Get rid of frame. */
855 emit_move_insn(frame_pointer_rtx,
856 gen_rtx_PLUS (HImode, frame_pointer_rtx,
857 gen_int_mode (size, HImode)));
861 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
864 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
868 if (frame_pointer_needed)
872 /* Try two methods to adjust stack and select shortest. */
874 /* Method 1-Adjust frame pointer. */
876 get_attr_length (gen_move_insn (frame_pointer_rtx,
877 gen_rtx_PLUS (HImode, frame_pointer_rtx,
880 /* Copy to stack pointer. */
882 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
884 /* Method 2-Adjust Stack pointer. */
885 int sp_plus_length = 0;
889 get_attr_length (gen_move_insn (stack_pointer_rtx,
890 gen_rtx_PLUS (HImode, stack_pointer_rtx,
894 /* Use shortest method. */
895 if (size <= 5 && (sp_plus_length < fp_plus_length))
897 emit_move_insn (stack_pointer_rtx,
898 gen_rtx_PLUS (HImode, stack_pointer_rtx,
899 gen_int_mode (size, HImode)));
903 emit_move_insn (frame_pointer_rtx,
904 gen_rtx_PLUS (HImode, frame_pointer_rtx,
905 gen_int_mode (size, HImode)));
906 /* Copy to stack pointer. */
907 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
910 if(!cfun->machine->is_OS_task)
912 /* Restore previous frame_pointer. */
913 emit_insn (gen_pophi (frame_pointer_rtx));
916 /* Restore used registers. */
917 for (reg = 31; reg >= 0; --reg)
919 if (TEST_HARD_REG_BIT (set, reg))
920 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
922 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
924 /* Restore RAMPZ using tmp reg as scratch. */
926 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
928 emit_insn (gen_popqi (tmp_reg_rtx));
929 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
933 /* Restore SREG using tmp reg as scratch. */
934 emit_insn (gen_popqi (tmp_reg_rtx));
936 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
939 /* Restore tmp REG. */
940 emit_insn (gen_popqi (tmp_reg_rtx));
942 /* Restore zero REG. */
943 emit_insn (gen_popqi (zero_reg_rtx));
946 emit_jump_insn (gen_return ());
950 /* Output summary messages at beginning of function epilogue. */
953 avr_asm_function_begin_epilogue (FILE *file)
955 fprintf (file, "/* epilogue start */\n");
958 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
959 machine for a memory operand of mode MODE. */
962 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
964 enum reg_class r = NO_REGS;
966 if (TARGET_ALL_DEBUG)
968 fprintf (stderr, "mode: (%s) %s %s %s %s:",
970 strict ? "(strict)": "",
971 reload_completed ? "(reload_completed)": "",
972 reload_in_progress ? "(reload_in_progress)": "",
973 reg_renumber ? "(reg_renumber)" : "");
974 if (GET_CODE (x) == PLUS
975 && REG_P (XEXP (x, 0))
976 && GET_CODE (XEXP (x, 1)) == CONST_INT
977 && INTVAL (XEXP (x, 1)) >= 0
978 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
981 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
982 true_regnum (XEXP (x, 0)));
985 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
986 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
988 else if (CONSTANT_ADDRESS_P (x))
990 else if (GET_CODE (x) == PLUS
991 && REG_P (XEXP (x, 0))
992 && GET_CODE (XEXP (x, 1)) == CONST_INT
993 && INTVAL (XEXP (x, 1)) >= 0)
995 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
999 || REGNO (XEXP (x,0)) == REG_Y
1000 || REGNO (XEXP (x,0)) == REG_Z)
1001 r = BASE_POINTER_REGS;
1002 if (XEXP (x,0) == frame_pointer_rtx
1003 || XEXP (x,0) == arg_pointer_rtx)
1004 r = BASE_POINTER_REGS;
1006 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1009 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1010 && REG_P (XEXP (x, 0))
1011 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1012 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1016 if (TARGET_ALL_DEBUG)
1018 fprintf (stderr, " ret = %c\n", r + '0');
1020 return r == NO_REGS ? 0 : (int)r;
1023 /* Attempts to replace X with a valid
1024 memory address for an operand of mode MODE */
1027 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1030 if (TARGET_ALL_DEBUG)
1032 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1036 if (GET_CODE (oldx) == PLUS
1037 && REG_P (XEXP (oldx,0)))
1039 if (REG_P (XEXP (oldx,1)))
1040 x = force_reg (GET_MODE (oldx), oldx);
1041 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1043 int offs = INTVAL (XEXP (oldx,1));
1044 if (frame_pointer_rtx != XEXP (oldx,0))
1045 if (offs > MAX_LD_OFFSET (mode))
1047 if (TARGET_ALL_DEBUG)
1048 fprintf (stderr, "force_reg (big offset)\n");
1049 x = force_reg (GET_MODE (oldx), oldx);
1057 /* Return a pointer register name as a string. */
1060 ptrreg_to_str (int regno)
1064 case REG_X: return "X";
1065 case REG_Y: return "Y";
1066 case REG_Z: return "Z";
1068 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1073 /* Return the condition name as a string.
1074 Used in conditional jump constructing */
1077 cond_string (enum rtx_code code)
1086 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1091 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1104 /* Output ADDR to FILE as address. */
1107 print_operand_address (FILE *file, rtx addr)
1109 switch (GET_CODE (addr))
1112 fprintf (file, ptrreg_to_str (REGNO (addr)));
1116 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1120 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1124 if (CONSTANT_ADDRESS_P (addr)
1125 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1126 || GET_CODE (addr) == LABEL_REF))
1128 fprintf (file, "gs(");
1129 output_addr_const (file,addr);
1130 fprintf (file ,")");
1133 output_addr_const (file, addr);
1138 /* Output X as assembler operand to file FILE. */
1141 print_operand (FILE *file, rtx x, int code)
1145 if (code >= 'A' && code <= 'D')
1150 if (!AVR_HAVE_JMP_CALL)
1153 else if (code == '!')
1155 if (AVR_HAVE_EIJMP_EICALL)
1160 if (x == zero_reg_rtx)
1161 fprintf (file, "__zero_reg__");
1163 fprintf (file, reg_names[true_regnum (x) + abcd]);
1165 else if (GET_CODE (x) == CONST_INT)
1166 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1167 else if (GET_CODE (x) == MEM)
1169 rtx addr = XEXP (x,0);
1171 if (CONSTANT_P (addr) && abcd)
1174 output_address (addr);
1175 fprintf (file, ")+%d", abcd);
1177 else if (code == 'o')
1179 if (GET_CODE (addr) != PLUS)
1180 fatal_insn ("bad address, not (reg+disp):", addr);
1182 print_operand (file, XEXP (addr, 1), 0);
1184 else if (code == 'p' || code == 'r')
1186 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1187 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1190 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1192 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1194 else if (GET_CODE (addr) == PLUS)
1196 print_operand_address (file, XEXP (addr,0));
1197 if (REGNO (XEXP (addr, 0)) == REG_X)
1198 fatal_insn ("internal compiler error. Bad address:"
1201 print_operand (file, XEXP (addr,1), code);
1204 print_operand_address (file, addr);
1206 else if (GET_CODE (x) == CONST_DOUBLE)
1210 if (GET_MODE (x) != SFmode)
1211 fatal_insn ("internal compiler error. Unknown mode:", x);
1212 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1213 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1214 fprintf (file, "0x%lx", val);
1216 else if (code == 'j')
1217 fputs (cond_string (GET_CODE (x)), file);
1218 else if (code == 'k')
1219 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1221 print_operand_address (file, x);
1224 /* Update the condition code in the INSN. */
1227 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1231 switch (get_attr_cc (insn))
1234 /* Insn does not affect CC at all. */
1242 set = single_set (insn);
1246 cc_status.flags |= CC_NO_OVERFLOW;
1247 cc_status.value1 = SET_DEST (set);
1252 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1253 The V flag may or may not be known but that's ok because
1254 alter_cond will change tests to use EQ/NE. */
1255 set = single_set (insn);
1259 cc_status.value1 = SET_DEST (set);
1260 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1265 set = single_set (insn);
1268 cc_status.value1 = SET_SRC (set);
1272 /* Insn doesn't leave CC in a usable state. */
1275 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1276 set = single_set (insn);
1279 rtx src = SET_SRC (set);
1281 if (GET_CODE (src) == ASHIFTRT
1282 && GET_MODE (src) == QImode)
1284 rtx x = XEXP (src, 1);
1286 if (GET_CODE (x) == CONST_INT
1290 cc_status.value1 = SET_DEST (set);
1291 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1299 /* Return maximum number of consecutive registers of
1300 class CLASS needed to hold a value of mode MODE. */
1303 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1305 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1308 /* Choose mode for jump insn:
1309 1 - relative jump in range -63 <= x <= 62 ;
1310 2 - relative jump in range -2046 <= x <= 2045 ;
1311 3 - absolute jump (only for ATmega[16]03). */
1314 avr_jump_mode (rtx x, rtx insn)
1316 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1317 ? XEXP (x, 0) : x));
1318 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1319 int jump_distance = cur_addr - dest_addr;
1321 if (-63 <= jump_distance && jump_distance <= 62)
1323 else if (-2046 <= jump_distance && jump_distance <= 2045)
1325 else if (AVR_HAVE_JMP_CALL)
1331 /* return an AVR condition jump commands.
1332 X is a comparison RTX.
1333 LEN is a number returned by avr_jump_mode function.
1334 if REVERSE nonzero then condition code in X must be reversed. */
1337 ret_cond_branch (rtx x, int len, int reverse)
1339 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1344 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1345 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1347 len == 2 ? (AS1 (breq,.+4) CR_TAB
1348 AS1 (brmi,.+2) CR_TAB
1350 (AS1 (breq,.+6) CR_TAB
1351 AS1 (brmi,.+4) CR_TAB
1355 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1357 len == 2 ? (AS1 (breq,.+4) CR_TAB
1358 AS1 (brlt,.+2) CR_TAB
1360 (AS1 (breq,.+6) CR_TAB
1361 AS1 (brlt,.+4) CR_TAB
1364 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1366 len == 2 ? (AS1 (breq,.+4) CR_TAB
1367 AS1 (brlo,.+2) CR_TAB
1369 (AS1 (breq,.+6) CR_TAB
1370 AS1 (brlo,.+4) CR_TAB
1373 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1374 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1376 len == 2 ? (AS1 (breq,.+2) CR_TAB
1377 AS1 (brpl,.+2) CR_TAB
1379 (AS1 (breq,.+2) CR_TAB
1380 AS1 (brpl,.+4) CR_TAB
1383 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1385 len == 2 ? (AS1 (breq,.+2) CR_TAB
1386 AS1 (brge,.+2) CR_TAB
1388 (AS1 (breq,.+2) CR_TAB
1389 AS1 (brge,.+4) CR_TAB
1392 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1394 len == 2 ? (AS1 (breq,.+2) CR_TAB
1395 AS1 (brsh,.+2) CR_TAB
1397 (AS1 (breq,.+2) CR_TAB
1398 AS1 (brsh,.+4) CR_TAB
1406 return AS1 (br%k1,%0);
1408 return (AS1 (br%j1,.+2) CR_TAB
1411 return (AS1 (br%j1,.+4) CR_TAB
1420 return AS1 (br%j1,%0);
1422 return (AS1 (br%k1,.+2) CR_TAB
1425 return (AS1 (br%k1,.+4) CR_TAB
1433 /* Predicate function for immediate operand which fits to byte (8bit) */
1436 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1438 return (GET_CODE (op) == CONST_INT
1439 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1442 /* Output all insn addresses and their sizes into the assembly language
1443 output file. This is helpful for debugging whether the length attributes
1444 in the md file are correct.
1445 Output insn cost for next insn. */
1448 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1449 int num_operands ATTRIBUTE_UNUSED)
1451 int uid = INSN_UID (insn);
1453 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1455 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1456 INSN_ADDRESSES (uid),
1457 INSN_ADDRESSES (uid) - last_insn_address,
1458 rtx_cost (PATTERN (insn), INSN));
1460 last_insn_address = INSN_ADDRESSES (uid);
1463 /* Return 0 if undefined, 1 if always true or always false. */
1466 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1468 unsigned int max = (mode == QImode ? 0xff :
1469 mode == HImode ? 0xffff :
1470 mode == SImode ? 0xffffffff : 0);
1471 if (max && operator && GET_CODE (x) == CONST_INT)
1473 if (unsigned_condition (operator) != operator)
1476 if (max != (INTVAL (x) & max)
1477 && INTVAL (x) != 0xff)
1484 /* Returns nonzero if REGNO is the number of a hard
1485 register in which function arguments are sometimes passed. */
1488 function_arg_regno_p(int r)
1490 return (r >= 8 && r <= 25);
1493 /* Initializing the variable cum for the state at the beginning
1494 of the argument list. */
1497 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1498 tree fndecl ATTRIBUTE_UNUSED)
1501 cum->regno = FIRST_CUM_REG;
1502 if (!libname && fntype)
1504 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1505 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1506 != void_type_node));
1512 /* Returns the number of registers to allocate for a function argument. */
1515 avr_num_arg_regs (enum machine_mode mode, tree type)
1519 if (mode == BLKmode)
1520 size = int_size_in_bytes (type);
1522 size = GET_MODE_SIZE (mode);
1524 /* Align all function arguments to start in even-numbered registers.
1525 Odd-sized arguments leave holes above them. */
1527 return (size + 1) & ~1;
1530 /* Controls whether a function argument is passed
1531 in a register, and which register. */
1534 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1535 int named ATTRIBUTE_UNUSED)
1537 int bytes = avr_num_arg_regs (mode, type);
1539 if (cum->nregs && bytes <= cum->nregs)
1540 return gen_rtx_REG (mode, cum->regno - bytes);
1545 /* Update the summarizer variable CUM to advance past an argument
1546 in the argument list. */
1549 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1550 int named ATTRIBUTE_UNUSED)
1552 int bytes = avr_num_arg_regs (mode, type);
1554 cum->nregs -= bytes;
1555 cum->regno -= bytes;
1557 if (cum->nregs <= 0)
1560 cum->regno = FIRST_CUM_REG;
1564 /***********************************************************************
1565 Functions for outputting various mov's for a various modes
1566 ************************************************************************/
1568 output_movqi (rtx insn, rtx operands[], int *l)
1571 rtx dest = operands[0];
1572 rtx src = operands[1];
1580 if (register_operand (dest, QImode))
1582 if (register_operand (src, QImode)) /* mov r,r */
1584 if (test_hard_reg_class (STACK_REG, dest))
1585 return AS2 (out,%0,%1);
1586 else if (test_hard_reg_class (STACK_REG, src))
1587 return AS2 (in,%0,%1);
1589 return AS2 (mov,%0,%1);
1591 else if (CONSTANT_P (src))
1593 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1594 return AS2 (ldi,%0,lo8(%1));
1596 if (GET_CODE (src) == CONST_INT)
1598 if (src == const0_rtx) /* mov r,L */
1599 return AS1 (clr,%0);
1600 else if (src == const1_rtx)
1603 return (AS1 (clr,%0) CR_TAB
1606 else if (src == constm1_rtx)
1608 /* Immediate constants -1 to any register */
1610 return (AS1 (clr,%0) CR_TAB
1615 int bit_nr = exact_log2 (INTVAL (src));
1621 output_asm_insn ((AS1 (clr,%0) CR_TAB
1624 avr_output_bld (operands, bit_nr);
1631 /* Last resort, larger than loading from memory. */
1633 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1634 AS2 (ldi,r31,lo8(%1)) CR_TAB
1635 AS2 (mov,%0,r31) CR_TAB
1636 AS2 (mov,r31,__tmp_reg__));
1638 else if (GET_CODE (src) == MEM)
1639 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1641 else if (GET_CODE (dest) == MEM)
1643 const char *template;
1645 if (src == const0_rtx)
1646 operands[1] = zero_reg_rtx;
1648 template = out_movqi_mr_r (insn, operands, real_l);
1651 output_asm_insn (template, operands);
1660 output_movhi (rtx insn, rtx operands[], int *l)
1663 rtx dest = operands[0];
1664 rtx src = operands[1];
1670 if (register_operand (dest, HImode))
1672 if (register_operand (src, HImode)) /* mov r,r */
1674 if (test_hard_reg_class (STACK_REG, dest))
1676 if (TARGET_TINY_STACK)
1679 return AS2 (out,__SP_L__,%A1);
1681 /* Use simple load of stack pointer if no interrupts are used
1682 or inside main or signal function prologue where they disabled. */
1683 else if (TARGET_NO_INTERRUPTS
1684 || (reload_completed
1685 && cfun->machine->is_signal
1686 && prologue_epilogue_contains (insn)))
1689 return (AS2 (out,__SP_H__,%B1) CR_TAB
1690 AS2 (out,__SP_L__,%A1));
1692 /* In interrupt prolog we know interrupts are enabled. */
1693 else if (reload_completed
1694 && cfun->machine->is_interrupt
1695 && prologue_epilogue_contains (insn))
1698 return ("cli" CR_TAB
1699 AS2 (out,__SP_H__,%B1) CR_TAB
1701 AS2 (out,__SP_L__,%A1));
1704 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1706 AS2 (out,__SP_H__,%B1) CR_TAB
1707 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1708 AS2 (out,__SP_L__,%A1));
1710 else if (test_hard_reg_class (STACK_REG, src))
1713 return (AS2 (in,%A0,__SP_L__) CR_TAB
1714 AS2 (in,%B0,__SP_H__));
1720 return (AS2 (movw,%0,%1));
1725 return (AS2 (mov,%A0,%A1) CR_TAB
1729 else if (CONSTANT_P (src))
1731 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1734 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1735 AS2 (ldi,%B0,hi8(%1)));
1738 if (GET_CODE (src) == CONST_INT)
1740 if (src == const0_rtx) /* mov r,L */
1743 return (AS1 (clr,%A0) CR_TAB
1746 else if (src == const1_rtx)
1749 return (AS1 (clr,%A0) CR_TAB
1750 AS1 (clr,%B0) CR_TAB
1753 else if (src == constm1_rtx)
1755 /* Immediate constants -1 to any register */
1757 return (AS1 (clr,%0) CR_TAB
1758 AS1 (dec,%A0) CR_TAB
1763 int bit_nr = exact_log2 (INTVAL (src));
1769 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1770 AS1 (clr,%B0) CR_TAB
1773 avr_output_bld (operands, bit_nr);
1779 if ((INTVAL (src) & 0xff) == 0)
1782 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1783 AS1 (clr,%A0) CR_TAB
1784 AS2 (ldi,r31,hi8(%1)) CR_TAB
1785 AS2 (mov,%B0,r31) CR_TAB
1786 AS2 (mov,r31,__tmp_reg__));
1788 else if ((INTVAL (src) & 0xff00) == 0)
1791 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1792 AS2 (ldi,r31,lo8(%1)) CR_TAB
1793 AS2 (mov,%A0,r31) CR_TAB
1794 AS1 (clr,%B0) CR_TAB
1795 AS2 (mov,r31,__tmp_reg__));
1799 /* Last resort, equal to loading from memory. */
1801 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1802 AS2 (ldi,r31,lo8(%1)) CR_TAB
1803 AS2 (mov,%A0,r31) CR_TAB
1804 AS2 (ldi,r31,hi8(%1)) CR_TAB
1805 AS2 (mov,%B0,r31) CR_TAB
1806 AS2 (mov,r31,__tmp_reg__));
1808 else if (GET_CODE (src) == MEM)
1809 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1811 else if (GET_CODE (dest) == MEM)
1813 const char *template;
1815 if (src == const0_rtx)
1816 operands[1] = zero_reg_rtx;
1818 template = out_movhi_mr_r (insn, operands, real_l);
1821 output_asm_insn (template, operands);
1826 fatal_insn ("invalid insn:", insn);
1831 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1835 rtx x = XEXP (src, 0);
1841 if (CONSTANT_ADDRESS_P (x))
1843 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1846 return AS2 (in,%0,__SREG__);
1848 if (optimize > 0 && io_address_operand (x, QImode))
1851 return AS2 (in,%0,%1-0x20);
1854 return AS2 (lds,%0,%1);
1856 /* memory access by reg+disp */
1857 else if (GET_CODE (x) == PLUS
1858 && REG_P (XEXP (x,0))
1859 && GET_CODE (XEXP (x,1)) == CONST_INT)
1861 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1863 int disp = INTVAL (XEXP (x,1));
1864 if (REGNO (XEXP (x,0)) != REG_Y)
1865 fatal_insn ("incorrect insn:",insn);
1867 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1868 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1869 AS2 (ldd,%0,Y+63) CR_TAB
1870 AS2 (sbiw,r28,%o1-63));
1872 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1873 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1874 AS2 (ld,%0,Y) CR_TAB
1875 AS2 (subi,r28,lo8(%o1)) CR_TAB
1876 AS2 (sbci,r29,hi8(%o1)));
1878 else if (REGNO (XEXP (x,0)) == REG_X)
1880 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1881 it but I have this situation with extremal optimizing options. */
1882 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1883 || reg_unused_after (insn, XEXP (x,0)))
1884 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1887 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1888 AS2 (ld,%0,X) CR_TAB
1889 AS2 (sbiw,r26,%o1));
1892 return AS2 (ldd,%0,%1);
1895 return AS2 (ld,%0,%1);
1899 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1903 rtx base = XEXP (src, 0);
1904 int reg_dest = true_regnum (dest);
1905 int reg_base = true_regnum (base);
1906 /* "volatile" forces reading low byte first, even if less efficient,
1907 for correct operation with 16-bit I/O registers. */
1908 int mem_volatile_p = MEM_VOLATILE_P (src);
1916 if (reg_dest == reg_base) /* R = (R) */
1919 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1920 AS2 (ld,%B0,%1) CR_TAB
1921 AS2 (mov,%A0,__tmp_reg__));
1923 else if (reg_base == REG_X) /* (R26) */
1925 if (reg_unused_after (insn, base))
1928 return (AS2 (ld,%A0,X+) CR_TAB
1932 return (AS2 (ld,%A0,X+) CR_TAB
1933 AS2 (ld,%B0,X) CR_TAB
1939 return (AS2 (ld,%A0,%1) CR_TAB
1940 AS2 (ldd,%B0,%1+1));
1943 else if (GET_CODE (base) == PLUS) /* (R + i) */
1945 int disp = INTVAL (XEXP (base, 1));
1946 int reg_base = true_regnum (XEXP (base, 0));
1948 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1950 if (REGNO (XEXP (base, 0)) != REG_Y)
1951 fatal_insn ("incorrect insn:",insn);
1953 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1954 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1955 AS2 (ldd,%A0,Y+62) CR_TAB
1956 AS2 (ldd,%B0,Y+63) CR_TAB
1957 AS2 (sbiw,r28,%o1-62));
1959 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1960 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1961 AS2 (ld,%A0,Y) CR_TAB
1962 AS2 (ldd,%B0,Y+1) CR_TAB
1963 AS2 (subi,r28,lo8(%o1)) CR_TAB
1964 AS2 (sbci,r29,hi8(%o1)));
1966 if (reg_base == REG_X)
1968 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1969 it but I have this situation with extremal
1970 optimization options. */
1973 if (reg_base == reg_dest)
1974 return (AS2 (adiw,r26,%o1) CR_TAB
1975 AS2 (ld,__tmp_reg__,X+) CR_TAB
1976 AS2 (ld,%B0,X) CR_TAB
1977 AS2 (mov,%A0,__tmp_reg__));
1979 return (AS2 (adiw,r26,%o1) CR_TAB
1980 AS2 (ld,%A0,X+) CR_TAB
1981 AS2 (ld,%B0,X) CR_TAB
1982 AS2 (sbiw,r26,%o1+1));
1985 if (reg_base == reg_dest)
1988 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1989 AS2 (ldd,%B0,%B1) CR_TAB
1990 AS2 (mov,%A0,__tmp_reg__));
1994 return (AS2 (ldd,%A0,%A1) CR_TAB
1997 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1999 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2000 fatal_insn ("incorrect insn:", insn);
2004 if (REGNO (XEXP (base, 0)) == REG_X)
2007 return (AS2 (sbiw,r26,2) CR_TAB
2008 AS2 (ld,%A0,X+) CR_TAB
2009 AS2 (ld,%B0,X) CR_TAB
2015 return (AS2 (sbiw,%r1,2) CR_TAB
2016 AS2 (ld,%A0,%p1) CR_TAB
2017 AS2 (ldd,%B0,%p1+1));
2022 return (AS2 (ld,%B0,%1) CR_TAB
2025 else if (GET_CODE (base) == POST_INC) /* (R++) */
2027 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2028 fatal_insn ("incorrect insn:", insn);
2031 return (AS2 (ld,%A0,%1) CR_TAB
2034 else if (CONSTANT_ADDRESS_P (base))
2036 if (optimize > 0 && io_address_operand (base, HImode))
2039 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2040 AS2 (in,%B0,%B1-0x20));
2043 return (AS2 (lds,%A0,%A1) CR_TAB
2047 fatal_insn ("unknown move insn:",insn);
2052 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2056 rtx base = XEXP (src, 0);
2057 int reg_dest = true_regnum (dest);
2058 int reg_base = true_regnum (base);
2066 if (reg_base == REG_X) /* (R26) */
2068 if (reg_dest == REG_X)
2069 /* "ld r26,-X" is undefined */
2070 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2071 AS2 (ld,r29,X) CR_TAB
2072 AS2 (ld,r28,-X) CR_TAB
2073 AS2 (ld,__tmp_reg__,-X) CR_TAB
2074 AS2 (sbiw,r26,1) CR_TAB
2075 AS2 (ld,r26,X) CR_TAB
2076 AS2 (mov,r27,__tmp_reg__));
2077 else if (reg_dest == REG_X - 2)
2078 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2079 AS2 (ld,%B0,X+) CR_TAB
2080 AS2 (ld,__tmp_reg__,X+) CR_TAB
2081 AS2 (ld,%D0,X) CR_TAB
2082 AS2 (mov,%C0,__tmp_reg__));
2083 else if (reg_unused_after (insn, base))
2084 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2085 AS2 (ld,%B0,X+) CR_TAB
2086 AS2 (ld,%C0,X+) CR_TAB
2089 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2090 AS2 (ld,%B0,X+) CR_TAB
2091 AS2 (ld,%C0,X+) CR_TAB
2092 AS2 (ld,%D0,X) CR_TAB
2097 if (reg_dest == reg_base)
2098 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2099 AS2 (ldd,%C0,%1+2) CR_TAB
2100 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2101 AS2 (ld,%A0,%1) CR_TAB
2102 AS2 (mov,%B0,__tmp_reg__));
2103 else if (reg_base == reg_dest + 2)
2104 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2105 AS2 (ldd,%B0,%1+1) CR_TAB
2106 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2107 AS2 (ldd,%D0,%1+3) CR_TAB
2108 AS2 (mov,%C0,__tmp_reg__));
2110 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2111 AS2 (ldd,%B0,%1+1) CR_TAB
2112 AS2 (ldd,%C0,%1+2) CR_TAB
2113 AS2 (ldd,%D0,%1+3));
2116 else if (GET_CODE (base) == PLUS) /* (R + i) */
2118 int disp = INTVAL (XEXP (base, 1));
2120 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2122 if (REGNO (XEXP (base, 0)) != REG_Y)
2123 fatal_insn ("incorrect insn:",insn);
2125 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2126 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2127 AS2 (ldd,%A0,Y+60) CR_TAB
2128 AS2 (ldd,%B0,Y+61) CR_TAB
2129 AS2 (ldd,%C0,Y+62) CR_TAB
2130 AS2 (ldd,%D0,Y+63) CR_TAB
2131 AS2 (sbiw,r28,%o1-60));
2133 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2134 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2135 AS2 (ld,%A0,Y) CR_TAB
2136 AS2 (ldd,%B0,Y+1) CR_TAB
2137 AS2 (ldd,%C0,Y+2) CR_TAB
2138 AS2 (ldd,%D0,Y+3) CR_TAB
2139 AS2 (subi,r28,lo8(%o1)) CR_TAB
2140 AS2 (sbci,r29,hi8(%o1)));
2143 reg_base = true_regnum (XEXP (base, 0));
2144 if (reg_base == REG_X)
2147 if (reg_dest == REG_X)
2150 /* "ld r26,-X" is undefined */
2151 return (AS2 (adiw,r26,%o1+3) CR_TAB
2152 AS2 (ld,r29,X) CR_TAB
2153 AS2 (ld,r28,-X) CR_TAB
2154 AS2 (ld,__tmp_reg__,-X) CR_TAB
2155 AS2 (sbiw,r26,1) CR_TAB
2156 AS2 (ld,r26,X) CR_TAB
2157 AS2 (mov,r27,__tmp_reg__));
2160 if (reg_dest == REG_X - 2)
2161 return (AS2 (adiw,r26,%o1) CR_TAB
2162 AS2 (ld,r24,X+) CR_TAB
2163 AS2 (ld,r25,X+) CR_TAB
2164 AS2 (ld,__tmp_reg__,X+) CR_TAB
2165 AS2 (ld,r27,X) CR_TAB
2166 AS2 (mov,r26,__tmp_reg__));
2168 return (AS2 (adiw,r26,%o1) CR_TAB
2169 AS2 (ld,%A0,X+) CR_TAB
2170 AS2 (ld,%B0,X+) CR_TAB
2171 AS2 (ld,%C0,X+) CR_TAB
2172 AS2 (ld,%D0,X) CR_TAB
2173 AS2 (sbiw,r26,%o1+3));
2175 if (reg_dest == reg_base)
2176 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2177 AS2 (ldd,%C0,%C1) CR_TAB
2178 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2179 AS2 (ldd,%A0,%A1) CR_TAB
2180 AS2 (mov,%B0,__tmp_reg__));
2181 else if (reg_dest == reg_base - 2)
2182 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2183 AS2 (ldd,%B0,%B1) CR_TAB
2184 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2185 AS2 (ldd,%D0,%D1) CR_TAB
2186 AS2 (mov,%C0,__tmp_reg__));
2187 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2188 AS2 (ldd,%B0,%B1) CR_TAB
2189 AS2 (ldd,%C0,%C1) CR_TAB
2192 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2193 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2194 AS2 (ld,%C0,%1) CR_TAB
2195 AS2 (ld,%B0,%1) CR_TAB
2197 else if (GET_CODE (base) == POST_INC) /* (R++) */
2198 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2199 AS2 (ld,%B0,%1) CR_TAB
2200 AS2 (ld,%C0,%1) CR_TAB
2202 else if (CONSTANT_ADDRESS_P (base))
2203 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2204 AS2 (lds,%B0,%B1) CR_TAB
2205 AS2 (lds,%C0,%C1) CR_TAB
2208 fatal_insn ("unknown move insn:",insn);
2213 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2217 rtx base = XEXP (dest, 0);
2218 int reg_base = true_regnum (base);
2219 int reg_src = true_regnum (src);
2225 if (CONSTANT_ADDRESS_P (base))
2226 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2227 AS2 (sts,%B0,%B1) CR_TAB
2228 AS2 (sts,%C0,%C1) CR_TAB
2230 if (reg_base > 0) /* (r) */
2232 if (reg_base == REG_X) /* (R26) */
2234 if (reg_src == REG_X)
2236 /* "st X+,r26" is undefined */
2237 if (reg_unused_after (insn, base))
2238 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2239 AS2 (st,X,r26) CR_TAB
2240 AS2 (adiw,r26,1) CR_TAB
2241 AS2 (st,X+,__tmp_reg__) CR_TAB
2242 AS2 (st,X+,r28) CR_TAB
2245 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2246 AS2 (st,X,r26) CR_TAB
2247 AS2 (adiw,r26,1) CR_TAB
2248 AS2 (st,X+,__tmp_reg__) CR_TAB
2249 AS2 (st,X+,r28) CR_TAB
2250 AS2 (st,X,r29) CR_TAB
2253 else if (reg_base == reg_src + 2)
2255 if (reg_unused_after (insn, base))
2256 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2257 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2258 AS2 (st,%0+,%A1) CR_TAB
2259 AS2 (st,%0+,%B1) CR_TAB
2260 AS2 (st,%0+,__zero_reg__) CR_TAB
2261 AS2 (st,%0,__tmp_reg__) CR_TAB
2262 AS1 (clr,__zero_reg__));
2264 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2265 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2266 AS2 (st,%0+,%A1) CR_TAB
2267 AS2 (st,%0+,%B1) CR_TAB
2268 AS2 (st,%0+,__zero_reg__) CR_TAB
2269 AS2 (st,%0,__tmp_reg__) CR_TAB
2270 AS1 (clr,__zero_reg__) CR_TAB
2273 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2274 AS2 (st,%0+,%B1) CR_TAB
2275 AS2 (st,%0+,%C1) CR_TAB
2276 AS2 (st,%0,%D1) CR_TAB
2280 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2281 AS2 (std,%0+1,%B1) CR_TAB
2282 AS2 (std,%0+2,%C1) CR_TAB
2283 AS2 (std,%0+3,%D1));
2285 else if (GET_CODE (base) == PLUS) /* (R + i) */
2287 int disp = INTVAL (XEXP (base, 1));
2288 reg_base = REGNO (XEXP (base, 0));
2289 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2291 if (reg_base != REG_Y)
2292 fatal_insn ("incorrect insn:",insn);
2294 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2295 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2296 AS2 (std,Y+60,%A1) CR_TAB
2297 AS2 (std,Y+61,%B1) CR_TAB
2298 AS2 (std,Y+62,%C1) CR_TAB
2299 AS2 (std,Y+63,%D1) CR_TAB
2300 AS2 (sbiw,r28,%o0-60));
2302 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2303 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2304 AS2 (st,Y,%A1) CR_TAB
2305 AS2 (std,Y+1,%B1) CR_TAB
2306 AS2 (std,Y+2,%C1) CR_TAB
2307 AS2 (std,Y+3,%D1) CR_TAB
2308 AS2 (subi,r28,lo8(%o0)) CR_TAB
2309 AS2 (sbci,r29,hi8(%o0)));
2311 if (reg_base == REG_X)
2314 if (reg_src == REG_X)
2317 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2318 AS2 (mov,__zero_reg__,r27) CR_TAB
2319 AS2 (adiw,r26,%o0) CR_TAB
2320 AS2 (st,X+,__tmp_reg__) CR_TAB
2321 AS2 (st,X+,__zero_reg__) CR_TAB
2322 AS2 (st,X+,r28) CR_TAB
2323 AS2 (st,X,r29) CR_TAB
2324 AS1 (clr,__zero_reg__) CR_TAB
2325 AS2 (sbiw,r26,%o0+3));
2327 else if (reg_src == REG_X - 2)
2330 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2331 AS2 (mov,__zero_reg__,r27) CR_TAB
2332 AS2 (adiw,r26,%o0) CR_TAB
2333 AS2 (st,X+,r24) CR_TAB
2334 AS2 (st,X+,r25) CR_TAB
2335 AS2 (st,X+,__tmp_reg__) CR_TAB
2336 AS2 (st,X,__zero_reg__) CR_TAB
2337 AS1 (clr,__zero_reg__) CR_TAB
2338 AS2 (sbiw,r26,%o0+3));
2341 return (AS2 (adiw,r26,%o0) CR_TAB
2342 AS2 (st,X+,%A1) CR_TAB
2343 AS2 (st,X+,%B1) CR_TAB
2344 AS2 (st,X+,%C1) CR_TAB
2345 AS2 (st,X,%D1) CR_TAB
2346 AS2 (sbiw,r26,%o0+3));
2348 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2349 AS2 (std,%B0,%B1) CR_TAB
2350 AS2 (std,%C0,%C1) CR_TAB
2353 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2354 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2355 AS2 (st,%0,%C1) CR_TAB
2356 AS2 (st,%0,%B1) CR_TAB
2358 else if (GET_CODE (base) == POST_INC) /* (R++) */
2359 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2360 AS2 (st,%0,%B1) CR_TAB
2361 AS2 (st,%0,%C1) CR_TAB
2363 fatal_insn ("unknown move insn:",insn);
2368 output_movsisf(rtx insn, rtx operands[], int *l)
2371 rtx dest = operands[0];
2372 rtx src = operands[1];
2378 if (register_operand (dest, VOIDmode))
2380 if (register_operand (src, VOIDmode)) /* mov r,r */
2382 if (true_regnum (dest) > true_regnum (src))
2387 return (AS2 (movw,%C0,%C1) CR_TAB
2388 AS2 (movw,%A0,%A1));
2391 return (AS2 (mov,%D0,%D1) CR_TAB
2392 AS2 (mov,%C0,%C1) CR_TAB
2393 AS2 (mov,%B0,%B1) CR_TAB
2401 return (AS2 (movw,%A0,%A1) CR_TAB
2402 AS2 (movw,%C0,%C1));
2405 return (AS2 (mov,%A0,%A1) CR_TAB
2406 AS2 (mov,%B0,%B1) CR_TAB
2407 AS2 (mov,%C0,%C1) CR_TAB
2411 else if (CONSTANT_P (src))
2413 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2416 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2417 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2418 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2419 AS2 (ldi,%D0,hhi8(%1)));
2422 if (GET_CODE (src) == CONST_INT)
2424 const char *const clr_op0 =
2425 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2426 AS1 (clr,%B0) CR_TAB
2428 : (AS1 (clr,%A0) CR_TAB
2429 AS1 (clr,%B0) CR_TAB
2430 AS1 (clr,%C0) CR_TAB
2433 if (src == const0_rtx) /* mov r,L */
2435 *l = AVR_HAVE_MOVW ? 3 : 4;
2438 else if (src == const1_rtx)
2441 output_asm_insn (clr_op0, operands);
2442 *l = AVR_HAVE_MOVW ? 4 : 5;
2443 return AS1 (inc,%A0);
2445 else if (src == constm1_rtx)
2447 /* Immediate constants -1 to any register */
2451 return (AS1 (clr,%A0) CR_TAB
2452 AS1 (dec,%A0) CR_TAB
2453 AS2 (mov,%B0,%A0) CR_TAB
2454 AS2 (movw,%C0,%A0));
2457 return (AS1 (clr,%A0) CR_TAB
2458 AS1 (dec,%A0) CR_TAB
2459 AS2 (mov,%B0,%A0) CR_TAB
2460 AS2 (mov,%C0,%A0) CR_TAB
2465 int bit_nr = exact_log2 (INTVAL (src));
2469 *l = AVR_HAVE_MOVW ? 5 : 6;
2472 output_asm_insn (clr_op0, operands);
2473 output_asm_insn ("set", operands);
2476 avr_output_bld (operands, bit_nr);
2483 /* Last resort, better than loading from memory. */
2485 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2486 AS2 (ldi,r31,lo8(%1)) CR_TAB
2487 AS2 (mov,%A0,r31) CR_TAB
2488 AS2 (ldi,r31,hi8(%1)) CR_TAB
2489 AS2 (mov,%B0,r31) CR_TAB
2490 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2491 AS2 (mov,%C0,r31) CR_TAB
2492 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2493 AS2 (mov,%D0,r31) CR_TAB
2494 AS2 (mov,r31,__tmp_reg__));
2496 else if (GET_CODE (src) == MEM)
2497 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2499 else if (GET_CODE (dest) == MEM)
2501 const char *template;
2503 if (src == const0_rtx)
2504 operands[1] = zero_reg_rtx;
2506 template = out_movsi_mr_r (insn, operands, real_l);
2509 output_asm_insn (template, operands);
2514 fatal_insn ("invalid insn:", insn);
2519 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2523 rtx x = XEXP (dest, 0);
2529 if (CONSTANT_ADDRESS_P (x))
2531 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2534 return AS2 (out,__SREG__,%1);
2536 if (optimize > 0 && io_address_operand (x, QImode))
2539 return AS2 (out,%0-0x20,%1);
2542 return AS2 (sts,%0,%1);
2544 /* memory access by reg+disp */
2545 else if (GET_CODE (x) == PLUS
2546 && REG_P (XEXP (x,0))
2547 && GET_CODE (XEXP (x,1)) == CONST_INT)
2549 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2551 int disp = INTVAL (XEXP (x,1));
2552 if (REGNO (XEXP (x,0)) != REG_Y)
2553 fatal_insn ("incorrect insn:",insn);
2555 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2556 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2557 AS2 (std,Y+63,%1) CR_TAB
2558 AS2 (sbiw,r28,%o0-63));
2560 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2561 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2562 AS2 (st,Y,%1) CR_TAB
2563 AS2 (subi,r28,lo8(%o0)) CR_TAB
2564 AS2 (sbci,r29,hi8(%o0)));
2566 else if (REGNO (XEXP (x,0)) == REG_X)
2568 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2570 if (reg_unused_after (insn, XEXP (x,0)))
2571 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2572 AS2 (adiw,r26,%o0) CR_TAB
2573 AS2 (st,X,__tmp_reg__));
2575 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2576 AS2 (adiw,r26,%o0) CR_TAB
2577 AS2 (st,X,__tmp_reg__) CR_TAB
2578 AS2 (sbiw,r26,%o0));
2582 if (reg_unused_after (insn, XEXP (x,0)))
2583 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2586 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2587 AS2 (st,X,%1) CR_TAB
2588 AS2 (sbiw,r26,%o0));
2592 return AS2 (std,%0,%1);
2595 return AS2 (st,%0,%1);
2599 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2603 rtx base = XEXP (dest, 0);
2604 int reg_base = true_regnum (base);
2605 int reg_src = true_regnum (src);
2606 /* "volatile" forces writing high byte first, even if less efficient,
2607 for correct operation with 16-bit I/O registers. */
2608 int mem_volatile_p = MEM_VOLATILE_P (dest);
2613 if (CONSTANT_ADDRESS_P (base))
2615 if (optimize > 0 && io_address_operand (base, HImode))
2618 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2619 AS2 (out,%A0-0x20,%A1));
2621 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2626 if (reg_base == REG_X)
2628 if (reg_src == REG_X)
2630 /* "st X+,r26" and "st -X,r26" are undefined. */
2631 if (!mem_volatile_p && reg_unused_after (insn, src))
2632 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2633 AS2 (st,X,r26) CR_TAB
2634 AS2 (adiw,r26,1) CR_TAB
2635 AS2 (st,X,__tmp_reg__));
2637 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2638 AS2 (adiw,r26,1) CR_TAB
2639 AS2 (st,X,__tmp_reg__) CR_TAB
2640 AS2 (sbiw,r26,1) CR_TAB
2645 if (!mem_volatile_p && reg_unused_after (insn, base))
2646 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2649 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2650 AS2 (st,X,%B1) CR_TAB
2655 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2658 else if (GET_CODE (base) == PLUS)
2660 int disp = INTVAL (XEXP (base, 1));
2661 reg_base = REGNO (XEXP (base, 0));
2662 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2664 if (reg_base != REG_Y)
2665 fatal_insn ("incorrect insn:",insn);
2667 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2668 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2669 AS2 (std,Y+63,%B1) CR_TAB
2670 AS2 (std,Y+62,%A1) CR_TAB
2671 AS2 (sbiw,r28,%o0-62));
2673 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2674 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2675 AS2 (std,Y+1,%B1) CR_TAB
2676 AS2 (st,Y,%A1) CR_TAB
2677 AS2 (subi,r28,lo8(%o0)) CR_TAB
2678 AS2 (sbci,r29,hi8(%o0)));
2680 if (reg_base == REG_X)
2683 if (reg_src == REG_X)
2686 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2687 AS2 (mov,__zero_reg__,r27) CR_TAB
2688 AS2 (adiw,r26,%o0+1) CR_TAB
2689 AS2 (st,X,__zero_reg__) CR_TAB
2690 AS2 (st,-X,__tmp_reg__) CR_TAB
2691 AS1 (clr,__zero_reg__) CR_TAB
2692 AS2 (sbiw,r26,%o0));
2695 return (AS2 (adiw,r26,%o0+1) CR_TAB
2696 AS2 (st,X,%B1) CR_TAB
2697 AS2 (st,-X,%A1) CR_TAB
2698 AS2 (sbiw,r26,%o0));
2700 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2703 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2704 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2706 else if (GET_CODE (base) == POST_INC) /* (R++) */
2710 if (REGNO (XEXP (base, 0)) == REG_X)
2713 return (AS2 (adiw,r26,1) CR_TAB
2714 AS2 (st,X,%B1) CR_TAB
2715 AS2 (st,-X,%A1) CR_TAB
2721 return (AS2 (std,%p0+1,%B1) CR_TAB
2722 AS2 (st,%p0,%A1) CR_TAB
2728 return (AS2 (st,%0,%A1) CR_TAB
2731 fatal_insn ("unknown move insn:",insn);
2735 /* Return 1 if frame pointer for current function required. */
2738 frame_pointer_required_p (void)
2740 return (cfun->calls_alloca
2741 || crtl->args.info.nregs == 0
2742 || get_frame_size () > 0);
2745 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2748 compare_condition (rtx insn)
2750 rtx next = next_real_insn (insn);
2751 RTX_CODE cond = UNKNOWN;
2752 if (next && GET_CODE (next) == JUMP_INSN)
2754 rtx pat = PATTERN (next);
2755 rtx src = SET_SRC (pat);
2756 rtx t = XEXP (src, 0);
2757 cond = GET_CODE (t);
2762 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2765 compare_sign_p (rtx insn)
2767 RTX_CODE cond = compare_condition (insn);
2768 return (cond == GE || cond == LT);
2771 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2772 that needs to be swapped (GT, GTU, LE, LEU). */
2775 compare_diff_p (rtx insn)
2777 RTX_CODE cond = compare_condition (insn);
2778 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2781 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2784 compare_eq_p (rtx insn)
2786 RTX_CODE cond = compare_condition (insn);
2787 return (cond == EQ || cond == NE);
2791 /* Output test instruction for HImode. */
2794 out_tsthi (rtx insn, int *l)
2796 if (compare_sign_p (insn))
2799 return AS1 (tst,%B0);
2801 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2802 && compare_eq_p (insn))
2804 /* Faster than sbiw if we can clobber the operand. */
2806 return AS2 (or,%A0,%B0);
2808 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2811 return AS2 (sbiw,%0,0);
2814 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2815 AS2 (cpc,%B0,__zero_reg__));
2819 /* Output test instruction for SImode. */
2822 out_tstsi (rtx insn, int *l)
2824 if (compare_sign_p (insn))
2827 return AS1 (tst,%D0);
2829 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2832 return (AS2 (sbiw,%A0,0) CR_TAB
2833 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2834 AS2 (cpc,%D0,__zero_reg__));
2837 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2838 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2839 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2840 AS2 (cpc,%D0,__zero_reg__));
2844 /* Generate asm equivalent for various shifts.
2845 Shift count is a CONST_INT, MEM or REG.
2846 This only handles cases that are not already
2847 carefully hand-optimized in ?sh??i3_out. */
2850 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2851 int *len, int t_len)
2855 int second_label = 1;
2856 int saved_in_tmp = 0;
2857 int use_zero_reg = 0;
2859 op[0] = operands[0];
2860 op[1] = operands[1];
2861 op[2] = operands[2];
2862 op[3] = operands[3];
2868 if (GET_CODE (operands[2]) == CONST_INT)
2870 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2871 int count = INTVAL (operands[2]);
2872 int max_len = 10; /* If larger than this, always use a loop. */
2881 if (count < 8 && !scratch)
2885 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2887 if (t_len * count <= max_len)
2889 /* Output shifts inline with no loop - faster. */
2891 *len = t_len * count;
2895 output_asm_insn (template, op);
2904 strcat (str, AS2 (ldi,%3,%2));
2906 else if (use_zero_reg)
2908 /* Hack to save one word: use __zero_reg__ as loop counter.
2909 Set one bit, then shift in a loop until it is 0 again. */
2911 op[3] = zero_reg_rtx;
2915 strcat (str, ("set" CR_TAB
2916 AS2 (bld,%3,%2-1)));
2920 /* No scratch register available, use one from LD_REGS (saved in
2921 __tmp_reg__) that doesn't overlap with registers to shift. */
2923 op[3] = gen_rtx_REG (QImode,
2924 ((true_regnum (operands[0]) - 1) & 15) + 16);
2925 op[4] = tmp_reg_rtx;
2929 *len = 3; /* Includes "mov %3,%4" after the loop. */
2931 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2937 else if (GET_CODE (operands[2]) == MEM)
2941 op[3] = op_mov[0] = tmp_reg_rtx;
2945 out_movqi_r_mr (insn, op_mov, len);
2947 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2949 else if (register_operand (operands[2], QImode))
2951 if (reg_unused_after (insn, operands[2]))
2955 op[3] = tmp_reg_rtx;
2957 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2961 fatal_insn ("bad shift insn:", insn);
2968 strcat (str, AS1 (rjmp,2f));
2972 *len += t_len + 2; /* template + dec + brXX */
2975 strcat (str, "\n1:\t");
2976 strcat (str, template);
2977 strcat (str, second_label ? "\n2:\t" : "\n\t");
2978 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2979 strcat (str, CR_TAB);
2980 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2982 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2983 output_asm_insn (str, op);
2988 /* 8bit shift left ((char)x << i) */
2991 ashlqi3_out (rtx insn, rtx operands[], int *len)
2993 if (GET_CODE (operands[2]) == CONST_INT)
3000 switch (INTVAL (operands[2]))
3003 if (INTVAL (operands[2]) < 8)
3007 return AS1 (clr,%0);
3011 return AS1 (lsl,%0);
3015 return (AS1 (lsl,%0) CR_TAB
3020 return (AS1 (lsl,%0) CR_TAB
3025 if (test_hard_reg_class (LD_REGS, operands[0]))
3028 return (AS1 (swap,%0) CR_TAB
3029 AS2 (andi,%0,0xf0));
3032 return (AS1 (lsl,%0) CR_TAB
3038 if (test_hard_reg_class (LD_REGS, operands[0]))
3041 return (AS1 (swap,%0) CR_TAB
3043 AS2 (andi,%0,0xe0));
3046 return (AS1 (lsl,%0) CR_TAB
3053 if (test_hard_reg_class (LD_REGS, operands[0]))
3056 return (AS1 (swap,%0) CR_TAB
3059 AS2 (andi,%0,0xc0));
3062 return (AS1 (lsl,%0) CR_TAB
3071 return (AS1 (ror,%0) CR_TAB
3076 else if (CONSTANT_P (operands[2]))
3077 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3079 out_shift_with_cnt (AS1 (lsl,%0),
3080 insn, operands, len, 1);
3085 /* 16bit shift left ((short)x << i) */
3088 ashlhi3_out (rtx insn, rtx operands[], int *len)
3090 if (GET_CODE (operands[2]) == CONST_INT)
3092 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3093 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3100 switch (INTVAL (operands[2]))
3103 if (INTVAL (operands[2]) < 16)
3107 return (AS1 (clr,%B0) CR_TAB
3111 if (optimize_size && scratch)
3116 return (AS1 (swap,%A0) CR_TAB
3117 AS1 (swap,%B0) CR_TAB
3118 AS2 (andi,%B0,0xf0) CR_TAB
3119 AS2 (eor,%B0,%A0) CR_TAB
3120 AS2 (andi,%A0,0xf0) CR_TAB
3126 return (AS1 (swap,%A0) CR_TAB
3127 AS1 (swap,%B0) CR_TAB
3128 AS2 (ldi,%3,0xf0) CR_TAB
3129 AS2 (and,%B0,%3) CR_TAB
3130 AS2 (eor,%B0,%A0) CR_TAB
3131 AS2 (and,%A0,%3) CR_TAB
3134 break; /* optimize_size ? 6 : 8 */
3138 break; /* scratch ? 5 : 6 */
3142 return (AS1 (lsl,%A0) CR_TAB
3143 AS1 (rol,%B0) CR_TAB
3144 AS1 (swap,%A0) CR_TAB
3145 AS1 (swap,%B0) CR_TAB
3146 AS2 (andi,%B0,0xf0) CR_TAB
3147 AS2 (eor,%B0,%A0) CR_TAB
3148 AS2 (andi,%A0,0xf0) CR_TAB
3154 return (AS1 (lsl,%A0) CR_TAB
3155 AS1 (rol,%B0) CR_TAB
3156 AS1 (swap,%A0) CR_TAB
3157 AS1 (swap,%B0) CR_TAB
3158 AS2 (ldi,%3,0xf0) CR_TAB
3159 AS2 (and,%B0,%3) CR_TAB
3160 AS2 (eor,%B0,%A0) CR_TAB
3161 AS2 (and,%A0,%3) CR_TAB
3168 break; /* scratch ? 5 : 6 */
3170 return (AS1 (clr,__tmp_reg__) CR_TAB
3171 AS1 (lsr,%B0) CR_TAB
3172 AS1 (ror,%A0) CR_TAB
3173 AS1 (ror,__tmp_reg__) CR_TAB
3174 AS1 (lsr,%B0) CR_TAB
3175 AS1 (ror,%A0) CR_TAB
3176 AS1 (ror,__tmp_reg__) CR_TAB
3177 AS2 (mov,%B0,%A0) CR_TAB
3178 AS2 (mov,%A0,__tmp_reg__));
3182 return (AS1 (lsr,%B0) CR_TAB
3183 AS2 (mov,%B0,%A0) CR_TAB
3184 AS1 (clr,%A0) CR_TAB
3185 AS1 (ror,%B0) CR_TAB
3189 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3194 return (AS2 (mov,%B0,%A0) CR_TAB
3195 AS1 (clr,%A0) CR_TAB
3200 return (AS2 (mov,%B0,%A0) CR_TAB
3201 AS1 (clr,%A0) CR_TAB
3202 AS1 (lsl,%B0) CR_TAB
3207 return (AS2 (mov,%B0,%A0) CR_TAB
3208 AS1 (clr,%A0) CR_TAB
3209 AS1 (lsl,%B0) CR_TAB
3210 AS1 (lsl,%B0) CR_TAB
3217 return (AS2 (mov,%B0,%A0) CR_TAB
3218 AS1 (clr,%A0) CR_TAB
3219 AS1 (swap,%B0) CR_TAB
3220 AS2 (andi,%B0,0xf0));
3225 return (AS2 (mov,%B0,%A0) CR_TAB
3226 AS1 (clr,%A0) CR_TAB
3227 AS1 (swap,%B0) CR_TAB
3228 AS2 (ldi,%3,0xf0) CR_TAB
3232 return (AS2 (mov,%B0,%A0) CR_TAB
3233 AS1 (clr,%A0) CR_TAB
3234 AS1 (lsl,%B0) CR_TAB
3235 AS1 (lsl,%B0) CR_TAB
3236 AS1 (lsl,%B0) CR_TAB
3243 return (AS2 (mov,%B0,%A0) CR_TAB
3244 AS1 (clr,%A0) CR_TAB
3245 AS1 (swap,%B0) CR_TAB
3246 AS1 (lsl,%B0) CR_TAB
3247 AS2 (andi,%B0,0xe0));
3249 if (AVR_HAVE_MUL && scratch)
3252 return (AS2 (ldi,%3,0x20) CR_TAB
3253 AS2 (mul,%A0,%3) CR_TAB
3254 AS2 (mov,%B0,r0) CR_TAB
3255 AS1 (clr,%A0) CR_TAB
3256 AS1 (clr,__zero_reg__));
3258 if (optimize_size && scratch)
3263 return (AS2 (mov,%B0,%A0) CR_TAB
3264 AS1 (clr,%A0) CR_TAB
3265 AS1 (swap,%B0) CR_TAB
3266 AS1 (lsl,%B0) CR_TAB
3267 AS2 (ldi,%3,0xe0) CR_TAB
3273 return ("set" CR_TAB
3274 AS2 (bld,r1,5) CR_TAB
3275 AS2 (mul,%A0,r1) CR_TAB
3276 AS2 (mov,%B0,r0) CR_TAB
3277 AS1 (clr,%A0) CR_TAB
3278 AS1 (clr,__zero_reg__));
3281 return (AS2 (mov,%B0,%A0) CR_TAB
3282 AS1 (clr,%A0) CR_TAB
3283 AS1 (lsl,%B0) CR_TAB
3284 AS1 (lsl,%B0) CR_TAB
3285 AS1 (lsl,%B0) CR_TAB
3286 AS1 (lsl,%B0) CR_TAB
3290 if (AVR_HAVE_MUL && ldi_ok)
3293 return (AS2 (ldi,%B0,0x40) CR_TAB
3294 AS2 (mul,%A0,%B0) CR_TAB
3295 AS2 (mov,%B0,r0) CR_TAB
3296 AS1 (clr,%A0) CR_TAB
3297 AS1 (clr,__zero_reg__));
3299 if (AVR_HAVE_MUL && scratch)
3302 return (AS2 (ldi,%3,0x40) CR_TAB
3303 AS2 (mul,%A0,%3) CR_TAB
3304 AS2 (mov,%B0,r0) CR_TAB
3305 AS1 (clr,%A0) CR_TAB
3306 AS1 (clr,__zero_reg__));
3308 if (optimize_size && ldi_ok)
3311 return (AS2 (mov,%B0,%A0) CR_TAB
3312 AS2 (ldi,%A0,6) "\n1:\t"
3313 AS1 (lsl,%B0) CR_TAB
3314 AS1 (dec,%A0) CR_TAB
3317 if (optimize_size && scratch)
3320 return (AS1 (clr,%B0) CR_TAB
3321 AS1 (lsr,%A0) CR_TAB
3322 AS1 (ror,%B0) CR_TAB
3323 AS1 (lsr,%A0) CR_TAB
3324 AS1 (ror,%B0) CR_TAB
3329 return (AS1 (clr,%B0) CR_TAB
3330 AS1 (lsr,%A0) CR_TAB
3331 AS1 (ror,%B0) CR_TAB
3336 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3338 insn, operands, len, 2);
3343 /* 32bit shift left ((long)x << i) */
3346 ashlsi3_out (rtx insn, rtx operands[], int *len)
3348 if (GET_CODE (operands[2]) == CONST_INT)
3356 switch (INTVAL (operands[2]))
3359 if (INTVAL (operands[2]) < 32)
3363 return *len = 3, (AS1 (clr,%D0) CR_TAB
3364 AS1 (clr,%C0) CR_TAB
3365 AS2 (movw,%A0,%C0));
3367 return (AS1 (clr,%D0) CR_TAB
3368 AS1 (clr,%C0) CR_TAB
3369 AS1 (clr,%B0) CR_TAB
3374 int reg0 = true_regnum (operands[0]);
3375 int reg1 = true_regnum (operands[1]);
3378 return (AS2 (mov,%D0,%C1) CR_TAB
3379 AS2 (mov,%C0,%B1) CR_TAB
3380 AS2 (mov,%B0,%A1) CR_TAB
3383 return (AS1 (clr,%A0) CR_TAB
3384 AS2 (mov,%B0,%A1) CR_TAB
3385 AS2 (mov,%C0,%B1) CR_TAB
3391 int reg0 = true_regnum (operands[0]);
3392 int reg1 = true_regnum (operands[1]);
3393 if (reg0 + 2 == reg1)
3394 return *len = 2, (AS1 (clr,%B0) CR_TAB
3397 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3398 AS1 (clr,%B0) CR_TAB
3401 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3402 AS2 (mov,%D0,%B1) CR_TAB
3403 AS1 (clr,%B0) CR_TAB
3409 return (AS2 (mov,%D0,%A1) CR_TAB
3410 AS1 (clr,%C0) CR_TAB
3411 AS1 (clr,%B0) CR_TAB
3416 return (AS1 (clr,%D0) CR_TAB
3417 AS1 (lsr,%A0) CR_TAB
3418 AS1 (ror,%D0) CR_TAB
3419 AS1 (clr,%C0) CR_TAB
3420 AS1 (clr,%B0) CR_TAB
3425 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3426 AS1 (rol,%B0) CR_TAB
3427 AS1 (rol,%C0) CR_TAB
3429 insn, operands, len, 4);
3433 /* 8bit arithmetic shift right ((signed char)x >> i) */
3436 ashrqi3_out (rtx insn, rtx operands[], int *len)
3438 if (GET_CODE (operands[2]) == CONST_INT)
3445 switch (INTVAL (operands[2]))
3449 return AS1 (asr,%0);
3453 return (AS1 (asr,%0) CR_TAB
3458 return (AS1 (asr,%0) CR_TAB
3464 return (AS1 (asr,%0) CR_TAB
3471 return (AS1 (asr,%0) CR_TAB
3479 return (AS2 (bst,%0,6) CR_TAB
3481 AS2 (sbc,%0,%0) CR_TAB
3485 if (INTVAL (operands[2]) < 8)
3492 return (AS1 (lsl,%0) CR_TAB
3496 else if (CONSTANT_P (operands[2]))
3497 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3499 out_shift_with_cnt (AS1 (asr,%0),
3500 insn, operands, len, 1);
3505 /* 16bit arithmetic shift right ((signed short)x >> i) */
3508 ashrhi3_out (rtx insn, rtx operands[], int *len)
3510 if (GET_CODE (operands[2]) == CONST_INT)
3512 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3513 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3520 switch (INTVAL (operands[2]))
3524 /* XXX try to optimize this too? */
3529 break; /* scratch ? 5 : 6 */
3531 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3532 AS2 (mov,%A0,%B0) CR_TAB
3533 AS1 (lsl,__tmp_reg__) CR_TAB
3534 AS1 (rol,%A0) CR_TAB
3535 AS2 (sbc,%B0,%B0) CR_TAB
3536 AS1 (lsl,__tmp_reg__) CR_TAB
3537 AS1 (rol,%A0) CR_TAB
3542 return (AS1 (lsl,%A0) CR_TAB
3543 AS2 (mov,%A0,%B0) CR_TAB
3544 AS1 (rol,%A0) CR_TAB
3549 int reg0 = true_regnum (operands[0]);
3550 int reg1 = true_regnum (operands[1]);
3553 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3554 AS1 (lsl,%B0) CR_TAB
3557 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3558 AS1 (clr,%B0) CR_TAB
3559 AS2 (sbrc,%A0,7) CR_TAB
3565 return (AS2 (mov,%A0,%B0) CR_TAB
3566 AS1 (lsl,%B0) CR_TAB
3567 AS2 (sbc,%B0,%B0) CR_TAB
3572 return (AS2 (mov,%A0,%B0) CR_TAB
3573 AS1 (lsl,%B0) CR_TAB
3574 AS2 (sbc,%B0,%B0) CR_TAB
3575 AS1 (asr,%A0) CR_TAB
3579 if (AVR_HAVE_MUL && ldi_ok)
3582 return (AS2 (ldi,%A0,0x20) CR_TAB
3583 AS2 (muls,%B0,%A0) CR_TAB
3584 AS2 (mov,%A0,r1) CR_TAB
3585 AS2 (sbc,%B0,%B0) CR_TAB
3586 AS1 (clr,__zero_reg__));
3588 if (optimize_size && scratch)
3591 return (AS2 (mov,%A0,%B0) CR_TAB
3592 AS1 (lsl,%B0) CR_TAB
3593 AS2 (sbc,%B0,%B0) CR_TAB
3594 AS1 (asr,%A0) CR_TAB
3595 AS1 (asr,%A0) CR_TAB
3599 if (AVR_HAVE_MUL && ldi_ok)
3602 return (AS2 (ldi,%A0,0x10) CR_TAB
3603 AS2 (muls,%B0,%A0) CR_TAB
3604 AS2 (mov,%A0,r1) CR_TAB
3605 AS2 (sbc,%B0,%B0) CR_TAB
3606 AS1 (clr,__zero_reg__));
3608 if (optimize_size && scratch)
3611 return (AS2 (mov,%A0,%B0) CR_TAB
3612 AS1 (lsl,%B0) CR_TAB
3613 AS2 (sbc,%B0,%B0) CR_TAB
3614 AS1 (asr,%A0) CR_TAB
3615 AS1 (asr,%A0) CR_TAB
3616 AS1 (asr,%A0) CR_TAB
3620 if (AVR_HAVE_MUL && ldi_ok)
3623 return (AS2 (ldi,%A0,0x08) CR_TAB
3624 AS2 (muls,%B0,%A0) CR_TAB
3625 AS2 (mov,%A0,r1) CR_TAB
3626 AS2 (sbc,%B0,%B0) CR_TAB
3627 AS1 (clr,__zero_reg__));
3630 break; /* scratch ? 5 : 7 */
3632 return (AS2 (mov,%A0,%B0) CR_TAB
3633 AS1 (lsl,%B0) CR_TAB
3634 AS2 (sbc,%B0,%B0) CR_TAB
3635 AS1 (asr,%A0) CR_TAB
3636 AS1 (asr,%A0) CR_TAB
3637 AS1 (asr,%A0) CR_TAB
3638 AS1 (asr,%A0) CR_TAB
3643 return (AS1 (lsl,%B0) CR_TAB
3644 AS2 (sbc,%A0,%A0) CR_TAB
3645 AS1 (lsl,%B0) CR_TAB
3646 AS2 (mov,%B0,%A0) CR_TAB
3650 if (INTVAL (operands[2]) < 16)
3656 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3657 AS2 (sbc,%A0,%A0) CR_TAB
3662 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3664 insn, operands, len, 2);
3669 /* 32bit arithmetic shift right ((signed long)x >> i) */
3672 ashrsi3_out (rtx insn, rtx operands[], int *len)
3674 if (GET_CODE (operands[2]) == CONST_INT)
3682 switch (INTVAL (operands[2]))
3686 int reg0 = true_regnum (operands[0]);
3687 int reg1 = true_regnum (operands[1]);
3690 return (AS2 (mov,%A0,%B1) CR_TAB
3691 AS2 (mov,%B0,%C1) CR_TAB
3692 AS2 (mov,%C0,%D1) CR_TAB
3693 AS1 (clr,%D0) CR_TAB
3694 AS2 (sbrc,%C0,7) CR_TAB
3697 return (AS1 (clr,%D0) CR_TAB
3698 AS2 (sbrc,%D1,7) CR_TAB
3699 AS1 (dec,%D0) CR_TAB
3700 AS2 (mov,%C0,%D1) CR_TAB
3701 AS2 (mov,%B0,%C1) CR_TAB
3707 int reg0 = true_regnum (operands[0]);
3708 int reg1 = true_regnum (operands[1]);
3710 if (reg0 == reg1 + 2)
3711 return *len = 4, (AS1 (clr,%D0) CR_TAB
3712 AS2 (sbrc,%B0,7) CR_TAB
3713 AS1 (com,%D0) CR_TAB
3716 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3717 AS1 (clr,%D0) CR_TAB
3718 AS2 (sbrc,%B0,7) CR_TAB
3719 AS1 (com,%D0) CR_TAB
3722 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3723 AS2 (mov,%A0,%C1) CR_TAB
3724 AS1 (clr,%D0) CR_TAB
3725 AS2 (sbrc,%B0,7) CR_TAB
3726 AS1 (com,%D0) CR_TAB
3731 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3732 AS1 (clr,%D0) CR_TAB
3733 AS2 (sbrc,%A0,7) CR_TAB
3734 AS1 (com,%D0) CR_TAB
3735 AS2 (mov,%B0,%D0) CR_TAB
3739 if (INTVAL (operands[2]) < 32)
3746 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3747 AS2 (sbc,%A0,%A0) CR_TAB
3748 AS2 (mov,%B0,%A0) CR_TAB
3749 AS2 (movw,%C0,%A0));
3751 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3752 AS2 (sbc,%A0,%A0) CR_TAB
3753 AS2 (mov,%B0,%A0) CR_TAB
3754 AS2 (mov,%C0,%A0) CR_TAB
3759 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3760 AS1 (ror,%C0) CR_TAB
3761 AS1 (ror,%B0) CR_TAB
3763 insn, operands, len, 4);
3767 /* 8bit logic shift right ((unsigned char)x >> i) */
3770 lshrqi3_out (rtx insn, rtx operands[], int *len)
3772 if (GET_CODE (operands[2]) == CONST_INT)
3779 switch (INTVAL (operands[2]))
3782 if (INTVAL (operands[2]) < 8)
3786 return AS1 (clr,%0);
3790 return AS1 (lsr,%0);
3794 return (AS1 (lsr,%0) CR_TAB
3798 return (AS1 (lsr,%0) CR_TAB
3803 if (test_hard_reg_class (LD_REGS, operands[0]))
3806 return (AS1 (swap,%0) CR_TAB
3807 AS2 (andi,%0,0x0f));
3810 return (AS1 (lsr,%0) CR_TAB
3816 if (test_hard_reg_class (LD_REGS, operands[0]))
3819 return (AS1 (swap,%0) CR_TAB
3824 return (AS1 (lsr,%0) CR_TAB
3831 if (test_hard_reg_class (LD_REGS, operands[0]))
3834 return (AS1 (swap,%0) CR_TAB
3840 return (AS1 (lsr,%0) CR_TAB
3849 return (AS1 (rol,%0) CR_TAB
3854 else if (CONSTANT_P (operands[2]))
3855 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3857 out_shift_with_cnt (AS1 (lsr,%0),
3858 insn, operands, len, 1);
3862 /* 16bit logic shift right ((unsigned short)x >> i) */
3865 lshrhi3_out (rtx insn, rtx operands[], int *len)
3867 if (GET_CODE (operands[2]) == CONST_INT)
3869 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3870 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3877 switch (INTVAL (operands[2]))
3880 if (INTVAL (operands[2]) < 16)
3884 return (AS1 (clr,%B0) CR_TAB
3888 if (optimize_size && scratch)
3893 return (AS1 (swap,%B0) CR_TAB
3894 AS1 (swap,%A0) CR_TAB
3895 AS2 (andi,%A0,0x0f) CR_TAB
3896 AS2 (eor,%A0,%B0) CR_TAB
3897 AS2 (andi,%B0,0x0f) CR_TAB
3903 return (AS1 (swap,%B0) CR_TAB
3904 AS1 (swap,%A0) CR_TAB
3905 AS2 (ldi,%3,0x0f) CR_TAB
3906 AS2 (and,%A0,%3) CR_TAB
3907 AS2 (eor,%A0,%B0) CR_TAB
3908 AS2 (and,%B0,%3) CR_TAB
3911 break; /* optimize_size ? 6 : 8 */
3915 break; /* scratch ? 5 : 6 */
3919 return (AS1 (lsr,%B0) CR_TAB
3920 AS1 (ror,%A0) CR_TAB
3921 AS1 (swap,%B0) CR_TAB
3922 AS1 (swap,%A0) CR_TAB
3923 AS2 (andi,%A0,0x0f) CR_TAB
3924 AS2 (eor,%A0,%B0) CR_TAB
3925 AS2 (andi,%B0,0x0f) CR_TAB
3931 return (AS1 (lsr,%B0) CR_TAB
3932 AS1 (ror,%A0) CR_TAB
3933 AS1 (swap,%B0) CR_TAB
3934 AS1 (swap,%A0) CR_TAB
3935 AS2 (ldi,%3,0x0f) CR_TAB
3936 AS2 (and,%A0,%3) CR_TAB
3937 AS2 (eor,%A0,%B0) CR_TAB
3938 AS2 (and,%B0,%3) CR_TAB
3945 break; /* scratch ? 5 : 6 */
3947 return (AS1 (clr,__tmp_reg__) CR_TAB
3948 AS1 (lsl,%A0) CR_TAB
3949 AS1 (rol,%B0) CR_TAB
3950 AS1 (rol,__tmp_reg__) CR_TAB
3951 AS1 (lsl,%A0) CR_TAB
3952 AS1 (rol,%B0) CR_TAB
3953 AS1 (rol,__tmp_reg__) CR_TAB
3954 AS2 (mov,%A0,%B0) CR_TAB
3955 AS2 (mov,%B0,__tmp_reg__));
3959 return (AS1 (lsl,%A0) CR_TAB
3960 AS2 (mov,%A0,%B0) CR_TAB
3961 AS1 (rol,%A0) CR_TAB
3962 AS2 (sbc,%B0,%B0) CR_TAB
3966 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3971 return (AS2 (mov,%A0,%B0) CR_TAB
3972 AS1 (clr,%B0) CR_TAB
3977 return (AS2 (mov,%A0,%B0) CR_TAB
3978 AS1 (clr,%B0) CR_TAB
3979 AS1 (lsr,%A0) CR_TAB
3984 return (AS2 (mov,%A0,%B0) CR_TAB
3985 AS1 (clr,%B0) CR_TAB
3986 AS1 (lsr,%A0) CR_TAB
3987 AS1 (lsr,%A0) CR_TAB
3994 return (AS2 (mov,%A0,%B0) CR_TAB
3995 AS1 (clr,%B0) CR_TAB
3996 AS1 (swap,%A0) CR_TAB
3997 AS2 (andi,%A0,0x0f));
4002 return (AS2 (mov,%A0,%B0) CR_TAB
4003 AS1 (clr,%B0) CR_TAB
4004 AS1 (swap,%A0) CR_TAB
4005 AS2 (ldi,%3,0x0f) CR_TAB
4009 return (AS2 (mov,%A0,%B0) CR_TAB
4010 AS1 (clr,%B0) CR_TAB
4011 AS1 (lsr,%A0) CR_TAB
4012 AS1 (lsr,%A0) CR_TAB
4013 AS1 (lsr,%A0) CR_TAB
4020 return (AS2 (mov,%A0,%B0) CR_TAB
4021 AS1 (clr,%B0) CR_TAB
4022 AS1 (swap,%A0) CR_TAB
4023 AS1 (lsr,%A0) CR_TAB
4024 AS2 (andi,%A0,0x07));
4026 if (AVR_HAVE_MUL && scratch)
4029 return (AS2 (ldi,%3,0x08) CR_TAB
4030 AS2 (mul,%B0,%3) CR_TAB
4031 AS2 (mov,%A0,r1) CR_TAB
4032 AS1 (clr,%B0) CR_TAB
4033 AS1 (clr,__zero_reg__));
4035 if (optimize_size && scratch)
4040 return (AS2 (mov,%A0,%B0) CR_TAB
4041 AS1 (clr,%B0) CR_TAB
4042 AS1 (swap,%A0) CR_TAB
4043 AS1 (lsr,%A0) CR_TAB
4044 AS2 (ldi,%3,0x07) CR_TAB
4050 return ("set" CR_TAB
4051 AS2 (bld,r1,3) CR_TAB
4052 AS2 (mul,%B0,r1) CR_TAB
4053 AS2 (mov,%A0,r1) CR_TAB
4054 AS1 (clr,%B0) CR_TAB
4055 AS1 (clr,__zero_reg__));
4058 return (AS2 (mov,%A0,%B0) CR_TAB
4059 AS1 (clr,%B0) CR_TAB
4060 AS1 (lsr,%A0) CR_TAB
4061 AS1 (lsr,%A0) CR_TAB
4062 AS1 (lsr,%A0) CR_TAB
4063 AS1 (lsr,%A0) CR_TAB
4067 if (AVR_HAVE_MUL && ldi_ok)
4070 return (AS2 (ldi,%A0,0x04) CR_TAB
4071 AS2 (mul,%B0,%A0) CR_TAB
4072 AS2 (mov,%A0,r1) CR_TAB
4073 AS1 (clr,%B0) CR_TAB
4074 AS1 (clr,__zero_reg__));
4076 if (AVR_HAVE_MUL && scratch)
4079 return (AS2 (ldi,%3,0x04) CR_TAB
4080 AS2 (mul,%B0,%3) CR_TAB
4081 AS2 (mov,%A0,r1) CR_TAB
4082 AS1 (clr,%B0) CR_TAB
4083 AS1 (clr,__zero_reg__));
4085 if (optimize_size && ldi_ok)
4088 return (AS2 (mov,%A0,%B0) CR_TAB
4089 AS2 (ldi,%B0,6) "\n1:\t"
4090 AS1 (lsr,%A0) CR_TAB
4091 AS1 (dec,%B0) CR_TAB
4094 if (optimize_size && scratch)
4097 return (AS1 (clr,%A0) CR_TAB
4098 AS1 (lsl,%B0) CR_TAB
4099 AS1 (rol,%A0) CR_TAB
4100 AS1 (lsl,%B0) CR_TAB
4101 AS1 (rol,%A0) CR_TAB
4106 return (AS1 (clr,%A0) CR_TAB
4107 AS1 (lsl,%B0) CR_TAB
4108 AS1 (rol,%A0) CR_TAB
4113 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4115 insn, operands, len, 2);
4119 /* 32bit logic shift right ((unsigned int)x >> i) */
4122 lshrsi3_out (rtx insn, rtx operands[], int *len)
4124 if (GET_CODE (operands[2]) == CONST_INT)
4132 switch (INTVAL (operands[2]))
4135 if (INTVAL (operands[2]) < 32)
4139 return *len = 3, (AS1 (clr,%D0) CR_TAB
4140 AS1 (clr,%C0) CR_TAB
4141 AS2 (movw,%A0,%C0));
4143 return (AS1 (clr,%D0) CR_TAB
4144 AS1 (clr,%C0) CR_TAB
4145 AS1 (clr,%B0) CR_TAB
4150 int reg0 = true_regnum (operands[0]);
4151 int reg1 = true_regnum (operands[1]);
4154 return (AS2 (mov,%A0,%B1) CR_TAB
4155 AS2 (mov,%B0,%C1) CR_TAB
4156 AS2 (mov,%C0,%D1) CR_TAB
4159 return (AS1 (clr,%D0) CR_TAB
4160 AS2 (mov,%C0,%D1) CR_TAB
4161 AS2 (mov,%B0,%C1) CR_TAB
4167 int reg0 = true_regnum (operands[0]);
4168 int reg1 = true_regnum (operands[1]);
4170 if (reg0 == reg1 + 2)
4171 return *len = 2, (AS1 (clr,%C0) CR_TAB
4174 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4175 AS1 (clr,%C0) CR_TAB
4178 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4179 AS2 (mov,%A0,%C1) CR_TAB
4180 AS1 (clr,%C0) CR_TAB
4185 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4186 AS1 (clr,%B0) CR_TAB
4187 AS1 (clr,%C0) CR_TAB
4192 return (AS1 (clr,%A0) CR_TAB
4193 AS2 (sbrc,%D0,7) CR_TAB
4194 AS1 (inc,%A0) CR_TAB
4195 AS1 (clr,%B0) CR_TAB
4196 AS1 (clr,%C0) CR_TAB
4201 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4202 AS1 (ror,%C0) CR_TAB
4203 AS1 (ror,%B0) CR_TAB
4205 insn, operands, len, 4);
4209 /* Modifies the length assigned to instruction INSN
4210 LEN is the initially computed length of the insn. */
4213 adjust_insn_length (rtx insn, int len)
4215 rtx patt = PATTERN (insn);
4218 if (GET_CODE (patt) == SET)
4221 op[1] = SET_SRC (patt);
4222 op[0] = SET_DEST (patt);
4223 if (general_operand (op[1], VOIDmode)
4224 && general_operand (op[0], VOIDmode))
4226 switch (GET_MODE (op[0]))
4229 output_movqi (insn, op, &len);
4232 output_movhi (insn, op, &len);
4236 output_movsisf (insn, op, &len);
4242 else if (op[0] == cc0_rtx && REG_P (op[1]))
4244 switch (GET_MODE (op[1]))
4246 case HImode: out_tsthi (insn,&len); break;
4247 case SImode: out_tstsi (insn,&len); break;
4251 else if (GET_CODE (op[1]) == AND)
4253 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4255 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4256 if (GET_MODE (op[1]) == SImode)
4257 len = (((mask & 0xff) != 0xff)
4258 + ((mask & 0xff00) != 0xff00)
4259 + ((mask & 0xff0000L) != 0xff0000L)
4260 + ((mask & 0xff000000L) != 0xff000000L));
4261 else if (GET_MODE (op[1]) == HImode)
4262 len = (((mask & 0xff) != 0xff)
4263 + ((mask & 0xff00) != 0xff00));
4266 else if (GET_CODE (op[1]) == IOR)
4268 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4270 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4271 if (GET_MODE (op[1]) == SImode)
4272 len = (((mask & 0xff) != 0)
4273 + ((mask & 0xff00) != 0)
4274 + ((mask & 0xff0000L) != 0)
4275 + ((mask & 0xff000000L) != 0));
4276 else if (GET_MODE (op[1]) == HImode)
4277 len = (((mask & 0xff) != 0)
4278 + ((mask & 0xff00) != 0));
4282 set = single_set (insn);
4287 op[1] = SET_SRC (set);
4288 op[0] = SET_DEST (set);
4290 if (GET_CODE (patt) == PARALLEL
4291 && general_operand (op[1], VOIDmode)
4292 && general_operand (op[0], VOIDmode))
4294 if (XVECLEN (patt, 0) == 2)
4295 op[2] = XVECEXP (patt, 0, 1);
4297 switch (GET_MODE (op[0]))
4303 output_reload_inhi (insn, op, &len);
4307 output_reload_insisf (insn, op, &len);
4313 else if (GET_CODE (op[1]) == ASHIFT
4314 || GET_CODE (op[1]) == ASHIFTRT
4315 || GET_CODE (op[1]) == LSHIFTRT)
4319 ops[1] = XEXP (op[1],0);
4320 ops[2] = XEXP (op[1],1);
4321 switch (GET_CODE (op[1]))
4324 switch (GET_MODE (op[0]))
4326 case QImode: ashlqi3_out (insn,ops,&len); break;
4327 case HImode: ashlhi3_out (insn,ops,&len); break;
4328 case SImode: ashlsi3_out (insn,ops,&len); break;
4333 switch (GET_MODE (op[0]))
4335 case QImode: ashrqi3_out (insn,ops,&len); break;
4336 case HImode: ashrhi3_out (insn,ops,&len); break;
4337 case SImode: ashrsi3_out (insn,ops,&len); break;
4342 switch (GET_MODE (op[0]))
4344 case QImode: lshrqi3_out (insn,ops,&len); break;
4345 case HImode: lshrhi3_out (insn,ops,&len); break;
4346 case SImode: lshrsi3_out (insn,ops,&len); break;
4358 /* Return nonzero if register REG dead after INSN. */
4361 reg_unused_after (rtx insn, rtx reg)
4363 return (dead_or_set_p (insn, reg)
4364 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4367 /* Return nonzero if REG is not used after INSN.
4368 We assume REG is a reload reg, and therefore does
4369 not live past labels. It may live past calls or jumps though. */
4372 _reg_unused_after (rtx insn, rtx reg)
4377 /* If the reg is set by this instruction, then it is safe for our
4378 case. Disregard the case where this is a store to memory, since
4379 we are checking a register used in the store address. */
4380 set = single_set (insn);
4381 if (set && GET_CODE (SET_DEST (set)) != MEM
4382 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4385 while ((insn = NEXT_INSN (insn)))
4388 code = GET_CODE (insn);
4391 /* If this is a label that existed before reload, then the register
4392 if dead here. However, if this is a label added by reorg, then
4393 the register may still be live here. We can't tell the difference,
4394 so we just ignore labels completely. */
4395 if (code == CODE_LABEL)
4403 if (code == JUMP_INSN)
4406 /* If this is a sequence, we must handle them all at once.
4407 We could have for instance a call that sets the target register,
4408 and an insn in a delay slot that uses the register. In this case,
4409 we must return 0. */
4410 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4415 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4417 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4418 rtx set = single_set (this_insn);
4420 if (GET_CODE (this_insn) == CALL_INSN)
4422 else if (GET_CODE (this_insn) == JUMP_INSN)
4424 if (INSN_ANNULLED_BRANCH_P (this_insn))
4429 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4431 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4433 if (GET_CODE (SET_DEST (set)) != MEM)
4439 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4444 else if (code == JUMP_INSN)
4448 if (code == CALL_INSN)
4451 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4452 if (GET_CODE (XEXP (tem, 0)) == USE
4453 && REG_P (XEXP (XEXP (tem, 0), 0))
4454 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4456 if (call_used_regs[REGNO (reg)])
4460 set = single_set (insn);
4462 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4464 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4465 return GET_CODE (SET_DEST (set)) != MEM;
4466 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4472 /* Target hook for assembling integer objects. The AVR version needs
4473 special handling for references to certain labels. */
4476 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4478 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4479 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4480 || GET_CODE (x) == LABEL_REF))
4482 fputs ("\t.word\tgs(", asm_out_file);
4483 output_addr_const (asm_out_file, x);
4484 fputs (")\n", asm_out_file);
4487 return default_assemble_integer (x, size, aligned_p);
4490 /* The routine used to output NUL terminated strings. We use a special
4491 version of this for most svr4 targets because doing so makes the
4492 generated assembly code more compact (and thus faster to assemble)
4493 as well as more readable, especially for targets like the i386
4494 (where the only alternative is to output character sequences as
4495 comma separated lists of numbers). */
4498 gas_output_limited_string(FILE *file, const char *str)
4500 const unsigned char *_limited_str = (const unsigned char *) str;
4502 fprintf (file, "%s\"", STRING_ASM_OP);
4503 for (; (ch = *_limited_str); _limited_str++)
4506 switch (escape = ESCAPES[ch])
4512 fprintf (file, "\\%03o", ch);
4516 putc (escape, file);
4520 fprintf (file, "\"\n");
4523 /* The routine used to output sequences of byte values. We use a special
4524 version of this for most svr4 targets because doing so makes the
4525 generated assembly code more compact (and thus faster to assemble)
4526 as well as more readable. Note that if we find subparts of the
4527 character sequence which end with NUL (and which are shorter than
4528 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4531 gas_output_ascii(FILE *file, const char *str, size_t length)
4533 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4534 const unsigned char *limit = _ascii_bytes + length;
4535 unsigned bytes_in_chunk = 0;
4536 for (; _ascii_bytes < limit; _ascii_bytes++)
4538 const unsigned char *p;
4539 if (bytes_in_chunk >= 60)
4541 fprintf (file, "\"\n");
4544 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4546 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4548 if (bytes_in_chunk > 0)
4550 fprintf (file, "\"\n");
4553 gas_output_limited_string (file, (const char*)_ascii_bytes);
4560 if (bytes_in_chunk == 0)
4561 fprintf (file, "\t.ascii\t\"");
4562 switch (escape = ESCAPES[ch = *_ascii_bytes])
4569 fprintf (file, "\\%03o", ch);
4570 bytes_in_chunk += 4;
4574 putc (escape, file);
4575 bytes_in_chunk += 2;
4580 if (bytes_in_chunk > 0)
4581 fprintf (file, "\"\n");
4584 /* Return value is nonzero if pseudos that have been
4585 assigned to registers of class CLASS would likely be spilled
4586 because registers of CLASS are needed for spill registers. */
4589 class_likely_spilled_p (int c)
4591 return (c != ALL_REGS && c != ADDW_REGS);
4594 /* Valid attributes:
4595 progmem - put data to program memory;
4596 signal - make a function to be hardware interrupt. After function
4597 prologue interrupts are disabled;
4598 interrupt - make a function to be hardware interrupt. After function
4599 prologue interrupts are enabled;
4600 naked - don't generate function prologue/epilogue and `ret' command.
4602 Only `progmem' attribute valid for type. */
4604 const struct attribute_spec avr_attribute_table[] =
4606 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4607 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4608 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4609 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4610 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4611 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4612 { NULL, 0, 0, false, false, false, NULL }
4615 /* Handle a "progmem" attribute; arguments as in
4616 struct attribute_spec.handler. */
4618 avr_handle_progmem_attribute (tree *node, tree name,
4619 tree args ATTRIBUTE_UNUSED,
4620 int flags ATTRIBUTE_UNUSED,
4625 if (TREE_CODE (*node) == TYPE_DECL)
4627 /* This is really a decl attribute, not a type attribute,
4628 but try to handle it for GCC 3.0 backwards compatibility. */
4630 tree type = TREE_TYPE (*node);
4631 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4632 tree newtype = build_type_attribute_variant (type, attr);
4634 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4635 TREE_TYPE (*node) = newtype;
4636 *no_add_attrs = true;
4638 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4640 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4642 warning (0, "only initialized variables can be placed into "
4643 "program memory area");
4644 *no_add_attrs = true;
4649 warning (OPT_Wattributes, "%qs attribute ignored",
4650 IDENTIFIER_POINTER (name));
4651 *no_add_attrs = true;
4658 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4659 struct attribute_spec.handler. */
4662 avr_handle_fndecl_attribute (tree *node, tree name,
4663 tree args ATTRIBUTE_UNUSED,
4664 int flags ATTRIBUTE_UNUSED,
4667 if (TREE_CODE (*node) != FUNCTION_DECL)
4669 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4670 IDENTIFIER_POINTER (name));
4671 *no_add_attrs = true;
4675 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4676 const char *attr = IDENTIFIER_POINTER (name);
4678 /* If the function has the 'signal' or 'interrupt' attribute, test to
4679 make sure that the name of the function is "__vector_NN" so as to
4680 catch when the user misspells the interrupt vector name. */
4682 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4684 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4686 warning (0, "%qs appears to be a misspelled interrupt handler",
4690 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4692 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4694 warning (0, "%qs appears to be a misspelled signal handler",
4704 avr_handle_fntype_attribute (tree *node, tree name,
4705 tree args ATTRIBUTE_UNUSED,
4706 int flags ATTRIBUTE_UNUSED,
4709 if (TREE_CODE (*node) != FUNCTION_TYPE)
4711 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4712 IDENTIFIER_POINTER (name));
4713 *no_add_attrs = true;
4719 /* Look for attribute `progmem' in DECL
4720 if found return 1, otherwise 0. */
4723 avr_progmem_p (tree decl, tree attributes)
4727 if (TREE_CODE (decl) != VAR_DECL)
4731 != lookup_attribute ("progmem", attributes))
4737 while (TREE_CODE (a) == ARRAY_TYPE);
4739 if (a == error_mark_node)
4742 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4748 /* Add the section attribute if the variable is in progmem. */
4751 avr_insert_attributes (tree node, tree *attributes)
4753 if (TREE_CODE (node) == VAR_DECL
4754 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4755 && avr_progmem_p (node, *attributes))
4757 static const char dsec[] = ".progmem.data";
4758 *attributes = tree_cons (get_identifier ("section"),
4759 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4762 /* ??? This seems sketchy. Why can't the user declare the
4763 thing const in the first place? */
4764 TREE_READONLY (node) = 1;
4768 /* A get_unnamed_section callback for switching to progmem_section. */
4771 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4773 fprintf (asm_out_file,
4774 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4775 AVR_HAVE_JMP_CALL ? "a" : "ax");
4776 /* Should already be aligned, this is just to be safe if it isn't. */
4777 fprintf (asm_out_file, "\t.p2align 1\n");
4780 /* Implement TARGET_ASM_INIT_SECTIONS. */
4783 avr_asm_init_sections (void)
4785 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4786 avr_output_progmem_section_asm_op,
4788 readonly_data_section = data_section;
4792 avr_section_type_flags (tree decl, const char *name, int reloc)
4794 unsigned int flags = default_section_type_flags (decl, name, reloc);
4796 if (strncmp (name, ".noinit", 7) == 0)
4798 if (decl && TREE_CODE (decl) == VAR_DECL
4799 && DECL_INITIAL (decl) == NULL_TREE)
4800 flags |= SECTION_BSS; /* @nobits */
4802 warning (0, "only uninitialized variables can be placed in the "
4809 /* Outputs some appropriate text to go at the start of an assembler
4813 avr_file_start (void)
4816 error ("MCU %qs supported for assembler only", avr_mcu_name);
4818 default_file_start ();
4820 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4821 fputs ("__SREG__ = 0x3f\n"
4823 "__SP_L__ = 0x3d\n", asm_out_file);
4825 fputs ("__tmp_reg__ = 0\n"
4826 "__zero_reg__ = 1\n", asm_out_file);
4828 /* FIXME: output these only if there is anything in the .data / .bss
4829 sections - some code size could be saved by not linking in the
4830 initialization code from libgcc if one or both sections are empty. */
4831 fputs ("\t.global __do_copy_data\n", asm_out_file);
4832 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4835 /* Outputs to the stdio stream FILE some
4836 appropriate text to go at the end of an assembler file. */
4843 /* Choose the order in which to allocate hard registers for
4844 pseudo-registers local to a basic block.
4846 Store the desired register order in the array `reg_alloc_order'.
4847 Element 0 should be the register to allocate first; element 1, the
4848 next register; and so on. */
4851 order_regs_for_local_alloc (void)
4854 static const int order_0[] = {
4862 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4866 static const int order_1[] = {
4874 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4878 static const int order_2[] = {
4887 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4892 const int *order = (TARGET_ORDER_1 ? order_1 :
4893 TARGET_ORDER_2 ? order_2 :
4895 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4896 reg_alloc_order[i] = order[i];
4900 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4901 cost of an RTX operand given its context. X is the rtx of the
4902 operand, MODE is its mode, and OUTER is the rtx_code of this
4903 operand's parent operator. */
4906 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4908 enum rtx_code code = GET_CODE (x);
4919 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4926 avr_rtx_costs (x, code, outer, &total);
4930 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4931 is to be calculated. Return true if the complete cost has been
4932 computed, and false if subexpressions should be scanned. In either
4933 case, *TOTAL contains the cost result. */
4936 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4938 enum machine_mode mode = GET_MODE (x);
4945 /* Immediate constants are as cheap as registers. */
4953 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4961 *total = COSTS_N_INSNS (1);
4965 *total = COSTS_N_INSNS (3);
4969 *total = COSTS_N_INSNS (7);
4975 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4983 *total = COSTS_N_INSNS (1);
4989 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4993 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4994 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4998 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4999 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5000 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5004 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5005 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5006 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5013 *total = COSTS_N_INSNS (1);
5014 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5015 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5019 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5021 *total = COSTS_N_INSNS (2);
5022 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5024 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5025 *total = COSTS_N_INSNS (1);
5027 *total = COSTS_N_INSNS (2);
5031 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5033 *total = COSTS_N_INSNS (4);
5034 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5036 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5037 *total = COSTS_N_INSNS (1);
5039 *total = COSTS_N_INSNS (4);
5045 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5051 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5052 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5053 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5054 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5058 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5059 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5060 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5068 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5069 else if (optimize_size)
5070 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5077 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5078 else if (optimize_size)
5079 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5087 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5088 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5096 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5099 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5100 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5107 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5109 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5110 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5114 val = INTVAL (XEXP (x, 1));
5116 *total = COSTS_N_INSNS (3);
5117 else if (val >= 0 && val <= 7)
5118 *total = COSTS_N_INSNS (val);
5120 *total = COSTS_N_INSNS (1);
5125 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5127 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5128 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5131 switch (INTVAL (XEXP (x, 1)))
5138 *total = COSTS_N_INSNS (2);
5141 *total = COSTS_N_INSNS (3);
5147 *total = COSTS_N_INSNS (4);
5152 *total = COSTS_N_INSNS (5);
5155 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5158 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5161 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5164 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5165 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5170 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5172 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5173 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5176 switch (INTVAL (XEXP (x, 1)))
5182 *total = COSTS_N_INSNS (3);
5187 *total = COSTS_N_INSNS (4);
5190 *total = COSTS_N_INSNS (6);
5193 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5196 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5197 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5204 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5211 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5213 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5214 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5218 val = INTVAL (XEXP (x, 1));
5220 *total = COSTS_N_INSNS (4);
5222 *total = COSTS_N_INSNS (2);
5223 else if (val >= 0 && val <= 7)
5224 *total = COSTS_N_INSNS (val);
5226 *total = COSTS_N_INSNS (1);
5231 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5233 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5234 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5237 switch (INTVAL (XEXP (x, 1)))
5243 *total = COSTS_N_INSNS (2);
5246 *total = COSTS_N_INSNS (3);
5252 *total = COSTS_N_INSNS (4);
5256 *total = COSTS_N_INSNS (5);
5259 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5262 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5266 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5269 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5270 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5275 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5277 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5278 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5281 switch (INTVAL (XEXP (x, 1)))
5287 *total = COSTS_N_INSNS (4);
5292 *total = COSTS_N_INSNS (6);
5295 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5298 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5301 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5302 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5309 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5316 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5318 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5319 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5323 val = INTVAL (XEXP (x, 1));
5325 *total = COSTS_N_INSNS (3);
5326 else if (val >= 0 && val <= 7)
5327 *total = COSTS_N_INSNS (val);
5329 *total = COSTS_N_INSNS (1);
5334 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5336 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5337 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5340 switch (INTVAL (XEXP (x, 1)))
5347 *total = COSTS_N_INSNS (2);
5350 *total = COSTS_N_INSNS (3);
5355 *total = COSTS_N_INSNS (4);
5359 *total = COSTS_N_INSNS (5);
5365 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5368 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5372 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5375 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5376 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5381 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5383 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5384 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5387 switch (INTVAL (XEXP (x, 1)))
5393 *total = COSTS_N_INSNS (4);
5396 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5401 *total = COSTS_N_INSNS (4);
5404 *total = COSTS_N_INSNS (6);
5407 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5408 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5415 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5419 switch (GET_MODE (XEXP (x, 0)))
5422 *total = COSTS_N_INSNS (1);
5423 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5424 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5428 *total = COSTS_N_INSNS (2);
5429 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5430 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5431 else if (INTVAL (XEXP (x, 1)) != 0)
5432 *total += COSTS_N_INSNS (1);
5436 *total = COSTS_N_INSNS (4);
5437 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5438 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5439 else if (INTVAL (XEXP (x, 1)) != 0)
5440 *total += COSTS_N_INSNS (3);
5446 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5455 /* Calculate the cost of a memory address. */
5458 avr_address_cost (rtx x)
5460 if (GET_CODE (x) == PLUS
5461 && GET_CODE (XEXP (x,1)) == CONST_INT
5462 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5463 && INTVAL (XEXP (x,1)) >= 61)
5465 if (CONSTANT_ADDRESS_P (x))
5467 if (optimize > 0 && io_address_operand (x, QImode))
5474 /* Test for extra memory constraint 'Q'.
5475 It's a memory address based on Y or Z pointer with valid displacement. */
5478 extra_constraint_Q (rtx x)
5480 if (GET_CODE (XEXP (x,0)) == PLUS
5481 && REG_P (XEXP (XEXP (x,0), 0))
5482 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5483 && (INTVAL (XEXP (XEXP (x,0), 1))
5484 <= MAX_LD_OFFSET (GET_MODE (x))))
5486 rtx xx = XEXP (XEXP (x,0), 0);
5487 int regno = REGNO (xx);
5488 if (TARGET_ALL_DEBUG)
5490 fprintf (stderr, ("extra_constraint:\n"
5491 "reload_completed: %d\n"
5492 "reload_in_progress: %d\n"),
5493 reload_completed, reload_in_progress);
5496 if (regno >= FIRST_PSEUDO_REGISTER)
5497 return 1; /* allocate pseudos */
5498 else if (regno == REG_Z || regno == REG_Y)
5499 return 1; /* strictly check */
5500 else if (xx == frame_pointer_rtx
5501 || xx == arg_pointer_rtx)
5502 return 1; /* XXX frame & arg pointer checks */
5507 /* Convert condition code CONDITION to the valid AVR condition code. */
5510 avr_normalize_condition (RTX_CODE condition)
5527 /* This function optimizes conditional jumps. */
5534 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5536 if (! (GET_CODE (insn) == INSN
5537 || GET_CODE (insn) == CALL_INSN
5538 || GET_CODE (insn) == JUMP_INSN)
5539 || !single_set (insn))
5542 pattern = PATTERN (insn);
5544 if (GET_CODE (pattern) == PARALLEL)
5545 pattern = XVECEXP (pattern, 0, 0);
5546 if (GET_CODE (pattern) == SET
5547 && SET_DEST (pattern) == cc0_rtx
5548 && compare_diff_p (insn))
5550 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5552 /* Now we work under compare insn. */
5554 pattern = SET_SRC (pattern);
5555 if (true_regnum (XEXP (pattern,0)) >= 0
5556 && true_regnum (XEXP (pattern,1)) >= 0 )
5558 rtx x = XEXP (pattern,0);
5559 rtx next = next_real_insn (insn);
5560 rtx pat = PATTERN (next);
5561 rtx src = SET_SRC (pat);
5562 rtx t = XEXP (src,0);
5563 PUT_CODE (t, swap_condition (GET_CODE (t)));
5564 XEXP (pattern,0) = XEXP (pattern,1);
5565 XEXP (pattern,1) = x;
5566 INSN_CODE (next) = -1;
5568 else if (true_regnum (XEXP (pattern,0)) >= 0
5569 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5571 rtx x = XEXP (pattern,1);
5572 rtx next = next_real_insn (insn);
5573 rtx pat = PATTERN (next);
5574 rtx src = SET_SRC (pat);
5575 rtx t = XEXP (src,0);
5576 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5578 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5580 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5581 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5582 INSN_CODE (next) = -1;
5583 INSN_CODE (insn) = -1;
5587 else if (true_regnum (SET_SRC (pattern)) >= 0)
5589 /* This is a tst insn */
5590 rtx next = next_real_insn (insn);
5591 rtx pat = PATTERN (next);
5592 rtx src = SET_SRC (pat);
5593 rtx t = XEXP (src,0);
5595 PUT_CODE (t, swap_condition (GET_CODE (t)));
5596 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5598 INSN_CODE (next) = -1;
5599 INSN_CODE (insn) = -1;
5605 /* Returns register number for function return value.*/
5608 avr_ret_register (void)
5613 /* Create an RTX representing the place where a
5614 library function returns a value of mode MODE. */
5617 avr_libcall_value (enum machine_mode mode)
5619 int offs = GET_MODE_SIZE (mode);
5622 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5625 /* Create an RTX representing the place where a
5626 function returns a value of data type VALTYPE. */
5629 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5633 if (TYPE_MODE (type) != BLKmode)
5634 return avr_libcall_value (TYPE_MODE (type));
5636 offs = int_size_in_bytes (type);
5639 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5640 offs = GET_MODE_SIZE (SImode);
5641 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5642 offs = GET_MODE_SIZE (DImode);
5644 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5647 /* Places additional restrictions on the register class to
5648 use when it is necessary to copy value X into a register
5652 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5658 test_hard_reg_class (enum reg_class class, rtx x)
5660 int regno = true_regnum (x);
5664 if (TEST_HARD_REG_CLASS (class, regno))
5672 jump_over_one_insn_p (rtx insn, rtx dest)
5674 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5677 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5678 int dest_addr = INSN_ADDRESSES (uid);
5679 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5682 /* Returns 1 if a value of mode MODE can be stored starting with hard
5683 register number REGNO. On the enhanced core, anything larger than
5684 1 byte must start in even numbered register for "movw" to work
5685 (this way we don't have to check for odd registers everywhere). */
5688 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5690 /* Disallow QImode in stack pointer regs. */
5691 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5694 /* The only thing that can go into registers r28:r29 is a Pmode. */
5695 if (regno == REG_Y && mode == Pmode)
5698 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5699 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5705 /* Modes larger than QImode occupy consecutive registers. */
5706 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5709 /* All modes larger than QImode should start in an even register. */
5710 return !(regno & 1);
5714 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5720 if (GET_CODE (operands[1]) == CONST_INT)
5722 int val = INTVAL (operands[1]);
5723 if ((val & 0xff) == 0)
5726 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5727 AS2 (ldi,%2,hi8(%1)) CR_TAB
5730 else if ((val & 0xff00) == 0)
5733 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5734 AS2 (mov,%A0,%2) CR_TAB
5735 AS2 (mov,%B0,__zero_reg__));
5737 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5740 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5741 AS2 (mov,%A0,%2) CR_TAB
5746 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5747 AS2 (mov,%A0,%2) CR_TAB
5748 AS2 (ldi,%2,hi8(%1)) CR_TAB
5754 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5756 rtx src = operands[1];
5757 int cnst = (GET_CODE (src) == CONST_INT);
5762 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5763 + ((INTVAL (src) & 0xff00) != 0)
5764 + ((INTVAL (src) & 0xff0000) != 0)
5765 + ((INTVAL (src) & 0xff000000) != 0);
5772 if (cnst && ((INTVAL (src) & 0xff) == 0))
5773 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5776 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5777 output_asm_insn (AS2 (mov, %A0, %2), operands);
5779 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5780 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5783 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5784 output_asm_insn (AS2 (mov, %B0, %2), operands);
5786 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5787 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5790 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5791 output_asm_insn (AS2 (mov, %C0, %2), operands);
5793 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5794 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5797 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5798 output_asm_insn (AS2 (mov, %D0, %2), operands);
5804 avr_output_bld (rtx operands[], int bit_nr)
5806 static char s[] = "bld %A0,0";
5808 s[5] = 'A' + (bit_nr >> 3);
5809 s[8] = '0' + (bit_nr & 7);
5810 output_asm_insn (s, operands);
5814 avr_output_addr_vec_elt (FILE *stream, int value)
5816 switch_to_section (progmem_section);
5817 if (AVR_HAVE_JMP_CALL)
5818 fprintf (stream, "\t.word gs(.L%d)\n", value);
5820 fprintf (stream, "\trjmp .L%d\n", value);
5823 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5824 registers (for a define_peephole2) in the current function. */
5827 avr_peep2_scratch_safe (rtx scratch)
5829 if ((interrupt_function_p (current_function_decl)
5830 || signal_function_p (current_function_decl))
5831 && leaf_function_p ())
5833 int first_reg = true_regnum (scratch);
5834 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5837 for (reg = first_reg; reg <= last_reg; reg++)
5839 if (!df_regs_ever_live_p (reg))
5846 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5847 or memory location in the I/O space (QImode only).
5849 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5850 Operand 1: register operand to test, or CONST_INT memory address.
5851 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5852 Operand 3: label to jump to if the test is true. */
5855 avr_out_sbxx_branch (rtx insn, rtx operands[])
5857 enum rtx_code comp = GET_CODE (operands[0]);
5858 int long_jump = (get_attr_length (insn) >= 4);
5859 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5863 else if (comp == LT)
5867 comp = reverse_condition (comp);
5869 if (GET_CODE (operands[1]) == CONST_INT)
5871 if (INTVAL (operands[1]) < 0x40)
5874 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5876 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5880 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5882 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5884 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5887 else /* GET_CODE (operands[1]) == REG */
5889 if (GET_MODE (operands[1]) == QImode)
5892 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5894 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5896 else /* HImode or SImode */
5898 static char buf[] = "sbrc %A1,0";
5899 int bit_nr = exact_log2 (INTVAL (operands[2])
5900 & GET_MODE_MASK (GET_MODE (operands[1])));
5902 buf[3] = (comp == EQ) ? 's' : 'c';
5903 buf[6] = 'A' + (bit_nr >> 3);
5904 buf[9] = '0' + (bit_nr & 7);
5905 output_asm_insn (buf, operands);
5910 return (AS1 (rjmp,.+4) CR_TAB
5913 return AS1 (rjmp,%3);
5917 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5920 avr_asm_out_ctor (rtx symbol, int priority)
5922 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5923 default_ctor_section_asm_out_constructor (symbol, priority);
5926 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5929 avr_asm_out_dtor (rtx symbol, int priority)
5931 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5932 default_dtor_section_asm_out_destructor (symbol, priority);
5935 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5938 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5940 if (TYPE_MODE (type) == BLKmode)
5942 HOST_WIDE_INT size = int_size_in_bytes (type);
5943 return (size == -1 || size > 8);