1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_OS_main_function_p (tree);
56 static int avr_regs_to_save (HARD_REG_SET *);
57 static int get_sequence_length (rtx insns);
58 static int sequent_regs_live (void);
59 static const char *ptrreg_to_str (int);
60 static const char *cond_string (enum rtx_code);
61 static int avr_num_arg_regs (enum machine_mode, tree);
63 static RTX_CODE compare_condition (rtx insn);
64 static int compare_sign_p (rtx insn);
65 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
66 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
68 const struct attribute_spec avr_attribute_table[];
69 static bool avr_assemble_integer (rtx, unsigned int, int);
70 static void avr_file_start (void);
71 static void avr_file_end (void);
72 static void avr_asm_function_end_prologue (FILE *);
73 static void avr_asm_function_begin_epilogue (FILE *);
74 static rtx avr_function_value (const_tree, const_tree, bool);
75 static void avr_insert_attributes (tree, tree *);
76 static void avr_asm_init_sections (void);
77 static unsigned int avr_section_type_flags (tree, const char *, int);
79 static void avr_reorg (void);
80 static void avr_asm_out_ctor (rtx, int);
81 static void avr_asm_out_dtor (rtx, int);
82 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
83 static bool avr_rtx_costs (rtx, int, int, int *);
84 static int avr_address_cost (rtx);
85 static bool avr_return_in_memory (const_tree, const_tree);
86 static struct machine_function * avr_init_machine_status (void);
87 static rtx avr_builtin_setjmp_frame_value (void);
88 static bool avr_hard_regno_scratch_ok (unsigned int);
90 /* Allocate registers from r25 to r8 for parameters for function calls. */
91 #define FIRST_CUM_REG 26
93 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
94 static GTY(()) rtx tmp_reg_rtx;
96 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
97 static GTY(()) rtx zero_reg_rtx;
99 /* AVR register names {"r0", "r1", ..., "r31"} */
100 static const char *const avr_regnames[] = REGISTER_NAMES;
102 /* This holds the last insn address. */
103 static int last_insn_address = 0;
105 /* Preprocessor macros to define depending on MCU type. */
106 const char *avr_extra_arch_macro;
108 /* Current architecture. */
109 const struct base_arch_s *avr_current_arch;
111 section *progmem_section;
113 static const struct base_arch_s avr_arch_types[] = {
114 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
115 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
116 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
117 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
118 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
119 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
120 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
121 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
122 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
123 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
124 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
127 /* These names are used as the index into the avr_arch_types[] table
146 const char *const name;
147 int arch; /* index in avr_arch_types[] */
148 /* Must lie outside user's namespace. NULL == no macro. */
149 const char *const macro;
152 /* List of all known AVR MCU types - if updated, it has to be kept
153 in sync in several places (FIXME: is there a better way?):
155 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
156 - t-avr (MULTILIB_MATCHES)
157 - gas/config/tc-avr.c
160 static const struct mcu_type_s avr_mcu_types[] = {
161 /* Classic, <= 8K. */
162 { "avr2", ARCH_AVR2, NULL },
163 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
164 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
165 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
166 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
167 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
168 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
169 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
170 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
171 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
172 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
173 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
174 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
175 /* Classic + MOVW, <= 8K. */
176 { "avr25", ARCH_AVR25, NULL },
177 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
178 { "attiny13a", ARCH_AVR25, "__AVR_ATtiny13A__" },
179 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
180 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
181 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
182 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
183 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
184 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
185 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
186 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
187 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
188 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
189 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
190 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
191 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
192 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
193 /* Classic, > 8K, <= 64K. */
194 { "avr3", ARCH_AVR3, NULL },
195 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
196 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
197 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
198 /* Classic, == 128K. */
199 { "avr31", ARCH_AVR31, NULL },
200 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
201 /* Classic + MOVW + JMP/CALL. */
202 { "avr35", ARCH_AVR35, NULL },
203 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
204 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
205 { "attiny167", ARCH_AVR35, "__AVR_ATtiny167__" },
206 /* Enhanced, <= 8K. */
207 { "avr4", ARCH_AVR4, NULL },
208 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
209 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
210 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
211 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
212 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
213 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
214 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
215 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
216 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
217 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
218 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
219 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
220 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
221 /* Enhanced, > 8K, <= 64K. */
222 { "avr5", ARCH_AVR5, NULL },
223 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
224 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
225 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
226 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
227 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
228 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
229 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
230 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
231 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
232 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
233 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
234 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
235 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
236 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
237 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
238 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
239 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
240 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
241 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
242 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
243 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
244 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
245 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
246 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
247 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
248 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
249 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
250 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
251 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
252 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
253 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
254 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
255 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
256 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
257 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
258 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
259 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
260 { "atmega32m1", ARCH_AVR5, "__AVR_ATmega32M1__" },
261 { "atmega32c1", ARCH_AVR5, "__AVR_ATmega32C1__" },
262 { "atmega32u4", ARCH_AVR5, "__AVR_ATmega32U4__" },
263 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
264 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
265 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
266 /* Enhanced, == 128K. */
267 { "avr51", ARCH_AVR51, NULL },
268 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
269 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
270 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
271 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
272 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
273 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
274 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
276 { "avr6", ARCH_AVR6, NULL },
277 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
278 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
279 /* Assembler only. */
280 { "avr1", ARCH_AVR1, NULL },
281 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
282 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
283 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
284 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
285 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
286 { NULL, ARCH_UNKNOWN, NULL }
289 int avr_case_values_threshold = 30000;
291 /* Initialize the GCC target structure. */
292 #undef TARGET_ASM_ALIGNED_HI_OP
293 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
294 #undef TARGET_ASM_ALIGNED_SI_OP
295 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
296 #undef TARGET_ASM_UNALIGNED_HI_OP
297 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
298 #undef TARGET_ASM_UNALIGNED_SI_OP
299 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
300 #undef TARGET_ASM_INTEGER
301 #define TARGET_ASM_INTEGER avr_assemble_integer
302 #undef TARGET_ASM_FILE_START
303 #define TARGET_ASM_FILE_START avr_file_start
304 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
305 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
306 #undef TARGET_ASM_FILE_END
307 #define TARGET_ASM_FILE_END avr_file_end
309 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
310 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
311 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
312 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
313 #undef TARGET_FUNCTION_VALUE
314 #define TARGET_FUNCTION_VALUE avr_function_value
315 #undef TARGET_ATTRIBUTE_TABLE
316 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
317 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
318 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
319 #undef TARGET_INSERT_ATTRIBUTES
320 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
321 #undef TARGET_SECTION_TYPE_FLAGS
322 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
323 #undef TARGET_RTX_COSTS
324 #define TARGET_RTX_COSTS avr_rtx_costs
325 #undef TARGET_ADDRESS_COST
326 #define TARGET_ADDRESS_COST avr_address_cost
327 #undef TARGET_MACHINE_DEPENDENT_REORG
328 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
330 #undef TARGET_RETURN_IN_MEMORY
331 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
333 #undef TARGET_STRICT_ARGUMENT_NAMING
334 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
336 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
337 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
339 #undef TARGET_HARD_REGNO_SCRATCH_OK
340 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
342 struct gcc_target targetm = TARGET_INITIALIZER;
345 avr_override_options (void)
347 const struct mcu_type_s *t;
349 flag_delete_null_pointer_checks = 0;
351 for (t = avr_mcu_types; t->name; t++)
352 if (strcmp (t->name, avr_mcu_name) == 0)
357 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
359 for (t = avr_mcu_types; t->name; t++)
360 fprintf (stderr," %s\n", t->name);
363 avr_current_arch = &avr_arch_types[t->arch];
364 avr_extra_arch_macro = t->macro;
366 if (optimize && !TARGET_NO_TABLEJUMP)
367 avr_case_values_threshold =
368 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
370 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
371 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
373 init_machine_status = avr_init_machine_status;
376 /* return register class from register number. */
378 static const int reg_class_tab[]={
379 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
380 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
381 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
382 GENERAL_REGS, /* r0 - r15 */
383 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
384 LD_REGS, /* r16 - 23 */
385 ADDW_REGS,ADDW_REGS, /* r24,r25 */
386 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
387 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
388 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
389 STACK_REG,STACK_REG /* SPL,SPH */
392 /* Function to set up the backend function structure. */
394 static struct machine_function *
395 avr_init_machine_status (void)
397 return ((struct machine_function *)
398 ggc_alloc_cleared (sizeof (struct machine_function)));
401 /* Return register class for register R. */
404 avr_regno_reg_class (int r)
407 return reg_class_tab[r];
411 /* Return nonzero if FUNC is a naked function. */
414 avr_naked_function_p (tree func)
418 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
420 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
421 return a != NULL_TREE;
424 /* Return nonzero if FUNC is an interrupt function as specified
425 by the "interrupt" attribute. */
428 interrupt_function_p (tree func)
432 if (TREE_CODE (func) != FUNCTION_DECL)
435 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
436 return a != NULL_TREE;
439 /* Return nonzero if FUNC is a signal function as specified
440 by the "signal" attribute. */
443 signal_function_p (tree func)
447 if (TREE_CODE (func) != FUNCTION_DECL)
450 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
451 return a != NULL_TREE;
454 /* Return nonzero if FUNC is a OS_task function. */
457 avr_OS_task_function_p (tree func)
461 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
463 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
464 return a != NULL_TREE;
467 /* Return nonzero if FUNC is a OS_main function. */
470 avr_OS_main_function_p (tree func)
474 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
476 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
477 return a != NULL_TREE;
480 /* Return the number of hard registers to push/pop in the prologue/epilogue
481 of the current function, and optionally store these registers in SET. */
484 avr_regs_to_save (HARD_REG_SET *set)
487 int int_or_sig_p = (interrupt_function_p (current_function_decl)
488 || signal_function_p (current_function_decl));
490 if (!reload_completed)
491 cfun->machine->is_leaf = leaf_function_p ();
494 CLEAR_HARD_REG_SET (*set);
497 /* No need to save any registers if the function never returns or
498 is have "OS_task" or "OS_main" attribute. */
499 if (TREE_THIS_VOLATILE (current_function_decl)
500 || cfun->machine->is_OS_task
501 || cfun->machine->is_OS_main)
504 for (reg = 0; reg < 32; reg++)
506 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
507 any global register variables. */
511 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
512 || (df_regs_ever_live_p (reg)
513 && (int_or_sig_p || !call_used_regs[reg])
514 && !(frame_pointer_needed
515 && (reg == REG_Y || reg == (REG_Y+1)))))
518 SET_HARD_REG_BIT (*set, reg);
525 /* Compute offset between arg_pointer and frame_pointer. */
528 initial_elimination_offset (int from, int to)
530 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
534 int offset = frame_pointer_needed ? 2 : 0;
535 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
537 offset += avr_regs_to_save (NULL);
538 return get_frame_size () + (avr_pc_size) + 1 + offset;
542 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
543 frame pointer by +STARTING_FRAME_OFFSET.
544 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
545 avoids creating add/sub of offset in nonlocal goto and setjmp. */
547 rtx avr_builtin_setjmp_frame_value (void)
549 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
550 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
553 /* Return 1 if the function epilogue is just a single "ret". */
556 avr_simple_epilogue (void)
558 return (! frame_pointer_needed
559 && get_frame_size () == 0
560 && avr_regs_to_save (NULL) == 0
561 && ! interrupt_function_p (current_function_decl)
562 && ! signal_function_p (current_function_decl)
563 && ! avr_naked_function_p (current_function_decl)
564 && ! TREE_THIS_VOLATILE (current_function_decl));
567 /* This function checks sequence of live registers. */
570 sequent_regs_live (void)
576 for (reg = 0; reg < 18; ++reg)
578 if (!call_used_regs[reg])
580 if (df_regs_ever_live_p (reg))
590 if (!frame_pointer_needed)
592 if (df_regs_ever_live_p (REG_Y))
600 if (df_regs_ever_live_p (REG_Y+1))
613 return (cur_seq == live_seq) ? live_seq : 0;
616 /* Obtain the length sequence of insns. */
619 get_sequence_length (rtx insns)
624 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
625 length += get_attr_length (insn);
630 /* Output function prologue. */
633 expand_prologue (void)
638 HOST_WIDE_INT size = get_frame_size();
639 /* Define templates for push instructions. */
640 rtx pushbyte = gen_rtx_MEM (QImode,
641 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
642 rtx pushword = gen_rtx_MEM (HImode,
643 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
646 last_insn_address = 0;
648 /* Init cfun->machine. */
649 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
650 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
651 cfun->machine->is_signal = signal_function_p (current_function_decl);
652 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
653 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
655 /* Prologue: naked. */
656 if (cfun->machine->is_naked)
661 avr_regs_to_save (&set);
662 live_seq = sequent_regs_live ();
663 minimize = (TARGET_CALL_PROLOGUES
664 && !cfun->machine->is_interrupt
665 && !cfun->machine->is_signal
666 && !cfun->machine->is_OS_task
667 && !cfun->machine->is_OS_main
670 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
672 if (cfun->machine->is_interrupt)
674 /* Enable interrupts. */
675 insn = emit_insn (gen_enable_interrupt ());
676 RTX_FRAME_RELATED_P (insn) = 1;
680 insn = emit_move_insn (pushbyte, zero_reg_rtx);
681 RTX_FRAME_RELATED_P (insn) = 1;
684 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
685 RTX_FRAME_RELATED_P (insn) = 1;
688 insn = emit_move_insn (tmp_reg_rtx,
689 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
690 RTX_FRAME_RELATED_P (insn) = 1;
691 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
692 RTX_FRAME_RELATED_P (insn) = 1;
696 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
698 insn = emit_move_insn (tmp_reg_rtx,
699 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
700 RTX_FRAME_RELATED_P (insn) = 1;
701 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
702 RTX_FRAME_RELATED_P (insn) = 1;
705 /* Clear zero reg. */
706 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
707 RTX_FRAME_RELATED_P (insn) = 1;
709 /* Prevent any attempt to delete the setting of ZERO_REG! */
710 emit_use (zero_reg_rtx);
712 if (minimize && (frame_pointer_needed
713 || (AVR_2_BYTE_PC && live_seq > 6)
716 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
717 gen_int_mode (size, HImode));
718 RTX_FRAME_RELATED_P (insn) = 1;
721 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
722 gen_int_mode (size + live_seq, HImode)));
723 RTX_FRAME_RELATED_P (insn) = 1;
728 for (reg = 0; reg < 32; ++reg)
730 if (TEST_HARD_REG_BIT (set, reg))
732 /* Emit push of register to save. */
733 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
734 RTX_FRAME_RELATED_P (insn) = 1;
737 if (frame_pointer_needed)
739 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
741 /* Push frame pointer. */
742 insn = emit_move_insn (pushword, frame_pointer_rtx);
743 RTX_FRAME_RELATED_P (insn) = 1;
748 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
749 RTX_FRAME_RELATED_P (insn) = 1;
753 /* Creating a frame can be done by direct manipulation of the
754 stack or via the frame pointer. These two methods are:
761 the optimum method depends on function type, stack and frame size.
762 To avoid a complex logic, both methods are tested and shortest
766 rtx sp_plus_insns = NULL_RTX;
768 if (TARGET_TINY_STACK)
770 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
771 over 'sbiw' (2 cycles, same size). */
772 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
776 /* Normal sized addition. */
777 myfp = frame_pointer_rtx;
780 /* Method 1-Adjust frame pointer. */
783 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
784 RTX_FRAME_RELATED_P (insn) = 1;
787 emit_move_insn (myfp,
788 gen_rtx_PLUS (GET_MODE(myfp), myfp,
791 RTX_FRAME_RELATED_P (insn) = 1;
793 /* Copy to stack pointer. */
794 if (TARGET_TINY_STACK)
796 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
797 RTX_FRAME_RELATED_P (insn) = 1;
799 else if (TARGET_NO_INTERRUPTS
800 || cfun->machine->is_signal
801 || cfun->machine->is_OS_main)
804 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
806 RTX_FRAME_RELATED_P (insn) = 1;
808 else if (cfun->machine->is_interrupt)
810 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
812 RTX_FRAME_RELATED_P (insn) = 1;
816 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
817 RTX_FRAME_RELATED_P (insn) = 1;
820 fp_plus_insns = get_insns ();
823 /* Method 2-Adjust Stack pointer. */
829 emit_move_insn (stack_pointer_rtx,
830 gen_rtx_PLUS (HImode,
834 RTX_FRAME_RELATED_P (insn) = 1;
837 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
838 RTX_FRAME_RELATED_P (insn) = 1;
840 sp_plus_insns = get_insns ();
844 /* Use shortest method. */
845 if (size <= 6 && (get_sequence_length (sp_plus_insns)
846 < get_sequence_length (fp_plus_insns)))
847 emit_insn (sp_plus_insns);
849 emit_insn (fp_plus_insns);
855 /* Output summary at end of function prologue. */
858 avr_asm_function_end_prologue (FILE *file)
860 if (cfun->machine->is_naked)
862 fputs ("/* prologue: naked */\n", file);
866 if (cfun->machine->is_interrupt)
868 fputs ("/* prologue: Interrupt */\n", file);
870 else if (cfun->machine->is_signal)
872 fputs ("/* prologue: Signal */\n", file);
875 fputs ("/* prologue: function */\n", file);
877 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
882 /* Implement EPILOGUE_USES. */
885 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
889 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
894 /* Output RTL epilogue. */
897 expand_epilogue (void)
903 HOST_WIDE_INT size = get_frame_size();
905 /* epilogue: naked */
906 if (cfun->machine->is_naked)
908 emit_jump_insn (gen_return ());
912 avr_regs_to_save (&set);
913 live_seq = sequent_regs_live ();
914 minimize = (TARGET_CALL_PROLOGUES
915 && !cfun->machine->is_interrupt
916 && !cfun->machine->is_signal
917 && !cfun->machine->is_OS_task
918 && !cfun->machine->is_OS_main
921 if (minimize && (frame_pointer_needed || live_seq > 4))
923 if (frame_pointer_needed)
925 /* Get rid of frame. */
926 emit_move_insn(frame_pointer_rtx,
927 gen_rtx_PLUS (HImode, frame_pointer_rtx,
928 gen_int_mode (size, HImode)));
932 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
935 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
939 if (frame_pointer_needed)
943 /* Try two methods to adjust stack and select shortest. */
946 rtx sp_plus_insns = NULL_RTX;
948 if (TARGET_TINY_STACK)
950 /* The high byte (r29) doesn't change - prefer 'subi'
951 (1 cycle) over 'sbiw' (2 cycles, same size). */
952 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
956 /* Normal sized addition. */
957 myfp = frame_pointer_rtx;
960 /* Method 1-Adjust frame pointer. */
963 emit_move_insn (myfp,
964 gen_rtx_PLUS (HImode, myfp,
968 /* Copy to stack pointer. */
969 if (TARGET_TINY_STACK)
971 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
973 else if (TARGET_NO_INTERRUPTS
974 || cfun->machine->is_signal)
976 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
979 else if (cfun->machine->is_interrupt)
981 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
986 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
989 fp_plus_insns = get_insns ();
992 /* Method 2-Adjust Stack pointer. */
997 emit_move_insn (stack_pointer_rtx,
998 gen_rtx_PLUS (HImode, stack_pointer_rtx,
1002 sp_plus_insns = get_insns ();
1006 /* Use shortest method. */
1007 if (size <= 5 && (get_sequence_length (sp_plus_insns)
1008 < get_sequence_length (fp_plus_insns)))
1009 emit_insn (sp_plus_insns);
1011 emit_insn (fp_plus_insns);
1013 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1015 /* Restore previous frame_pointer. */
1016 emit_insn (gen_pophi (frame_pointer_rtx));
1019 /* Restore used registers. */
1020 for (reg = 31; reg >= 0; --reg)
1022 if (TEST_HARD_REG_BIT (set, reg))
1023 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
1025 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1027 /* Restore RAMPZ using tmp reg as scratch. */
1029 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
1031 emit_insn (gen_popqi (tmp_reg_rtx));
1032 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
1036 /* Restore SREG using tmp reg as scratch. */
1037 emit_insn (gen_popqi (tmp_reg_rtx));
1039 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
1042 /* Restore tmp REG. */
1043 emit_insn (gen_popqi (tmp_reg_rtx));
1045 /* Restore zero REG. */
1046 emit_insn (gen_popqi (zero_reg_rtx));
1049 emit_jump_insn (gen_return ());
1053 /* Output summary messages at beginning of function epilogue. */
1056 avr_asm_function_begin_epilogue (FILE *file)
1058 fprintf (file, "/* epilogue start */\n");
1061 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1062 machine for a memory operand of mode MODE. */
1065 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1067 enum reg_class r = NO_REGS;
1069 if (TARGET_ALL_DEBUG)
1071 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1072 GET_MODE_NAME(mode),
1073 strict ? "(strict)": "",
1074 reload_completed ? "(reload_completed)": "",
1075 reload_in_progress ? "(reload_in_progress)": "",
1076 reg_renumber ? "(reg_renumber)" : "");
1077 if (GET_CODE (x) == PLUS
1078 && REG_P (XEXP (x, 0))
1079 && GET_CODE (XEXP (x, 1)) == CONST_INT
1080 && INTVAL (XEXP (x, 1)) >= 0
1081 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1084 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1085 true_regnum (XEXP (x, 0)));
1088 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1089 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1091 else if (CONSTANT_ADDRESS_P (x))
1093 else if (GET_CODE (x) == PLUS
1094 && REG_P (XEXP (x, 0))
1095 && GET_CODE (XEXP (x, 1)) == CONST_INT
1096 && INTVAL (XEXP (x, 1)) >= 0)
1098 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1102 || REGNO (XEXP (x,0)) == REG_Y
1103 || REGNO (XEXP (x,0)) == REG_Z)
1104 r = BASE_POINTER_REGS;
1105 if (XEXP (x,0) == frame_pointer_rtx
1106 || XEXP (x,0) == arg_pointer_rtx)
1107 r = BASE_POINTER_REGS;
1109 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1112 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1113 && REG_P (XEXP (x, 0))
1114 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1115 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1119 if (TARGET_ALL_DEBUG)
1121 fprintf (stderr, " ret = %c\n", r + '0');
1123 return r == NO_REGS ? 0 : (int)r;
1126 /* Attempts to replace X with a valid
1127 memory address for an operand of mode MODE */
1130 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1133 if (TARGET_ALL_DEBUG)
1135 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1139 if (GET_CODE (oldx) == PLUS
1140 && REG_P (XEXP (oldx,0)))
1142 if (REG_P (XEXP (oldx,1)))
1143 x = force_reg (GET_MODE (oldx), oldx);
1144 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1146 int offs = INTVAL (XEXP (oldx,1));
1147 if (frame_pointer_rtx != XEXP (oldx,0))
1148 if (offs > MAX_LD_OFFSET (mode))
1150 if (TARGET_ALL_DEBUG)
1151 fprintf (stderr, "force_reg (big offset)\n");
1152 x = force_reg (GET_MODE (oldx), oldx);
1160 /* Return a pointer register name as a string. */
1163 ptrreg_to_str (int regno)
1167 case REG_X: return "X";
1168 case REG_Y: return "Y";
1169 case REG_Z: return "Z";
1171 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1176 /* Return the condition name as a string.
1177 Used in conditional jump constructing */
1180 cond_string (enum rtx_code code)
1189 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1194 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1207 /* Output ADDR to FILE as address. */
1210 print_operand_address (FILE *file, rtx addr)
1212 switch (GET_CODE (addr))
1215 fprintf (file, ptrreg_to_str (REGNO (addr)));
1219 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1223 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1227 if (CONSTANT_ADDRESS_P (addr)
1228 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1229 || GET_CODE (addr) == LABEL_REF))
1231 fprintf (file, "gs(");
1232 output_addr_const (file,addr);
1233 fprintf (file ,")");
1236 output_addr_const (file, addr);
1241 /* Output X as assembler operand to file FILE. */
1244 print_operand (FILE *file, rtx x, int code)
1248 if (code >= 'A' && code <= 'D')
1253 if (!AVR_HAVE_JMP_CALL)
1256 else if (code == '!')
1258 if (AVR_HAVE_EIJMP_EICALL)
1263 if (x == zero_reg_rtx)
1264 fprintf (file, "__zero_reg__");
1266 fprintf (file, reg_names[true_regnum (x) + abcd]);
1268 else if (GET_CODE (x) == CONST_INT)
1269 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1270 else if (GET_CODE (x) == MEM)
1272 rtx addr = XEXP (x,0);
1274 if (CONSTANT_P (addr) && abcd)
1277 output_address (addr);
1278 fprintf (file, ")+%d", abcd);
1280 else if (code == 'o')
1282 if (GET_CODE (addr) != PLUS)
1283 fatal_insn ("bad address, not (reg+disp):", addr);
1285 print_operand (file, XEXP (addr, 1), 0);
1287 else if (code == 'p' || code == 'r')
1289 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1290 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1293 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1295 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1297 else if (GET_CODE (addr) == PLUS)
1299 print_operand_address (file, XEXP (addr,0));
1300 if (REGNO (XEXP (addr, 0)) == REG_X)
1301 fatal_insn ("internal compiler error. Bad address:"
1304 print_operand (file, XEXP (addr,1), code);
1307 print_operand_address (file, addr);
1309 else if (GET_CODE (x) == CONST_DOUBLE)
1313 if (GET_MODE (x) != SFmode)
1314 fatal_insn ("internal compiler error. Unknown mode:", x);
1315 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1316 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1317 fprintf (file, "0x%lx", val);
1319 else if (code == 'j')
1320 fputs (cond_string (GET_CODE (x)), file);
1321 else if (code == 'k')
1322 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1324 print_operand_address (file, x);
1327 /* Update the condition code in the INSN. */
1330 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1334 switch (get_attr_cc (insn))
1337 /* Insn does not affect CC at all. */
1345 set = single_set (insn);
1349 cc_status.flags |= CC_NO_OVERFLOW;
1350 cc_status.value1 = SET_DEST (set);
1355 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1356 The V flag may or may not be known but that's ok because
1357 alter_cond will change tests to use EQ/NE. */
1358 set = single_set (insn);
1362 cc_status.value1 = SET_DEST (set);
1363 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1368 set = single_set (insn);
1371 cc_status.value1 = SET_SRC (set);
1375 /* Insn doesn't leave CC in a usable state. */
1378 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1379 set = single_set (insn);
1382 rtx src = SET_SRC (set);
1384 if (GET_CODE (src) == ASHIFTRT
1385 && GET_MODE (src) == QImode)
1387 rtx x = XEXP (src, 1);
1389 if (GET_CODE (x) == CONST_INT
1393 cc_status.value1 = SET_DEST (set);
1394 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1402 /* Return maximum number of consecutive registers of
1403 class CLASS needed to hold a value of mode MODE. */
1406 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1408 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1411 /* Choose mode for jump insn:
1412 1 - relative jump in range -63 <= x <= 62 ;
1413 2 - relative jump in range -2046 <= x <= 2045 ;
1414 3 - absolute jump (only for ATmega[16]03). */
1417 avr_jump_mode (rtx x, rtx insn)
1419 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1420 ? XEXP (x, 0) : x));
1421 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1422 int jump_distance = cur_addr - dest_addr;
1424 if (-63 <= jump_distance && jump_distance <= 62)
1426 else if (-2046 <= jump_distance && jump_distance <= 2045)
1428 else if (AVR_HAVE_JMP_CALL)
1434 /* return an AVR condition jump commands.
1435 X is a comparison RTX.
1436 LEN is a number returned by avr_jump_mode function.
1437 if REVERSE nonzero then condition code in X must be reversed. */
1440 ret_cond_branch (rtx x, int len, int reverse)
1442 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1447 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1448 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1450 len == 2 ? (AS1 (breq,.+4) CR_TAB
1451 AS1 (brmi,.+2) CR_TAB
1453 (AS1 (breq,.+6) CR_TAB
1454 AS1 (brmi,.+4) CR_TAB
1458 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1460 len == 2 ? (AS1 (breq,.+4) CR_TAB
1461 AS1 (brlt,.+2) CR_TAB
1463 (AS1 (breq,.+6) CR_TAB
1464 AS1 (brlt,.+4) CR_TAB
1467 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1469 len == 2 ? (AS1 (breq,.+4) CR_TAB
1470 AS1 (brlo,.+2) CR_TAB
1472 (AS1 (breq,.+6) CR_TAB
1473 AS1 (brlo,.+4) CR_TAB
1476 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1477 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1479 len == 2 ? (AS1 (breq,.+2) CR_TAB
1480 AS1 (brpl,.+2) CR_TAB
1482 (AS1 (breq,.+2) CR_TAB
1483 AS1 (brpl,.+4) CR_TAB
1486 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1488 len == 2 ? (AS1 (breq,.+2) CR_TAB
1489 AS1 (brge,.+2) CR_TAB
1491 (AS1 (breq,.+2) CR_TAB
1492 AS1 (brge,.+4) CR_TAB
1495 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1497 len == 2 ? (AS1 (breq,.+2) CR_TAB
1498 AS1 (brsh,.+2) CR_TAB
1500 (AS1 (breq,.+2) CR_TAB
1501 AS1 (brsh,.+4) CR_TAB
1509 return AS1 (br%k1,%0);
1511 return (AS1 (br%j1,.+2) CR_TAB
1514 return (AS1 (br%j1,.+4) CR_TAB
1523 return AS1 (br%j1,%0);
1525 return (AS1 (br%k1,.+2) CR_TAB
1528 return (AS1 (br%k1,.+4) CR_TAB
1536 /* Predicate function for immediate operand which fits to byte (8bit) */
1539 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1541 return (GET_CODE (op) == CONST_INT
1542 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1545 /* Output all insn addresses and their sizes into the assembly language
1546 output file. This is helpful for debugging whether the length attributes
1547 in the md file are correct.
1548 Output insn cost for next insn. */
1551 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1552 int num_operands ATTRIBUTE_UNUSED)
1554 int uid = INSN_UID (insn);
1556 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1558 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1559 INSN_ADDRESSES (uid),
1560 INSN_ADDRESSES (uid) - last_insn_address,
1561 rtx_cost (PATTERN (insn), INSN));
1563 last_insn_address = INSN_ADDRESSES (uid);
1566 /* Return 0 if undefined, 1 if always true or always false. */
1569 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1571 unsigned int max = (mode == QImode ? 0xff :
1572 mode == HImode ? 0xffff :
1573 mode == SImode ? 0xffffffff : 0);
1574 if (max && op && GET_CODE (x) == CONST_INT)
1576 if (unsigned_condition (op) != op)
1579 if (max != (INTVAL (x) & max)
1580 && INTVAL (x) != 0xff)
1587 /* Returns nonzero if REGNO is the number of a hard
1588 register in which function arguments are sometimes passed. */
1591 function_arg_regno_p(int r)
1593 return (r >= 8 && r <= 25);
1596 /* Initializing the variable cum for the state at the beginning
1597 of the argument list. */
1600 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1601 tree fndecl ATTRIBUTE_UNUSED)
1604 cum->regno = FIRST_CUM_REG;
1605 if (!libname && fntype)
1607 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1608 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1609 != void_type_node));
1615 /* Returns the number of registers to allocate for a function argument. */
1618 avr_num_arg_regs (enum machine_mode mode, tree type)
1622 if (mode == BLKmode)
1623 size = int_size_in_bytes (type);
1625 size = GET_MODE_SIZE (mode);
1627 /* Align all function arguments to start in even-numbered registers.
1628 Odd-sized arguments leave holes above them. */
1630 return (size + 1) & ~1;
1633 /* Controls whether a function argument is passed
1634 in a register, and which register. */
1637 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1638 int named ATTRIBUTE_UNUSED)
1640 int bytes = avr_num_arg_regs (mode, type);
1642 if (cum->nregs && bytes <= cum->nregs)
1643 return gen_rtx_REG (mode, cum->regno - bytes);
1648 /* Update the summarizer variable CUM to advance past an argument
1649 in the argument list. */
1652 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1653 int named ATTRIBUTE_UNUSED)
1655 int bytes = avr_num_arg_regs (mode, type);
1657 cum->nregs -= bytes;
1658 cum->regno -= bytes;
1660 if (cum->nregs <= 0)
1663 cum->regno = FIRST_CUM_REG;
1667 /***********************************************************************
1668 Functions for outputting various mov's for a various modes
1669 ************************************************************************/
1671 output_movqi (rtx insn, rtx operands[], int *l)
1674 rtx dest = operands[0];
1675 rtx src = operands[1];
1683 if (register_operand (dest, QImode))
1685 if (register_operand (src, QImode)) /* mov r,r */
1687 if (test_hard_reg_class (STACK_REG, dest))
1688 return AS2 (out,%0,%1);
1689 else if (test_hard_reg_class (STACK_REG, src))
1690 return AS2 (in,%0,%1);
1692 return AS2 (mov,%0,%1);
1694 else if (CONSTANT_P (src))
1696 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1697 return AS2 (ldi,%0,lo8(%1));
1699 if (GET_CODE (src) == CONST_INT)
1701 if (src == const0_rtx) /* mov r,L */
1702 return AS1 (clr,%0);
1703 else if (src == const1_rtx)
1706 return (AS1 (clr,%0) CR_TAB
1709 else if (src == constm1_rtx)
1711 /* Immediate constants -1 to any register */
1713 return (AS1 (clr,%0) CR_TAB
1718 int bit_nr = exact_log2 (INTVAL (src));
1724 output_asm_insn ((AS1 (clr,%0) CR_TAB
1727 avr_output_bld (operands, bit_nr);
1734 /* Last resort, larger than loading from memory. */
1736 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1737 AS2 (ldi,r31,lo8(%1)) CR_TAB
1738 AS2 (mov,%0,r31) CR_TAB
1739 AS2 (mov,r31,__tmp_reg__));
1741 else if (GET_CODE (src) == MEM)
1742 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1744 else if (GET_CODE (dest) == MEM)
1748 if (src == const0_rtx)
1749 operands[1] = zero_reg_rtx;
1751 templ = out_movqi_mr_r (insn, operands, real_l);
1754 output_asm_insn (templ, operands);
1763 output_movhi (rtx insn, rtx operands[], int *l)
1766 rtx dest = operands[0];
1767 rtx src = operands[1];
1773 if (register_operand (dest, HImode))
1775 if (register_operand (src, HImode)) /* mov r,r */
1777 if (test_hard_reg_class (STACK_REG, dest))
1779 if (TARGET_TINY_STACK)
1780 return *l = 1, AS2 (out,__SP_L__,%A1);
1781 /* Use simple load of stack pointer if no interrupts are
1783 else if (TARGET_NO_INTERRUPTS)
1784 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1785 AS2 (out,__SP_L__,%A1));
1787 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1789 AS2 (out,__SP_H__,%B1) CR_TAB
1790 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1791 AS2 (out,__SP_L__,%A1));
1793 else if (test_hard_reg_class (STACK_REG, src))
1796 return (AS2 (in,%A0,__SP_L__) CR_TAB
1797 AS2 (in,%B0,__SP_H__));
1803 return (AS2 (movw,%0,%1));
1808 return (AS2 (mov,%A0,%A1) CR_TAB
1812 else if (CONSTANT_P (src))
1814 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1817 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1818 AS2 (ldi,%B0,hi8(%1)));
1821 if (GET_CODE (src) == CONST_INT)
1823 if (src == const0_rtx) /* mov r,L */
1826 return (AS1 (clr,%A0) CR_TAB
1829 else if (src == const1_rtx)
1832 return (AS1 (clr,%A0) CR_TAB
1833 AS1 (clr,%B0) CR_TAB
1836 else if (src == constm1_rtx)
1838 /* Immediate constants -1 to any register */
1840 return (AS1 (clr,%0) CR_TAB
1841 AS1 (dec,%A0) CR_TAB
1846 int bit_nr = exact_log2 (INTVAL (src));
1852 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1853 AS1 (clr,%B0) CR_TAB
1856 avr_output_bld (operands, bit_nr);
1862 if ((INTVAL (src) & 0xff) == 0)
1865 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1866 AS1 (clr,%A0) CR_TAB
1867 AS2 (ldi,r31,hi8(%1)) CR_TAB
1868 AS2 (mov,%B0,r31) CR_TAB
1869 AS2 (mov,r31,__tmp_reg__));
1871 else if ((INTVAL (src) & 0xff00) == 0)
1874 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1875 AS2 (ldi,r31,lo8(%1)) CR_TAB
1876 AS2 (mov,%A0,r31) CR_TAB
1877 AS1 (clr,%B0) CR_TAB
1878 AS2 (mov,r31,__tmp_reg__));
1882 /* Last resort, equal to loading from memory. */
1884 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1885 AS2 (ldi,r31,lo8(%1)) CR_TAB
1886 AS2 (mov,%A0,r31) CR_TAB
1887 AS2 (ldi,r31,hi8(%1)) CR_TAB
1888 AS2 (mov,%B0,r31) CR_TAB
1889 AS2 (mov,r31,__tmp_reg__));
1891 else if (GET_CODE (src) == MEM)
1892 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1894 else if (GET_CODE (dest) == MEM)
1898 if (src == const0_rtx)
1899 operands[1] = zero_reg_rtx;
1901 templ = out_movhi_mr_r (insn, operands, real_l);
1904 output_asm_insn (templ, operands);
1909 fatal_insn ("invalid insn:", insn);
1914 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1918 rtx x = XEXP (src, 0);
1924 if (CONSTANT_ADDRESS_P (x))
1926 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1929 return AS2 (in,%0,__SREG__);
1931 if (optimize > 0 && io_address_operand (x, QImode))
1934 return AS2 (in,%0,%1-0x20);
1937 return AS2 (lds,%0,%1);
1939 /* memory access by reg+disp */
1940 else if (GET_CODE (x) == PLUS
1941 && REG_P (XEXP (x,0))
1942 && GET_CODE (XEXP (x,1)) == CONST_INT)
1944 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1946 int disp = INTVAL (XEXP (x,1));
1947 if (REGNO (XEXP (x,0)) != REG_Y)
1948 fatal_insn ("incorrect insn:",insn);
1950 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1951 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1952 AS2 (ldd,%0,Y+63) CR_TAB
1953 AS2 (sbiw,r28,%o1-63));
1955 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1956 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1957 AS2 (ld,%0,Y) CR_TAB
1958 AS2 (subi,r28,lo8(%o1)) CR_TAB
1959 AS2 (sbci,r29,hi8(%o1)));
1961 else if (REGNO (XEXP (x,0)) == REG_X)
1963 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1964 it but I have this situation with extremal optimizing options. */
1965 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1966 || reg_unused_after (insn, XEXP (x,0)))
1967 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1970 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1971 AS2 (ld,%0,X) CR_TAB
1972 AS2 (sbiw,r26,%o1));
1975 return AS2 (ldd,%0,%1);
1978 return AS2 (ld,%0,%1);
1982 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1986 rtx base = XEXP (src, 0);
1987 int reg_dest = true_regnum (dest);
1988 int reg_base = true_regnum (base);
1989 /* "volatile" forces reading low byte first, even if less efficient,
1990 for correct operation with 16-bit I/O registers. */
1991 int mem_volatile_p = MEM_VOLATILE_P (src);
1999 if (reg_dest == reg_base) /* R = (R) */
2002 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2003 AS2 (ld,%B0,%1) CR_TAB
2004 AS2 (mov,%A0,__tmp_reg__));
2006 else if (reg_base == REG_X) /* (R26) */
2008 if (reg_unused_after (insn, base))
2011 return (AS2 (ld,%A0,X+) CR_TAB
2015 return (AS2 (ld,%A0,X+) CR_TAB
2016 AS2 (ld,%B0,X) CR_TAB
2022 return (AS2 (ld,%A0,%1) CR_TAB
2023 AS2 (ldd,%B0,%1+1));
2026 else if (GET_CODE (base) == PLUS) /* (R + i) */
2028 int disp = INTVAL (XEXP (base, 1));
2029 int reg_base = true_regnum (XEXP (base, 0));
2031 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2033 if (REGNO (XEXP (base, 0)) != REG_Y)
2034 fatal_insn ("incorrect insn:",insn);
2036 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2037 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2038 AS2 (ldd,%A0,Y+62) CR_TAB
2039 AS2 (ldd,%B0,Y+63) CR_TAB
2040 AS2 (sbiw,r28,%o1-62));
2042 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2043 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2044 AS2 (ld,%A0,Y) CR_TAB
2045 AS2 (ldd,%B0,Y+1) CR_TAB
2046 AS2 (subi,r28,lo8(%o1)) CR_TAB
2047 AS2 (sbci,r29,hi8(%o1)));
2049 if (reg_base == REG_X)
2051 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2052 it but I have this situation with extremal
2053 optimization options. */
2056 if (reg_base == reg_dest)
2057 return (AS2 (adiw,r26,%o1) CR_TAB
2058 AS2 (ld,__tmp_reg__,X+) CR_TAB
2059 AS2 (ld,%B0,X) CR_TAB
2060 AS2 (mov,%A0,__tmp_reg__));
2062 return (AS2 (adiw,r26,%o1) CR_TAB
2063 AS2 (ld,%A0,X+) CR_TAB
2064 AS2 (ld,%B0,X) CR_TAB
2065 AS2 (sbiw,r26,%o1+1));
2068 if (reg_base == reg_dest)
2071 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2072 AS2 (ldd,%B0,%B1) CR_TAB
2073 AS2 (mov,%A0,__tmp_reg__));
2077 return (AS2 (ldd,%A0,%A1) CR_TAB
2080 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2082 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2083 fatal_insn ("incorrect insn:", insn);
2087 if (REGNO (XEXP (base, 0)) == REG_X)
2090 return (AS2 (sbiw,r26,2) CR_TAB
2091 AS2 (ld,%A0,X+) CR_TAB
2092 AS2 (ld,%B0,X) CR_TAB
2098 return (AS2 (sbiw,%r1,2) CR_TAB
2099 AS2 (ld,%A0,%p1) CR_TAB
2100 AS2 (ldd,%B0,%p1+1));
2105 return (AS2 (ld,%B0,%1) CR_TAB
2108 else if (GET_CODE (base) == POST_INC) /* (R++) */
2110 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2111 fatal_insn ("incorrect insn:", insn);
2114 return (AS2 (ld,%A0,%1) CR_TAB
2117 else if (CONSTANT_ADDRESS_P (base))
2119 if (optimize > 0 && io_address_operand (base, HImode))
2122 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2123 AS2 (in,%B0,%B1-0x20));
2126 return (AS2 (lds,%A0,%A1) CR_TAB
2130 fatal_insn ("unknown move insn:",insn);
2135 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2139 rtx base = XEXP (src, 0);
2140 int reg_dest = true_regnum (dest);
2141 int reg_base = true_regnum (base);
2149 if (reg_base == REG_X) /* (R26) */
2151 if (reg_dest == REG_X)
2152 /* "ld r26,-X" is undefined */
2153 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2154 AS2 (ld,r29,X) CR_TAB
2155 AS2 (ld,r28,-X) CR_TAB
2156 AS2 (ld,__tmp_reg__,-X) CR_TAB
2157 AS2 (sbiw,r26,1) CR_TAB
2158 AS2 (ld,r26,X) CR_TAB
2159 AS2 (mov,r27,__tmp_reg__));
2160 else if (reg_dest == REG_X - 2)
2161 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2162 AS2 (ld,%B0,X+) CR_TAB
2163 AS2 (ld,__tmp_reg__,X+) CR_TAB
2164 AS2 (ld,%D0,X) CR_TAB
2165 AS2 (mov,%C0,__tmp_reg__));
2166 else if (reg_unused_after (insn, base))
2167 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2168 AS2 (ld,%B0,X+) CR_TAB
2169 AS2 (ld,%C0,X+) CR_TAB
2172 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2173 AS2 (ld,%B0,X+) CR_TAB
2174 AS2 (ld,%C0,X+) CR_TAB
2175 AS2 (ld,%D0,X) CR_TAB
2180 if (reg_dest == reg_base)
2181 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2182 AS2 (ldd,%C0,%1+2) CR_TAB
2183 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2184 AS2 (ld,%A0,%1) CR_TAB
2185 AS2 (mov,%B0,__tmp_reg__));
2186 else if (reg_base == reg_dest + 2)
2187 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2188 AS2 (ldd,%B0,%1+1) CR_TAB
2189 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2190 AS2 (ldd,%D0,%1+3) CR_TAB
2191 AS2 (mov,%C0,__tmp_reg__));
2193 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2194 AS2 (ldd,%B0,%1+1) CR_TAB
2195 AS2 (ldd,%C0,%1+2) CR_TAB
2196 AS2 (ldd,%D0,%1+3));
2199 else if (GET_CODE (base) == PLUS) /* (R + i) */
2201 int disp = INTVAL (XEXP (base, 1));
2203 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2205 if (REGNO (XEXP (base, 0)) != REG_Y)
2206 fatal_insn ("incorrect insn:",insn);
2208 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2209 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2210 AS2 (ldd,%A0,Y+60) CR_TAB
2211 AS2 (ldd,%B0,Y+61) CR_TAB
2212 AS2 (ldd,%C0,Y+62) CR_TAB
2213 AS2 (ldd,%D0,Y+63) CR_TAB
2214 AS2 (sbiw,r28,%o1-60));
2216 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2217 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2218 AS2 (ld,%A0,Y) CR_TAB
2219 AS2 (ldd,%B0,Y+1) CR_TAB
2220 AS2 (ldd,%C0,Y+2) CR_TAB
2221 AS2 (ldd,%D0,Y+3) CR_TAB
2222 AS2 (subi,r28,lo8(%o1)) CR_TAB
2223 AS2 (sbci,r29,hi8(%o1)));
2226 reg_base = true_regnum (XEXP (base, 0));
2227 if (reg_base == REG_X)
2230 if (reg_dest == REG_X)
2233 /* "ld r26,-X" is undefined */
2234 return (AS2 (adiw,r26,%o1+3) CR_TAB
2235 AS2 (ld,r29,X) CR_TAB
2236 AS2 (ld,r28,-X) CR_TAB
2237 AS2 (ld,__tmp_reg__,-X) CR_TAB
2238 AS2 (sbiw,r26,1) CR_TAB
2239 AS2 (ld,r26,X) CR_TAB
2240 AS2 (mov,r27,__tmp_reg__));
2243 if (reg_dest == REG_X - 2)
2244 return (AS2 (adiw,r26,%o1) CR_TAB
2245 AS2 (ld,r24,X+) CR_TAB
2246 AS2 (ld,r25,X+) CR_TAB
2247 AS2 (ld,__tmp_reg__,X+) CR_TAB
2248 AS2 (ld,r27,X) CR_TAB
2249 AS2 (mov,r26,__tmp_reg__));
2251 return (AS2 (adiw,r26,%o1) CR_TAB
2252 AS2 (ld,%A0,X+) CR_TAB
2253 AS2 (ld,%B0,X+) CR_TAB
2254 AS2 (ld,%C0,X+) CR_TAB
2255 AS2 (ld,%D0,X) CR_TAB
2256 AS2 (sbiw,r26,%o1+3));
2258 if (reg_dest == reg_base)
2259 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2260 AS2 (ldd,%C0,%C1) CR_TAB
2261 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2262 AS2 (ldd,%A0,%A1) CR_TAB
2263 AS2 (mov,%B0,__tmp_reg__));
2264 else if (reg_dest == reg_base - 2)
2265 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2266 AS2 (ldd,%B0,%B1) CR_TAB
2267 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2268 AS2 (ldd,%D0,%D1) CR_TAB
2269 AS2 (mov,%C0,__tmp_reg__));
2270 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2271 AS2 (ldd,%B0,%B1) CR_TAB
2272 AS2 (ldd,%C0,%C1) CR_TAB
2275 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2276 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2277 AS2 (ld,%C0,%1) CR_TAB
2278 AS2 (ld,%B0,%1) CR_TAB
2280 else if (GET_CODE (base) == POST_INC) /* (R++) */
2281 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2282 AS2 (ld,%B0,%1) CR_TAB
2283 AS2 (ld,%C0,%1) CR_TAB
2285 else if (CONSTANT_ADDRESS_P (base))
2286 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2287 AS2 (lds,%B0,%B1) CR_TAB
2288 AS2 (lds,%C0,%C1) CR_TAB
2291 fatal_insn ("unknown move insn:",insn);
2296 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2300 rtx base = XEXP (dest, 0);
2301 int reg_base = true_regnum (base);
2302 int reg_src = true_regnum (src);
2308 if (CONSTANT_ADDRESS_P (base))
2309 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2310 AS2 (sts,%B0,%B1) CR_TAB
2311 AS2 (sts,%C0,%C1) CR_TAB
2313 if (reg_base > 0) /* (r) */
2315 if (reg_base == REG_X) /* (R26) */
2317 if (reg_src == REG_X)
2319 /* "st X+,r26" is undefined */
2320 if (reg_unused_after (insn, base))
2321 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2322 AS2 (st,X,r26) CR_TAB
2323 AS2 (adiw,r26,1) CR_TAB
2324 AS2 (st,X+,__tmp_reg__) CR_TAB
2325 AS2 (st,X+,r28) CR_TAB
2328 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2329 AS2 (st,X,r26) CR_TAB
2330 AS2 (adiw,r26,1) CR_TAB
2331 AS2 (st,X+,__tmp_reg__) CR_TAB
2332 AS2 (st,X+,r28) CR_TAB
2333 AS2 (st,X,r29) CR_TAB
2336 else if (reg_base == reg_src + 2)
2338 if (reg_unused_after (insn, base))
2339 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2340 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2341 AS2 (st,%0+,%A1) CR_TAB
2342 AS2 (st,%0+,%B1) CR_TAB
2343 AS2 (st,%0+,__zero_reg__) CR_TAB
2344 AS2 (st,%0,__tmp_reg__) CR_TAB
2345 AS1 (clr,__zero_reg__));
2347 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2348 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2349 AS2 (st,%0+,%A1) CR_TAB
2350 AS2 (st,%0+,%B1) CR_TAB
2351 AS2 (st,%0+,__zero_reg__) CR_TAB
2352 AS2 (st,%0,__tmp_reg__) CR_TAB
2353 AS1 (clr,__zero_reg__) CR_TAB
2356 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2357 AS2 (st,%0+,%B1) CR_TAB
2358 AS2 (st,%0+,%C1) CR_TAB
2359 AS2 (st,%0,%D1) CR_TAB
2363 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2364 AS2 (std,%0+1,%B1) CR_TAB
2365 AS2 (std,%0+2,%C1) CR_TAB
2366 AS2 (std,%0+3,%D1));
2368 else if (GET_CODE (base) == PLUS) /* (R + i) */
2370 int disp = INTVAL (XEXP (base, 1));
2371 reg_base = REGNO (XEXP (base, 0));
2372 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2374 if (reg_base != REG_Y)
2375 fatal_insn ("incorrect insn:",insn);
2377 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2378 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2379 AS2 (std,Y+60,%A1) CR_TAB
2380 AS2 (std,Y+61,%B1) CR_TAB
2381 AS2 (std,Y+62,%C1) CR_TAB
2382 AS2 (std,Y+63,%D1) CR_TAB
2383 AS2 (sbiw,r28,%o0-60));
2385 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2386 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2387 AS2 (st,Y,%A1) CR_TAB
2388 AS2 (std,Y+1,%B1) CR_TAB
2389 AS2 (std,Y+2,%C1) CR_TAB
2390 AS2 (std,Y+3,%D1) CR_TAB
2391 AS2 (subi,r28,lo8(%o0)) CR_TAB
2392 AS2 (sbci,r29,hi8(%o0)));
2394 if (reg_base == REG_X)
2397 if (reg_src == REG_X)
2400 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2401 AS2 (mov,__zero_reg__,r27) CR_TAB
2402 AS2 (adiw,r26,%o0) CR_TAB
2403 AS2 (st,X+,__tmp_reg__) CR_TAB
2404 AS2 (st,X+,__zero_reg__) CR_TAB
2405 AS2 (st,X+,r28) CR_TAB
2406 AS2 (st,X,r29) CR_TAB
2407 AS1 (clr,__zero_reg__) CR_TAB
2408 AS2 (sbiw,r26,%o0+3));
2410 else if (reg_src == REG_X - 2)
2413 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2414 AS2 (mov,__zero_reg__,r27) CR_TAB
2415 AS2 (adiw,r26,%o0) CR_TAB
2416 AS2 (st,X+,r24) CR_TAB
2417 AS2 (st,X+,r25) CR_TAB
2418 AS2 (st,X+,__tmp_reg__) CR_TAB
2419 AS2 (st,X,__zero_reg__) CR_TAB
2420 AS1 (clr,__zero_reg__) CR_TAB
2421 AS2 (sbiw,r26,%o0+3));
2424 return (AS2 (adiw,r26,%o0) CR_TAB
2425 AS2 (st,X+,%A1) CR_TAB
2426 AS2 (st,X+,%B1) CR_TAB
2427 AS2 (st,X+,%C1) CR_TAB
2428 AS2 (st,X,%D1) CR_TAB
2429 AS2 (sbiw,r26,%o0+3));
2431 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2432 AS2 (std,%B0,%B1) CR_TAB
2433 AS2 (std,%C0,%C1) CR_TAB
2436 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2437 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2438 AS2 (st,%0,%C1) CR_TAB
2439 AS2 (st,%0,%B1) CR_TAB
2441 else if (GET_CODE (base) == POST_INC) /* (R++) */
2442 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2443 AS2 (st,%0,%B1) CR_TAB
2444 AS2 (st,%0,%C1) CR_TAB
2446 fatal_insn ("unknown move insn:",insn);
2451 output_movsisf(rtx insn, rtx operands[], int *l)
2454 rtx dest = operands[0];
2455 rtx src = operands[1];
2461 if (register_operand (dest, VOIDmode))
2463 if (register_operand (src, VOIDmode)) /* mov r,r */
2465 if (true_regnum (dest) > true_regnum (src))
2470 return (AS2 (movw,%C0,%C1) CR_TAB
2471 AS2 (movw,%A0,%A1));
2474 return (AS2 (mov,%D0,%D1) CR_TAB
2475 AS2 (mov,%C0,%C1) CR_TAB
2476 AS2 (mov,%B0,%B1) CR_TAB
2484 return (AS2 (movw,%A0,%A1) CR_TAB
2485 AS2 (movw,%C0,%C1));
2488 return (AS2 (mov,%A0,%A1) CR_TAB
2489 AS2 (mov,%B0,%B1) CR_TAB
2490 AS2 (mov,%C0,%C1) CR_TAB
2494 else if (CONSTANT_P (src))
2496 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2499 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2500 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2501 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2502 AS2 (ldi,%D0,hhi8(%1)));
2505 if (GET_CODE (src) == CONST_INT)
2507 const char *const clr_op0 =
2508 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2509 AS1 (clr,%B0) CR_TAB
2511 : (AS1 (clr,%A0) CR_TAB
2512 AS1 (clr,%B0) CR_TAB
2513 AS1 (clr,%C0) CR_TAB
2516 if (src == const0_rtx) /* mov r,L */
2518 *l = AVR_HAVE_MOVW ? 3 : 4;
2521 else if (src == const1_rtx)
2524 output_asm_insn (clr_op0, operands);
2525 *l = AVR_HAVE_MOVW ? 4 : 5;
2526 return AS1 (inc,%A0);
2528 else if (src == constm1_rtx)
2530 /* Immediate constants -1 to any register */
2534 return (AS1 (clr,%A0) CR_TAB
2535 AS1 (dec,%A0) CR_TAB
2536 AS2 (mov,%B0,%A0) CR_TAB
2537 AS2 (movw,%C0,%A0));
2540 return (AS1 (clr,%A0) CR_TAB
2541 AS1 (dec,%A0) CR_TAB
2542 AS2 (mov,%B0,%A0) CR_TAB
2543 AS2 (mov,%C0,%A0) CR_TAB
2548 int bit_nr = exact_log2 (INTVAL (src));
2552 *l = AVR_HAVE_MOVW ? 5 : 6;
2555 output_asm_insn (clr_op0, operands);
2556 output_asm_insn ("set", operands);
2559 avr_output_bld (operands, bit_nr);
2566 /* Last resort, better than loading from memory. */
2568 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2569 AS2 (ldi,r31,lo8(%1)) CR_TAB
2570 AS2 (mov,%A0,r31) CR_TAB
2571 AS2 (ldi,r31,hi8(%1)) CR_TAB
2572 AS2 (mov,%B0,r31) CR_TAB
2573 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2574 AS2 (mov,%C0,r31) CR_TAB
2575 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2576 AS2 (mov,%D0,r31) CR_TAB
2577 AS2 (mov,r31,__tmp_reg__));
2579 else if (GET_CODE (src) == MEM)
2580 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2582 else if (GET_CODE (dest) == MEM)
2586 if (src == const0_rtx)
2587 operands[1] = zero_reg_rtx;
2589 templ = out_movsi_mr_r (insn, operands, real_l);
2592 output_asm_insn (templ, operands);
2597 fatal_insn ("invalid insn:", insn);
2602 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2606 rtx x = XEXP (dest, 0);
2612 if (CONSTANT_ADDRESS_P (x))
2614 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2617 return AS2 (out,__SREG__,%1);
2619 if (optimize > 0 && io_address_operand (x, QImode))
2622 return AS2 (out,%0-0x20,%1);
2625 return AS2 (sts,%0,%1);
2627 /* memory access by reg+disp */
2628 else if (GET_CODE (x) == PLUS
2629 && REG_P (XEXP (x,0))
2630 && GET_CODE (XEXP (x,1)) == CONST_INT)
2632 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2634 int disp = INTVAL (XEXP (x,1));
2635 if (REGNO (XEXP (x,0)) != REG_Y)
2636 fatal_insn ("incorrect insn:",insn);
2638 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2639 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2640 AS2 (std,Y+63,%1) CR_TAB
2641 AS2 (sbiw,r28,%o0-63));
2643 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2644 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2645 AS2 (st,Y,%1) CR_TAB
2646 AS2 (subi,r28,lo8(%o0)) CR_TAB
2647 AS2 (sbci,r29,hi8(%o0)));
2649 else if (REGNO (XEXP (x,0)) == REG_X)
2651 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2653 if (reg_unused_after (insn, XEXP (x,0)))
2654 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2655 AS2 (adiw,r26,%o0) CR_TAB
2656 AS2 (st,X,__tmp_reg__));
2658 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2659 AS2 (adiw,r26,%o0) CR_TAB
2660 AS2 (st,X,__tmp_reg__) CR_TAB
2661 AS2 (sbiw,r26,%o0));
2665 if (reg_unused_after (insn, XEXP (x,0)))
2666 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2669 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2670 AS2 (st,X,%1) CR_TAB
2671 AS2 (sbiw,r26,%o0));
2675 return AS2 (std,%0,%1);
2678 return AS2 (st,%0,%1);
2682 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2686 rtx base = XEXP (dest, 0);
2687 int reg_base = true_regnum (base);
2688 int reg_src = true_regnum (src);
2689 /* "volatile" forces writing high byte first, even if less efficient,
2690 for correct operation with 16-bit I/O registers. */
2691 int mem_volatile_p = MEM_VOLATILE_P (dest);
2696 if (CONSTANT_ADDRESS_P (base))
2698 if (optimize > 0 && io_address_operand (base, HImode))
2701 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2702 AS2 (out,%A0-0x20,%A1));
2704 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2709 if (reg_base == REG_X)
2711 if (reg_src == REG_X)
2713 /* "st X+,r26" and "st -X,r26" are undefined. */
2714 if (!mem_volatile_p && reg_unused_after (insn, src))
2715 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2716 AS2 (st,X,r26) CR_TAB
2717 AS2 (adiw,r26,1) CR_TAB
2718 AS2 (st,X,__tmp_reg__));
2720 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2721 AS2 (adiw,r26,1) CR_TAB
2722 AS2 (st,X,__tmp_reg__) CR_TAB
2723 AS2 (sbiw,r26,1) CR_TAB
2728 if (!mem_volatile_p && reg_unused_after (insn, base))
2729 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2732 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2733 AS2 (st,X,%B1) CR_TAB
2738 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2741 else if (GET_CODE (base) == PLUS)
2743 int disp = INTVAL (XEXP (base, 1));
2744 reg_base = REGNO (XEXP (base, 0));
2745 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2747 if (reg_base != REG_Y)
2748 fatal_insn ("incorrect insn:",insn);
2750 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2751 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2752 AS2 (std,Y+63,%B1) CR_TAB
2753 AS2 (std,Y+62,%A1) CR_TAB
2754 AS2 (sbiw,r28,%o0-62));
2756 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2757 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2758 AS2 (std,Y+1,%B1) CR_TAB
2759 AS2 (st,Y,%A1) CR_TAB
2760 AS2 (subi,r28,lo8(%o0)) CR_TAB
2761 AS2 (sbci,r29,hi8(%o0)));
2763 if (reg_base == REG_X)
2766 if (reg_src == REG_X)
2769 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2770 AS2 (mov,__zero_reg__,r27) CR_TAB
2771 AS2 (adiw,r26,%o0+1) CR_TAB
2772 AS2 (st,X,__zero_reg__) CR_TAB
2773 AS2 (st,-X,__tmp_reg__) CR_TAB
2774 AS1 (clr,__zero_reg__) CR_TAB
2775 AS2 (sbiw,r26,%o0));
2778 return (AS2 (adiw,r26,%o0+1) CR_TAB
2779 AS2 (st,X,%B1) CR_TAB
2780 AS2 (st,-X,%A1) CR_TAB
2781 AS2 (sbiw,r26,%o0));
2783 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2786 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2787 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2789 else if (GET_CODE (base) == POST_INC) /* (R++) */
2793 if (REGNO (XEXP (base, 0)) == REG_X)
2796 return (AS2 (adiw,r26,1) CR_TAB
2797 AS2 (st,X,%B1) CR_TAB
2798 AS2 (st,-X,%A1) CR_TAB
2804 return (AS2 (std,%p0+1,%B1) CR_TAB
2805 AS2 (st,%p0,%A1) CR_TAB
2811 return (AS2 (st,%0,%A1) CR_TAB
2814 fatal_insn ("unknown move insn:",insn);
2818 /* Return 1 if frame pointer for current function required. */
2821 frame_pointer_required_p (void)
2823 return (cfun->calls_alloca
2824 || crtl->args.info.nregs == 0
2825 || get_frame_size () > 0);
2828 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2831 compare_condition (rtx insn)
2833 rtx next = next_real_insn (insn);
2834 RTX_CODE cond = UNKNOWN;
2835 if (next && GET_CODE (next) == JUMP_INSN)
2837 rtx pat = PATTERN (next);
2838 rtx src = SET_SRC (pat);
2839 rtx t = XEXP (src, 0);
2840 cond = GET_CODE (t);
2845 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2848 compare_sign_p (rtx insn)
2850 RTX_CODE cond = compare_condition (insn);
2851 return (cond == GE || cond == LT);
2854 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2855 that needs to be swapped (GT, GTU, LE, LEU). */
2858 compare_diff_p (rtx insn)
2860 RTX_CODE cond = compare_condition (insn);
2861 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2864 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2867 compare_eq_p (rtx insn)
2869 RTX_CODE cond = compare_condition (insn);
2870 return (cond == EQ || cond == NE);
2874 /* Output test instruction for HImode. */
2877 out_tsthi (rtx insn, int *l)
2879 if (compare_sign_p (insn))
2882 return AS1 (tst,%B0);
2884 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2885 && compare_eq_p (insn))
2887 /* Faster than sbiw if we can clobber the operand. */
2889 return AS2 (or,%A0,%B0);
2891 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2894 return AS2 (sbiw,%0,0);
2897 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2898 AS2 (cpc,%B0,__zero_reg__));
2902 /* Output test instruction for SImode. */
2905 out_tstsi (rtx insn, int *l)
2907 if (compare_sign_p (insn))
2910 return AS1 (tst,%D0);
2912 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2915 return (AS2 (sbiw,%A0,0) CR_TAB
2916 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2917 AS2 (cpc,%D0,__zero_reg__));
2920 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2921 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2922 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2923 AS2 (cpc,%D0,__zero_reg__));
2927 /* Generate asm equivalent for various shifts.
2928 Shift count is a CONST_INT, MEM or REG.
2929 This only handles cases that are not already
2930 carefully hand-optimized in ?sh??i3_out. */
2933 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2934 int *len, int t_len)
2938 int second_label = 1;
2939 int saved_in_tmp = 0;
2940 int use_zero_reg = 0;
2942 op[0] = operands[0];
2943 op[1] = operands[1];
2944 op[2] = operands[2];
2945 op[3] = operands[3];
2951 if (GET_CODE (operands[2]) == CONST_INT)
2953 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2954 int count = INTVAL (operands[2]);
2955 int max_len = 10; /* If larger than this, always use a loop. */
2964 if (count < 8 && !scratch)
2968 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2970 if (t_len * count <= max_len)
2972 /* Output shifts inline with no loop - faster. */
2974 *len = t_len * count;
2978 output_asm_insn (templ, op);
2987 strcat (str, AS2 (ldi,%3,%2));
2989 else if (use_zero_reg)
2991 /* Hack to save one word: use __zero_reg__ as loop counter.
2992 Set one bit, then shift in a loop until it is 0 again. */
2994 op[3] = zero_reg_rtx;
2998 strcat (str, ("set" CR_TAB
2999 AS2 (bld,%3,%2-1)));
3003 /* No scratch register available, use one from LD_REGS (saved in
3004 __tmp_reg__) that doesn't overlap with registers to shift. */
3006 op[3] = gen_rtx_REG (QImode,
3007 ((true_regnum (operands[0]) - 1) & 15) + 16);
3008 op[4] = tmp_reg_rtx;
3012 *len = 3; /* Includes "mov %3,%4" after the loop. */
3014 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3020 else if (GET_CODE (operands[2]) == MEM)
3024 op[3] = op_mov[0] = tmp_reg_rtx;
3028 out_movqi_r_mr (insn, op_mov, len);
3030 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3032 else if (register_operand (operands[2], QImode))
3034 if (reg_unused_after (insn, operands[2]))
3038 op[3] = tmp_reg_rtx;
3040 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3044 fatal_insn ("bad shift insn:", insn);
3051 strcat (str, AS1 (rjmp,2f));
3055 *len += t_len + 2; /* template + dec + brXX */
3058 strcat (str, "\n1:\t");
3059 strcat (str, templ);
3060 strcat (str, second_label ? "\n2:\t" : "\n\t");
3061 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3062 strcat (str, CR_TAB);
3063 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3065 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3066 output_asm_insn (str, op);
3071 /* 8bit shift left ((char)x << i) */
3074 ashlqi3_out (rtx insn, rtx operands[], int *len)
3076 if (GET_CODE (operands[2]) == CONST_INT)
3083 switch (INTVAL (operands[2]))
3086 if (INTVAL (operands[2]) < 8)
3090 return AS1 (clr,%0);
3094 return AS1 (lsl,%0);
3098 return (AS1 (lsl,%0) CR_TAB
3103 return (AS1 (lsl,%0) CR_TAB
3108 if (test_hard_reg_class (LD_REGS, operands[0]))
3111 return (AS1 (swap,%0) CR_TAB
3112 AS2 (andi,%0,0xf0));
3115 return (AS1 (lsl,%0) CR_TAB
3121 if (test_hard_reg_class (LD_REGS, operands[0]))
3124 return (AS1 (swap,%0) CR_TAB
3126 AS2 (andi,%0,0xe0));
3129 return (AS1 (lsl,%0) CR_TAB
3136 if (test_hard_reg_class (LD_REGS, operands[0]))
3139 return (AS1 (swap,%0) CR_TAB
3142 AS2 (andi,%0,0xc0));
3145 return (AS1 (lsl,%0) CR_TAB
3154 return (AS1 (ror,%0) CR_TAB
3159 else if (CONSTANT_P (operands[2]))
3160 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3162 out_shift_with_cnt (AS1 (lsl,%0),
3163 insn, operands, len, 1);
3168 /* 16bit shift left ((short)x << i) */
3171 ashlhi3_out (rtx insn, rtx operands[], int *len)
3173 if (GET_CODE (operands[2]) == CONST_INT)
3175 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3176 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3183 switch (INTVAL (operands[2]))
3186 if (INTVAL (operands[2]) < 16)
3190 return (AS1 (clr,%B0) CR_TAB
3194 if (optimize_size && scratch)
3199 return (AS1 (swap,%A0) CR_TAB
3200 AS1 (swap,%B0) CR_TAB
3201 AS2 (andi,%B0,0xf0) CR_TAB
3202 AS2 (eor,%B0,%A0) CR_TAB
3203 AS2 (andi,%A0,0xf0) CR_TAB
3209 return (AS1 (swap,%A0) CR_TAB
3210 AS1 (swap,%B0) CR_TAB
3211 AS2 (ldi,%3,0xf0) CR_TAB
3212 AS2 (and,%B0,%3) CR_TAB
3213 AS2 (eor,%B0,%A0) CR_TAB
3214 AS2 (and,%A0,%3) CR_TAB
3217 break; /* optimize_size ? 6 : 8 */
3221 break; /* scratch ? 5 : 6 */
3225 return (AS1 (lsl,%A0) CR_TAB
3226 AS1 (rol,%B0) CR_TAB
3227 AS1 (swap,%A0) CR_TAB
3228 AS1 (swap,%B0) CR_TAB
3229 AS2 (andi,%B0,0xf0) CR_TAB
3230 AS2 (eor,%B0,%A0) CR_TAB
3231 AS2 (andi,%A0,0xf0) CR_TAB
3237 return (AS1 (lsl,%A0) CR_TAB
3238 AS1 (rol,%B0) CR_TAB
3239 AS1 (swap,%A0) CR_TAB
3240 AS1 (swap,%B0) CR_TAB
3241 AS2 (ldi,%3,0xf0) CR_TAB
3242 AS2 (and,%B0,%3) CR_TAB
3243 AS2 (eor,%B0,%A0) CR_TAB
3244 AS2 (and,%A0,%3) CR_TAB
3251 break; /* scratch ? 5 : 6 */
3253 return (AS1 (clr,__tmp_reg__) CR_TAB
3254 AS1 (lsr,%B0) CR_TAB
3255 AS1 (ror,%A0) CR_TAB
3256 AS1 (ror,__tmp_reg__) CR_TAB
3257 AS1 (lsr,%B0) CR_TAB
3258 AS1 (ror,%A0) CR_TAB
3259 AS1 (ror,__tmp_reg__) CR_TAB
3260 AS2 (mov,%B0,%A0) CR_TAB
3261 AS2 (mov,%A0,__tmp_reg__));
3265 return (AS1 (lsr,%B0) CR_TAB
3266 AS2 (mov,%B0,%A0) CR_TAB
3267 AS1 (clr,%A0) CR_TAB
3268 AS1 (ror,%B0) CR_TAB
3272 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3277 return (AS2 (mov,%B0,%A0) CR_TAB
3278 AS1 (clr,%A0) CR_TAB
3283 return (AS2 (mov,%B0,%A0) CR_TAB
3284 AS1 (clr,%A0) CR_TAB
3285 AS1 (lsl,%B0) CR_TAB
3290 return (AS2 (mov,%B0,%A0) CR_TAB
3291 AS1 (clr,%A0) CR_TAB
3292 AS1 (lsl,%B0) CR_TAB
3293 AS1 (lsl,%B0) CR_TAB
3300 return (AS2 (mov,%B0,%A0) CR_TAB
3301 AS1 (clr,%A0) CR_TAB
3302 AS1 (swap,%B0) CR_TAB
3303 AS2 (andi,%B0,0xf0));
3308 return (AS2 (mov,%B0,%A0) CR_TAB
3309 AS1 (clr,%A0) CR_TAB
3310 AS1 (swap,%B0) CR_TAB
3311 AS2 (ldi,%3,0xf0) CR_TAB
3315 return (AS2 (mov,%B0,%A0) CR_TAB
3316 AS1 (clr,%A0) CR_TAB
3317 AS1 (lsl,%B0) CR_TAB
3318 AS1 (lsl,%B0) CR_TAB
3319 AS1 (lsl,%B0) CR_TAB
3326 return (AS2 (mov,%B0,%A0) CR_TAB
3327 AS1 (clr,%A0) CR_TAB
3328 AS1 (swap,%B0) CR_TAB
3329 AS1 (lsl,%B0) CR_TAB
3330 AS2 (andi,%B0,0xe0));
3332 if (AVR_HAVE_MUL && scratch)
3335 return (AS2 (ldi,%3,0x20) CR_TAB
3336 AS2 (mul,%A0,%3) CR_TAB
3337 AS2 (mov,%B0,r0) CR_TAB
3338 AS1 (clr,%A0) CR_TAB
3339 AS1 (clr,__zero_reg__));
3341 if (optimize_size && scratch)
3346 return (AS2 (mov,%B0,%A0) CR_TAB
3347 AS1 (clr,%A0) CR_TAB
3348 AS1 (swap,%B0) CR_TAB
3349 AS1 (lsl,%B0) CR_TAB
3350 AS2 (ldi,%3,0xe0) CR_TAB
3356 return ("set" CR_TAB
3357 AS2 (bld,r1,5) CR_TAB
3358 AS2 (mul,%A0,r1) CR_TAB
3359 AS2 (mov,%B0,r0) CR_TAB
3360 AS1 (clr,%A0) CR_TAB
3361 AS1 (clr,__zero_reg__));
3364 return (AS2 (mov,%B0,%A0) CR_TAB
3365 AS1 (clr,%A0) CR_TAB
3366 AS1 (lsl,%B0) CR_TAB
3367 AS1 (lsl,%B0) CR_TAB
3368 AS1 (lsl,%B0) CR_TAB
3369 AS1 (lsl,%B0) CR_TAB
3373 if (AVR_HAVE_MUL && ldi_ok)
3376 return (AS2 (ldi,%B0,0x40) CR_TAB
3377 AS2 (mul,%A0,%B0) CR_TAB
3378 AS2 (mov,%B0,r0) CR_TAB
3379 AS1 (clr,%A0) CR_TAB
3380 AS1 (clr,__zero_reg__));
3382 if (AVR_HAVE_MUL && scratch)
3385 return (AS2 (ldi,%3,0x40) CR_TAB
3386 AS2 (mul,%A0,%3) CR_TAB
3387 AS2 (mov,%B0,r0) CR_TAB
3388 AS1 (clr,%A0) CR_TAB
3389 AS1 (clr,__zero_reg__));
3391 if (optimize_size && ldi_ok)
3394 return (AS2 (mov,%B0,%A0) CR_TAB
3395 AS2 (ldi,%A0,6) "\n1:\t"
3396 AS1 (lsl,%B0) CR_TAB
3397 AS1 (dec,%A0) CR_TAB
3400 if (optimize_size && scratch)
3403 return (AS1 (clr,%B0) CR_TAB
3404 AS1 (lsr,%A0) CR_TAB
3405 AS1 (ror,%B0) CR_TAB
3406 AS1 (lsr,%A0) CR_TAB
3407 AS1 (ror,%B0) CR_TAB
3412 return (AS1 (clr,%B0) CR_TAB
3413 AS1 (lsr,%A0) CR_TAB
3414 AS1 (ror,%B0) CR_TAB
3419 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3421 insn, operands, len, 2);
3426 /* 32bit shift left ((long)x << i) */
3429 ashlsi3_out (rtx insn, rtx operands[], int *len)
3431 if (GET_CODE (operands[2]) == CONST_INT)
3439 switch (INTVAL (operands[2]))
3442 if (INTVAL (operands[2]) < 32)
3446 return *len = 3, (AS1 (clr,%D0) CR_TAB
3447 AS1 (clr,%C0) CR_TAB
3448 AS2 (movw,%A0,%C0));
3450 return (AS1 (clr,%D0) CR_TAB
3451 AS1 (clr,%C0) CR_TAB
3452 AS1 (clr,%B0) CR_TAB
3457 int reg0 = true_regnum (operands[0]);
3458 int reg1 = true_regnum (operands[1]);
3461 return (AS2 (mov,%D0,%C1) CR_TAB
3462 AS2 (mov,%C0,%B1) CR_TAB
3463 AS2 (mov,%B0,%A1) CR_TAB
3466 return (AS1 (clr,%A0) CR_TAB
3467 AS2 (mov,%B0,%A1) CR_TAB
3468 AS2 (mov,%C0,%B1) CR_TAB
3474 int reg0 = true_regnum (operands[0]);
3475 int reg1 = true_regnum (operands[1]);
3476 if (reg0 + 2 == reg1)
3477 return *len = 2, (AS1 (clr,%B0) CR_TAB
3480 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3481 AS1 (clr,%B0) CR_TAB
3484 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3485 AS2 (mov,%D0,%B1) CR_TAB
3486 AS1 (clr,%B0) CR_TAB
3492 return (AS2 (mov,%D0,%A1) CR_TAB
3493 AS1 (clr,%C0) CR_TAB
3494 AS1 (clr,%B0) CR_TAB
3499 return (AS1 (clr,%D0) CR_TAB
3500 AS1 (lsr,%A0) CR_TAB
3501 AS1 (ror,%D0) CR_TAB
3502 AS1 (clr,%C0) CR_TAB
3503 AS1 (clr,%B0) CR_TAB
3508 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3509 AS1 (rol,%B0) CR_TAB
3510 AS1 (rol,%C0) CR_TAB
3512 insn, operands, len, 4);
3516 /* 8bit arithmetic shift right ((signed char)x >> i) */
3519 ashrqi3_out (rtx insn, rtx operands[], int *len)
3521 if (GET_CODE (operands[2]) == CONST_INT)
3528 switch (INTVAL (operands[2]))
3532 return AS1 (asr,%0);
3536 return (AS1 (asr,%0) CR_TAB
3541 return (AS1 (asr,%0) CR_TAB
3547 return (AS1 (asr,%0) CR_TAB
3554 return (AS1 (asr,%0) CR_TAB
3562 return (AS2 (bst,%0,6) CR_TAB
3564 AS2 (sbc,%0,%0) CR_TAB
3568 if (INTVAL (operands[2]) < 8)
3575 return (AS1 (lsl,%0) CR_TAB
3579 else if (CONSTANT_P (operands[2]))
3580 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3582 out_shift_with_cnt (AS1 (asr,%0),
3583 insn, operands, len, 1);
3588 /* 16bit arithmetic shift right ((signed short)x >> i) */
3591 ashrhi3_out (rtx insn, rtx operands[], int *len)
3593 if (GET_CODE (operands[2]) == CONST_INT)
3595 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3596 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3603 switch (INTVAL (operands[2]))
3607 /* XXX try to optimize this too? */
3612 break; /* scratch ? 5 : 6 */
3614 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3615 AS2 (mov,%A0,%B0) CR_TAB
3616 AS1 (lsl,__tmp_reg__) CR_TAB
3617 AS1 (rol,%A0) CR_TAB
3618 AS2 (sbc,%B0,%B0) CR_TAB
3619 AS1 (lsl,__tmp_reg__) CR_TAB
3620 AS1 (rol,%A0) CR_TAB
3625 return (AS1 (lsl,%A0) CR_TAB
3626 AS2 (mov,%A0,%B0) CR_TAB
3627 AS1 (rol,%A0) CR_TAB
3632 int reg0 = true_regnum (operands[0]);
3633 int reg1 = true_regnum (operands[1]);
3636 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3637 AS1 (lsl,%B0) CR_TAB
3640 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3641 AS1 (clr,%B0) CR_TAB
3642 AS2 (sbrc,%A0,7) CR_TAB
3648 return (AS2 (mov,%A0,%B0) CR_TAB
3649 AS1 (lsl,%B0) CR_TAB
3650 AS2 (sbc,%B0,%B0) CR_TAB
3655 return (AS2 (mov,%A0,%B0) CR_TAB
3656 AS1 (lsl,%B0) CR_TAB
3657 AS2 (sbc,%B0,%B0) CR_TAB
3658 AS1 (asr,%A0) CR_TAB
3662 if (AVR_HAVE_MUL && ldi_ok)
3665 return (AS2 (ldi,%A0,0x20) CR_TAB
3666 AS2 (muls,%B0,%A0) CR_TAB
3667 AS2 (mov,%A0,r1) CR_TAB
3668 AS2 (sbc,%B0,%B0) CR_TAB
3669 AS1 (clr,__zero_reg__));
3671 if (optimize_size && scratch)
3674 return (AS2 (mov,%A0,%B0) CR_TAB
3675 AS1 (lsl,%B0) CR_TAB
3676 AS2 (sbc,%B0,%B0) CR_TAB
3677 AS1 (asr,%A0) CR_TAB
3678 AS1 (asr,%A0) CR_TAB
3682 if (AVR_HAVE_MUL && ldi_ok)
3685 return (AS2 (ldi,%A0,0x10) CR_TAB
3686 AS2 (muls,%B0,%A0) CR_TAB
3687 AS2 (mov,%A0,r1) CR_TAB
3688 AS2 (sbc,%B0,%B0) CR_TAB
3689 AS1 (clr,__zero_reg__));
3691 if (optimize_size && scratch)
3694 return (AS2 (mov,%A0,%B0) CR_TAB
3695 AS1 (lsl,%B0) CR_TAB
3696 AS2 (sbc,%B0,%B0) CR_TAB
3697 AS1 (asr,%A0) CR_TAB
3698 AS1 (asr,%A0) CR_TAB
3699 AS1 (asr,%A0) CR_TAB
3703 if (AVR_HAVE_MUL && ldi_ok)
3706 return (AS2 (ldi,%A0,0x08) CR_TAB
3707 AS2 (muls,%B0,%A0) CR_TAB
3708 AS2 (mov,%A0,r1) CR_TAB
3709 AS2 (sbc,%B0,%B0) CR_TAB
3710 AS1 (clr,__zero_reg__));
3713 break; /* scratch ? 5 : 7 */
3715 return (AS2 (mov,%A0,%B0) CR_TAB
3716 AS1 (lsl,%B0) CR_TAB
3717 AS2 (sbc,%B0,%B0) CR_TAB
3718 AS1 (asr,%A0) CR_TAB
3719 AS1 (asr,%A0) CR_TAB
3720 AS1 (asr,%A0) CR_TAB
3721 AS1 (asr,%A0) CR_TAB
3726 return (AS1 (lsl,%B0) CR_TAB
3727 AS2 (sbc,%A0,%A0) CR_TAB
3728 AS1 (lsl,%B0) CR_TAB
3729 AS2 (mov,%B0,%A0) CR_TAB
3733 if (INTVAL (operands[2]) < 16)
3739 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3740 AS2 (sbc,%A0,%A0) CR_TAB
3745 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3747 insn, operands, len, 2);
3752 /* 32bit arithmetic shift right ((signed long)x >> i) */
3755 ashrsi3_out (rtx insn, rtx operands[], int *len)
3757 if (GET_CODE (operands[2]) == CONST_INT)
3765 switch (INTVAL (operands[2]))
3769 int reg0 = true_regnum (operands[0]);
3770 int reg1 = true_regnum (operands[1]);
3773 return (AS2 (mov,%A0,%B1) CR_TAB
3774 AS2 (mov,%B0,%C1) CR_TAB
3775 AS2 (mov,%C0,%D1) CR_TAB
3776 AS1 (clr,%D0) CR_TAB
3777 AS2 (sbrc,%C0,7) CR_TAB
3780 return (AS1 (clr,%D0) CR_TAB
3781 AS2 (sbrc,%D1,7) CR_TAB
3782 AS1 (dec,%D0) CR_TAB
3783 AS2 (mov,%C0,%D1) CR_TAB
3784 AS2 (mov,%B0,%C1) CR_TAB
3790 int reg0 = true_regnum (operands[0]);
3791 int reg1 = true_regnum (operands[1]);
3793 if (reg0 == reg1 + 2)
3794 return *len = 4, (AS1 (clr,%D0) CR_TAB
3795 AS2 (sbrc,%B0,7) CR_TAB
3796 AS1 (com,%D0) CR_TAB
3799 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3800 AS1 (clr,%D0) CR_TAB
3801 AS2 (sbrc,%B0,7) CR_TAB
3802 AS1 (com,%D0) CR_TAB
3805 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3806 AS2 (mov,%A0,%C1) CR_TAB
3807 AS1 (clr,%D0) CR_TAB
3808 AS2 (sbrc,%B0,7) CR_TAB
3809 AS1 (com,%D0) CR_TAB
3814 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3815 AS1 (clr,%D0) CR_TAB
3816 AS2 (sbrc,%A0,7) CR_TAB
3817 AS1 (com,%D0) CR_TAB
3818 AS2 (mov,%B0,%D0) CR_TAB
3822 if (INTVAL (operands[2]) < 32)
3829 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3830 AS2 (sbc,%A0,%A0) CR_TAB
3831 AS2 (mov,%B0,%A0) CR_TAB
3832 AS2 (movw,%C0,%A0));
3834 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3835 AS2 (sbc,%A0,%A0) CR_TAB
3836 AS2 (mov,%B0,%A0) CR_TAB
3837 AS2 (mov,%C0,%A0) CR_TAB
3842 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3843 AS1 (ror,%C0) CR_TAB
3844 AS1 (ror,%B0) CR_TAB
3846 insn, operands, len, 4);
3850 /* 8bit logic shift right ((unsigned char)x >> i) */
3853 lshrqi3_out (rtx insn, rtx operands[], int *len)
3855 if (GET_CODE (operands[2]) == CONST_INT)
3862 switch (INTVAL (operands[2]))
3865 if (INTVAL (operands[2]) < 8)
3869 return AS1 (clr,%0);
3873 return AS1 (lsr,%0);
3877 return (AS1 (lsr,%0) CR_TAB
3881 return (AS1 (lsr,%0) CR_TAB
3886 if (test_hard_reg_class (LD_REGS, operands[0]))
3889 return (AS1 (swap,%0) CR_TAB
3890 AS2 (andi,%0,0x0f));
3893 return (AS1 (lsr,%0) CR_TAB
3899 if (test_hard_reg_class (LD_REGS, operands[0]))
3902 return (AS1 (swap,%0) CR_TAB
3907 return (AS1 (lsr,%0) CR_TAB
3914 if (test_hard_reg_class (LD_REGS, operands[0]))
3917 return (AS1 (swap,%0) CR_TAB
3923 return (AS1 (lsr,%0) CR_TAB
3932 return (AS1 (rol,%0) CR_TAB
3937 else if (CONSTANT_P (operands[2]))
3938 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3940 out_shift_with_cnt (AS1 (lsr,%0),
3941 insn, operands, len, 1);
3945 /* 16bit logic shift right ((unsigned short)x >> i) */
3948 lshrhi3_out (rtx insn, rtx operands[], int *len)
3950 if (GET_CODE (operands[2]) == CONST_INT)
3952 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3953 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3960 switch (INTVAL (operands[2]))
3963 if (INTVAL (operands[2]) < 16)
3967 return (AS1 (clr,%B0) CR_TAB
3971 if (optimize_size && scratch)
3976 return (AS1 (swap,%B0) CR_TAB
3977 AS1 (swap,%A0) CR_TAB
3978 AS2 (andi,%A0,0x0f) CR_TAB
3979 AS2 (eor,%A0,%B0) CR_TAB
3980 AS2 (andi,%B0,0x0f) CR_TAB
3986 return (AS1 (swap,%B0) CR_TAB
3987 AS1 (swap,%A0) CR_TAB
3988 AS2 (ldi,%3,0x0f) CR_TAB
3989 AS2 (and,%A0,%3) CR_TAB
3990 AS2 (eor,%A0,%B0) CR_TAB
3991 AS2 (and,%B0,%3) CR_TAB
3994 break; /* optimize_size ? 6 : 8 */
3998 break; /* scratch ? 5 : 6 */
4002 return (AS1 (lsr,%B0) CR_TAB
4003 AS1 (ror,%A0) CR_TAB
4004 AS1 (swap,%B0) CR_TAB
4005 AS1 (swap,%A0) CR_TAB
4006 AS2 (andi,%A0,0x0f) CR_TAB
4007 AS2 (eor,%A0,%B0) CR_TAB
4008 AS2 (andi,%B0,0x0f) CR_TAB
4014 return (AS1 (lsr,%B0) CR_TAB
4015 AS1 (ror,%A0) CR_TAB
4016 AS1 (swap,%B0) CR_TAB
4017 AS1 (swap,%A0) CR_TAB
4018 AS2 (ldi,%3,0x0f) CR_TAB
4019 AS2 (and,%A0,%3) CR_TAB
4020 AS2 (eor,%A0,%B0) CR_TAB
4021 AS2 (and,%B0,%3) CR_TAB
4028 break; /* scratch ? 5 : 6 */
4030 return (AS1 (clr,__tmp_reg__) CR_TAB
4031 AS1 (lsl,%A0) CR_TAB
4032 AS1 (rol,%B0) CR_TAB
4033 AS1 (rol,__tmp_reg__) CR_TAB
4034 AS1 (lsl,%A0) CR_TAB
4035 AS1 (rol,%B0) CR_TAB
4036 AS1 (rol,__tmp_reg__) CR_TAB
4037 AS2 (mov,%A0,%B0) CR_TAB
4038 AS2 (mov,%B0,__tmp_reg__));
4042 return (AS1 (lsl,%A0) CR_TAB
4043 AS2 (mov,%A0,%B0) CR_TAB
4044 AS1 (rol,%A0) CR_TAB
4045 AS2 (sbc,%B0,%B0) CR_TAB
4049 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4054 return (AS2 (mov,%A0,%B0) CR_TAB
4055 AS1 (clr,%B0) CR_TAB
4060 return (AS2 (mov,%A0,%B0) CR_TAB
4061 AS1 (clr,%B0) CR_TAB
4062 AS1 (lsr,%A0) CR_TAB
4067 return (AS2 (mov,%A0,%B0) CR_TAB
4068 AS1 (clr,%B0) CR_TAB
4069 AS1 (lsr,%A0) CR_TAB
4070 AS1 (lsr,%A0) CR_TAB
4077 return (AS2 (mov,%A0,%B0) CR_TAB
4078 AS1 (clr,%B0) CR_TAB
4079 AS1 (swap,%A0) CR_TAB
4080 AS2 (andi,%A0,0x0f));
4085 return (AS2 (mov,%A0,%B0) CR_TAB
4086 AS1 (clr,%B0) CR_TAB
4087 AS1 (swap,%A0) CR_TAB
4088 AS2 (ldi,%3,0x0f) CR_TAB
4092 return (AS2 (mov,%A0,%B0) CR_TAB
4093 AS1 (clr,%B0) CR_TAB
4094 AS1 (lsr,%A0) CR_TAB
4095 AS1 (lsr,%A0) CR_TAB
4096 AS1 (lsr,%A0) CR_TAB
4103 return (AS2 (mov,%A0,%B0) CR_TAB
4104 AS1 (clr,%B0) CR_TAB
4105 AS1 (swap,%A0) CR_TAB
4106 AS1 (lsr,%A0) CR_TAB
4107 AS2 (andi,%A0,0x07));
4109 if (AVR_HAVE_MUL && scratch)
4112 return (AS2 (ldi,%3,0x08) CR_TAB
4113 AS2 (mul,%B0,%3) CR_TAB
4114 AS2 (mov,%A0,r1) CR_TAB
4115 AS1 (clr,%B0) CR_TAB
4116 AS1 (clr,__zero_reg__));
4118 if (optimize_size && scratch)
4123 return (AS2 (mov,%A0,%B0) CR_TAB
4124 AS1 (clr,%B0) CR_TAB
4125 AS1 (swap,%A0) CR_TAB
4126 AS1 (lsr,%A0) CR_TAB
4127 AS2 (ldi,%3,0x07) CR_TAB
4133 return ("set" CR_TAB
4134 AS2 (bld,r1,3) CR_TAB
4135 AS2 (mul,%B0,r1) CR_TAB
4136 AS2 (mov,%A0,r1) CR_TAB
4137 AS1 (clr,%B0) CR_TAB
4138 AS1 (clr,__zero_reg__));
4141 return (AS2 (mov,%A0,%B0) CR_TAB
4142 AS1 (clr,%B0) CR_TAB
4143 AS1 (lsr,%A0) CR_TAB
4144 AS1 (lsr,%A0) CR_TAB
4145 AS1 (lsr,%A0) CR_TAB
4146 AS1 (lsr,%A0) CR_TAB
4150 if (AVR_HAVE_MUL && ldi_ok)
4153 return (AS2 (ldi,%A0,0x04) CR_TAB
4154 AS2 (mul,%B0,%A0) CR_TAB
4155 AS2 (mov,%A0,r1) CR_TAB
4156 AS1 (clr,%B0) CR_TAB
4157 AS1 (clr,__zero_reg__));
4159 if (AVR_HAVE_MUL && scratch)
4162 return (AS2 (ldi,%3,0x04) CR_TAB
4163 AS2 (mul,%B0,%3) CR_TAB
4164 AS2 (mov,%A0,r1) CR_TAB
4165 AS1 (clr,%B0) CR_TAB
4166 AS1 (clr,__zero_reg__));
4168 if (optimize_size && ldi_ok)
4171 return (AS2 (mov,%A0,%B0) CR_TAB
4172 AS2 (ldi,%B0,6) "\n1:\t"
4173 AS1 (lsr,%A0) CR_TAB
4174 AS1 (dec,%B0) CR_TAB
4177 if (optimize_size && scratch)
4180 return (AS1 (clr,%A0) CR_TAB
4181 AS1 (lsl,%B0) CR_TAB
4182 AS1 (rol,%A0) CR_TAB
4183 AS1 (lsl,%B0) CR_TAB
4184 AS1 (rol,%A0) CR_TAB
4189 return (AS1 (clr,%A0) CR_TAB
4190 AS1 (lsl,%B0) CR_TAB
4191 AS1 (rol,%A0) CR_TAB
4196 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4198 insn, operands, len, 2);
4202 /* 32bit logic shift right ((unsigned int)x >> i) */
4205 lshrsi3_out (rtx insn, rtx operands[], int *len)
4207 if (GET_CODE (operands[2]) == CONST_INT)
4215 switch (INTVAL (operands[2]))
4218 if (INTVAL (operands[2]) < 32)
4222 return *len = 3, (AS1 (clr,%D0) CR_TAB
4223 AS1 (clr,%C0) CR_TAB
4224 AS2 (movw,%A0,%C0));
4226 return (AS1 (clr,%D0) CR_TAB
4227 AS1 (clr,%C0) CR_TAB
4228 AS1 (clr,%B0) CR_TAB
4233 int reg0 = true_regnum (operands[0]);
4234 int reg1 = true_regnum (operands[1]);
4237 return (AS2 (mov,%A0,%B1) CR_TAB
4238 AS2 (mov,%B0,%C1) CR_TAB
4239 AS2 (mov,%C0,%D1) CR_TAB
4242 return (AS1 (clr,%D0) CR_TAB
4243 AS2 (mov,%C0,%D1) CR_TAB
4244 AS2 (mov,%B0,%C1) CR_TAB
4250 int reg0 = true_regnum (operands[0]);
4251 int reg1 = true_regnum (operands[1]);
4253 if (reg0 == reg1 + 2)
4254 return *len = 2, (AS1 (clr,%C0) CR_TAB
4257 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4258 AS1 (clr,%C0) CR_TAB
4261 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4262 AS2 (mov,%A0,%C1) CR_TAB
4263 AS1 (clr,%C0) CR_TAB
4268 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4269 AS1 (clr,%B0) CR_TAB
4270 AS1 (clr,%C0) CR_TAB
4275 return (AS1 (clr,%A0) CR_TAB
4276 AS2 (sbrc,%D0,7) CR_TAB
4277 AS1 (inc,%A0) CR_TAB
4278 AS1 (clr,%B0) CR_TAB
4279 AS1 (clr,%C0) CR_TAB
4284 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4285 AS1 (ror,%C0) CR_TAB
4286 AS1 (ror,%B0) CR_TAB
4288 insn, operands, len, 4);
4292 /* Modifies the length assigned to instruction INSN
4293 LEN is the initially computed length of the insn. */
4296 adjust_insn_length (rtx insn, int len)
4298 rtx patt = PATTERN (insn);
4301 if (GET_CODE (patt) == SET)
4304 op[1] = SET_SRC (patt);
4305 op[0] = SET_DEST (patt);
4306 if (general_operand (op[1], VOIDmode)
4307 && general_operand (op[0], VOIDmode))
4309 switch (GET_MODE (op[0]))
4312 output_movqi (insn, op, &len);
4315 output_movhi (insn, op, &len);
4319 output_movsisf (insn, op, &len);
4325 else if (op[0] == cc0_rtx && REG_P (op[1]))
4327 switch (GET_MODE (op[1]))
4329 case HImode: out_tsthi (insn,&len); break;
4330 case SImode: out_tstsi (insn,&len); break;
4334 else if (GET_CODE (op[1]) == AND)
4336 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4338 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4339 if (GET_MODE (op[1]) == SImode)
4340 len = (((mask & 0xff) != 0xff)
4341 + ((mask & 0xff00) != 0xff00)
4342 + ((mask & 0xff0000L) != 0xff0000L)
4343 + ((mask & 0xff000000L) != 0xff000000L));
4344 else if (GET_MODE (op[1]) == HImode)
4345 len = (((mask & 0xff) != 0xff)
4346 + ((mask & 0xff00) != 0xff00));
4349 else if (GET_CODE (op[1]) == IOR)
4351 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4353 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4354 if (GET_MODE (op[1]) == SImode)
4355 len = (((mask & 0xff) != 0)
4356 + ((mask & 0xff00) != 0)
4357 + ((mask & 0xff0000L) != 0)
4358 + ((mask & 0xff000000L) != 0));
4359 else if (GET_MODE (op[1]) == HImode)
4360 len = (((mask & 0xff) != 0)
4361 + ((mask & 0xff00) != 0));
4365 set = single_set (insn);
4370 op[1] = SET_SRC (set);
4371 op[0] = SET_DEST (set);
4373 if (GET_CODE (patt) == PARALLEL
4374 && general_operand (op[1], VOIDmode)
4375 && general_operand (op[0], VOIDmode))
4377 if (XVECLEN (patt, 0) == 2)
4378 op[2] = XVECEXP (patt, 0, 1);
4380 switch (GET_MODE (op[0]))
4386 output_reload_inhi (insn, op, &len);
4390 output_reload_insisf (insn, op, &len);
4396 else if (GET_CODE (op[1]) == ASHIFT
4397 || GET_CODE (op[1]) == ASHIFTRT
4398 || GET_CODE (op[1]) == LSHIFTRT)
4402 ops[1] = XEXP (op[1],0);
4403 ops[2] = XEXP (op[1],1);
4404 switch (GET_CODE (op[1]))
4407 switch (GET_MODE (op[0]))
4409 case QImode: ashlqi3_out (insn,ops,&len); break;
4410 case HImode: ashlhi3_out (insn,ops,&len); break;
4411 case SImode: ashlsi3_out (insn,ops,&len); break;
4416 switch (GET_MODE (op[0]))
4418 case QImode: ashrqi3_out (insn,ops,&len); break;
4419 case HImode: ashrhi3_out (insn,ops,&len); break;
4420 case SImode: ashrsi3_out (insn,ops,&len); break;
4425 switch (GET_MODE (op[0]))
4427 case QImode: lshrqi3_out (insn,ops,&len); break;
4428 case HImode: lshrhi3_out (insn,ops,&len); break;
4429 case SImode: lshrsi3_out (insn,ops,&len); break;
4441 /* Return nonzero if register REG dead after INSN. */
4444 reg_unused_after (rtx insn, rtx reg)
4446 return (dead_or_set_p (insn, reg)
4447 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4450 /* Return nonzero if REG is not used after INSN.
4451 We assume REG is a reload reg, and therefore does
4452 not live past labels. It may live past calls or jumps though. */
4455 _reg_unused_after (rtx insn, rtx reg)
4460 /* If the reg is set by this instruction, then it is safe for our
4461 case. Disregard the case where this is a store to memory, since
4462 we are checking a register used in the store address. */
4463 set = single_set (insn);
4464 if (set && GET_CODE (SET_DEST (set)) != MEM
4465 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4468 while ((insn = NEXT_INSN (insn)))
4471 code = GET_CODE (insn);
4474 /* If this is a label that existed before reload, then the register
4475 if dead here. However, if this is a label added by reorg, then
4476 the register may still be live here. We can't tell the difference,
4477 so we just ignore labels completely. */
4478 if (code == CODE_LABEL)
4486 if (code == JUMP_INSN)
4489 /* If this is a sequence, we must handle them all at once.
4490 We could have for instance a call that sets the target register,
4491 and an insn in a delay slot that uses the register. In this case,
4492 we must return 0. */
4493 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4498 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4500 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4501 rtx set = single_set (this_insn);
4503 if (GET_CODE (this_insn) == CALL_INSN)
4505 else if (GET_CODE (this_insn) == JUMP_INSN)
4507 if (INSN_ANNULLED_BRANCH_P (this_insn))
4512 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4514 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4516 if (GET_CODE (SET_DEST (set)) != MEM)
4522 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4527 else if (code == JUMP_INSN)
4531 if (code == CALL_INSN)
4534 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4535 if (GET_CODE (XEXP (tem, 0)) == USE
4536 && REG_P (XEXP (XEXP (tem, 0), 0))
4537 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4539 if (call_used_regs[REGNO (reg)])
4543 set = single_set (insn);
4545 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4547 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4548 return GET_CODE (SET_DEST (set)) != MEM;
4549 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4555 /* Target hook for assembling integer objects. The AVR version needs
4556 special handling for references to certain labels. */
4559 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4561 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4562 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4563 || GET_CODE (x) == LABEL_REF))
4565 fputs ("\t.word\tgs(", asm_out_file);
4566 output_addr_const (asm_out_file, x);
4567 fputs (")\n", asm_out_file);
4570 return default_assemble_integer (x, size, aligned_p);
4573 /* The routine used to output NUL terminated strings. We use a special
4574 version of this for most svr4 targets because doing so makes the
4575 generated assembly code more compact (and thus faster to assemble)
4576 as well as more readable, especially for targets like the i386
4577 (where the only alternative is to output character sequences as
4578 comma separated lists of numbers). */
4581 gas_output_limited_string(FILE *file, const char *str)
4583 const unsigned char *_limited_str = (const unsigned char *) str;
4585 fprintf (file, "%s\"", STRING_ASM_OP);
4586 for (; (ch = *_limited_str); _limited_str++)
4589 switch (escape = ESCAPES[ch])
4595 fprintf (file, "\\%03o", ch);
4599 putc (escape, file);
4603 fprintf (file, "\"\n");
4606 /* The routine used to output sequences of byte values. We use a special
4607 version of this for most svr4 targets because doing so makes the
4608 generated assembly code more compact (and thus faster to assemble)
4609 as well as more readable. Note that if we find subparts of the
4610 character sequence which end with NUL (and which are shorter than
4611 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4614 gas_output_ascii(FILE *file, const char *str, size_t length)
4616 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4617 const unsigned char *limit = _ascii_bytes + length;
4618 unsigned bytes_in_chunk = 0;
4619 for (; _ascii_bytes < limit; _ascii_bytes++)
4621 const unsigned char *p;
4622 if (bytes_in_chunk >= 60)
4624 fprintf (file, "\"\n");
4627 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4629 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4631 if (bytes_in_chunk > 0)
4633 fprintf (file, "\"\n");
4636 gas_output_limited_string (file, (const char*)_ascii_bytes);
4643 if (bytes_in_chunk == 0)
4644 fprintf (file, "\t.ascii\t\"");
4645 switch (escape = ESCAPES[ch = *_ascii_bytes])
4652 fprintf (file, "\\%03o", ch);
4653 bytes_in_chunk += 4;
4657 putc (escape, file);
4658 bytes_in_chunk += 2;
4663 if (bytes_in_chunk > 0)
4664 fprintf (file, "\"\n");
4667 /* Return value is nonzero if pseudos that have been
4668 assigned to registers of class CLASS would likely be spilled
4669 because registers of CLASS are needed for spill registers. */
4672 class_likely_spilled_p (int c)
4674 return (c != ALL_REGS && c != ADDW_REGS);
4677 /* Valid attributes:
4678 progmem - put data to program memory;
4679 signal - make a function to be hardware interrupt. After function
4680 prologue interrupts are disabled;
4681 interrupt - make a function to be hardware interrupt. After function
4682 prologue interrupts are enabled;
4683 naked - don't generate function prologue/epilogue and `ret' command.
4685 Only `progmem' attribute valid for type. */
4687 const struct attribute_spec avr_attribute_table[] =
4689 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4690 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4691 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4692 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4693 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4694 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4695 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4696 { NULL, 0, 0, false, false, false, NULL }
4699 /* Handle a "progmem" attribute; arguments as in
4700 struct attribute_spec.handler. */
4702 avr_handle_progmem_attribute (tree *node, tree name,
4703 tree args ATTRIBUTE_UNUSED,
4704 int flags ATTRIBUTE_UNUSED,
4709 if (TREE_CODE (*node) == TYPE_DECL)
4711 /* This is really a decl attribute, not a type attribute,
4712 but try to handle it for GCC 3.0 backwards compatibility. */
4714 tree type = TREE_TYPE (*node);
4715 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4716 tree newtype = build_type_attribute_variant (type, attr);
4718 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4719 TREE_TYPE (*node) = newtype;
4720 *no_add_attrs = true;
4722 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4724 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4726 warning (0, "only initialized variables can be placed into "
4727 "program memory area");
4728 *no_add_attrs = true;
4733 warning (OPT_Wattributes, "%qs attribute ignored",
4734 IDENTIFIER_POINTER (name));
4735 *no_add_attrs = true;
4742 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4743 struct attribute_spec.handler. */
4746 avr_handle_fndecl_attribute (tree *node, tree name,
4747 tree args ATTRIBUTE_UNUSED,
4748 int flags ATTRIBUTE_UNUSED,
4751 if (TREE_CODE (*node) != FUNCTION_DECL)
4753 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4754 IDENTIFIER_POINTER (name));
4755 *no_add_attrs = true;
4759 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4760 const char *attr = IDENTIFIER_POINTER (name);
4762 /* If the function has the 'signal' or 'interrupt' attribute, test to
4763 make sure that the name of the function is "__vector_NN" so as to
4764 catch when the user misspells the interrupt vector name. */
4766 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4768 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4770 warning (0, "%qs appears to be a misspelled interrupt handler",
4774 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4776 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4778 warning (0, "%qs appears to be a misspelled signal handler",
4788 avr_handle_fntype_attribute (tree *node, tree name,
4789 tree args ATTRIBUTE_UNUSED,
4790 int flags ATTRIBUTE_UNUSED,
4793 if (TREE_CODE (*node) != FUNCTION_TYPE)
4795 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4796 IDENTIFIER_POINTER (name));
4797 *no_add_attrs = true;
4803 /* Look for attribute `progmem' in DECL
4804 if found return 1, otherwise 0. */
4807 avr_progmem_p (tree decl, tree attributes)
4811 if (TREE_CODE (decl) != VAR_DECL)
4815 != lookup_attribute ("progmem", attributes))
4821 while (TREE_CODE (a) == ARRAY_TYPE);
4823 if (a == error_mark_node)
4826 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4832 /* Add the section attribute if the variable is in progmem. */
4835 avr_insert_attributes (tree node, tree *attributes)
4837 if (TREE_CODE (node) == VAR_DECL
4838 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4839 && avr_progmem_p (node, *attributes))
4841 static const char dsec[] = ".progmem.data";
4842 *attributes = tree_cons (get_identifier ("section"),
4843 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4846 /* ??? This seems sketchy. Why can't the user declare the
4847 thing const in the first place? */
4848 TREE_READONLY (node) = 1;
4852 /* A get_unnamed_section callback for switching to progmem_section. */
4855 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4857 fprintf (asm_out_file,
4858 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4859 AVR_HAVE_JMP_CALL ? "a" : "ax");
4860 /* Should already be aligned, this is just to be safe if it isn't. */
4861 fprintf (asm_out_file, "\t.p2align 1\n");
4864 /* Implement TARGET_ASM_INIT_SECTIONS. */
4867 avr_asm_init_sections (void)
4869 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4870 avr_output_progmem_section_asm_op,
4872 readonly_data_section = data_section;
4876 avr_section_type_flags (tree decl, const char *name, int reloc)
4878 unsigned int flags = default_section_type_flags (decl, name, reloc);
4880 if (strncmp (name, ".noinit", 7) == 0)
4882 if (decl && TREE_CODE (decl) == VAR_DECL
4883 && DECL_INITIAL (decl) == NULL_TREE)
4884 flags |= SECTION_BSS; /* @nobits */
4886 warning (0, "only uninitialized variables can be placed in the "
4893 /* Outputs some appropriate text to go at the start of an assembler
4897 avr_file_start (void)
4899 if (avr_current_arch->asm_only)
4900 error ("MCU %qs supported for assembler only", avr_mcu_name);
4902 default_file_start ();
4904 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4905 fputs ("__SREG__ = 0x3f\n"
4907 "__SP_L__ = 0x3d\n", asm_out_file);
4909 fputs ("__tmp_reg__ = 0\n"
4910 "__zero_reg__ = 1\n", asm_out_file);
4912 /* FIXME: output these only if there is anything in the .data / .bss
4913 sections - some code size could be saved by not linking in the
4914 initialization code from libgcc if one or both sections are empty. */
4915 fputs ("\t.global __do_copy_data\n", asm_out_file);
4916 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4919 /* Outputs to the stdio stream FILE some
4920 appropriate text to go at the end of an assembler file. */
4927 /* Choose the order in which to allocate hard registers for
4928 pseudo-registers local to a basic block.
4930 Store the desired register order in the array `reg_alloc_order'.
4931 Element 0 should be the register to allocate first; element 1, the
4932 next register; and so on. */
4935 order_regs_for_local_alloc (void)
4938 static const int order_0[] = {
4946 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4950 static const int order_1[] = {
4958 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4962 static const int order_2[] = {
4971 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4976 const int *order = (TARGET_ORDER_1 ? order_1 :
4977 TARGET_ORDER_2 ? order_2 :
4979 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4980 reg_alloc_order[i] = order[i];
4984 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4985 cost of an RTX operand given its context. X is the rtx of the
4986 operand, MODE is its mode, and OUTER is the rtx_code of this
4987 operand's parent operator. */
4990 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4992 enum rtx_code code = GET_CODE (x);
5003 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5010 avr_rtx_costs (x, code, outer, &total);
5014 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5015 is to be calculated. Return true if the complete cost has been
5016 computed, and false if subexpressions should be scanned. In either
5017 case, *TOTAL contains the cost result. */
5020 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
5022 enum machine_mode mode = GET_MODE (x);
5029 /* Immediate constants are as cheap as registers. */
5037 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5045 *total = COSTS_N_INSNS (1);
5049 *total = COSTS_N_INSNS (3);
5053 *total = COSTS_N_INSNS (7);
5059 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5067 *total = COSTS_N_INSNS (1);
5073 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5077 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5078 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5082 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5083 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5084 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5088 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5089 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5090 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5097 *total = COSTS_N_INSNS (1);
5098 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5099 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5103 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5105 *total = COSTS_N_INSNS (2);
5106 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5108 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5109 *total = COSTS_N_INSNS (1);
5111 *total = COSTS_N_INSNS (2);
5115 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5117 *total = COSTS_N_INSNS (4);
5118 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5120 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5121 *total = COSTS_N_INSNS (1);
5123 *total = COSTS_N_INSNS (4);
5129 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5135 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5136 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5137 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5138 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5142 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5143 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5144 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5152 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5153 else if (optimize_size)
5154 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5161 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5162 else if (optimize_size)
5163 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5171 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5172 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5180 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5183 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5184 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5191 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5193 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5194 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5198 val = INTVAL (XEXP (x, 1));
5200 *total = COSTS_N_INSNS (3);
5201 else if (val >= 0 && val <= 7)
5202 *total = COSTS_N_INSNS (val);
5204 *total = COSTS_N_INSNS (1);
5209 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5211 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5212 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5215 switch (INTVAL (XEXP (x, 1)))
5222 *total = COSTS_N_INSNS (2);
5225 *total = COSTS_N_INSNS (3);
5231 *total = COSTS_N_INSNS (4);
5236 *total = COSTS_N_INSNS (5);
5239 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5242 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5245 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5248 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5249 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5254 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5256 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5257 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5260 switch (INTVAL (XEXP (x, 1)))
5266 *total = COSTS_N_INSNS (3);
5271 *total = COSTS_N_INSNS (4);
5274 *total = COSTS_N_INSNS (6);
5277 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5280 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5281 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5288 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5295 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5297 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5298 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5302 val = INTVAL (XEXP (x, 1));
5304 *total = COSTS_N_INSNS (4);
5306 *total = COSTS_N_INSNS (2);
5307 else if (val >= 0 && val <= 7)
5308 *total = COSTS_N_INSNS (val);
5310 *total = COSTS_N_INSNS (1);
5315 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5317 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5318 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5321 switch (INTVAL (XEXP (x, 1)))
5327 *total = COSTS_N_INSNS (2);
5330 *total = COSTS_N_INSNS (3);
5336 *total = COSTS_N_INSNS (4);
5340 *total = COSTS_N_INSNS (5);
5343 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5346 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5350 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5353 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5354 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5359 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5361 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5362 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5365 switch (INTVAL (XEXP (x, 1)))
5371 *total = COSTS_N_INSNS (4);
5376 *total = COSTS_N_INSNS (6);
5379 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5382 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5385 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5386 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5393 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5400 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5402 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5403 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5407 val = INTVAL (XEXP (x, 1));
5409 *total = COSTS_N_INSNS (3);
5410 else if (val >= 0 && val <= 7)
5411 *total = COSTS_N_INSNS (val);
5413 *total = COSTS_N_INSNS (1);
5418 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5420 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5421 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5424 switch (INTVAL (XEXP (x, 1)))
5431 *total = COSTS_N_INSNS (2);
5434 *total = COSTS_N_INSNS (3);
5439 *total = COSTS_N_INSNS (4);
5443 *total = COSTS_N_INSNS (5);
5449 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5452 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5456 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5459 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5460 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5465 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5467 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5468 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5471 switch (INTVAL (XEXP (x, 1)))
5477 *total = COSTS_N_INSNS (4);
5480 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5485 *total = COSTS_N_INSNS (4);
5488 *total = COSTS_N_INSNS (6);
5491 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5492 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5499 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5503 switch (GET_MODE (XEXP (x, 0)))
5506 *total = COSTS_N_INSNS (1);
5507 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5508 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5512 *total = COSTS_N_INSNS (2);
5513 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5514 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5515 else if (INTVAL (XEXP (x, 1)) != 0)
5516 *total += COSTS_N_INSNS (1);
5520 *total = COSTS_N_INSNS (4);
5521 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5522 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5523 else if (INTVAL (XEXP (x, 1)) != 0)
5524 *total += COSTS_N_INSNS (3);
5530 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5539 /* Calculate the cost of a memory address. */
5542 avr_address_cost (rtx x)
5544 if (GET_CODE (x) == PLUS
5545 && GET_CODE (XEXP (x,1)) == CONST_INT
5546 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5547 && INTVAL (XEXP (x,1)) >= 61)
5549 if (CONSTANT_ADDRESS_P (x))
5551 if (optimize > 0 && io_address_operand (x, QImode))
5558 /* Test for extra memory constraint 'Q'.
5559 It's a memory address based on Y or Z pointer with valid displacement. */
5562 extra_constraint_Q (rtx x)
5564 if (GET_CODE (XEXP (x,0)) == PLUS
5565 && REG_P (XEXP (XEXP (x,0), 0))
5566 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5567 && (INTVAL (XEXP (XEXP (x,0), 1))
5568 <= MAX_LD_OFFSET (GET_MODE (x))))
5570 rtx xx = XEXP (XEXP (x,0), 0);
5571 int regno = REGNO (xx);
5572 if (TARGET_ALL_DEBUG)
5574 fprintf (stderr, ("extra_constraint:\n"
5575 "reload_completed: %d\n"
5576 "reload_in_progress: %d\n"),
5577 reload_completed, reload_in_progress);
5580 if (regno >= FIRST_PSEUDO_REGISTER)
5581 return 1; /* allocate pseudos */
5582 else if (regno == REG_Z || regno == REG_Y)
5583 return 1; /* strictly check */
5584 else if (xx == frame_pointer_rtx
5585 || xx == arg_pointer_rtx)
5586 return 1; /* XXX frame & arg pointer checks */
5591 /* Convert condition code CONDITION to the valid AVR condition code. */
5594 avr_normalize_condition (RTX_CODE condition)
5611 /* This function optimizes conditional jumps. */
5618 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5620 if (! (GET_CODE (insn) == INSN
5621 || GET_CODE (insn) == CALL_INSN
5622 || GET_CODE (insn) == JUMP_INSN)
5623 || !single_set (insn))
5626 pattern = PATTERN (insn);
5628 if (GET_CODE (pattern) == PARALLEL)
5629 pattern = XVECEXP (pattern, 0, 0);
5630 if (GET_CODE (pattern) == SET
5631 && SET_DEST (pattern) == cc0_rtx
5632 && compare_diff_p (insn))
5634 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5636 /* Now we work under compare insn. */
5638 pattern = SET_SRC (pattern);
5639 if (true_regnum (XEXP (pattern,0)) >= 0
5640 && true_regnum (XEXP (pattern,1)) >= 0 )
5642 rtx x = XEXP (pattern,0);
5643 rtx next = next_real_insn (insn);
5644 rtx pat = PATTERN (next);
5645 rtx src = SET_SRC (pat);
5646 rtx t = XEXP (src,0);
5647 PUT_CODE (t, swap_condition (GET_CODE (t)));
5648 XEXP (pattern,0) = XEXP (pattern,1);
5649 XEXP (pattern,1) = x;
5650 INSN_CODE (next) = -1;
5652 else if (true_regnum (XEXP (pattern,0)) >= 0
5653 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5655 rtx x = XEXP (pattern,1);
5656 rtx next = next_real_insn (insn);
5657 rtx pat = PATTERN (next);
5658 rtx src = SET_SRC (pat);
5659 rtx t = XEXP (src,0);
5660 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5662 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5664 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5665 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5666 INSN_CODE (next) = -1;
5667 INSN_CODE (insn) = -1;
5671 else if (true_regnum (SET_SRC (pattern)) >= 0)
5673 /* This is a tst insn */
5674 rtx next = next_real_insn (insn);
5675 rtx pat = PATTERN (next);
5676 rtx src = SET_SRC (pat);
5677 rtx t = XEXP (src,0);
5679 PUT_CODE (t, swap_condition (GET_CODE (t)));
5680 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5682 INSN_CODE (next) = -1;
5683 INSN_CODE (insn) = -1;
5689 /* Returns register number for function return value.*/
5692 avr_ret_register (void)
5697 /* Create an RTX representing the place where a
5698 library function returns a value of mode MODE. */
5701 avr_libcall_value (enum machine_mode mode)
5703 int offs = GET_MODE_SIZE (mode);
5706 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5709 /* Create an RTX representing the place where a
5710 function returns a value of data type VALTYPE. */
5713 avr_function_value (const_tree type,
5714 const_tree func ATTRIBUTE_UNUSED,
5715 bool outgoing ATTRIBUTE_UNUSED)
5719 if (TYPE_MODE (type) != BLKmode)
5720 return avr_libcall_value (TYPE_MODE (type));
5722 offs = int_size_in_bytes (type);
5725 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5726 offs = GET_MODE_SIZE (SImode);
5727 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5728 offs = GET_MODE_SIZE (DImode);
5730 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5733 /* Places additional restrictions on the register class to
5734 use when it is necessary to copy value X into a register
5738 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5744 test_hard_reg_class (enum reg_class rclass, rtx x)
5746 int regno = true_regnum (x);
5750 if (TEST_HARD_REG_CLASS (rclass, regno))
5758 jump_over_one_insn_p (rtx insn, rtx dest)
5760 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5763 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5764 int dest_addr = INSN_ADDRESSES (uid);
5765 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5768 /* Returns 1 if a value of mode MODE can be stored starting with hard
5769 register number REGNO. On the enhanced core, anything larger than
5770 1 byte must start in even numbered register for "movw" to work
5771 (this way we don't have to check for odd registers everywhere). */
5774 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5776 /* Disallow QImode in stack pointer regs. */
5777 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5780 /* The only thing that can go into registers r28:r29 is a Pmode. */
5781 if (regno == REG_Y && mode == Pmode)
5784 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5785 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5791 /* Modes larger than QImode occupy consecutive registers. */
5792 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5795 /* All modes larger than QImode should start in an even register. */
5796 return !(regno & 1);
5800 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5806 if (GET_CODE (operands[1]) == CONST_INT)
5808 int val = INTVAL (operands[1]);
5809 if ((val & 0xff) == 0)
5812 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5813 AS2 (ldi,%2,hi8(%1)) CR_TAB
5816 else if ((val & 0xff00) == 0)
5819 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5820 AS2 (mov,%A0,%2) CR_TAB
5821 AS2 (mov,%B0,__zero_reg__));
5823 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5826 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5827 AS2 (mov,%A0,%2) CR_TAB
5832 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5833 AS2 (mov,%A0,%2) CR_TAB
5834 AS2 (ldi,%2,hi8(%1)) CR_TAB
5840 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5842 rtx src = operands[1];
5843 int cnst = (GET_CODE (src) == CONST_INT);
5848 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5849 + ((INTVAL (src) & 0xff00) != 0)
5850 + ((INTVAL (src) & 0xff0000) != 0)
5851 + ((INTVAL (src) & 0xff000000) != 0);
5858 if (cnst && ((INTVAL (src) & 0xff) == 0))
5859 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5862 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5863 output_asm_insn (AS2 (mov, %A0, %2), operands);
5865 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5866 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5869 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5870 output_asm_insn (AS2 (mov, %B0, %2), operands);
5872 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5873 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5876 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5877 output_asm_insn (AS2 (mov, %C0, %2), operands);
5879 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5880 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5883 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5884 output_asm_insn (AS2 (mov, %D0, %2), operands);
5890 avr_output_bld (rtx operands[], int bit_nr)
5892 static char s[] = "bld %A0,0";
5894 s[5] = 'A' + (bit_nr >> 3);
5895 s[8] = '0' + (bit_nr & 7);
5896 output_asm_insn (s, operands);
5900 avr_output_addr_vec_elt (FILE *stream, int value)
5902 switch_to_section (progmem_section);
5903 if (AVR_HAVE_JMP_CALL)
5904 fprintf (stream, "\t.word gs(.L%d)\n", value);
5906 fprintf (stream, "\trjmp .L%d\n", value);
5909 /* Returns true if SCRATCH are safe to be allocated as a scratch
5910 registers (for a define_peephole2) in the current function. */
5913 avr_hard_regno_scratch_ok (unsigned int regno)
5915 /* Interrupt functions can only use registers that have already been saved
5916 by the prologue, even if they would normally be call-clobbered. */
5918 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5919 && !df_regs_ever_live_p (regno))
5925 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5928 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5929 unsigned int new_reg)
5931 /* Interrupt functions can only use registers that have already been
5932 saved by the prologue, even if they would normally be
5935 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5936 && !df_regs_ever_live_p (new_reg))
5942 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5943 or memory location in the I/O space (QImode only).
5945 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5946 Operand 1: register operand to test, or CONST_INT memory address.
5947 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5948 Operand 3: label to jump to if the test is true. */
5951 avr_out_sbxx_branch (rtx insn, rtx operands[])
5953 enum rtx_code comp = GET_CODE (operands[0]);
5954 int long_jump = (get_attr_length (insn) >= 4);
5955 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5959 else if (comp == LT)
5963 comp = reverse_condition (comp);
5965 if (GET_CODE (operands[1]) == CONST_INT)
5967 if (INTVAL (operands[1]) < 0x40)
5970 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5972 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5976 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5978 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5980 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5983 else /* GET_CODE (operands[1]) == REG */
5985 if (GET_MODE (operands[1]) == QImode)
5988 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5990 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5992 else /* HImode or SImode */
5994 static char buf[] = "sbrc %A1,0";
5995 int bit_nr = exact_log2 (INTVAL (operands[2])
5996 & GET_MODE_MASK (GET_MODE (operands[1])));
5998 buf[3] = (comp == EQ) ? 's' : 'c';
5999 buf[6] = 'A' + (bit_nr >> 3);
6000 buf[9] = '0' + (bit_nr & 7);
6001 output_asm_insn (buf, operands);
6006 return (AS1 (rjmp,.+4) CR_TAB
6009 return AS1 (rjmp,%3);
6013 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6016 avr_asm_out_ctor (rtx symbol, int priority)
6018 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6019 default_ctor_section_asm_out_constructor (symbol, priority);
6022 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6025 avr_asm_out_dtor (rtx symbol, int priority)
6027 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6028 default_dtor_section_asm_out_destructor (symbol, priority);
6031 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6034 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6036 if (TYPE_MODE (type) == BLKmode)
6038 HOST_WIDE_INT size = int_size_in_bytes (type);
6039 return (size == -1 || size > 8);