1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
64 static RTX_CODE compare_condition (rtx insn);
65 static int compare_sign_p (rtx insn);
66 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
69 const struct attribute_spec avr_attribute_table[];
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx avr_function_value (const_tree, const_tree, bool);
76 static void avr_insert_attributes (tree, tree *);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree, const char *, int);
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx, int);
82 static void avr_asm_out_dtor (rtx, int);
83 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
84 static bool avr_rtx_costs (rtx, int, int, int *, bool);
85 static int avr_address_cost (rtx, bool);
86 static bool avr_return_in_memory (const_tree, const_tree);
87 static struct machine_function * avr_init_machine_status (void);
88 static rtx avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
91 /* Allocate registers from r25 to r8 for parameters for function calls. */
92 #define FIRST_CUM_REG 26
94 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
95 static GTY(()) rtx tmp_reg_rtx;
97 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
98 static GTY(()) rtx zero_reg_rtx;
100 /* AVR register names {"r0", "r1", ..., "r31"} */
101 static const char *const avr_regnames[] = REGISTER_NAMES;
103 /* This holds the last insn address. */
104 static int last_insn_address = 0;
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro;
109 /* Current architecture. */
110 const struct base_arch_s *avr_current_arch;
112 section *progmem_section;
114 static const struct base_arch_s avr_arch_types[] = {
115 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
116 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
117 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
118 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
119 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
120 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
121 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
122 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
123 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
124 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
125 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
128 /* These names are used as the index into the avr_arch_types[] table
147 const char *const name;
148 int arch; /* index in avr_arch_types[] */
149 /* Must lie outside user's namespace. NULL == no macro. */
150 const char *const macro;
153 /* List of all known AVR MCU types - if updated, it has to be kept
154 in sync in several places (FIXME: is there a better way?):
156 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
157 - t-avr (MULTILIB_MATCHES)
158 - gas/config/tc-avr.c
161 static const struct mcu_type_s avr_mcu_types[] = {
162 /* Classic, <= 8K. */
163 { "avr2", ARCH_AVR2, NULL },
164 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
165 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
166 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
167 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
168 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
169 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
170 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
171 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
172 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
173 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
174 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
175 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
176 /* Classic + MOVW, <= 8K. */
177 { "avr25", ARCH_AVR25, NULL },
178 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
179 { "attiny13a", ARCH_AVR25, "__AVR_ATtiny13A__" },
180 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
181 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
182 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
183 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
184 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
185 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
186 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
187 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
188 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
189 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
190 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
191 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
192 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
193 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
194 /* Classic, > 8K, <= 64K. */
195 { "avr3", ARCH_AVR3, NULL },
196 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
197 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
198 /* Classic, == 128K. */
199 { "avr31", ARCH_AVR31, NULL },
200 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
201 { "at43usb320", ARCH_AVR31, "__AVR_AT43USB320__" },
202 /* Classic + MOVW + JMP/CALL. */
203 { "avr35", ARCH_AVR35, NULL },
204 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
205 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
206 { "attiny167", ARCH_AVR35, "__AVR_ATtiny167__" },
207 /* Enhanced, <= 8K. */
208 { "avr4", ARCH_AVR4, NULL },
209 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
210 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
211 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
212 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
213 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
214 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
215 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
216 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
217 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
218 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
219 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
220 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
221 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
222 /* Enhanced, > 8K, <= 64K. */
223 { "avr5", ARCH_AVR5, NULL },
224 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
225 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
226 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
227 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
228 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
229 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
230 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
231 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
232 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
233 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
234 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
235 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
236 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
237 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
238 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
239 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
240 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
241 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
242 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
243 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
244 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
245 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
246 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
247 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
248 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
249 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
250 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
251 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
252 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
253 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
254 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
255 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
256 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
257 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
258 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
259 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
260 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
261 { "atmega32m1", ARCH_AVR5, "__AVR_ATmega32M1__" },
262 { "atmega32c1", ARCH_AVR5, "__AVR_ATmega32C1__" },
263 { "atmega32u4", ARCH_AVR5, "__AVR_ATmega32U4__" },
264 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
265 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
266 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
267 /* Enhanced, == 128K. */
268 { "avr51", ARCH_AVR51, NULL },
269 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
270 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
271 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
272 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
273 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
274 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
275 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
277 { "avr6", ARCH_AVR6, NULL },
278 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
279 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
280 /* Assembler only. */
281 { "avr1", ARCH_AVR1, NULL },
282 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
283 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
284 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
285 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
286 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
287 { NULL, ARCH_UNKNOWN, NULL }
290 int avr_case_values_threshold = 30000;
292 /* Initialize the GCC target structure. */
293 #undef TARGET_ASM_ALIGNED_HI_OP
294 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
295 #undef TARGET_ASM_ALIGNED_SI_OP
296 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
297 #undef TARGET_ASM_UNALIGNED_HI_OP
298 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
299 #undef TARGET_ASM_UNALIGNED_SI_OP
300 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
301 #undef TARGET_ASM_INTEGER
302 #define TARGET_ASM_INTEGER avr_assemble_integer
303 #undef TARGET_ASM_FILE_START
304 #define TARGET_ASM_FILE_START avr_file_start
305 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
306 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
307 #undef TARGET_ASM_FILE_END
308 #define TARGET_ASM_FILE_END avr_file_end
310 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
311 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
312 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
313 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
314 #undef TARGET_FUNCTION_VALUE
315 #define TARGET_FUNCTION_VALUE avr_function_value
316 #undef TARGET_ATTRIBUTE_TABLE
317 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
318 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
319 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
320 #undef TARGET_INSERT_ATTRIBUTES
321 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
322 #undef TARGET_SECTION_TYPE_FLAGS
323 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
324 #undef TARGET_RTX_COSTS
325 #define TARGET_RTX_COSTS avr_rtx_costs
326 #undef TARGET_ADDRESS_COST
327 #define TARGET_ADDRESS_COST avr_address_cost
328 #undef TARGET_MACHINE_DEPENDENT_REORG
329 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
331 #undef TARGET_RETURN_IN_MEMORY
332 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
334 #undef TARGET_STRICT_ARGUMENT_NAMING
335 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
337 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
338 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
340 #undef TARGET_HARD_REGNO_SCRATCH_OK
341 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
343 struct gcc_target targetm = TARGET_INITIALIZER;
346 avr_override_options (void)
348 const struct mcu_type_s *t;
350 flag_delete_null_pointer_checks = 0;
352 if (!PARAM_SET_P (PARAM_INLINE_CALL_COST))
353 set_param_value ("inline-call-cost", 5);
355 for (t = avr_mcu_types; t->name; t++)
356 if (strcmp (t->name, avr_mcu_name) == 0)
361 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
363 for (t = avr_mcu_types; t->name; t++)
364 fprintf (stderr," %s\n", t->name);
367 avr_current_arch = &avr_arch_types[t->arch];
368 avr_extra_arch_macro = t->macro;
370 if (optimize && !TARGET_NO_TABLEJUMP)
371 avr_case_values_threshold =
372 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
374 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
375 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
377 init_machine_status = avr_init_machine_status;
380 /* return register class from register number. */
382 static const int reg_class_tab[]={
383 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
384 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
385 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
386 GENERAL_REGS, /* r0 - r15 */
387 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
388 LD_REGS, /* r16 - 23 */
389 ADDW_REGS,ADDW_REGS, /* r24,r25 */
390 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
391 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
392 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
393 STACK_REG,STACK_REG /* SPL,SPH */
396 /* Function to set up the backend function structure. */
398 static struct machine_function *
399 avr_init_machine_status (void)
401 return ((struct machine_function *)
402 ggc_alloc_cleared (sizeof (struct machine_function)));
405 /* Return register class for register R. */
408 avr_regno_reg_class (int r)
411 return reg_class_tab[r];
415 /* Return nonzero if FUNC is a naked function. */
418 avr_naked_function_p (tree func)
422 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
424 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
425 return a != NULL_TREE;
428 /* Return nonzero if FUNC is an interrupt function as specified
429 by the "interrupt" attribute. */
432 interrupt_function_p (tree func)
436 if (TREE_CODE (func) != FUNCTION_DECL)
439 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
440 return a != NULL_TREE;
443 /* Return nonzero if FUNC is a signal function as specified
444 by the "signal" attribute. */
447 signal_function_p (tree func)
451 if (TREE_CODE (func) != FUNCTION_DECL)
454 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
455 return a != NULL_TREE;
458 /* Return nonzero if FUNC is a OS_task function. */
461 avr_OS_task_function_p (tree func)
465 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
467 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
468 return a != NULL_TREE;
471 /* Return nonzero if FUNC is a OS_main function. */
474 avr_OS_main_function_p (tree func)
478 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
480 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
481 return a != NULL_TREE;
484 /* Return the number of hard registers to push/pop in the prologue/epilogue
485 of the current function, and optionally store these registers in SET. */
488 avr_regs_to_save (HARD_REG_SET *set)
491 int int_or_sig_p = (interrupt_function_p (current_function_decl)
492 || signal_function_p (current_function_decl));
494 if (!reload_completed)
495 cfun->machine->is_leaf = leaf_function_p ();
498 CLEAR_HARD_REG_SET (*set);
501 /* No need to save any registers if the function never returns or
502 is have "OS_task" or "OS_main" attribute. */
503 if (TREE_THIS_VOLATILE (current_function_decl)
504 || cfun->machine->is_OS_task
505 || cfun->machine->is_OS_main)
508 for (reg = 0; reg < 32; reg++)
510 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
511 any global register variables. */
515 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
516 || (df_regs_ever_live_p (reg)
517 && (int_or_sig_p || !call_used_regs[reg])
518 && !(frame_pointer_needed
519 && (reg == REG_Y || reg == (REG_Y+1)))))
522 SET_HARD_REG_BIT (*set, reg);
529 /* Compute offset between arg_pointer and frame_pointer. */
532 initial_elimination_offset (int from, int to)
534 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
538 int offset = frame_pointer_needed ? 2 : 0;
539 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
541 offset += avr_regs_to_save (NULL);
542 return get_frame_size () + (avr_pc_size) + 1 + offset;
546 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
547 frame pointer by +STARTING_FRAME_OFFSET.
548 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
549 avoids creating add/sub of offset in nonlocal goto and setjmp. */
551 rtx avr_builtin_setjmp_frame_value (void)
553 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
554 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
557 /* Return 1 if the function epilogue is just a single "ret". */
560 avr_simple_epilogue (void)
562 return (! frame_pointer_needed
563 && get_frame_size () == 0
564 && avr_regs_to_save (NULL) == 0
565 && ! interrupt_function_p (current_function_decl)
566 && ! signal_function_p (current_function_decl)
567 && ! avr_naked_function_p (current_function_decl)
568 && ! TREE_THIS_VOLATILE (current_function_decl));
571 /* This function checks sequence of live registers. */
574 sequent_regs_live (void)
580 for (reg = 0; reg < 18; ++reg)
582 if (!call_used_regs[reg])
584 if (df_regs_ever_live_p (reg))
594 if (!frame_pointer_needed)
596 if (df_regs_ever_live_p (REG_Y))
604 if (df_regs_ever_live_p (REG_Y+1))
617 return (cur_seq == live_seq) ? live_seq : 0;
620 /* Obtain the length sequence of insns. */
623 get_sequence_length (rtx insns)
628 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
629 length += get_attr_length (insn);
634 /* Output function prologue. */
637 expand_prologue (void)
642 HOST_WIDE_INT size = get_frame_size();
643 /* Define templates for push instructions. */
644 rtx pushbyte = gen_rtx_MEM (QImode,
645 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
646 rtx pushword = gen_rtx_MEM (HImode,
647 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
650 last_insn_address = 0;
652 /* Init cfun->machine. */
653 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
654 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
655 cfun->machine->is_signal = signal_function_p (current_function_decl);
656 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
657 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
659 /* Prologue: naked. */
660 if (cfun->machine->is_naked)
665 avr_regs_to_save (&set);
666 live_seq = sequent_regs_live ();
667 minimize = (TARGET_CALL_PROLOGUES
668 && !cfun->machine->is_interrupt
669 && !cfun->machine->is_signal
670 && !cfun->machine->is_OS_task
671 && !cfun->machine->is_OS_main
674 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
676 if (cfun->machine->is_interrupt)
678 /* Enable interrupts. */
679 insn = emit_insn (gen_enable_interrupt ());
680 RTX_FRAME_RELATED_P (insn) = 1;
684 insn = emit_move_insn (pushbyte, zero_reg_rtx);
685 RTX_FRAME_RELATED_P (insn) = 1;
688 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
689 RTX_FRAME_RELATED_P (insn) = 1;
692 insn = emit_move_insn (tmp_reg_rtx,
693 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
694 RTX_FRAME_RELATED_P (insn) = 1;
695 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
696 RTX_FRAME_RELATED_P (insn) = 1;
700 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
702 insn = emit_move_insn (tmp_reg_rtx,
703 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
704 RTX_FRAME_RELATED_P (insn) = 1;
705 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
706 RTX_FRAME_RELATED_P (insn) = 1;
709 /* Clear zero reg. */
710 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
711 RTX_FRAME_RELATED_P (insn) = 1;
713 /* Prevent any attempt to delete the setting of ZERO_REG! */
714 emit_use (zero_reg_rtx);
716 if (minimize && (frame_pointer_needed
717 || (AVR_2_BYTE_PC && live_seq > 6)
720 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
721 gen_int_mode (size, HImode));
722 RTX_FRAME_RELATED_P (insn) = 1;
725 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
726 gen_int_mode (size + live_seq, HImode)));
727 RTX_FRAME_RELATED_P (insn) = 1;
732 for (reg = 0; reg < 32; ++reg)
734 if (TEST_HARD_REG_BIT (set, reg))
736 /* Emit push of register to save. */
737 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
738 RTX_FRAME_RELATED_P (insn) = 1;
741 if (frame_pointer_needed)
743 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
745 /* Push frame pointer. */
746 insn = emit_move_insn (pushword, frame_pointer_rtx);
747 RTX_FRAME_RELATED_P (insn) = 1;
752 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
753 RTX_FRAME_RELATED_P (insn) = 1;
757 /* Creating a frame can be done by direct manipulation of the
758 stack or via the frame pointer. These two methods are:
765 the optimum method depends on function type, stack and frame size.
766 To avoid a complex logic, both methods are tested and shortest
770 rtx sp_plus_insns = NULL_RTX;
772 if (TARGET_TINY_STACK)
774 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
775 over 'sbiw' (2 cycles, same size). */
776 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
780 /* Normal sized addition. */
781 myfp = frame_pointer_rtx;
784 /* Method 1-Adjust frame pointer. */
787 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
788 RTX_FRAME_RELATED_P (insn) = 1;
791 emit_move_insn (myfp,
792 gen_rtx_PLUS (GET_MODE(myfp), myfp,
795 RTX_FRAME_RELATED_P (insn) = 1;
797 /* Copy to stack pointer. */
798 if (TARGET_TINY_STACK)
800 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
801 RTX_FRAME_RELATED_P (insn) = 1;
803 else if (TARGET_NO_INTERRUPTS
804 || cfun->machine->is_signal
805 || cfun->machine->is_OS_main)
808 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
810 RTX_FRAME_RELATED_P (insn) = 1;
812 else if (cfun->machine->is_interrupt)
814 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
816 RTX_FRAME_RELATED_P (insn) = 1;
820 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
821 RTX_FRAME_RELATED_P (insn) = 1;
824 fp_plus_insns = get_insns ();
827 /* Method 2-Adjust Stack pointer. */
833 emit_move_insn (stack_pointer_rtx,
834 gen_rtx_PLUS (HImode,
838 RTX_FRAME_RELATED_P (insn) = 1;
841 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
842 RTX_FRAME_RELATED_P (insn) = 1;
844 sp_plus_insns = get_insns ();
848 /* Use shortest method. */
849 if (size <= 6 && (get_sequence_length (sp_plus_insns)
850 < get_sequence_length (fp_plus_insns)))
851 emit_insn (sp_plus_insns);
853 emit_insn (fp_plus_insns);
859 /* Output summary at end of function prologue. */
862 avr_asm_function_end_prologue (FILE *file)
864 if (cfun->machine->is_naked)
866 fputs ("/* prologue: naked */\n", file);
870 if (cfun->machine->is_interrupt)
872 fputs ("/* prologue: Interrupt */\n", file);
874 else if (cfun->machine->is_signal)
876 fputs ("/* prologue: Signal */\n", file);
879 fputs ("/* prologue: function */\n", file);
881 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
886 /* Implement EPILOGUE_USES. */
889 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
893 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
898 /* Output RTL epilogue. */
901 expand_epilogue (void)
907 HOST_WIDE_INT size = get_frame_size();
909 /* epilogue: naked */
910 if (cfun->machine->is_naked)
912 emit_jump_insn (gen_return ());
916 avr_regs_to_save (&set);
917 live_seq = sequent_regs_live ();
918 minimize = (TARGET_CALL_PROLOGUES
919 && !cfun->machine->is_interrupt
920 && !cfun->machine->is_signal
921 && !cfun->machine->is_OS_task
922 && !cfun->machine->is_OS_main
925 if (minimize && (frame_pointer_needed || live_seq > 4))
927 if (frame_pointer_needed)
929 /* Get rid of frame. */
930 emit_move_insn(frame_pointer_rtx,
931 gen_rtx_PLUS (HImode, frame_pointer_rtx,
932 gen_int_mode (size, HImode)));
936 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
939 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
943 if (frame_pointer_needed)
947 /* Try two methods to adjust stack and select shortest. */
950 rtx sp_plus_insns = NULL_RTX;
952 if (TARGET_TINY_STACK)
954 /* The high byte (r29) doesn't change - prefer 'subi'
955 (1 cycle) over 'sbiw' (2 cycles, same size). */
956 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
960 /* Normal sized addition. */
961 myfp = frame_pointer_rtx;
964 /* Method 1-Adjust frame pointer. */
967 emit_move_insn (myfp,
968 gen_rtx_PLUS (HImode, myfp,
972 /* Copy to stack pointer. */
973 if (TARGET_TINY_STACK)
975 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
977 else if (TARGET_NO_INTERRUPTS
978 || cfun->machine->is_signal)
980 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
983 else if (cfun->machine->is_interrupt)
985 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
990 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
993 fp_plus_insns = get_insns ();
996 /* Method 2-Adjust Stack pointer. */
1001 emit_move_insn (stack_pointer_rtx,
1002 gen_rtx_PLUS (HImode, stack_pointer_rtx,
1006 sp_plus_insns = get_insns ();
1010 /* Use shortest method. */
1011 if (size <= 5 && (get_sequence_length (sp_plus_insns)
1012 < get_sequence_length (fp_plus_insns)))
1013 emit_insn (sp_plus_insns);
1015 emit_insn (fp_plus_insns);
1017 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1019 /* Restore previous frame_pointer. */
1020 emit_insn (gen_pophi (frame_pointer_rtx));
1023 /* Restore used registers. */
1024 for (reg = 31; reg >= 0; --reg)
1026 if (TEST_HARD_REG_BIT (set, reg))
1027 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
1029 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1031 /* Restore RAMPZ using tmp reg as scratch. */
1033 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
1035 emit_insn (gen_popqi (tmp_reg_rtx));
1036 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
1040 /* Restore SREG using tmp reg as scratch. */
1041 emit_insn (gen_popqi (tmp_reg_rtx));
1043 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
1046 /* Restore tmp REG. */
1047 emit_insn (gen_popqi (tmp_reg_rtx));
1049 /* Restore zero REG. */
1050 emit_insn (gen_popqi (zero_reg_rtx));
1053 emit_jump_insn (gen_return ());
1057 /* Output summary messages at beginning of function epilogue. */
1060 avr_asm_function_begin_epilogue (FILE *file)
1062 fprintf (file, "/* epilogue start */\n");
1065 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1066 machine for a memory operand of mode MODE. */
1069 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1071 enum reg_class r = NO_REGS;
1073 if (TARGET_ALL_DEBUG)
1075 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1076 GET_MODE_NAME(mode),
1077 strict ? "(strict)": "",
1078 reload_completed ? "(reload_completed)": "",
1079 reload_in_progress ? "(reload_in_progress)": "",
1080 reg_renumber ? "(reg_renumber)" : "");
1081 if (GET_CODE (x) == PLUS
1082 && REG_P (XEXP (x, 0))
1083 && GET_CODE (XEXP (x, 1)) == CONST_INT
1084 && INTVAL (XEXP (x, 1)) >= 0
1085 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1088 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1089 true_regnum (XEXP (x, 0)));
1092 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1093 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1095 else if (CONSTANT_ADDRESS_P (x))
1097 else if (GET_CODE (x) == PLUS
1098 && REG_P (XEXP (x, 0))
1099 && GET_CODE (XEXP (x, 1)) == CONST_INT
1100 && INTVAL (XEXP (x, 1)) >= 0)
1102 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1106 || REGNO (XEXP (x,0)) == REG_Y
1107 || REGNO (XEXP (x,0)) == REG_Z)
1108 r = BASE_POINTER_REGS;
1109 if (XEXP (x,0) == frame_pointer_rtx
1110 || XEXP (x,0) == arg_pointer_rtx)
1111 r = BASE_POINTER_REGS;
1113 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1116 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1117 && REG_P (XEXP (x, 0))
1118 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1119 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1123 if (TARGET_ALL_DEBUG)
1125 fprintf (stderr, " ret = %c\n", r + '0');
1127 return r == NO_REGS ? 0 : (int)r;
1130 /* Attempts to replace X with a valid
1131 memory address for an operand of mode MODE */
1134 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1137 if (TARGET_ALL_DEBUG)
1139 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1143 if (GET_CODE (oldx) == PLUS
1144 && REG_P (XEXP (oldx,0)))
1146 if (REG_P (XEXP (oldx,1)))
1147 x = force_reg (GET_MODE (oldx), oldx);
1148 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1150 int offs = INTVAL (XEXP (oldx,1));
1151 if (frame_pointer_rtx != XEXP (oldx,0))
1152 if (offs > MAX_LD_OFFSET (mode))
1154 if (TARGET_ALL_DEBUG)
1155 fprintf (stderr, "force_reg (big offset)\n");
1156 x = force_reg (GET_MODE (oldx), oldx);
1164 /* Return a pointer register name as a string. */
1167 ptrreg_to_str (int regno)
1171 case REG_X: return "X";
1172 case REG_Y: return "Y";
1173 case REG_Z: return "Z";
1175 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1180 /* Return the condition name as a string.
1181 Used in conditional jump constructing */
1184 cond_string (enum rtx_code code)
1193 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1198 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1211 /* Output ADDR to FILE as address. */
1214 print_operand_address (FILE *file, rtx addr)
1216 switch (GET_CODE (addr))
1219 fprintf (file, ptrreg_to_str (REGNO (addr)));
1223 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1227 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1231 if (CONSTANT_ADDRESS_P (addr)
1232 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1233 || GET_CODE (addr) == LABEL_REF))
1235 fprintf (file, "gs(");
1236 output_addr_const (file,addr);
1237 fprintf (file ,")");
1240 output_addr_const (file, addr);
1245 /* Output X as assembler operand to file FILE. */
1248 print_operand (FILE *file, rtx x, int code)
1252 if (code >= 'A' && code <= 'D')
1257 if (!AVR_HAVE_JMP_CALL)
1260 else if (code == '!')
1262 if (AVR_HAVE_EIJMP_EICALL)
1267 if (x == zero_reg_rtx)
1268 fprintf (file, "__zero_reg__");
1270 fprintf (file, reg_names[true_regnum (x) + abcd]);
1272 else if (GET_CODE (x) == CONST_INT)
1273 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1274 else if (GET_CODE (x) == MEM)
1276 rtx addr = XEXP (x,0);
1278 if (CONSTANT_P (addr) && abcd)
1281 output_address (addr);
1282 fprintf (file, ")+%d", abcd);
1284 else if (code == 'o')
1286 if (GET_CODE (addr) != PLUS)
1287 fatal_insn ("bad address, not (reg+disp):", addr);
1289 print_operand (file, XEXP (addr, 1), 0);
1291 else if (code == 'p' || code == 'r')
1293 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1294 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1297 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1299 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1301 else if (GET_CODE (addr) == PLUS)
1303 print_operand_address (file, XEXP (addr,0));
1304 if (REGNO (XEXP (addr, 0)) == REG_X)
1305 fatal_insn ("internal compiler error. Bad address:"
1308 print_operand (file, XEXP (addr,1), code);
1311 print_operand_address (file, addr);
1313 else if (GET_CODE (x) == CONST_DOUBLE)
1317 if (GET_MODE (x) != SFmode)
1318 fatal_insn ("internal compiler error. Unknown mode:", x);
1319 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1320 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1321 fprintf (file, "0x%lx", val);
1323 else if (code == 'j')
1324 fputs (cond_string (GET_CODE (x)), file);
1325 else if (code == 'k')
1326 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1328 print_operand_address (file, x);
1331 /* Update the condition code in the INSN. */
1334 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1338 switch (get_attr_cc (insn))
1341 /* Insn does not affect CC at all. */
1349 set = single_set (insn);
1353 cc_status.flags |= CC_NO_OVERFLOW;
1354 cc_status.value1 = SET_DEST (set);
1359 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1360 The V flag may or may not be known but that's ok because
1361 alter_cond will change tests to use EQ/NE. */
1362 set = single_set (insn);
1366 cc_status.value1 = SET_DEST (set);
1367 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1372 set = single_set (insn);
1375 cc_status.value1 = SET_SRC (set);
1379 /* Insn doesn't leave CC in a usable state. */
1382 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1383 set = single_set (insn);
1386 rtx src = SET_SRC (set);
1388 if (GET_CODE (src) == ASHIFTRT
1389 && GET_MODE (src) == QImode)
1391 rtx x = XEXP (src, 1);
1393 if (GET_CODE (x) == CONST_INT
1397 cc_status.value1 = SET_DEST (set);
1398 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1406 /* Return maximum number of consecutive registers of
1407 class CLASS needed to hold a value of mode MODE. */
1410 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1412 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1415 /* Choose mode for jump insn:
1416 1 - relative jump in range -63 <= x <= 62 ;
1417 2 - relative jump in range -2046 <= x <= 2045 ;
1418 3 - absolute jump (only for ATmega[16]03). */
1421 avr_jump_mode (rtx x, rtx insn)
1423 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1424 ? XEXP (x, 0) : x));
1425 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1426 int jump_distance = cur_addr - dest_addr;
1428 if (-63 <= jump_distance && jump_distance <= 62)
1430 else if (-2046 <= jump_distance && jump_distance <= 2045)
1432 else if (AVR_HAVE_JMP_CALL)
1438 /* return an AVR condition jump commands.
1439 X is a comparison RTX.
1440 LEN is a number returned by avr_jump_mode function.
1441 if REVERSE nonzero then condition code in X must be reversed. */
1444 ret_cond_branch (rtx x, int len, int reverse)
1446 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1451 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1452 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1454 len == 2 ? (AS1 (breq,.+4) CR_TAB
1455 AS1 (brmi,.+2) CR_TAB
1457 (AS1 (breq,.+6) CR_TAB
1458 AS1 (brmi,.+4) CR_TAB
1462 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1464 len == 2 ? (AS1 (breq,.+4) CR_TAB
1465 AS1 (brlt,.+2) CR_TAB
1467 (AS1 (breq,.+6) CR_TAB
1468 AS1 (brlt,.+4) CR_TAB
1471 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1473 len == 2 ? (AS1 (breq,.+4) CR_TAB
1474 AS1 (brlo,.+2) CR_TAB
1476 (AS1 (breq,.+6) CR_TAB
1477 AS1 (brlo,.+4) CR_TAB
1480 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1481 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1483 len == 2 ? (AS1 (breq,.+2) CR_TAB
1484 AS1 (brpl,.+2) CR_TAB
1486 (AS1 (breq,.+2) CR_TAB
1487 AS1 (brpl,.+4) CR_TAB
1490 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1492 len == 2 ? (AS1 (breq,.+2) CR_TAB
1493 AS1 (brge,.+2) CR_TAB
1495 (AS1 (breq,.+2) CR_TAB
1496 AS1 (brge,.+4) CR_TAB
1499 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1501 len == 2 ? (AS1 (breq,.+2) CR_TAB
1502 AS1 (brsh,.+2) CR_TAB
1504 (AS1 (breq,.+2) CR_TAB
1505 AS1 (brsh,.+4) CR_TAB
1513 return AS1 (br%k1,%0);
1515 return (AS1 (br%j1,.+2) CR_TAB
1518 return (AS1 (br%j1,.+4) CR_TAB
1527 return AS1 (br%j1,%0);
1529 return (AS1 (br%k1,.+2) CR_TAB
1532 return (AS1 (br%k1,.+4) CR_TAB
1540 /* Predicate function for immediate operand which fits to byte (8bit) */
1543 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1545 return (GET_CODE (op) == CONST_INT
1546 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1549 /* Output all insn addresses and their sizes into the assembly language
1550 output file. This is helpful for debugging whether the length attributes
1551 in the md file are correct.
1552 Output insn cost for next insn. */
1555 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1556 int num_operands ATTRIBUTE_UNUSED)
1558 int uid = INSN_UID (insn);
1560 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1562 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1563 INSN_ADDRESSES (uid),
1564 INSN_ADDRESSES (uid) - last_insn_address,
1565 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1567 last_insn_address = INSN_ADDRESSES (uid);
1570 /* Return 0 if undefined, 1 if always true or always false. */
1573 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1575 unsigned int max = (mode == QImode ? 0xff :
1576 mode == HImode ? 0xffff :
1577 mode == SImode ? 0xffffffff : 0);
1578 if (max && op && GET_CODE (x) == CONST_INT)
1580 if (unsigned_condition (op) != op)
1583 if (max != (INTVAL (x) & max)
1584 && INTVAL (x) != 0xff)
1591 /* Returns nonzero if REGNO is the number of a hard
1592 register in which function arguments are sometimes passed. */
1595 function_arg_regno_p(int r)
1597 return (r >= 8 && r <= 25);
1600 /* Initializing the variable cum for the state at the beginning
1601 of the argument list. */
1604 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1605 tree fndecl ATTRIBUTE_UNUSED)
1608 cum->regno = FIRST_CUM_REG;
1609 if (!libname && fntype)
1611 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1612 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1613 != void_type_node));
1619 /* Returns the number of registers to allocate for a function argument. */
1622 avr_num_arg_regs (enum machine_mode mode, tree type)
1626 if (mode == BLKmode)
1627 size = int_size_in_bytes (type);
1629 size = GET_MODE_SIZE (mode);
1631 /* Align all function arguments to start in even-numbered registers.
1632 Odd-sized arguments leave holes above them. */
1634 return (size + 1) & ~1;
1637 /* Controls whether a function argument is passed
1638 in a register, and which register. */
1641 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1642 int named ATTRIBUTE_UNUSED)
1644 int bytes = avr_num_arg_regs (mode, type);
1646 if (cum->nregs && bytes <= cum->nregs)
1647 return gen_rtx_REG (mode, cum->regno - bytes);
1652 /* Update the summarizer variable CUM to advance past an argument
1653 in the argument list. */
1656 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1657 int named ATTRIBUTE_UNUSED)
1659 int bytes = avr_num_arg_regs (mode, type);
1661 cum->nregs -= bytes;
1662 cum->regno -= bytes;
1664 if (cum->nregs <= 0)
1667 cum->regno = FIRST_CUM_REG;
1671 /***********************************************************************
1672 Functions for outputting various mov's for a various modes
1673 ************************************************************************/
1675 output_movqi (rtx insn, rtx operands[], int *l)
1678 rtx dest = operands[0];
1679 rtx src = operands[1];
1687 if (register_operand (dest, QImode))
1689 if (register_operand (src, QImode)) /* mov r,r */
1691 if (test_hard_reg_class (STACK_REG, dest))
1692 return AS2 (out,%0,%1);
1693 else if (test_hard_reg_class (STACK_REG, src))
1694 return AS2 (in,%0,%1);
1696 return AS2 (mov,%0,%1);
1698 else if (CONSTANT_P (src))
1700 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1701 return AS2 (ldi,%0,lo8(%1));
1703 if (GET_CODE (src) == CONST_INT)
1705 if (src == const0_rtx) /* mov r,L */
1706 return AS1 (clr,%0);
1707 else if (src == const1_rtx)
1710 return (AS1 (clr,%0) CR_TAB
1713 else if (src == constm1_rtx)
1715 /* Immediate constants -1 to any register */
1717 return (AS1 (clr,%0) CR_TAB
1722 int bit_nr = exact_log2 (INTVAL (src));
1728 output_asm_insn ((AS1 (clr,%0) CR_TAB
1731 avr_output_bld (operands, bit_nr);
1738 /* Last resort, larger than loading from memory. */
1740 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1741 AS2 (ldi,r31,lo8(%1)) CR_TAB
1742 AS2 (mov,%0,r31) CR_TAB
1743 AS2 (mov,r31,__tmp_reg__));
1745 else if (GET_CODE (src) == MEM)
1746 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1748 else if (GET_CODE (dest) == MEM)
1752 if (src == const0_rtx)
1753 operands[1] = zero_reg_rtx;
1755 templ = out_movqi_mr_r (insn, operands, real_l);
1758 output_asm_insn (templ, operands);
1767 output_movhi (rtx insn, rtx operands[], int *l)
1770 rtx dest = operands[0];
1771 rtx src = operands[1];
1777 if (register_operand (dest, HImode))
1779 if (register_operand (src, HImode)) /* mov r,r */
1781 if (test_hard_reg_class (STACK_REG, dest))
1783 if (TARGET_TINY_STACK)
1784 return *l = 1, AS2 (out,__SP_L__,%A1);
1785 /* Use simple load of stack pointer if no interrupts are
1787 else if (TARGET_NO_INTERRUPTS)
1788 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1789 AS2 (out,__SP_L__,%A1));
1791 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1793 AS2 (out,__SP_H__,%B1) CR_TAB
1794 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1795 AS2 (out,__SP_L__,%A1));
1797 else if (test_hard_reg_class (STACK_REG, src))
1800 return (AS2 (in,%A0,__SP_L__) CR_TAB
1801 AS2 (in,%B0,__SP_H__));
1807 return (AS2 (movw,%0,%1));
1812 return (AS2 (mov,%A0,%A1) CR_TAB
1816 else if (CONSTANT_P (src))
1818 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1821 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1822 AS2 (ldi,%B0,hi8(%1)));
1825 if (GET_CODE (src) == CONST_INT)
1827 if (src == const0_rtx) /* mov r,L */
1830 return (AS1 (clr,%A0) CR_TAB
1833 else if (src == const1_rtx)
1836 return (AS1 (clr,%A0) CR_TAB
1837 AS1 (clr,%B0) CR_TAB
1840 else if (src == constm1_rtx)
1842 /* Immediate constants -1 to any register */
1844 return (AS1 (clr,%0) CR_TAB
1845 AS1 (dec,%A0) CR_TAB
1850 int bit_nr = exact_log2 (INTVAL (src));
1856 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1857 AS1 (clr,%B0) CR_TAB
1860 avr_output_bld (operands, bit_nr);
1866 if ((INTVAL (src) & 0xff) == 0)
1869 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1870 AS1 (clr,%A0) CR_TAB
1871 AS2 (ldi,r31,hi8(%1)) CR_TAB
1872 AS2 (mov,%B0,r31) CR_TAB
1873 AS2 (mov,r31,__tmp_reg__));
1875 else if ((INTVAL (src) & 0xff00) == 0)
1878 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1879 AS2 (ldi,r31,lo8(%1)) CR_TAB
1880 AS2 (mov,%A0,r31) CR_TAB
1881 AS1 (clr,%B0) CR_TAB
1882 AS2 (mov,r31,__tmp_reg__));
1886 /* Last resort, equal to loading from memory. */
1888 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1889 AS2 (ldi,r31,lo8(%1)) CR_TAB
1890 AS2 (mov,%A0,r31) CR_TAB
1891 AS2 (ldi,r31,hi8(%1)) CR_TAB
1892 AS2 (mov,%B0,r31) CR_TAB
1893 AS2 (mov,r31,__tmp_reg__));
1895 else if (GET_CODE (src) == MEM)
1896 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1898 else if (GET_CODE (dest) == MEM)
1902 if (src == const0_rtx)
1903 operands[1] = zero_reg_rtx;
1905 templ = out_movhi_mr_r (insn, operands, real_l);
1908 output_asm_insn (templ, operands);
1913 fatal_insn ("invalid insn:", insn);
1918 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1922 rtx x = XEXP (src, 0);
1928 if (CONSTANT_ADDRESS_P (x))
1930 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1933 return AS2 (in,%0,__SREG__);
1935 if (optimize > 0 && io_address_operand (x, QImode))
1938 return AS2 (in,%0,%1-0x20);
1941 return AS2 (lds,%0,%1);
1943 /* memory access by reg+disp */
1944 else if (GET_CODE (x) == PLUS
1945 && REG_P (XEXP (x,0))
1946 && GET_CODE (XEXP (x,1)) == CONST_INT)
1948 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1950 int disp = INTVAL (XEXP (x,1));
1951 if (REGNO (XEXP (x,0)) != REG_Y)
1952 fatal_insn ("incorrect insn:",insn);
1954 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1955 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1956 AS2 (ldd,%0,Y+63) CR_TAB
1957 AS2 (sbiw,r28,%o1-63));
1959 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1960 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1961 AS2 (ld,%0,Y) CR_TAB
1962 AS2 (subi,r28,lo8(%o1)) CR_TAB
1963 AS2 (sbci,r29,hi8(%o1)));
1965 else if (REGNO (XEXP (x,0)) == REG_X)
1967 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1968 it but I have this situation with extremal optimizing options. */
1969 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1970 || reg_unused_after (insn, XEXP (x,0)))
1971 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1974 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1975 AS2 (ld,%0,X) CR_TAB
1976 AS2 (sbiw,r26,%o1));
1979 return AS2 (ldd,%0,%1);
1982 return AS2 (ld,%0,%1);
1986 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1990 rtx base = XEXP (src, 0);
1991 int reg_dest = true_regnum (dest);
1992 int reg_base = true_regnum (base);
1993 /* "volatile" forces reading low byte first, even if less efficient,
1994 for correct operation with 16-bit I/O registers. */
1995 int mem_volatile_p = MEM_VOLATILE_P (src);
2003 if (reg_dest == reg_base) /* R = (R) */
2006 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2007 AS2 (ld,%B0,%1) CR_TAB
2008 AS2 (mov,%A0,__tmp_reg__));
2010 else if (reg_base == REG_X) /* (R26) */
2012 if (reg_unused_after (insn, base))
2015 return (AS2 (ld,%A0,X+) CR_TAB
2019 return (AS2 (ld,%A0,X+) CR_TAB
2020 AS2 (ld,%B0,X) CR_TAB
2026 return (AS2 (ld,%A0,%1) CR_TAB
2027 AS2 (ldd,%B0,%1+1));
2030 else if (GET_CODE (base) == PLUS) /* (R + i) */
2032 int disp = INTVAL (XEXP (base, 1));
2033 int reg_base = true_regnum (XEXP (base, 0));
2035 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2037 if (REGNO (XEXP (base, 0)) != REG_Y)
2038 fatal_insn ("incorrect insn:",insn);
2040 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2041 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2042 AS2 (ldd,%A0,Y+62) CR_TAB
2043 AS2 (ldd,%B0,Y+63) CR_TAB
2044 AS2 (sbiw,r28,%o1-62));
2046 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2047 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2048 AS2 (ld,%A0,Y) CR_TAB
2049 AS2 (ldd,%B0,Y+1) CR_TAB
2050 AS2 (subi,r28,lo8(%o1)) CR_TAB
2051 AS2 (sbci,r29,hi8(%o1)));
2053 if (reg_base == REG_X)
2055 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2056 it but I have this situation with extremal
2057 optimization options. */
2060 if (reg_base == reg_dest)
2061 return (AS2 (adiw,r26,%o1) CR_TAB
2062 AS2 (ld,__tmp_reg__,X+) CR_TAB
2063 AS2 (ld,%B0,X) CR_TAB
2064 AS2 (mov,%A0,__tmp_reg__));
2066 return (AS2 (adiw,r26,%o1) CR_TAB
2067 AS2 (ld,%A0,X+) CR_TAB
2068 AS2 (ld,%B0,X) CR_TAB
2069 AS2 (sbiw,r26,%o1+1));
2072 if (reg_base == reg_dest)
2075 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2076 AS2 (ldd,%B0,%B1) CR_TAB
2077 AS2 (mov,%A0,__tmp_reg__));
2081 return (AS2 (ldd,%A0,%A1) CR_TAB
2084 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2086 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2087 fatal_insn ("incorrect insn:", insn);
2091 if (REGNO (XEXP (base, 0)) == REG_X)
2094 return (AS2 (sbiw,r26,2) CR_TAB
2095 AS2 (ld,%A0,X+) CR_TAB
2096 AS2 (ld,%B0,X) CR_TAB
2102 return (AS2 (sbiw,%r1,2) CR_TAB
2103 AS2 (ld,%A0,%p1) CR_TAB
2104 AS2 (ldd,%B0,%p1+1));
2109 return (AS2 (ld,%B0,%1) CR_TAB
2112 else if (GET_CODE (base) == POST_INC) /* (R++) */
2114 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2115 fatal_insn ("incorrect insn:", insn);
2118 return (AS2 (ld,%A0,%1) CR_TAB
2121 else if (CONSTANT_ADDRESS_P (base))
2123 if (optimize > 0 && io_address_operand (base, HImode))
2126 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2127 AS2 (in,%B0,%B1-0x20));
2130 return (AS2 (lds,%A0,%A1) CR_TAB
2134 fatal_insn ("unknown move insn:",insn);
2139 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2143 rtx base = XEXP (src, 0);
2144 int reg_dest = true_regnum (dest);
2145 int reg_base = true_regnum (base);
2153 if (reg_base == REG_X) /* (R26) */
2155 if (reg_dest == REG_X)
2156 /* "ld r26,-X" is undefined */
2157 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2158 AS2 (ld,r29,X) CR_TAB
2159 AS2 (ld,r28,-X) CR_TAB
2160 AS2 (ld,__tmp_reg__,-X) CR_TAB
2161 AS2 (sbiw,r26,1) CR_TAB
2162 AS2 (ld,r26,X) CR_TAB
2163 AS2 (mov,r27,__tmp_reg__));
2164 else if (reg_dest == REG_X - 2)
2165 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2166 AS2 (ld,%B0,X+) CR_TAB
2167 AS2 (ld,__tmp_reg__,X+) CR_TAB
2168 AS2 (ld,%D0,X) CR_TAB
2169 AS2 (mov,%C0,__tmp_reg__));
2170 else if (reg_unused_after (insn, base))
2171 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2172 AS2 (ld,%B0,X+) CR_TAB
2173 AS2 (ld,%C0,X+) CR_TAB
2176 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2177 AS2 (ld,%B0,X+) CR_TAB
2178 AS2 (ld,%C0,X+) CR_TAB
2179 AS2 (ld,%D0,X) CR_TAB
2184 if (reg_dest == reg_base)
2185 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2186 AS2 (ldd,%C0,%1+2) CR_TAB
2187 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2188 AS2 (ld,%A0,%1) CR_TAB
2189 AS2 (mov,%B0,__tmp_reg__));
2190 else if (reg_base == reg_dest + 2)
2191 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2192 AS2 (ldd,%B0,%1+1) CR_TAB
2193 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2194 AS2 (ldd,%D0,%1+3) CR_TAB
2195 AS2 (mov,%C0,__tmp_reg__));
2197 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2198 AS2 (ldd,%B0,%1+1) CR_TAB
2199 AS2 (ldd,%C0,%1+2) CR_TAB
2200 AS2 (ldd,%D0,%1+3));
2203 else if (GET_CODE (base) == PLUS) /* (R + i) */
2205 int disp = INTVAL (XEXP (base, 1));
2207 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2209 if (REGNO (XEXP (base, 0)) != REG_Y)
2210 fatal_insn ("incorrect insn:",insn);
2212 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2213 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2214 AS2 (ldd,%A0,Y+60) CR_TAB
2215 AS2 (ldd,%B0,Y+61) CR_TAB
2216 AS2 (ldd,%C0,Y+62) CR_TAB
2217 AS2 (ldd,%D0,Y+63) CR_TAB
2218 AS2 (sbiw,r28,%o1-60));
2220 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2221 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2222 AS2 (ld,%A0,Y) CR_TAB
2223 AS2 (ldd,%B0,Y+1) CR_TAB
2224 AS2 (ldd,%C0,Y+2) CR_TAB
2225 AS2 (ldd,%D0,Y+3) CR_TAB
2226 AS2 (subi,r28,lo8(%o1)) CR_TAB
2227 AS2 (sbci,r29,hi8(%o1)));
2230 reg_base = true_regnum (XEXP (base, 0));
2231 if (reg_base == REG_X)
2234 if (reg_dest == REG_X)
2237 /* "ld r26,-X" is undefined */
2238 return (AS2 (adiw,r26,%o1+3) CR_TAB
2239 AS2 (ld,r29,X) CR_TAB
2240 AS2 (ld,r28,-X) CR_TAB
2241 AS2 (ld,__tmp_reg__,-X) CR_TAB
2242 AS2 (sbiw,r26,1) CR_TAB
2243 AS2 (ld,r26,X) CR_TAB
2244 AS2 (mov,r27,__tmp_reg__));
2247 if (reg_dest == REG_X - 2)
2248 return (AS2 (adiw,r26,%o1) CR_TAB
2249 AS2 (ld,r24,X+) CR_TAB
2250 AS2 (ld,r25,X+) CR_TAB
2251 AS2 (ld,__tmp_reg__,X+) CR_TAB
2252 AS2 (ld,r27,X) CR_TAB
2253 AS2 (mov,r26,__tmp_reg__));
2255 return (AS2 (adiw,r26,%o1) CR_TAB
2256 AS2 (ld,%A0,X+) CR_TAB
2257 AS2 (ld,%B0,X+) CR_TAB
2258 AS2 (ld,%C0,X+) CR_TAB
2259 AS2 (ld,%D0,X) CR_TAB
2260 AS2 (sbiw,r26,%o1+3));
2262 if (reg_dest == reg_base)
2263 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2264 AS2 (ldd,%C0,%C1) CR_TAB
2265 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2266 AS2 (ldd,%A0,%A1) CR_TAB
2267 AS2 (mov,%B0,__tmp_reg__));
2268 else if (reg_dest == reg_base - 2)
2269 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2270 AS2 (ldd,%B0,%B1) CR_TAB
2271 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2272 AS2 (ldd,%D0,%D1) CR_TAB
2273 AS2 (mov,%C0,__tmp_reg__));
2274 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2275 AS2 (ldd,%B0,%B1) CR_TAB
2276 AS2 (ldd,%C0,%C1) CR_TAB
2279 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2280 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2281 AS2 (ld,%C0,%1) CR_TAB
2282 AS2 (ld,%B0,%1) CR_TAB
2284 else if (GET_CODE (base) == POST_INC) /* (R++) */
2285 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2286 AS2 (ld,%B0,%1) CR_TAB
2287 AS2 (ld,%C0,%1) CR_TAB
2289 else if (CONSTANT_ADDRESS_P (base))
2290 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2291 AS2 (lds,%B0,%B1) CR_TAB
2292 AS2 (lds,%C0,%C1) CR_TAB
2295 fatal_insn ("unknown move insn:",insn);
2300 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2304 rtx base = XEXP (dest, 0);
2305 int reg_base = true_regnum (base);
2306 int reg_src = true_regnum (src);
2312 if (CONSTANT_ADDRESS_P (base))
2313 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2314 AS2 (sts,%B0,%B1) CR_TAB
2315 AS2 (sts,%C0,%C1) CR_TAB
2317 if (reg_base > 0) /* (r) */
2319 if (reg_base == REG_X) /* (R26) */
2321 if (reg_src == REG_X)
2323 /* "st X+,r26" is undefined */
2324 if (reg_unused_after (insn, base))
2325 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2326 AS2 (st,X,r26) CR_TAB
2327 AS2 (adiw,r26,1) CR_TAB
2328 AS2 (st,X+,__tmp_reg__) CR_TAB
2329 AS2 (st,X+,r28) CR_TAB
2332 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2333 AS2 (st,X,r26) CR_TAB
2334 AS2 (adiw,r26,1) CR_TAB
2335 AS2 (st,X+,__tmp_reg__) CR_TAB
2336 AS2 (st,X+,r28) CR_TAB
2337 AS2 (st,X,r29) CR_TAB
2340 else if (reg_base == reg_src + 2)
2342 if (reg_unused_after (insn, base))
2343 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2344 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2345 AS2 (st,%0+,%A1) CR_TAB
2346 AS2 (st,%0+,%B1) CR_TAB
2347 AS2 (st,%0+,__zero_reg__) CR_TAB
2348 AS2 (st,%0,__tmp_reg__) CR_TAB
2349 AS1 (clr,__zero_reg__));
2351 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2352 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2353 AS2 (st,%0+,%A1) CR_TAB
2354 AS2 (st,%0+,%B1) CR_TAB
2355 AS2 (st,%0+,__zero_reg__) CR_TAB
2356 AS2 (st,%0,__tmp_reg__) CR_TAB
2357 AS1 (clr,__zero_reg__) CR_TAB
2360 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2361 AS2 (st,%0+,%B1) CR_TAB
2362 AS2 (st,%0+,%C1) CR_TAB
2363 AS2 (st,%0,%D1) CR_TAB
2367 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2368 AS2 (std,%0+1,%B1) CR_TAB
2369 AS2 (std,%0+2,%C1) CR_TAB
2370 AS2 (std,%0+3,%D1));
2372 else if (GET_CODE (base) == PLUS) /* (R + i) */
2374 int disp = INTVAL (XEXP (base, 1));
2375 reg_base = REGNO (XEXP (base, 0));
2376 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2378 if (reg_base != REG_Y)
2379 fatal_insn ("incorrect insn:",insn);
2381 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2382 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2383 AS2 (std,Y+60,%A1) CR_TAB
2384 AS2 (std,Y+61,%B1) CR_TAB
2385 AS2 (std,Y+62,%C1) CR_TAB
2386 AS2 (std,Y+63,%D1) CR_TAB
2387 AS2 (sbiw,r28,%o0-60));
2389 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2390 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2391 AS2 (st,Y,%A1) CR_TAB
2392 AS2 (std,Y+1,%B1) CR_TAB
2393 AS2 (std,Y+2,%C1) CR_TAB
2394 AS2 (std,Y+3,%D1) CR_TAB
2395 AS2 (subi,r28,lo8(%o0)) CR_TAB
2396 AS2 (sbci,r29,hi8(%o0)));
2398 if (reg_base == REG_X)
2401 if (reg_src == REG_X)
2404 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2405 AS2 (mov,__zero_reg__,r27) CR_TAB
2406 AS2 (adiw,r26,%o0) CR_TAB
2407 AS2 (st,X+,__tmp_reg__) CR_TAB
2408 AS2 (st,X+,__zero_reg__) CR_TAB
2409 AS2 (st,X+,r28) CR_TAB
2410 AS2 (st,X,r29) CR_TAB
2411 AS1 (clr,__zero_reg__) CR_TAB
2412 AS2 (sbiw,r26,%o0+3));
2414 else if (reg_src == REG_X - 2)
2417 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2418 AS2 (mov,__zero_reg__,r27) CR_TAB
2419 AS2 (adiw,r26,%o0) CR_TAB
2420 AS2 (st,X+,r24) CR_TAB
2421 AS2 (st,X+,r25) CR_TAB
2422 AS2 (st,X+,__tmp_reg__) CR_TAB
2423 AS2 (st,X,__zero_reg__) CR_TAB
2424 AS1 (clr,__zero_reg__) CR_TAB
2425 AS2 (sbiw,r26,%o0+3));
2428 return (AS2 (adiw,r26,%o0) CR_TAB
2429 AS2 (st,X+,%A1) CR_TAB
2430 AS2 (st,X+,%B1) CR_TAB
2431 AS2 (st,X+,%C1) CR_TAB
2432 AS2 (st,X,%D1) CR_TAB
2433 AS2 (sbiw,r26,%o0+3));
2435 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2436 AS2 (std,%B0,%B1) CR_TAB
2437 AS2 (std,%C0,%C1) CR_TAB
2440 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2441 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2442 AS2 (st,%0,%C1) CR_TAB
2443 AS2 (st,%0,%B1) CR_TAB
2445 else if (GET_CODE (base) == POST_INC) /* (R++) */
2446 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2447 AS2 (st,%0,%B1) CR_TAB
2448 AS2 (st,%0,%C1) CR_TAB
2450 fatal_insn ("unknown move insn:",insn);
2455 output_movsisf(rtx insn, rtx operands[], int *l)
2458 rtx dest = operands[0];
2459 rtx src = operands[1];
2465 if (register_operand (dest, VOIDmode))
2467 if (register_operand (src, VOIDmode)) /* mov r,r */
2469 if (true_regnum (dest) > true_regnum (src))
2474 return (AS2 (movw,%C0,%C1) CR_TAB
2475 AS2 (movw,%A0,%A1));
2478 return (AS2 (mov,%D0,%D1) CR_TAB
2479 AS2 (mov,%C0,%C1) CR_TAB
2480 AS2 (mov,%B0,%B1) CR_TAB
2488 return (AS2 (movw,%A0,%A1) CR_TAB
2489 AS2 (movw,%C0,%C1));
2492 return (AS2 (mov,%A0,%A1) CR_TAB
2493 AS2 (mov,%B0,%B1) CR_TAB
2494 AS2 (mov,%C0,%C1) CR_TAB
2498 else if (CONSTANT_P (src))
2500 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2503 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2504 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2505 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2506 AS2 (ldi,%D0,hhi8(%1)));
2509 if (GET_CODE (src) == CONST_INT)
2511 const char *const clr_op0 =
2512 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2513 AS1 (clr,%B0) CR_TAB
2515 : (AS1 (clr,%A0) CR_TAB
2516 AS1 (clr,%B0) CR_TAB
2517 AS1 (clr,%C0) CR_TAB
2520 if (src == const0_rtx) /* mov r,L */
2522 *l = AVR_HAVE_MOVW ? 3 : 4;
2525 else if (src == const1_rtx)
2528 output_asm_insn (clr_op0, operands);
2529 *l = AVR_HAVE_MOVW ? 4 : 5;
2530 return AS1 (inc,%A0);
2532 else if (src == constm1_rtx)
2534 /* Immediate constants -1 to any register */
2538 return (AS1 (clr,%A0) CR_TAB
2539 AS1 (dec,%A0) CR_TAB
2540 AS2 (mov,%B0,%A0) CR_TAB
2541 AS2 (movw,%C0,%A0));
2544 return (AS1 (clr,%A0) CR_TAB
2545 AS1 (dec,%A0) CR_TAB
2546 AS2 (mov,%B0,%A0) CR_TAB
2547 AS2 (mov,%C0,%A0) CR_TAB
2552 int bit_nr = exact_log2 (INTVAL (src));
2556 *l = AVR_HAVE_MOVW ? 5 : 6;
2559 output_asm_insn (clr_op0, operands);
2560 output_asm_insn ("set", operands);
2563 avr_output_bld (operands, bit_nr);
2570 /* Last resort, better than loading from memory. */
2572 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2573 AS2 (ldi,r31,lo8(%1)) CR_TAB
2574 AS2 (mov,%A0,r31) CR_TAB
2575 AS2 (ldi,r31,hi8(%1)) CR_TAB
2576 AS2 (mov,%B0,r31) CR_TAB
2577 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2578 AS2 (mov,%C0,r31) CR_TAB
2579 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2580 AS2 (mov,%D0,r31) CR_TAB
2581 AS2 (mov,r31,__tmp_reg__));
2583 else if (GET_CODE (src) == MEM)
2584 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2586 else if (GET_CODE (dest) == MEM)
2590 if (src == const0_rtx)
2591 operands[1] = zero_reg_rtx;
2593 templ = out_movsi_mr_r (insn, operands, real_l);
2596 output_asm_insn (templ, operands);
2601 fatal_insn ("invalid insn:", insn);
2606 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2610 rtx x = XEXP (dest, 0);
2616 if (CONSTANT_ADDRESS_P (x))
2618 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2621 return AS2 (out,__SREG__,%1);
2623 if (optimize > 0 && io_address_operand (x, QImode))
2626 return AS2 (out,%0-0x20,%1);
2629 return AS2 (sts,%0,%1);
2631 /* memory access by reg+disp */
2632 else if (GET_CODE (x) == PLUS
2633 && REG_P (XEXP (x,0))
2634 && GET_CODE (XEXP (x,1)) == CONST_INT)
2636 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2638 int disp = INTVAL (XEXP (x,1));
2639 if (REGNO (XEXP (x,0)) != REG_Y)
2640 fatal_insn ("incorrect insn:",insn);
2642 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2643 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2644 AS2 (std,Y+63,%1) CR_TAB
2645 AS2 (sbiw,r28,%o0-63));
2647 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2648 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2649 AS2 (st,Y,%1) CR_TAB
2650 AS2 (subi,r28,lo8(%o0)) CR_TAB
2651 AS2 (sbci,r29,hi8(%o0)));
2653 else if (REGNO (XEXP (x,0)) == REG_X)
2655 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2657 if (reg_unused_after (insn, XEXP (x,0)))
2658 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2659 AS2 (adiw,r26,%o0) CR_TAB
2660 AS2 (st,X,__tmp_reg__));
2662 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2663 AS2 (adiw,r26,%o0) CR_TAB
2664 AS2 (st,X,__tmp_reg__) CR_TAB
2665 AS2 (sbiw,r26,%o0));
2669 if (reg_unused_after (insn, XEXP (x,0)))
2670 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2673 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2674 AS2 (st,X,%1) CR_TAB
2675 AS2 (sbiw,r26,%o0));
2679 return AS2 (std,%0,%1);
2682 return AS2 (st,%0,%1);
2686 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2690 rtx base = XEXP (dest, 0);
2691 int reg_base = true_regnum (base);
2692 int reg_src = true_regnum (src);
2693 /* "volatile" forces writing high byte first, even if less efficient,
2694 for correct operation with 16-bit I/O registers. */
2695 int mem_volatile_p = MEM_VOLATILE_P (dest);
2700 if (CONSTANT_ADDRESS_P (base))
2702 if (optimize > 0 && io_address_operand (base, HImode))
2705 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2706 AS2 (out,%A0-0x20,%A1));
2708 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2713 if (reg_base == REG_X)
2715 if (reg_src == REG_X)
2717 /* "st X+,r26" and "st -X,r26" are undefined. */
2718 if (!mem_volatile_p && reg_unused_after (insn, src))
2719 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2720 AS2 (st,X,r26) CR_TAB
2721 AS2 (adiw,r26,1) CR_TAB
2722 AS2 (st,X,__tmp_reg__));
2724 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2725 AS2 (adiw,r26,1) CR_TAB
2726 AS2 (st,X,__tmp_reg__) CR_TAB
2727 AS2 (sbiw,r26,1) CR_TAB
2732 if (!mem_volatile_p && reg_unused_after (insn, base))
2733 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2736 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2737 AS2 (st,X,%B1) CR_TAB
2742 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2745 else if (GET_CODE (base) == PLUS)
2747 int disp = INTVAL (XEXP (base, 1));
2748 reg_base = REGNO (XEXP (base, 0));
2749 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2751 if (reg_base != REG_Y)
2752 fatal_insn ("incorrect insn:",insn);
2754 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2755 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2756 AS2 (std,Y+63,%B1) CR_TAB
2757 AS2 (std,Y+62,%A1) CR_TAB
2758 AS2 (sbiw,r28,%o0-62));
2760 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2761 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2762 AS2 (std,Y+1,%B1) CR_TAB
2763 AS2 (st,Y,%A1) CR_TAB
2764 AS2 (subi,r28,lo8(%o0)) CR_TAB
2765 AS2 (sbci,r29,hi8(%o0)));
2767 if (reg_base == REG_X)
2770 if (reg_src == REG_X)
2773 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2774 AS2 (mov,__zero_reg__,r27) CR_TAB
2775 AS2 (adiw,r26,%o0+1) CR_TAB
2776 AS2 (st,X,__zero_reg__) CR_TAB
2777 AS2 (st,-X,__tmp_reg__) CR_TAB
2778 AS1 (clr,__zero_reg__) CR_TAB
2779 AS2 (sbiw,r26,%o0));
2782 return (AS2 (adiw,r26,%o0+1) CR_TAB
2783 AS2 (st,X,%B1) CR_TAB
2784 AS2 (st,-X,%A1) CR_TAB
2785 AS2 (sbiw,r26,%o0));
2787 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2790 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2791 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2793 else if (GET_CODE (base) == POST_INC) /* (R++) */
2797 if (REGNO (XEXP (base, 0)) == REG_X)
2800 return (AS2 (adiw,r26,1) CR_TAB
2801 AS2 (st,X,%B1) CR_TAB
2802 AS2 (st,-X,%A1) CR_TAB
2808 return (AS2 (std,%p0+1,%B1) CR_TAB
2809 AS2 (st,%p0,%A1) CR_TAB
2815 return (AS2 (st,%0,%A1) CR_TAB
2818 fatal_insn ("unknown move insn:",insn);
2822 /* Return 1 if frame pointer for current function required. */
2825 frame_pointer_required_p (void)
2827 return (cfun->calls_alloca
2828 || crtl->args.info.nregs == 0
2829 || get_frame_size () > 0);
2832 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2835 compare_condition (rtx insn)
2837 rtx next = next_real_insn (insn);
2838 RTX_CODE cond = UNKNOWN;
2839 if (next && GET_CODE (next) == JUMP_INSN)
2841 rtx pat = PATTERN (next);
2842 rtx src = SET_SRC (pat);
2843 rtx t = XEXP (src, 0);
2844 cond = GET_CODE (t);
2849 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2852 compare_sign_p (rtx insn)
2854 RTX_CODE cond = compare_condition (insn);
2855 return (cond == GE || cond == LT);
2858 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2859 that needs to be swapped (GT, GTU, LE, LEU). */
2862 compare_diff_p (rtx insn)
2864 RTX_CODE cond = compare_condition (insn);
2865 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2868 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2871 compare_eq_p (rtx insn)
2873 RTX_CODE cond = compare_condition (insn);
2874 return (cond == EQ || cond == NE);
2878 /* Output test instruction for HImode. */
2881 out_tsthi (rtx insn, int *l)
2883 if (compare_sign_p (insn))
2886 return AS1 (tst,%B0);
2888 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2889 && compare_eq_p (insn))
2891 /* Faster than sbiw if we can clobber the operand. */
2893 return AS2 (or,%A0,%B0);
2895 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2898 return AS2 (sbiw,%0,0);
2901 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2902 AS2 (cpc,%B0,__zero_reg__));
2906 /* Output test instruction for SImode. */
2909 out_tstsi (rtx insn, int *l)
2911 if (compare_sign_p (insn))
2914 return AS1 (tst,%D0);
2916 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2919 return (AS2 (sbiw,%A0,0) CR_TAB
2920 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2921 AS2 (cpc,%D0,__zero_reg__));
2924 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2925 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2926 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2927 AS2 (cpc,%D0,__zero_reg__));
2931 /* Generate asm equivalent for various shifts.
2932 Shift count is a CONST_INT, MEM or REG.
2933 This only handles cases that are not already
2934 carefully hand-optimized in ?sh??i3_out. */
2937 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2938 int *len, int t_len)
2942 int second_label = 1;
2943 int saved_in_tmp = 0;
2944 int use_zero_reg = 0;
2946 op[0] = operands[0];
2947 op[1] = operands[1];
2948 op[2] = operands[2];
2949 op[3] = operands[3];
2955 if (GET_CODE (operands[2]) == CONST_INT)
2957 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2958 int count = INTVAL (operands[2]);
2959 int max_len = 10; /* If larger than this, always use a loop. */
2968 if (count < 8 && !scratch)
2972 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2974 if (t_len * count <= max_len)
2976 /* Output shifts inline with no loop - faster. */
2978 *len = t_len * count;
2982 output_asm_insn (templ, op);
2991 strcat (str, AS2 (ldi,%3,%2));
2993 else if (use_zero_reg)
2995 /* Hack to save one word: use __zero_reg__ as loop counter.
2996 Set one bit, then shift in a loop until it is 0 again. */
2998 op[3] = zero_reg_rtx;
3002 strcat (str, ("set" CR_TAB
3003 AS2 (bld,%3,%2-1)));
3007 /* No scratch register available, use one from LD_REGS (saved in
3008 __tmp_reg__) that doesn't overlap with registers to shift. */
3010 op[3] = gen_rtx_REG (QImode,
3011 ((true_regnum (operands[0]) - 1) & 15) + 16);
3012 op[4] = tmp_reg_rtx;
3016 *len = 3; /* Includes "mov %3,%4" after the loop. */
3018 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3024 else if (GET_CODE (operands[2]) == MEM)
3028 op[3] = op_mov[0] = tmp_reg_rtx;
3032 out_movqi_r_mr (insn, op_mov, len);
3034 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3036 else if (register_operand (operands[2], QImode))
3038 if (reg_unused_after (insn, operands[2]))
3042 op[3] = tmp_reg_rtx;
3044 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3048 fatal_insn ("bad shift insn:", insn);
3055 strcat (str, AS1 (rjmp,2f));
3059 *len += t_len + 2; /* template + dec + brXX */
3062 strcat (str, "\n1:\t");
3063 strcat (str, templ);
3064 strcat (str, second_label ? "\n2:\t" : "\n\t");
3065 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3066 strcat (str, CR_TAB);
3067 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3069 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3070 output_asm_insn (str, op);
3075 /* 8bit shift left ((char)x << i) */
3078 ashlqi3_out (rtx insn, rtx operands[], int *len)
3080 if (GET_CODE (operands[2]) == CONST_INT)
3087 switch (INTVAL (operands[2]))
3090 if (INTVAL (operands[2]) < 8)
3094 return AS1 (clr,%0);
3098 return AS1 (lsl,%0);
3102 return (AS1 (lsl,%0) CR_TAB
3107 return (AS1 (lsl,%0) CR_TAB
3112 if (test_hard_reg_class (LD_REGS, operands[0]))
3115 return (AS1 (swap,%0) CR_TAB
3116 AS2 (andi,%0,0xf0));
3119 return (AS1 (lsl,%0) CR_TAB
3125 if (test_hard_reg_class (LD_REGS, operands[0]))
3128 return (AS1 (swap,%0) CR_TAB
3130 AS2 (andi,%0,0xe0));
3133 return (AS1 (lsl,%0) CR_TAB
3140 if (test_hard_reg_class (LD_REGS, operands[0]))
3143 return (AS1 (swap,%0) CR_TAB
3146 AS2 (andi,%0,0xc0));
3149 return (AS1 (lsl,%0) CR_TAB
3158 return (AS1 (ror,%0) CR_TAB
3163 else if (CONSTANT_P (operands[2]))
3164 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3166 out_shift_with_cnt (AS1 (lsl,%0),
3167 insn, operands, len, 1);
3172 /* 16bit shift left ((short)x << i) */
3175 ashlhi3_out (rtx insn, rtx operands[], int *len)
3177 if (GET_CODE (operands[2]) == CONST_INT)
3179 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3180 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3187 switch (INTVAL (operands[2]))
3190 if (INTVAL (operands[2]) < 16)
3194 return (AS1 (clr,%B0) CR_TAB
3198 if (optimize_size && scratch)
3203 return (AS1 (swap,%A0) CR_TAB
3204 AS1 (swap,%B0) CR_TAB
3205 AS2 (andi,%B0,0xf0) CR_TAB
3206 AS2 (eor,%B0,%A0) CR_TAB
3207 AS2 (andi,%A0,0xf0) CR_TAB
3213 return (AS1 (swap,%A0) CR_TAB
3214 AS1 (swap,%B0) CR_TAB
3215 AS2 (ldi,%3,0xf0) CR_TAB
3216 AS2 (and,%B0,%3) CR_TAB
3217 AS2 (eor,%B0,%A0) CR_TAB
3218 AS2 (and,%A0,%3) CR_TAB
3221 break; /* optimize_size ? 6 : 8 */
3225 break; /* scratch ? 5 : 6 */
3229 return (AS1 (lsl,%A0) CR_TAB
3230 AS1 (rol,%B0) CR_TAB
3231 AS1 (swap,%A0) CR_TAB
3232 AS1 (swap,%B0) CR_TAB
3233 AS2 (andi,%B0,0xf0) CR_TAB
3234 AS2 (eor,%B0,%A0) CR_TAB
3235 AS2 (andi,%A0,0xf0) CR_TAB
3241 return (AS1 (lsl,%A0) CR_TAB
3242 AS1 (rol,%B0) CR_TAB
3243 AS1 (swap,%A0) CR_TAB
3244 AS1 (swap,%B0) CR_TAB
3245 AS2 (ldi,%3,0xf0) CR_TAB
3246 AS2 (and,%B0,%3) CR_TAB
3247 AS2 (eor,%B0,%A0) CR_TAB
3248 AS2 (and,%A0,%3) CR_TAB
3255 break; /* scratch ? 5 : 6 */
3257 return (AS1 (clr,__tmp_reg__) CR_TAB
3258 AS1 (lsr,%B0) CR_TAB
3259 AS1 (ror,%A0) CR_TAB
3260 AS1 (ror,__tmp_reg__) CR_TAB
3261 AS1 (lsr,%B0) CR_TAB
3262 AS1 (ror,%A0) CR_TAB
3263 AS1 (ror,__tmp_reg__) CR_TAB
3264 AS2 (mov,%B0,%A0) CR_TAB
3265 AS2 (mov,%A0,__tmp_reg__));
3269 return (AS1 (lsr,%B0) CR_TAB
3270 AS2 (mov,%B0,%A0) CR_TAB
3271 AS1 (clr,%A0) CR_TAB
3272 AS1 (ror,%B0) CR_TAB
3276 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3281 return (AS2 (mov,%B0,%A0) CR_TAB
3282 AS1 (clr,%A0) CR_TAB
3287 return (AS2 (mov,%B0,%A0) CR_TAB
3288 AS1 (clr,%A0) CR_TAB
3289 AS1 (lsl,%B0) CR_TAB
3294 return (AS2 (mov,%B0,%A0) CR_TAB
3295 AS1 (clr,%A0) CR_TAB
3296 AS1 (lsl,%B0) CR_TAB
3297 AS1 (lsl,%B0) CR_TAB
3304 return (AS2 (mov,%B0,%A0) CR_TAB
3305 AS1 (clr,%A0) CR_TAB
3306 AS1 (swap,%B0) CR_TAB
3307 AS2 (andi,%B0,0xf0));
3312 return (AS2 (mov,%B0,%A0) CR_TAB
3313 AS1 (clr,%A0) CR_TAB
3314 AS1 (swap,%B0) CR_TAB
3315 AS2 (ldi,%3,0xf0) CR_TAB
3319 return (AS2 (mov,%B0,%A0) CR_TAB
3320 AS1 (clr,%A0) CR_TAB
3321 AS1 (lsl,%B0) CR_TAB
3322 AS1 (lsl,%B0) CR_TAB
3323 AS1 (lsl,%B0) CR_TAB
3330 return (AS2 (mov,%B0,%A0) CR_TAB
3331 AS1 (clr,%A0) CR_TAB
3332 AS1 (swap,%B0) CR_TAB
3333 AS1 (lsl,%B0) CR_TAB
3334 AS2 (andi,%B0,0xe0));
3336 if (AVR_HAVE_MUL && scratch)
3339 return (AS2 (ldi,%3,0x20) CR_TAB
3340 AS2 (mul,%A0,%3) CR_TAB
3341 AS2 (mov,%B0,r0) CR_TAB
3342 AS1 (clr,%A0) CR_TAB
3343 AS1 (clr,__zero_reg__));
3345 if (optimize_size && scratch)
3350 return (AS2 (mov,%B0,%A0) CR_TAB
3351 AS1 (clr,%A0) CR_TAB
3352 AS1 (swap,%B0) CR_TAB
3353 AS1 (lsl,%B0) CR_TAB
3354 AS2 (ldi,%3,0xe0) CR_TAB
3360 return ("set" CR_TAB
3361 AS2 (bld,r1,5) CR_TAB
3362 AS2 (mul,%A0,r1) CR_TAB
3363 AS2 (mov,%B0,r0) CR_TAB
3364 AS1 (clr,%A0) CR_TAB
3365 AS1 (clr,__zero_reg__));
3368 return (AS2 (mov,%B0,%A0) CR_TAB
3369 AS1 (clr,%A0) CR_TAB
3370 AS1 (lsl,%B0) CR_TAB
3371 AS1 (lsl,%B0) CR_TAB
3372 AS1 (lsl,%B0) CR_TAB
3373 AS1 (lsl,%B0) CR_TAB
3377 if (AVR_HAVE_MUL && ldi_ok)
3380 return (AS2 (ldi,%B0,0x40) CR_TAB
3381 AS2 (mul,%A0,%B0) CR_TAB
3382 AS2 (mov,%B0,r0) CR_TAB
3383 AS1 (clr,%A0) CR_TAB
3384 AS1 (clr,__zero_reg__));
3386 if (AVR_HAVE_MUL && scratch)
3389 return (AS2 (ldi,%3,0x40) CR_TAB
3390 AS2 (mul,%A0,%3) CR_TAB
3391 AS2 (mov,%B0,r0) CR_TAB
3392 AS1 (clr,%A0) CR_TAB
3393 AS1 (clr,__zero_reg__));
3395 if (optimize_size && ldi_ok)
3398 return (AS2 (mov,%B0,%A0) CR_TAB
3399 AS2 (ldi,%A0,6) "\n1:\t"
3400 AS1 (lsl,%B0) CR_TAB
3401 AS1 (dec,%A0) CR_TAB
3404 if (optimize_size && scratch)
3407 return (AS1 (clr,%B0) CR_TAB
3408 AS1 (lsr,%A0) CR_TAB
3409 AS1 (ror,%B0) CR_TAB
3410 AS1 (lsr,%A0) CR_TAB
3411 AS1 (ror,%B0) CR_TAB
3416 return (AS1 (clr,%B0) CR_TAB
3417 AS1 (lsr,%A0) CR_TAB
3418 AS1 (ror,%B0) CR_TAB
3423 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3425 insn, operands, len, 2);
3430 /* 32bit shift left ((long)x << i) */
3433 ashlsi3_out (rtx insn, rtx operands[], int *len)
3435 if (GET_CODE (operands[2]) == CONST_INT)
3443 switch (INTVAL (operands[2]))
3446 if (INTVAL (operands[2]) < 32)
3450 return *len = 3, (AS1 (clr,%D0) CR_TAB
3451 AS1 (clr,%C0) CR_TAB
3452 AS2 (movw,%A0,%C0));
3454 return (AS1 (clr,%D0) CR_TAB
3455 AS1 (clr,%C0) CR_TAB
3456 AS1 (clr,%B0) CR_TAB
3461 int reg0 = true_regnum (operands[0]);
3462 int reg1 = true_regnum (operands[1]);
3465 return (AS2 (mov,%D0,%C1) CR_TAB
3466 AS2 (mov,%C0,%B1) CR_TAB
3467 AS2 (mov,%B0,%A1) CR_TAB
3470 return (AS1 (clr,%A0) CR_TAB
3471 AS2 (mov,%B0,%A1) CR_TAB
3472 AS2 (mov,%C0,%B1) CR_TAB
3478 int reg0 = true_regnum (operands[0]);
3479 int reg1 = true_regnum (operands[1]);
3480 if (reg0 + 2 == reg1)
3481 return *len = 2, (AS1 (clr,%B0) CR_TAB
3484 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3485 AS1 (clr,%B0) CR_TAB
3488 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3489 AS2 (mov,%D0,%B1) CR_TAB
3490 AS1 (clr,%B0) CR_TAB
3496 return (AS2 (mov,%D0,%A1) CR_TAB
3497 AS1 (clr,%C0) CR_TAB
3498 AS1 (clr,%B0) CR_TAB
3503 return (AS1 (clr,%D0) CR_TAB
3504 AS1 (lsr,%A0) CR_TAB
3505 AS1 (ror,%D0) CR_TAB
3506 AS1 (clr,%C0) CR_TAB
3507 AS1 (clr,%B0) CR_TAB
3512 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3513 AS1 (rol,%B0) CR_TAB
3514 AS1 (rol,%C0) CR_TAB
3516 insn, operands, len, 4);
3520 /* 8bit arithmetic shift right ((signed char)x >> i) */
3523 ashrqi3_out (rtx insn, rtx operands[], int *len)
3525 if (GET_CODE (operands[2]) == CONST_INT)
3532 switch (INTVAL (operands[2]))
3536 return AS1 (asr,%0);
3540 return (AS1 (asr,%0) CR_TAB
3545 return (AS1 (asr,%0) CR_TAB
3551 return (AS1 (asr,%0) CR_TAB
3558 return (AS1 (asr,%0) CR_TAB
3566 return (AS2 (bst,%0,6) CR_TAB
3568 AS2 (sbc,%0,%0) CR_TAB
3572 if (INTVAL (operands[2]) < 8)
3579 return (AS1 (lsl,%0) CR_TAB
3583 else if (CONSTANT_P (operands[2]))
3584 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3586 out_shift_with_cnt (AS1 (asr,%0),
3587 insn, operands, len, 1);
3592 /* 16bit arithmetic shift right ((signed short)x >> i) */
3595 ashrhi3_out (rtx insn, rtx operands[], int *len)
3597 if (GET_CODE (operands[2]) == CONST_INT)
3599 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3600 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3607 switch (INTVAL (operands[2]))
3611 /* XXX try to optimize this too? */
3616 break; /* scratch ? 5 : 6 */
3618 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3619 AS2 (mov,%A0,%B0) CR_TAB
3620 AS1 (lsl,__tmp_reg__) CR_TAB
3621 AS1 (rol,%A0) CR_TAB
3622 AS2 (sbc,%B0,%B0) CR_TAB
3623 AS1 (lsl,__tmp_reg__) CR_TAB
3624 AS1 (rol,%A0) CR_TAB
3629 return (AS1 (lsl,%A0) CR_TAB
3630 AS2 (mov,%A0,%B0) CR_TAB
3631 AS1 (rol,%A0) CR_TAB
3636 int reg0 = true_regnum (operands[0]);
3637 int reg1 = true_regnum (operands[1]);
3640 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3641 AS1 (lsl,%B0) CR_TAB
3644 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3645 AS1 (clr,%B0) CR_TAB
3646 AS2 (sbrc,%A0,7) CR_TAB
3652 return (AS2 (mov,%A0,%B0) CR_TAB
3653 AS1 (lsl,%B0) CR_TAB
3654 AS2 (sbc,%B0,%B0) CR_TAB
3659 return (AS2 (mov,%A0,%B0) CR_TAB
3660 AS1 (lsl,%B0) CR_TAB
3661 AS2 (sbc,%B0,%B0) CR_TAB
3662 AS1 (asr,%A0) CR_TAB
3666 if (AVR_HAVE_MUL && ldi_ok)
3669 return (AS2 (ldi,%A0,0x20) CR_TAB
3670 AS2 (muls,%B0,%A0) CR_TAB
3671 AS2 (mov,%A0,r1) CR_TAB
3672 AS2 (sbc,%B0,%B0) CR_TAB
3673 AS1 (clr,__zero_reg__));
3675 if (optimize_size && scratch)
3678 return (AS2 (mov,%A0,%B0) CR_TAB
3679 AS1 (lsl,%B0) CR_TAB
3680 AS2 (sbc,%B0,%B0) CR_TAB
3681 AS1 (asr,%A0) CR_TAB
3682 AS1 (asr,%A0) CR_TAB
3686 if (AVR_HAVE_MUL && ldi_ok)
3689 return (AS2 (ldi,%A0,0x10) CR_TAB
3690 AS2 (muls,%B0,%A0) CR_TAB
3691 AS2 (mov,%A0,r1) CR_TAB
3692 AS2 (sbc,%B0,%B0) CR_TAB
3693 AS1 (clr,__zero_reg__));
3695 if (optimize_size && scratch)
3698 return (AS2 (mov,%A0,%B0) CR_TAB
3699 AS1 (lsl,%B0) CR_TAB
3700 AS2 (sbc,%B0,%B0) CR_TAB
3701 AS1 (asr,%A0) CR_TAB
3702 AS1 (asr,%A0) CR_TAB
3703 AS1 (asr,%A0) CR_TAB
3707 if (AVR_HAVE_MUL && ldi_ok)
3710 return (AS2 (ldi,%A0,0x08) CR_TAB
3711 AS2 (muls,%B0,%A0) CR_TAB
3712 AS2 (mov,%A0,r1) CR_TAB
3713 AS2 (sbc,%B0,%B0) CR_TAB
3714 AS1 (clr,__zero_reg__));
3717 break; /* scratch ? 5 : 7 */
3719 return (AS2 (mov,%A0,%B0) CR_TAB
3720 AS1 (lsl,%B0) CR_TAB
3721 AS2 (sbc,%B0,%B0) CR_TAB
3722 AS1 (asr,%A0) CR_TAB
3723 AS1 (asr,%A0) CR_TAB
3724 AS1 (asr,%A0) CR_TAB
3725 AS1 (asr,%A0) CR_TAB
3730 return (AS1 (lsl,%B0) CR_TAB
3731 AS2 (sbc,%A0,%A0) CR_TAB
3732 AS1 (lsl,%B0) CR_TAB
3733 AS2 (mov,%B0,%A0) CR_TAB
3737 if (INTVAL (operands[2]) < 16)
3743 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3744 AS2 (sbc,%A0,%A0) CR_TAB
3749 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3751 insn, operands, len, 2);
3756 /* 32bit arithmetic shift right ((signed long)x >> i) */
3759 ashrsi3_out (rtx insn, rtx operands[], int *len)
3761 if (GET_CODE (operands[2]) == CONST_INT)
3769 switch (INTVAL (operands[2]))
3773 int reg0 = true_regnum (operands[0]);
3774 int reg1 = true_regnum (operands[1]);
3777 return (AS2 (mov,%A0,%B1) CR_TAB
3778 AS2 (mov,%B0,%C1) CR_TAB
3779 AS2 (mov,%C0,%D1) CR_TAB
3780 AS1 (clr,%D0) CR_TAB
3781 AS2 (sbrc,%C0,7) CR_TAB
3784 return (AS1 (clr,%D0) CR_TAB
3785 AS2 (sbrc,%D1,7) CR_TAB
3786 AS1 (dec,%D0) CR_TAB
3787 AS2 (mov,%C0,%D1) CR_TAB
3788 AS2 (mov,%B0,%C1) CR_TAB
3794 int reg0 = true_regnum (operands[0]);
3795 int reg1 = true_regnum (operands[1]);
3797 if (reg0 == reg1 + 2)
3798 return *len = 4, (AS1 (clr,%D0) CR_TAB
3799 AS2 (sbrc,%B0,7) CR_TAB
3800 AS1 (com,%D0) CR_TAB
3803 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3804 AS1 (clr,%D0) CR_TAB
3805 AS2 (sbrc,%B0,7) CR_TAB
3806 AS1 (com,%D0) CR_TAB
3809 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3810 AS2 (mov,%A0,%C1) CR_TAB
3811 AS1 (clr,%D0) CR_TAB
3812 AS2 (sbrc,%B0,7) CR_TAB
3813 AS1 (com,%D0) CR_TAB
3818 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3819 AS1 (clr,%D0) CR_TAB
3820 AS2 (sbrc,%A0,7) CR_TAB
3821 AS1 (com,%D0) CR_TAB
3822 AS2 (mov,%B0,%D0) CR_TAB
3826 if (INTVAL (operands[2]) < 32)
3833 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3834 AS2 (sbc,%A0,%A0) CR_TAB
3835 AS2 (mov,%B0,%A0) CR_TAB
3836 AS2 (movw,%C0,%A0));
3838 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3839 AS2 (sbc,%A0,%A0) CR_TAB
3840 AS2 (mov,%B0,%A0) CR_TAB
3841 AS2 (mov,%C0,%A0) CR_TAB
3846 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3847 AS1 (ror,%C0) CR_TAB
3848 AS1 (ror,%B0) CR_TAB
3850 insn, operands, len, 4);
3854 /* 8bit logic shift right ((unsigned char)x >> i) */
3857 lshrqi3_out (rtx insn, rtx operands[], int *len)
3859 if (GET_CODE (operands[2]) == CONST_INT)
3866 switch (INTVAL (operands[2]))
3869 if (INTVAL (operands[2]) < 8)
3873 return AS1 (clr,%0);
3877 return AS1 (lsr,%0);
3881 return (AS1 (lsr,%0) CR_TAB
3885 return (AS1 (lsr,%0) CR_TAB
3890 if (test_hard_reg_class (LD_REGS, operands[0]))
3893 return (AS1 (swap,%0) CR_TAB
3894 AS2 (andi,%0,0x0f));
3897 return (AS1 (lsr,%0) CR_TAB
3903 if (test_hard_reg_class (LD_REGS, operands[0]))
3906 return (AS1 (swap,%0) CR_TAB
3911 return (AS1 (lsr,%0) CR_TAB
3918 if (test_hard_reg_class (LD_REGS, operands[0]))
3921 return (AS1 (swap,%0) CR_TAB
3927 return (AS1 (lsr,%0) CR_TAB
3936 return (AS1 (rol,%0) CR_TAB
3941 else if (CONSTANT_P (operands[2]))
3942 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3944 out_shift_with_cnt (AS1 (lsr,%0),
3945 insn, operands, len, 1);
3949 /* 16bit logic shift right ((unsigned short)x >> i) */
3952 lshrhi3_out (rtx insn, rtx operands[], int *len)
3954 if (GET_CODE (operands[2]) == CONST_INT)
3956 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3957 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3964 switch (INTVAL (operands[2]))
3967 if (INTVAL (operands[2]) < 16)
3971 return (AS1 (clr,%B0) CR_TAB
3975 if (optimize_size && scratch)
3980 return (AS1 (swap,%B0) CR_TAB
3981 AS1 (swap,%A0) CR_TAB
3982 AS2 (andi,%A0,0x0f) CR_TAB
3983 AS2 (eor,%A0,%B0) CR_TAB
3984 AS2 (andi,%B0,0x0f) CR_TAB
3990 return (AS1 (swap,%B0) CR_TAB
3991 AS1 (swap,%A0) CR_TAB
3992 AS2 (ldi,%3,0x0f) CR_TAB
3993 AS2 (and,%A0,%3) CR_TAB
3994 AS2 (eor,%A0,%B0) CR_TAB
3995 AS2 (and,%B0,%3) CR_TAB
3998 break; /* optimize_size ? 6 : 8 */
4002 break; /* scratch ? 5 : 6 */
4006 return (AS1 (lsr,%B0) CR_TAB
4007 AS1 (ror,%A0) CR_TAB
4008 AS1 (swap,%B0) CR_TAB
4009 AS1 (swap,%A0) CR_TAB
4010 AS2 (andi,%A0,0x0f) CR_TAB
4011 AS2 (eor,%A0,%B0) CR_TAB
4012 AS2 (andi,%B0,0x0f) CR_TAB
4018 return (AS1 (lsr,%B0) CR_TAB
4019 AS1 (ror,%A0) CR_TAB
4020 AS1 (swap,%B0) CR_TAB
4021 AS1 (swap,%A0) CR_TAB
4022 AS2 (ldi,%3,0x0f) CR_TAB
4023 AS2 (and,%A0,%3) CR_TAB
4024 AS2 (eor,%A0,%B0) CR_TAB
4025 AS2 (and,%B0,%3) CR_TAB
4032 break; /* scratch ? 5 : 6 */
4034 return (AS1 (clr,__tmp_reg__) CR_TAB
4035 AS1 (lsl,%A0) CR_TAB
4036 AS1 (rol,%B0) CR_TAB
4037 AS1 (rol,__tmp_reg__) CR_TAB
4038 AS1 (lsl,%A0) CR_TAB
4039 AS1 (rol,%B0) CR_TAB
4040 AS1 (rol,__tmp_reg__) CR_TAB
4041 AS2 (mov,%A0,%B0) CR_TAB
4042 AS2 (mov,%B0,__tmp_reg__));
4046 return (AS1 (lsl,%A0) CR_TAB
4047 AS2 (mov,%A0,%B0) CR_TAB
4048 AS1 (rol,%A0) CR_TAB
4049 AS2 (sbc,%B0,%B0) CR_TAB
4053 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4058 return (AS2 (mov,%A0,%B0) CR_TAB
4059 AS1 (clr,%B0) CR_TAB
4064 return (AS2 (mov,%A0,%B0) CR_TAB
4065 AS1 (clr,%B0) CR_TAB
4066 AS1 (lsr,%A0) CR_TAB
4071 return (AS2 (mov,%A0,%B0) CR_TAB
4072 AS1 (clr,%B0) CR_TAB
4073 AS1 (lsr,%A0) CR_TAB
4074 AS1 (lsr,%A0) CR_TAB
4081 return (AS2 (mov,%A0,%B0) CR_TAB
4082 AS1 (clr,%B0) CR_TAB
4083 AS1 (swap,%A0) CR_TAB
4084 AS2 (andi,%A0,0x0f));
4089 return (AS2 (mov,%A0,%B0) CR_TAB
4090 AS1 (clr,%B0) CR_TAB
4091 AS1 (swap,%A0) CR_TAB
4092 AS2 (ldi,%3,0x0f) CR_TAB
4096 return (AS2 (mov,%A0,%B0) CR_TAB
4097 AS1 (clr,%B0) CR_TAB
4098 AS1 (lsr,%A0) CR_TAB
4099 AS1 (lsr,%A0) CR_TAB
4100 AS1 (lsr,%A0) CR_TAB
4107 return (AS2 (mov,%A0,%B0) CR_TAB
4108 AS1 (clr,%B0) CR_TAB
4109 AS1 (swap,%A0) CR_TAB
4110 AS1 (lsr,%A0) CR_TAB
4111 AS2 (andi,%A0,0x07));
4113 if (AVR_HAVE_MUL && scratch)
4116 return (AS2 (ldi,%3,0x08) CR_TAB
4117 AS2 (mul,%B0,%3) CR_TAB
4118 AS2 (mov,%A0,r1) CR_TAB
4119 AS1 (clr,%B0) CR_TAB
4120 AS1 (clr,__zero_reg__));
4122 if (optimize_size && scratch)
4127 return (AS2 (mov,%A0,%B0) CR_TAB
4128 AS1 (clr,%B0) CR_TAB
4129 AS1 (swap,%A0) CR_TAB
4130 AS1 (lsr,%A0) CR_TAB
4131 AS2 (ldi,%3,0x07) CR_TAB
4137 return ("set" CR_TAB
4138 AS2 (bld,r1,3) CR_TAB
4139 AS2 (mul,%B0,r1) CR_TAB
4140 AS2 (mov,%A0,r1) CR_TAB
4141 AS1 (clr,%B0) CR_TAB
4142 AS1 (clr,__zero_reg__));
4145 return (AS2 (mov,%A0,%B0) CR_TAB
4146 AS1 (clr,%B0) CR_TAB
4147 AS1 (lsr,%A0) CR_TAB
4148 AS1 (lsr,%A0) CR_TAB
4149 AS1 (lsr,%A0) CR_TAB
4150 AS1 (lsr,%A0) CR_TAB
4154 if (AVR_HAVE_MUL && ldi_ok)
4157 return (AS2 (ldi,%A0,0x04) CR_TAB
4158 AS2 (mul,%B0,%A0) CR_TAB
4159 AS2 (mov,%A0,r1) CR_TAB
4160 AS1 (clr,%B0) CR_TAB
4161 AS1 (clr,__zero_reg__));
4163 if (AVR_HAVE_MUL && scratch)
4166 return (AS2 (ldi,%3,0x04) CR_TAB
4167 AS2 (mul,%B0,%3) CR_TAB
4168 AS2 (mov,%A0,r1) CR_TAB
4169 AS1 (clr,%B0) CR_TAB
4170 AS1 (clr,__zero_reg__));
4172 if (optimize_size && ldi_ok)
4175 return (AS2 (mov,%A0,%B0) CR_TAB
4176 AS2 (ldi,%B0,6) "\n1:\t"
4177 AS1 (lsr,%A0) CR_TAB
4178 AS1 (dec,%B0) CR_TAB
4181 if (optimize_size && scratch)
4184 return (AS1 (clr,%A0) CR_TAB
4185 AS1 (lsl,%B0) CR_TAB
4186 AS1 (rol,%A0) CR_TAB
4187 AS1 (lsl,%B0) CR_TAB
4188 AS1 (rol,%A0) CR_TAB
4193 return (AS1 (clr,%A0) CR_TAB
4194 AS1 (lsl,%B0) CR_TAB
4195 AS1 (rol,%A0) CR_TAB
4200 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4202 insn, operands, len, 2);
4206 /* 32bit logic shift right ((unsigned int)x >> i) */
4209 lshrsi3_out (rtx insn, rtx operands[], int *len)
4211 if (GET_CODE (operands[2]) == CONST_INT)
4219 switch (INTVAL (operands[2]))
4222 if (INTVAL (operands[2]) < 32)
4226 return *len = 3, (AS1 (clr,%D0) CR_TAB
4227 AS1 (clr,%C0) CR_TAB
4228 AS2 (movw,%A0,%C0));
4230 return (AS1 (clr,%D0) CR_TAB
4231 AS1 (clr,%C0) CR_TAB
4232 AS1 (clr,%B0) CR_TAB
4237 int reg0 = true_regnum (operands[0]);
4238 int reg1 = true_regnum (operands[1]);
4241 return (AS2 (mov,%A0,%B1) CR_TAB
4242 AS2 (mov,%B0,%C1) CR_TAB
4243 AS2 (mov,%C0,%D1) CR_TAB
4246 return (AS1 (clr,%D0) CR_TAB
4247 AS2 (mov,%C0,%D1) CR_TAB
4248 AS2 (mov,%B0,%C1) CR_TAB
4254 int reg0 = true_regnum (operands[0]);
4255 int reg1 = true_regnum (operands[1]);
4257 if (reg0 == reg1 + 2)
4258 return *len = 2, (AS1 (clr,%C0) CR_TAB
4261 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4262 AS1 (clr,%C0) CR_TAB
4265 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4266 AS2 (mov,%A0,%C1) CR_TAB
4267 AS1 (clr,%C0) CR_TAB
4272 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4273 AS1 (clr,%B0) CR_TAB
4274 AS1 (clr,%C0) CR_TAB
4279 return (AS1 (clr,%A0) CR_TAB
4280 AS2 (sbrc,%D0,7) CR_TAB
4281 AS1 (inc,%A0) CR_TAB
4282 AS1 (clr,%B0) CR_TAB
4283 AS1 (clr,%C0) CR_TAB
4288 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4289 AS1 (ror,%C0) CR_TAB
4290 AS1 (ror,%B0) CR_TAB
4292 insn, operands, len, 4);
4296 /* Modifies the length assigned to instruction INSN
4297 LEN is the initially computed length of the insn. */
4300 adjust_insn_length (rtx insn, int len)
4302 rtx patt = PATTERN (insn);
4305 if (GET_CODE (patt) == SET)
4308 op[1] = SET_SRC (patt);
4309 op[0] = SET_DEST (patt);
4310 if (general_operand (op[1], VOIDmode)
4311 && general_operand (op[0], VOIDmode))
4313 switch (GET_MODE (op[0]))
4316 output_movqi (insn, op, &len);
4319 output_movhi (insn, op, &len);
4323 output_movsisf (insn, op, &len);
4329 else if (op[0] == cc0_rtx && REG_P (op[1]))
4331 switch (GET_MODE (op[1]))
4333 case HImode: out_tsthi (insn,&len); break;
4334 case SImode: out_tstsi (insn,&len); break;
4338 else if (GET_CODE (op[1]) == AND)
4340 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4342 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4343 if (GET_MODE (op[1]) == SImode)
4344 len = (((mask & 0xff) != 0xff)
4345 + ((mask & 0xff00) != 0xff00)
4346 + ((mask & 0xff0000L) != 0xff0000L)
4347 + ((mask & 0xff000000L) != 0xff000000L));
4348 else if (GET_MODE (op[1]) == HImode)
4349 len = (((mask & 0xff) != 0xff)
4350 + ((mask & 0xff00) != 0xff00));
4353 else if (GET_CODE (op[1]) == IOR)
4355 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4357 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4358 if (GET_MODE (op[1]) == SImode)
4359 len = (((mask & 0xff) != 0)
4360 + ((mask & 0xff00) != 0)
4361 + ((mask & 0xff0000L) != 0)
4362 + ((mask & 0xff000000L) != 0));
4363 else if (GET_MODE (op[1]) == HImode)
4364 len = (((mask & 0xff) != 0)
4365 + ((mask & 0xff00) != 0));
4369 set = single_set (insn);
4374 op[1] = SET_SRC (set);
4375 op[0] = SET_DEST (set);
4377 if (GET_CODE (patt) == PARALLEL
4378 && general_operand (op[1], VOIDmode)
4379 && general_operand (op[0], VOIDmode))
4381 if (XVECLEN (patt, 0) == 2)
4382 op[2] = XVECEXP (patt, 0, 1);
4384 switch (GET_MODE (op[0]))
4390 output_reload_inhi (insn, op, &len);
4394 output_reload_insisf (insn, op, &len);
4400 else if (GET_CODE (op[1]) == ASHIFT
4401 || GET_CODE (op[1]) == ASHIFTRT
4402 || GET_CODE (op[1]) == LSHIFTRT)
4406 ops[1] = XEXP (op[1],0);
4407 ops[2] = XEXP (op[1],1);
4408 switch (GET_CODE (op[1]))
4411 switch (GET_MODE (op[0]))
4413 case QImode: ashlqi3_out (insn,ops,&len); break;
4414 case HImode: ashlhi3_out (insn,ops,&len); break;
4415 case SImode: ashlsi3_out (insn,ops,&len); break;
4420 switch (GET_MODE (op[0]))
4422 case QImode: ashrqi3_out (insn,ops,&len); break;
4423 case HImode: ashrhi3_out (insn,ops,&len); break;
4424 case SImode: ashrsi3_out (insn,ops,&len); break;
4429 switch (GET_MODE (op[0]))
4431 case QImode: lshrqi3_out (insn,ops,&len); break;
4432 case HImode: lshrhi3_out (insn,ops,&len); break;
4433 case SImode: lshrsi3_out (insn,ops,&len); break;
4445 /* Return nonzero if register REG dead after INSN. */
4448 reg_unused_after (rtx insn, rtx reg)
4450 return (dead_or_set_p (insn, reg)
4451 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4454 /* Return nonzero if REG is not used after INSN.
4455 We assume REG is a reload reg, and therefore does
4456 not live past labels. It may live past calls or jumps though. */
4459 _reg_unused_after (rtx insn, rtx reg)
4464 /* If the reg is set by this instruction, then it is safe for our
4465 case. Disregard the case where this is a store to memory, since
4466 we are checking a register used in the store address. */
4467 set = single_set (insn);
4468 if (set && GET_CODE (SET_DEST (set)) != MEM
4469 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4472 while ((insn = NEXT_INSN (insn)))
4475 code = GET_CODE (insn);
4478 /* If this is a label that existed before reload, then the register
4479 if dead here. However, if this is a label added by reorg, then
4480 the register may still be live here. We can't tell the difference,
4481 so we just ignore labels completely. */
4482 if (code == CODE_LABEL)
4490 if (code == JUMP_INSN)
4493 /* If this is a sequence, we must handle them all at once.
4494 We could have for instance a call that sets the target register,
4495 and an insn in a delay slot that uses the register. In this case,
4496 we must return 0. */
4497 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4502 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4504 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4505 rtx set = single_set (this_insn);
4507 if (GET_CODE (this_insn) == CALL_INSN)
4509 else if (GET_CODE (this_insn) == JUMP_INSN)
4511 if (INSN_ANNULLED_BRANCH_P (this_insn))
4516 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4518 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4520 if (GET_CODE (SET_DEST (set)) != MEM)
4526 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4531 else if (code == JUMP_INSN)
4535 if (code == CALL_INSN)
4538 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4539 if (GET_CODE (XEXP (tem, 0)) == USE
4540 && REG_P (XEXP (XEXP (tem, 0), 0))
4541 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4543 if (call_used_regs[REGNO (reg)])
4547 set = single_set (insn);
4549 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4551 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4552 return GET_CODE (SET_DEST (set)) != MEM;
4553 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4559 /* Target hook for assembling integer objects. The AVR version needs
4560 special handling for references to certain labels. */
4563 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4565 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4566 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4567 || GET_CODE (x) == LABEL_REF))
4569 fputs ("\t.word\tgs(", asm_out_file);
4570 output_addr_const (asm_out_file, x);
4571 fputs (")\n", asm_out_file);
4574 return default_assemble_integer (x, size, aligned_p);
4577 /* The routine used to output NUL terminated strings. We use a special
4578 version of this for most svr4 targets because doing so makes the
4579 generated assembly code more compact (and thus faster to assemble)
4580 as well as more readable, especially for targets like the i386
4581 (where the only alternative is to output character sequences as
4582 comma separated lists of numbers). */
4585 gas_output_limited_string(FILE *file, const char *str)
4587 const unsigned char *_limited_str = (const unsigned char *) str;
4589 fprintf (file, "%s\"", STRING_ASM_OP);
4590 for (; (ch = *_limited_str); _limited_str++)
4593 switch (escape = ESCAPES[ch])
4599 fprintf (file, "\\%03o", ch);
4603 putc (escape, file);
4607 fprintf (file, "\"\n");
4610 /* The routine used to output sequences of byte values. We use a special
4611 version of this for most svr4 targets because doing so makes the
4612 generated assembly code more compact (and thus faster to assemble)
4613 as well as more readable. Note that if we find subparts of the
4614 character sequence which end with NUL (and which are shorter than
4615 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4618 gas_output_ascii(FILE *file, const char *str, size_t length)
4620 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4621 const unsigned char *limit = _ascii_bytes + length;
4622 unsigned bytes_in_chunk = 0;
4623 for (; _ascii_bytes < limit; _ascii_bytes++)
4625 const unsigned char *p;
4626 if (bytes_in_chunk >= 60)
4628 fprintf (file, "\"\n");
4631 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4633 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4635 if (bytes_in_chunk > 0)
4637 fprintf (file, "\"\n");
4640 gas_output_limited_string (file, (const char*)_ascii_bytes);
4647 if (bytes_in_chunk == 0)
4648 fprintf (file, "\t.ascii\t\"");
4649 switch (escape = ESCAPES[ch = *_ascii_bytes])
4656 fprintf (file, "\\%03o", ch);
4657 bytes_in_chunk += 4;
4661 putc (escape, file);
4662 bytes_in_chunk += 2;
4667 if (bytes_in_chunk > 0)
4668 fprintf (file, "\"\n");
4671 /* Return value is nonzero if pseudos that have been
4672 assigned to registers of class CLASS would likely be spilled
4673 because registers of CLASS are needed for spill registers. */
4676 class_likely_spilled_p (int c)
4678 return (c != ALL_REGS && c != ADDW_REGS);
4681 /* Valid attributes:
4682 progmem - put data to program memory;
4683 signal - make a function to be hardware interrupt. After function
4684 prologue interrupts are disabled;
4685 interrupt - make a function to be hardware interrupt. After function
4686 prologue interrupts are enabled;
4687 naked - don't generate function prologue/epilogue and `ret' command.
4689 Only `progmem' attribute valid for type. */
4691 const struct attribute_spec avr_attribute_table[] =
4693 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4694 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4695 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4696 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4697 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4698 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4699 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4700 { NULL, 0, 0, false, false, false, NULL }
4703 /* Handle a "progmem" attribute; arguments as in
4704 struct attribute_spec.handler. */
4706 avr_handle_progmem_attribute (tree *node, tree name,
4707 tree args ATTRIBUTE_UNUSED,
4708 int flags ATTRIBUTE_UNUSED,
4713 if (TREE_CODE (*node) == TYPE_DECL)
4715 /* This is really a decl attribute, not a type attribute,
4716 but try to handle it for GCC 3.0 backwards compatibility. */
4718 tree type = TREE_TYPE (*node);
4719 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4720 tree newtype = build_type_attribute_variant (type, attr);
4722 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4723 TREE_TYPE (*node) = newtype;
4724 *no_add_attrs = true;
4726 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4728 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4730 warning (0, "only initialized variables can be placed into "
4731 "program memory area");
4732 *no_add_attrs = true;
4737 warning (OPT_Wattributes, "%qs attribute ignored",
4738 IDENTIFIER_POINTER (name));
4739 *no_add_attrs = true;
4746 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4747 struct attribute_spec.handler. */
4750 avr_handle_fndecl_attribute (tree *node, tree name,
4751 tree args ATTRIBUTE_UNUSED,
4752 int flags ATTRIBUTE_UNUSED,
4755 if (TREE_CODE (*node) != FUNCTION_DECL)
4757 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4758 IDENTIFIER_POINTER (name));
4759 *no_add_attrs = true;
4763 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4764 const char *attr = IDENTIFIER_POINTER (name);
4766 /* If the function has the 'signal' or 'interrupt' attribute, test to
4767 make sure that the name of the function is "__vector_NN" so as to
4768 catch when the user misspells the interrupt vector name. */
4770 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4772 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4774 warning (0, "%qs appears to be a misspelled interrupt handler",
4778 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4780 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4782 warning (0, "%qs appears to be a misspelled signal handler",
4792 avr_handle_fntype_attribute (tree *node, tree name,
4793 tree args ATTRIBUTE_UNUSED,
4794 int flags ATTRIBUTE_UNUSED,
4797 if (TREE_CODE (*node) != FUNCTION_TYPE)
4799 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4800 IDENTIFIER_POINTER (name));
4801 *no_add_attrs = true;
4807 /* Look for attribute `progmem' in DECL
4808 if found return 1, otherwise 0. */
4811 avr_progmem_p (tree decl, tree attributes)
4815 if (TREE_CODE (decl) != VAR_DECL)
4819 != lookup_attribute ("progmem", attributes))
4825 while (TREE_CODE (a) == ARRAY_TYPE);
4827 if (a == error_mark_node)
4830 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4836 /* Add the section attribute if the variable is in progmem. */
4839 avr_insert_attributes (tree node, tree *attributes)
4841 if (TREE_CODE (node) == VAR_DECL
4842 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4843 && avr_progmem_p (node, *attributes))
4845 static const char dsec[] = ".progmem.data";
4846 *attributes = tree_cons (get_identifier ("section"),
4847 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4850 /* ??? This seems sketchy. Why can't the user declare the
4851 thing const in the first place? */
4852 TREE_READONLY (node) = 1;
4856 /* A get_unnamed_section callback for switching to progmem_section. */
4859 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4861 fprintf (asm_out_file,
4862 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4863 AVR_HAVE_JMP_CALL ? "a" : "ax");
4864 /* Should already be aligned, this is just to be safe if it isn't. */
4865 fprintf (asm_out_file, "\t.p2align 1\n");
4868 /* Implement TARGET_ASM_INIT_SECTIONS. */
4871 avr_asm_init_sections (void)
4873 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4874 avr_output_progmem_section_asm_op,
4876 readonly_data_section = data_section;
4880 avr_section_type_flags (tree decl, const char *name, int reloc)
4882 unsigned int flags = default_section_type_flags (decl, name, reloc);
4884 if (strncmp (name, ".noinit", 7) == 0)
4886 if (decl && TREE_CODE (decl) == VAR_DECL
4887 && DECL_INITIAL (decl) == NULL_TREE)
4888 flags |= SECTION_BSS; /* @nobits */
4890 warning (0, "only uninitialized variables can be placed in the "
4897 /* Outputs some appropriate text to go at the start of an assembler
4901 avr_file_start (void)
4903 if (avr_current_arch->asm_only)
4904 error ("MCU %qs supported for assembler only", avr_mcu_name);
4906 default_file_start ();
4908 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4909 fputs ("__SREG__ = 0x3f\n"
4911 "__SP_L__ = 0x3d\n", asm_out_file);
4913 fputs ("__tmp_reg__ = 0\n"
4914 "__zero_reg__ = 1\n", asm_out_file);
4916 /* FIXME: output these only if there is anything in the .data / .bss
4917 sections - some code size could be saved by not linking in the
4918 initialization code from libgcc if one or both sections are empty. */
4919 fputs ("\t.global __do_copy_data\n", asm_out_file);
4920 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4923 /* Outputs to the stdio stream FILE some
4924 appropriate text to go at the end of an assembler file. */
4931 /* Choose the order in which to allocate hard registers for
4932 pseudo-registers local to a basic block.
4934 Store the desired register order in the array `reg_alloc_order'.
4935 Element 0 should be the register to allocate first; element 1, the
4936 next register; and so on. */
4939 order_regs_for_local_alloc (void)
4942 static const int order_0[] = {
4950 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4954 static const int order_1[] = {
4962 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4966 static const int order_2[] = {
4975 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4980 const int *order = (TARGET_ORDER_1 ? order_1 :
4981 TARGET_ORDER_2 ? order_2 :
4983 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4984 reg_alloc_order[i] = order[i];
4988 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4989 cost of an RTX operand given its context. X is the rtx of the
4990 operand, MODE is its mode, and OUTER is the rtx_code of this
4991 operand's parent operator. */
4994 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
4995 bool speed ATTRIBUTE_UNUSED)
4997 enum rtx_code code = GET_CODE (x);
5008 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5015 avr_rtx_costs (x, code, outer, &total);
5019 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5020 is to be calculated. Return true if the complete cost has been
5021 computed, and false if subexpressions should be scanned. In either
5022 case, *TOTAL contains the cost result. */
5025 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
5028 enum machine_mode mode = GET_MODE (x);
5035 /* Immediate constants are as cheap as registers. */
5043 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5051 *total = COSTS_N_INSNS (1);
5055 *total = COSTS_N_INSNS (3);
5059 *total = COSTS_N_INSNS (7);
5065 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5073 *total = COSTS_N_INSNS (1);
5079 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5083 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5084 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5088 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5089 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5090 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5094 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5095 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5096 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5103 *total = COSTS_N_INSNS (1);
5104 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5105 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5109 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5111 *total = COSTS_N_INSNS (2);
5112 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5114 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5115 *total = COSTS_N_INSNS (1);
5117 *total = COSTS_N_INSNS (2);
5121 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5123 *total = COSTS_N_INSNS (4);
5124 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5126 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5127 *total = COSTS_N_INSNS (1);
5129 *total = COSTS_N_INSNS (4);
5135 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5141 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5142 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5143 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5144 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5148 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5149 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5150 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5158 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5160 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5167 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5169 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5177 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5178 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5186 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5189 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5190 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5197 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5199 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5200 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5204 val = INTVAL (XEXP (x, 1));
5206 *total = COSTS_N_INSNS (3);
5207 else if (val >= 0 && val <= 7)
5208 *total = COSTS_N_INSNS (val);
5210 *total = COSTS_N_INSNS (1);
5215 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5217 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5218 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5221 switch (INTVAL (XEXP (x, 1)))
5228 *total = COSTS_N_INSNS (2);
5231 *total = COSTS_N_INSNS (3);
5237 *total = COSTS_N_INSNS (4);
5242 *total = COSTS_N_INSNS (5);
5245 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5248 *total = COSTS_N_INSNS (ptimize_size ? 5 : 9);
5251 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5254 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5255 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5260 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5262 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5263 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5266 switch (INTVAL (XEXP (x, 1)))
5272 *total = COSTS_N_INSNS (3);
5277 *total = COSTS_N_INSNS (4);
5280 *total = COSTS_N_INSNS (6);
5283 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5286 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5287 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5294 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5301 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5303 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5304 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5308 val = INTVAL (XEXP (x, 1));
5310 *total = COSTS_N_INSNS (4);
5312 *total = COSTS_N_INSNS (2);
5313 else if (val >= 0 && val <= 7)
5314 *total = COSTS_N_INSNS (val);
5316 *total = COSTS_N_INSNS (1);
5321 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5323 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5324 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5327 switch (INTVAL (XEXP (x, 1)))
5333 *total = COSTS_N_INSNS (2);
5336 *total = COSTS_N_INSNS (3);
5342 *total = COSTS_N_INSNS (4);
5346 *total = COSTS_N_INSNS (5);
5349 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5352 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5356 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5359 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5360 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5365 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5367 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5368 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5371 switch (INTVAL (XEXP (x, 1)))
5377 *total = COSTS_N_INSNS (4);
5382 *total = COSTS_N_INSNS (6);
5385 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5388 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5391 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5392 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5399 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5406 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5408 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5409 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5413 val = INTVAL (XEXP (x, 1));
5415 *total = COSTS_N_INSNS (3);
5416 else if (val >= 0 && val <= 7)
5417 *total = COSTS_N_INSNS (val);
5419 *total = COSTS_N_INSNS (1);
5424 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5426 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5427 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5430 switch (INTVAL (XEXP (x, 1)))
5437 *total = COSTS_N_INSNS (2);
5440 *total = COSTS_N_INSNS (3);
5445 *total = COSTS_N_INSNS (4);
5449 *total = COSTS_N_INSNS (5);
5455 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5458 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5462 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5465 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5466 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5471 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5473 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5474 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5477 switch (INTVAL (XEXP (x, 1)))
5483 *total = COSTS_N_INSNS (4);
5486 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5491 *total = COSTS_N_INSNS (4);
5494 *total = COSTS_N_INSNS (6);
5497 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5498 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5505 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5509 switch (GET_MODE (XEXP (x, 0)))
5512 *total = COSTS_N_INSNS (1);
5513 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5514 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5518 *total = COSTS_N_INSNS (2);
5519 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5520 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5521 else if (INTVAL (XEXP (x, 1)) != 0)
5522 *total += COSTS_N_INSNS (1);
5526 *total = COSTS_N_INSNS (4);
5527 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5528 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5529 else if (INTVAL (XEXP (x, 1)) != 0)
5530 *total += COSTS_N_INSNS (3);
5536 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5545 /* Calculate the cost of a memory address. */
5548 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5550 if (GET_CODE (x) == PLUS
5551 && GET_CODE (XEXP (x,1)) == CONST_INT
5552 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5553 && INTVAL (XEXP (x,1)) >= 61)
5555 if (CONSTANT_ADDRESS_P (x))
5557 if (optimize > 0 && io_address_operand (x, QImode))
5564 /* Test for extra memory constraint 'Q'.
5565 It's a memory address based on Y or Z pointer with valid displacement. */
5568 extra_constraint_Q (rtx x)
5570 if (GET_CODE (XEXP (x,0)) == PLUS
5571 && REG_P (XEXP (XEXP (x,0), 0))
5572 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5573 && (INTVAL (XEXP (XEXP (x,0), 1))
5574 <= MAX_LD_OFFSET (GET_MODE (x))))
5576 rtx xx = XEXP (XEXP (x,0), 0);
5577 int regno = REGNO (xx);
5578 if (TARGET_ALL_DEBUG)
5580 fprintf (stderr, ("extra_constraint:\n"
5581 "reload_completed: %d\n"
5582 "reload_in_progress: %d\n"),
5583 reload_completed, reload_in_progress);
5586 if (regno >= FIRST_PSEUDO_REGISTER)
5587 return 1; /* allocate pseudos */
5588 else if (regno == REG_Z || regno == REG_Y)
5589 return 1; /* strictly check */
5590 else if (xx == frame_pointer_rtx
5591 || xx == arg_pointer_rtx)
5592 return 1; /* XXX frame & arg pointer checks */
5597 /* Convert condition code CONDITION to the valid AVR condition code. */
5600 avr_normalize_condition (RTX_CODE condition)
5617 /* This function optimizes conditional jumps. */
5624 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5626 if (! (GET_CODE (insn) == INSN
5627 || GET_CODE (insn) == CALL_INSN
5628 || GET_CODE (insn) == JUMP_INSN)
5629 || !single_set (insn))
5632 pattern = PATTERN (insn);
5634 if (GET_CODE (pattern) == PARALLEL)
5635 pattern = XVECEXP (pattern, 0, 0);
5636 if (GET_CODE (pattern) == SET
5637 && SET_DEST (pattern) == cc0_rtx
5638 && compare_diff_p (insn))
5640 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5642 /* Now we work under compare insn. */
5644 pattern = SET_SRC (pattern);
5645 if (true_regnum (XEXP (pattern,0)) >= 0
5646 && true_regnum (XEXP (pattern,1)) >= 0 )
5648 rtx x = XEXP (pattern,0);
5649 rtx next = next_real_insn (insn);
5650 rtx pat = PATTERN (next);
5651 rtx src = SET_SRC (pat);
5652 rtx t = XEXP (src,0);
5653 PUT_CODE (t, swap_condition (GET_CODE (t)));
5654 XEXP (pattern,0) = XEXP (pattern,1);
5655 XEXP (pattern,1) = x;
5656 INSN_CODE (next) = -1;
5658 else if (true_regnum (XEXP (pattern,0)) >= 0
5659 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5661 rtx x = XEXP (pattern,1);
5662 rtx next = next_real_insn (insn);
5663 rtx pat = PATTERN (next);
5664 rtx src = SET_SRC (pat);
5665 rtx t = XEXP (src,0);
5666 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5668 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5670 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5671 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5672 INSN_CODE (next) = -1;
5673 INSN_CODE (insn) = -1;
5677 else if (true_regnum (SET_SRC (pattern)) >= 0)
5679 /* This is a tst insn */
5680 rtx next = next_real_insn (insn);
5681 rtx pat = PATTERN (next);
5682 rtx src = SET_SRC (pat);
5683 rtx t = XEXP (src,0);
5685 PUT_CODE (t, swap_condition (GET_CODE (t)));
5686 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5688 INSN_CODE (next) = -1;
5689 INSN_CODE (insn) = -1;
5695 /* Returns register number for function return value.*/
5698 avr_ret_register (void)
5703 /* Create an RTX representing the place where a
5704 library function returns a value of mode MODE. */
5707 avr_libcall_value (enum machine_mode mode)
5709 int offs = GET_MODE_SIZE (mode);
5712 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5715 /* Create an RTX representing the place where a
5716 function returns a value of data type VALTYPE. */
5719 avr_function_value (const_tree type,
5720 const_tree func ATTRIBUTE_UNUSED,
5721 bool outgoing ATTRIBUTE_UNUSED)
5725 if (TYPE_MODE (type) != BLKmode)
5726 return avr_libcall_value (TYPE_MODE (type));
5728 offs = int_size_in_bytes (type);
5731 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5732 offs = GET_MODE_SIZE (SImode);
5733 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5734 offs = GET_MODE_SIZE (DImode);
5736 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5739 /* Places additional restrictions on the register class to
5740 use when it is necessary to copy value X into a register
5744 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5750 test_hard_reg_class (enum reg_class rclass, rtx x)
5752 int regno = true_regnum (x);
5756 if (TEST_HARD_REG_CLASS (rclass, regno))
5764 jump_over_one_insn_p (rtx insn, rtx dest)
5766 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5769 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5770 int dest_addr = INSN_ADDRESSES (uid);
5771 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5774 /* Returns 1 if a value of mode MODE can be stored starting with hard
5775 register number REGNO. On the enhanced core, anything larger than
5776 1 byte must start in even numbered register for "movw" to work
5777 (this way we don't have to check for odd registers everywhere). */
5780 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5782 /* Disallow QImode in stack pointer regs. */
5783 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5786 /* The only thing that can go into registers r28:r29 is a Pmode. */
5787 if (regno == REG_Y && mode == Pmode)
5790 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5791 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5797 /* Modes larger than QImode occupy consecutive registers. */
5798 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5801 /* All modes larger than QImode should start in an even register. */
5802 return !(regno & 1);
5806 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5812 if (GET_CODE (operands[1]) == CONST_INT)
5814 int val = INTVAL (operands[1]);
5815 if ((val & 0xff) == 0)
5818 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5819 AS2 (ldi,%2,hi8(%1)) CR_TAB
5822 else if ((val & 0xff00) == 0)
5825 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5826 AS2 (mov,%A0,%2) CR_TAB
5827 AS2 (mov,%B0,__zero_reg__));
5829 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5832 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5833 AS2 (mov,%A0,%2) CR_TAB
5838 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5839 AS2 (mov,%A0,%2) CR_TAB
5840 AS2 (ldi,%2,hi8(%1)) CR_TAB
5846 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5848 rtx src = operands[1];
5849 int cnst = (GET_CODE (src) == CONST_INT);
5854 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5855 + ((INTVAL (src) & 0xff00) != 0)
5856 + ((INTVAL (src) & 0xff0000) != 0)
5857 + ((INTVAL (src) & 0xff000000) != 0);
5864 if (cnst && ((INTVAL (src) & 0xff) == 0))
5865 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5868 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5869 output_asm_insn (AS2 (mov, %A0, %2), operands);
5871 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5872 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5875 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5876 output_asm_insn (AS2 (mov, %B0, %2), operands);
5878 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5879 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5882 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5883 output_asm_insn (AS2 (mov, %C0, %2), operands);
5885 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5886 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5889 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5890 output_asm_insn (AS2 (mov, %D0, %2), operands);
5896 avr_output_bld (rtx operands[], int bit_nr)
5898 static char s[] = "bld %A0,0";
5900 s[5] = 'A' + (bit_nr >> 3);
5901 s[8] = '0' + (bit_nr & 7);
5902 output_asm_insn (s, operands);
5906 avr_output_addr_vec_elt (FILE *stream, int value)
5908 switch_to_section (progmem_section);
5909 if (AVR_HAVE_JMP_CALL)
5910 fprintf (stream, "\t.word gs(.L%d)\n", value);
5912 fprintf (stream, "\trjmp .L%d\n", value);
5915 /* Returns true if SCRATCH are safe to be allocated as a scratch
5916 registers (for a define_peephole2) in the current function. */
5919 avr_hard_regno_scratch_ok (unsigned int regno)
5921 /* Interrupt functions can only use registers that have already been saved
5922 by the prologue, even if they would normally be call-clobbered. */
5924 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5925 && !df_regs_ever_live_p (regno))
5931 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5934 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5935 unsigned int new_reg)
5937 /* Interrupt functions can only use registers that have already been
5938 saved by the prologue, even if they would normally be
5941 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5942 && !df_regs_ever_live_p (new_reg))
5948 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5949 or memory location in the I/O space (QImode only).
5951 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5952 Operand 1: register operand to test, or CONST_INT memory address.
5953 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5954 Operand 3: label to jump to if the test is true. */
5957 avr_out_sbxx_branch (rtx insn, rtx operands[])
5959 enum rtx_code comp = GET_CODE (operands[0]);
5960 int long_jump = (get_attr_length (insn) >= 4);
5961 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5965 else if (comp == LT)
5969 comp = reverse_condition (comp);
5971 if (GET_CODE (operands[1]) == CONST_INT)
5973 if (INTVAL (operands[1]) < 0x40)
5976 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5978 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5982 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5984 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5986 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5989 else /* GET_CODE (operands[1]) == REG */
5991 if (GET_MODE (operands[1]) == QImode)
5994 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5996 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5998 else /* HImode or SImode */
6000 static char buf[] = "sbrc %A1,0";
6001 int bit_nr = exact_log2 (INTVAL (operands[2])
6002 & GET_MODE_MASK (GET_MODE (operands[1])));
6004 buf[3] = (comp == EQ) ? 's' : 'c';
6005 buf[6] = 'A' + (bit_nr >> 3);
6006 buf[9] = '0' + (bit_nr & 7);
6007 output_asm_insn (buf, operands);
6012 return (AS1 (rjmp,.+4) CR_TAB
6015 return AS1 (rjmp,%3);
6019 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6022 avr_asm_out_ctor (rtx symbol, int priority)
6024 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6025 default_ctor_section_asm_out_constructor (symbol, priority);
6028 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6031 avr_asm_out_dtor (rtx symbol, int priority)
6033 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6034 default_dtor_section_asm_out_destructor (symbol, priority);
6037 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6040 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6042 if (TYPE_MODE (type) == BLKmode)
6044 HOST_WIDE_INT size = int_size_in_bytes (type);
6045 return (size == -1 || size > 8);