1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
73 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
81 int toc_save_p; /* true if the TOC needs to be saved */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Support adjust_priority scheduler hook
128 and -mprioritize-restricted-insns= option. */
129 const char *rs6000_sched_restricted_insns_priority_str;
130 int rs6000_sched_restricted_insns_priority;
132 /* Support for -msched-costly-dep option. */
133 const char *rs6000_sched_costly_dep_str;
134 enum rs6000_dependence_cost rs6000_sched_costly_dep;
136 /* Support for -minsert-sched-nops option. */
137 const char *rs6000_sched_insert_nops_str;
138 enum rs6000_nop_insertion rs6000_sched_insert_nops;
140 /* Size of long double */
141 const char *rs6000_long_double_size_string;
142 int rs6000_long_double_type_size;
144 /* Whether -mabi=altivec has appeared */
145 int rs6000_altivec_abi;
147 /* Whether VRSAVE instructions should be generated. */
148 int rs6000_altivec_vrsave;
150 /* String from -mvrsave= option. */
151 const char *rs6000_altivec_vrsave_string;
153 /* Nonzero if we want SPE ABI extensions. */
156 /* Whether isel instructions should be generated. */
159 /* Whether SPE simd instructions should be generated. */
162 /* Nonzero if floating point operations are done in the GPRs. */
163 int rs6000_float_gprs = 0;
165 /* String from -mfloat-gprs=. */
166 const char *rs6000_float_gprs_string;
168 /* String from -misel=. */
169 const char *rs6000_isel_string;
171 /* String from -mspe=. */
172 const char *rs6000_spe_string;
174 /* Set to nonzero once AIX common-mode calls have been defined. */
175 static GTY(()) int common_mode_defined;
177 /* Save information from a "cmpxx" operation until the branch or scc is
179 rtx rs6000_compare_op0, rs6000_compare_op1;
180 int rs6000_compare_fp_p;
182 /* Label number of label created for -mrelocatable, to call to so we can
183 get the address of the GOT section */
184 int rs6000_pic_labelno;
187 /* Which abi to adhere to */
188 const char *rs6000_abi_name;
190 /* Semantics of the small data area */
191 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
193 /* Which small data model to use */
194 const char *rs6000_sdata_name = (char *)0;
196 /* Counter for labels which are to be placed in .fixup. */
197 int fixuplabelno = 0;
200 /* Bit size of immediate TLS offsets and string from which it is decoded. */
201 int rs6000_tls_size = 32;
202 const char *rs6000_tls_size_string;
204 /* ABI enumeration available for subtarget to use. */
205 enum rs6000_abi rs6000_current_abi;
207 /* ABI string from -mabi= option. */
208 const char *rs6000_abi_string;
211 const char *rs6000_debug_name;
212 int rs6000_debug_stack; /* debug stack applications */
213 int rs6000_debug_arg; /* debug argument handling */
216 static GTY(()) tree opaque_V2SI_type_node;
217 static GTY(()) tree opaque_V2SF_type_node;
218 static GTY(()) tree opaque_p_V2SI_type_node;
220 const char *rs6000_traceback_name;
222 traceback_default = 0,
228 /* Flag to say the TOC is initialized */
230 char toc_label_name[10];
232 /* Alias set for saves and restores from the rs6000 stack. */
233 static GTY(()) int rs6000_sr_alias_set;
235 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
236 The only place that looks at this is rs6000_set_default_type_attributes;
237 everywhere else should rely on the presence or absence of a longcall
238 attribute on the function declaration. */
239 int rs6000_default_long_calls;
240 const char *rs6000_longcall_switch;
242 /* Control alignment for fields within structures. */
243 /* String from -malign-XXXXX. */
244 const char *rs6000_alignment_string;
245 int rs6000_alignment_flags;
247 struct builtin_description
249 /* mask is not const because we're going to alter it below. This
250 nonsense will go away when we rewrite the -march infrastructure
251 to give us more target flag bits. */
253 const enum insn_code icode;
254 const char *const name;
255 const enum rs6000_builtins code;
258 static bool rs6000_function_ok_for_sibcall (tree, tree);
259 static int num_insns_constant_wide (HOST_WIDE_INT);
260 static void validate_condition_mode (enum rtx_code, enum machine_mode);
261 static rtx rs6000_generate_compare (enum rtx_code);
262 static void rs6000_maybe_dead (rtx);
263 static void rs6000_emit_stack_tie (void);
264 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
265 static rtx spe_synthesize_frame_save (rtx);
266 static bool spe_func_has_64bit_regs_p (void);
267 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
269 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
270 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
271 static unsigned rs6000_hash_constant (rtx);
272 static unsigned toc_hash_function (const void *);
273 static int toc_hash_eq (const void *, const void *);
274 static int constant_pool_expr_1 (rtx, int *, int *);
275 static bool constant_pool_expr_p (rtx);
276 static bool toc_relative_expr_p (rtx);
277 static bool legitimate_small_data_p (enum machine_mode, rtx);
278 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
279 static bool legitimate_indexed_address_p (rtx, int);
280 static bool legitimate_indirect_address_p (rtx, int);
281 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
282 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
283 static struct machine_function * rs6000_init_machine_status (void);
284 static bool rs6000_assemble_integer (rtx, unsigned int, int);
285 #ifdef HAVE_GAS_HIDDEN
286 static void rs6000_assemble_visibility (tree, int);
288 static int rs6000_ra_ever_killed (void);
289 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
290 extern const struct attribute_spec rs6000_attribute_table[];
291 static void rs6000_set_default_type_attributes (tree);
292 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
293 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
294 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
296 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
297 static bool rs6000_return_in_memory (tree, tree);
298 static void rs6000_file_start (void);
300 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
301 static void rs6000_elf_asm_out_constructor (rtx, int);
302 static void rs6000_elf_asm_out_destructor (rtx, int);
303 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
304 static void rs6000_elf_unique_section (tree, int);
305 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
306 unsigned HOST_WIDE_INT);
307 static void rs6000_elf_encode_section_info (tree, rtx, int)
309 static bool rs6000_elf_in_small_data_p (tree);
312 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
313 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
314 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
315 static void rs6000_xcoff_unique_section (tree, int);
316 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
317 unsigned HOST_WIDE_INT);
318 static const char * rs6000_xcoff_strip_name_encoding (const char *);
319 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
320 static void rs6000_xcoff_file_start (void);
321 static void rs6000_xcoff_file_end (void);
324 static bool rs6000_binds_local_p (tree);
326 static int rs6000_use_dfa_pipeline_interface (void);
327 static int rs6000_variable_issue (FILE *, int, rtx, int);
328 static bool rs6000_rtx_costs (rtx, int, int, int *);
329 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
330 static bool is_microcoded_insn (rtx);
331 static int is_dispatch_slot_restricted (rtx);
332 static bool is_cracked_insn (rtx);
333 static bool is_branch_slot_insn (rtx);
334 static int rs6000_adjust_priority (rtx, int);
335 static int rs6000_issue_rate (void);
336 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
337 static rtx get_next_active_insn (rtx, rtx);
338 static bool insn_terminates_group_p (rtx , enum group_termination);
339 static bool is_costly_group (rtx *, rtx);
340 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
341 static int redefine_groups (FILE *, int, rtx, rtx);
342 static int pad_groups (FILE *, int, rtx, rtx);
343 static void rs6000_sched_finish (FILE *, int);
344 static int rs6000_use_sched_lookahead (void);
346 static void rs6000_init_builtins (void);
347 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
348 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
349 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
350 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
351 static void altivec_init_builtins (void);
352 static void rs6000_common_init_builtins (void);
353 static void rs6000_init_libfuncs (void);
355 static void enable_mask_for_builtins (struct builtin_description *, int,
356 enum rs6000_builtins,
357 enum rs6000_builtins);
358 static void spe_init_builtins (void);
359 static rtx spe_expand_builtin (tree, rtx, bool *);
360 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
361 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
362 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
363 static rs6000_stack_t *rs6000_stack_info (void);
364 static void debug_stack_info (rs6000_stack_t *);
366 static rtx altivec_expand_builtin (tree, rtx, bool *);
367 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
368 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
369 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
370 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
371 static rtx altivec_expand_predicate_builtin (enum insn_code,
372 const char *, tree, rtx);
373 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
374 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
375 static void rs6000_parse_abi_options (void);
376 static void rs6000_parse_alignment_option (void);
377 static void rs6000_parse_tls_size_option (void);
378 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
379 static int first_altivec_reg_to_save (void);
380 static unsigned int compute_vrsave_mask (void);
381 static void is_altivec_return_reg (rtx, void *);
382 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
383 int easy_vector_constant (rtx, enum machine_mode);
384 static int easy_vector_same (rtx, enum machine_mode);
385 static bool is_ev64_opaque_type (tree);
386 static rtx rs6000_dwarf_register_span (rtx);
387 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
388 static rtx rs6000_tls_get_addr (void);
389 static rtx rs6000_got_sym (void);
390 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
391 static const char *rs6000_get_some_local_dynamic_name (void);
392 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
393 static rtx rs6000_complex_function_value (enum machine_mode);
394 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
395 enum machine_mode, tree);
396 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
397 enum machine_mode, tree, int);
398 static void rs6000_move_block_from_reg(int regno, rtx x, int nregs);
399 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
400 enum machine_mode, tree,
403 static void macho_branch_islands (void);
404 static void add_compiler_branch_island (tree, tree, int);
405 static int no_previous_def (tree function_name);
406 static tree get_prev_label (tree function_name);
409 static tree rs6000_build_builtin_va_list (void);
411 /* Hash table stuff for keeping track of TOC entries. */
413 struct toc_hash_struct GTY(())
415 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
416 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
418 enum machine_mode key_mode;
422 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
424 /* Default register names. */
425 char rs6000_reg_names[][8] =
427 "0", "1", "2", "3", "4", "5", "6", "7",
428 "8", "9", "10", "11", "12", "13", "14", "15",
429 "16", "17", "18", "19", "20", "21", "22", "23",
430 "24", "25", "26", "27", "28", "29", "30", "31",
431 "0", "1", "2", "3", "4", "5", "6", "7",
432 "8", "9", "10", "11", "12", "13", "14", "15",
433 "16", "17", "18", "19", "20", "21", "22", "23",
434 "24", "25", "26", "27", "28", "29", "30", "31",
435 "mq", "lr", "ctr","ap",
436 "0", "1", "2", "3", "4", "5", "6", "7",
438 /* AltiVec registers. */
439 "0", "1", "2", "3", "4", "5", "6", "7",
440 "8", "9", "10", "11", "12", "13", "14", "15",
441 "16", "17", "18", "19", "20", "21", "22", "23",
442 "24", "25", "26", "27", "28", "29", "30", "31",
448 #ifdef TARGET_REGNAMES
449 static const char alt_reg_names[][8] =
451 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
452 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
453 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
454 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
455 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
456 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
457 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
458 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
459 "mq", "lr", "ctr", "ap",
460 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
462 /* AltiVec registers. */
463 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
464 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
465 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
466 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
473 #ifndef MASK_STRICT_ALIGN
474 #define MASK_STRICT_ALIGN 0
476 #ifndef TARGET_PROFILE_KERNEL
477 #define TARGET_PROFILE_KERNEL 0
480 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
481 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
483 /* Return 1 for a symbol ref for a thread-local storage symbol. */
484 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
485 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
487 /* Initialize the GCC target structure. */
488 #undef TARGET_ATTRIBUTE_TABLE
489 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
490 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
491 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
493 #undef TARGET_ASM_ALIGNED_DI_OP
494 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
496 /* Default unaligned ops are only provided for ELF. Find the ops needed
497 for non-ELF systems. */
498 #ifndef OBJECT_FORMAT_ELF
500 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
502 #undef TARGET_ASM_UNALIGNED_HI_OP
503 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
504 #undef TARGET_ASM_UNALIGNED_SI_OP
505 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
506 #undef TARGET_ASM_UNALIGNED_DI_OP
507 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
510 #undef TARGET_ASM_UNALIGNED_HI_OP
511 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
512 #undef TARGET_ASM_UNALIGNED_SI_OP
513 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
517 /* This hook deals with fixups for relocatable code and DI-mode objects
519 #undef TARGET_ASM_INTEGER
520 #define TARGET_ASM_INTEGER rs6000_assemble_integer
522 #ifdef HAVE_GAS_HIDDEN
523 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
524 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
527 #undef TARGET_HAVE_TLS
528 #define TARGET_HAVE_TLS HAVE_AS_TLS
530 #undef TARGET_CANNOT_FORCE_CONST_MEM
531 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
533 #undef TARGET_ASM_FUNCTION_PROLOGUE
534 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
535 #undef TARGET_ASM_FUNCTION_EPILOGUE
536 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
538 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
539 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
540 #undef TARGET_SCHED_VARIABLE_ISSUE
541 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
543 #undef TARGET_SCHED_ISSUE_RATE
544 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
545 #undef TARGET_SCHED_ADJUST_COST
546 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
547 #undef TARGET_SCHED_ADJUST_PRIORITY
548 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
549 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
550 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
551 #undef TARGET_SCHED_FINISH
552 #define TARGET_SCHED_FINISH rs6000_sched_finish
554 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
555 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
557 #undef TARGET_INIT_BUILTINS
558 #define TARGET_INIT_BUILTINS rs6000_init_builtins
560 #undef TARGET_EXPAND_BUILTIN
561 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
563 #undef TARGET_INIT_LIBFUNCS
564 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
567 #undef TARGET_BINDS_LOCAL_P
568 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
571 #undef TARGET_ASM_OUTPUT_MI_THUNK
572 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
574 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
575 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
577 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
578 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
580 #undef TARGET_RTX_COSTS
581 #define TARGET_RTX_COSTS rs6000_rtx_costs
582 #undef TARGET_ADDRESS_COST
583 #define TARGET_ADDRESS_COST hook_int_rtx_0
585 #undef TARGET_VECTOR_OPAQUE_P
586 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
588 #undef TARGET_DWARF_REGISTER_SPAN
589 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
591 /* On rs6000, function arguments are promoted, as are function return
593 #undef TARGET_PROMOTE_FUNCTION_ARGS
594 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
595 #undef TARGET_PROMOTE_FUNCTION_RETURN
596 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
598 /* Structure return values are passed as an extra parameter. */
599 #undef TARGET_STRUCT_VALUE_RTX
600 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
602 #undef TARGET_RETURN_IN_MEMORY
603 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
605 #undef TARGET_SETUP_INCOMING_VARARGS
606 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
608 /* Always strict argument naming on rs6000. */
609 #undef TARGET_STRICT_ARGUMENT_NAMING
610 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
611 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
612 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
614 #undef TARGET_BUILD_BUILTIN_VA_LIST
615 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
617 struct gcc_target targetm = TARGET_INITIALIZER;
619 /* Override command line options. Mostly we process the processor
620 type and sometimes adjust other TARGET_ options. */
623 rs6000_override_options (const char *default_cpu)
626 struct rs6000_cpu_select *ptr;
629 /* Simplifications for entries below. */
632 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
633 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
636 /* This table occasionally claims that a processor does not support
637 a particular feature even though it does, but the feature is slower
638 than the alternative. Thus, it shouldn't be relied on as a
639 complete description of the processor's support.
641 Please keep this list in order, and don't forget to update the
642 documentation in invoke.texi when adding a new processor or
646 const char *const name; /* Canonical processor name. */
647 const enum processor_type processor; /* Processor type enum value. */
648 const int target_enable; /* Target flags to enable. */
649 } const processor_target_table[]
650 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
651 {"403", PROCESSOR_PPC403,
652 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
653 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
654 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
655 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
656 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
657 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
658 {"601", PROCESSOR_PPC601,
659 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
660 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
661 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
662 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
663 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
664 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
665 {"620", PROCESSOR_PPC620,
666 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
667 {"630", PROCESSOR_PPC630,
668 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
669 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
670 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
671 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
672 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
673 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
674 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
675 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
676 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
677 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
678 {"970", PROCESSOR_POWER4,
679 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
680 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
681 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
682 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
683 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
684 {"G5", PROCESSOR_POWER4,
685 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
686 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
687 {"power2", PROCESSOR_POWER,
688 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
689 {"power3", PROCESSOR_PPC630,
690 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
691 {"power4", PROCESSOR_POWER4,
692 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
693 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
694 {"powerpc64", PROCESSOR_POWERPC64,
695 POWERPC_BASE_MASK | MASK_POWERPC64},
696 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
697 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
698 {"rios2", PROCESSOR_RIOS2,
699 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
700 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
701 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
702 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
705 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
707 /* Save current -mmultiple/-mno-multiple status. */
708 int multiple = TARGET_MULTIPLE;
709 /* Save current -mstring/-mno-string status. */
710 int string = TARGET_STRING;
712 /* Some OSs don't support saving the high part of 64-bit registers on
713 context switch. Other OSs don't support saving Altivec registers.
714 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
715 settings; if the user wants either, the user must explicitly specify
716 them and we won't interfere with the user's specification. */
719 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
720 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
721 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
724 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
725 #ifdef OS_MISSING_POWERPC64
726 if (OS_MISSING_POWERPC64)
727 set_masks &= ~MASK_POWERPC64;
729 #ifdef OS_MISSING_ALTIVEC
730 if (OS_MISSING_ALTIVEC)
731 set_masks &= ~MASK_ALTIVEC;
734 /* Identify the processor type. */
735 rs6000_select[0].string = default_cpu;
736 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
738 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
740 ptr = &rs6000_select[i];
741 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
743 for (j = 0; j < ptt_size; j++)
744 if (! strcmp (ptr->string, processor_target_table[j].name))
747 rs6000_cpu = processor_target_table[j].processor;
751 target_flags &= ~set_masks;
752 target_flags |= (processor_target_table[j].target_enable
759 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
766 /* If we are optimizing big endian systems for space, use the load/store
767 multiple and string instructions. */
768 if (BYTES_BIG_ENDIAN && optimize_size)
769 target_flags |= MASK_MULTIPLE | MASK_STRING;
771 /* If -mmultiple or -mno-multiple was explicitly used, don't
772 override with the processor default */
773 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
774 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
776 /* If -mstring or -mno-string was explicitly used, don't override
777 with the processor default. */
778 if ((target_flags_explicit & MASK_STRING) != 0)
779 target_flags = (target_flags & ~MASK_STRING) | string;
781 /* Don't allow -mmultiple or -mstring on little endian systems
782 unless the cpu is a 750, because the hardware doesn't support the
783 instructions used in little endian mode, and causes an alignment
784 trap. The 750 does not cause an alignment trap (except when the
785 target is unaligned). */
787 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
791 target_flags &= ~MASK_MULTIPLE;
792 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
793 warning ("-mmultiple is not supported on little endian systems");
798 target_flags &= ~MASK_STRING;
799 if ((target_flags_explicit & MASK_STRING) != 0)
800 warning ("-mstring is not supported on little endian systems");
804 /* Set debug flags */
805 if (rs6000_debug_name)
807 if (! strcmp (rs6000_debug_name, "all"))
808 rs6000_debug_stack = rs6000_debug_arg = 1;
809 else if (! strcmp (rs6000_debug_name, "stack"))
810 rs6000_debug_stack = 1;
811 else if (! strcmp (rs6000_debug_name, "arg"))
812 rs6000_debug_arg = 1;
814 error ("unknown -mdebug-%s switch", rs6000_debug_name);
817 if (rs6000_traceback_name)
819 if (! strncmp (rs6000_traceback_name, "full", 4))
820 rs6000_traceback = traceback_full;
821 else if (! strncmp (rs6000_traceback_name, "part", 4))
822 rs6000_traceback = traceback_part;
823 else if (! strncmp (rs6000_traceback_name, "no", 2))
824 rs6000_traceback = traceback_none;
826 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
827 rs6000_traceback_name);
830 /* Set size of long double */
831 rs6000_long_double_type_size = 64;
832 if (rs6000_long_double_size_string)
835 int size = strtol (rs6000_long_double_size_string, &tail, 10);
836 if (*tail != '\0' || (size != 64 && size != 128))
837 error ("Unknown switch -mlong-double-%s",
838 rs6000_long_double_size_string);
840 rs6000_long_double_type_size = size;
843 /* Handle -mabi= options. */
844 rs6000_parse_abi_options ();
846 /* Handle -malign-XXXXX option. */
847 rs6000_parse_alignment_option ();
849 /* Handle generic -mFOO=YES/NO options. */
850 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
851 &rs6000_altivec_vrsave);
852 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
854 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
855 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
858 /* Handle -mtls-size option. */
859 rs6000_parse_tls_size_option ();
861 #ifdef SUBTARGET_OVERRIDE_OPTIONS
862 SUBTARGET_OVERRIDE_OPTIONS;
864 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
865 SUBSUBTARGET_OVERRIDE_OPTIONS;
870 /* The e500 does not have string instructions, and we set
871 MASK_STRING above when optimizing for size. */
872 if ((target_flags & MASK_STRING) != 0)
873 target_flags = target_flags & ~MASK_STRING;
875 /* No SPE means 64-bit long doubles, even if an E500. */
876 if (rs6000_spe_string != 0
877 && !strcmp (rs6000_spe_string, "no"))
878 rs6000_long_double_type_size = 64;
880 else if (rs6000_select[1].string != NULL)
882 /* For the powerpc-eabispe configuration, we set all these by
883 default, so let's unset them if we manually set another
884 CPU that is not the E500. */
885 if (rs6000_abi_string == 0)
887 if (rs6000_spe_string == 0)
889 if (rs6000_float_gprs_string == 0)
890 rs6000_float_gprs = 0;
891 if (rs6000_isel_string == 0)
893 if (rs6000_long_double_size_string == 0)
894 rs6000_long_double_type_size = 64;
897 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
898 using TARGET_OPTIONS to handle a toggle switch, but we're out of
899 bits in target_flags so TARGET_SWITCHES cannot be used.
900 Assumption here is that rs6000_longcall_switch points into the
901 text of the complete option, rather than being a copy, so we can
902 scan back for the presence or absence of the no- modifier. */
903 if (rs6000_longcall_switch)
905 const char *base = rs6000_longcall_switch;
906 while (base[-1] != 'm') base--;
908 if (*rs6000_longcall_switch != '\0')
909 error ("invalid option `%s'", base);
910 rs6000_default_long_calls = (base[0] != 'n');
913 /* Handle -mprioritize-restricted-insns option. */
914 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
915 if (rs6000_sched_restricted_insns_priority_str)
916 rs6000_sched_restricted_insns_priority =
917 atoi (rs6000_sched_restricted_insns_priority_str);
919 /* Handle -msched-costly-dep option. */
920 rs6000_sched_costly_dep = DEFAULT_SCHED_COSTLY_DEP;
921 if (rs6000_sched_costly_dep_str)
923 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
924 rs6000_sched_costly_dep = no_dep_costly;
925 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
926 rs6000_sched_costly_dep = all_deps_costly;
927 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
928 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
929 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
930 rs6000_sched_costly_dep = store_to_load_dep_costly;
932 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
935 /* Handle -minsert-sched-nops option. */
936 rs6000_sched_insert_nops = DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME;
937 if (rs6000_sched_insert_nops_str)
939 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
940 rs6000_sched_insert_nops = sched_finish_none;
941 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
942 rs6000_sched_insert_nops = sched_finish_pad_groups;
943 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
944 rs6000_sched_insert_nops = sched_finish_regroup_exact;
946 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
949 #ifdef TARGET_REGNAMES
950 /* If the user desires alternate register names, copy in the
951 alternate names now. */
953 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
956 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
957 If -maix-struct-return or -msvr4-struct-return was explicitly
958 used, don't override with the ABI default. */
959 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
961 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
962 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
964 target_flags |= MASK_AIX_STRUCT_RET;
967 if (TARGET_LONG_DOUBLE_128
968 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
969 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
971 /* Allocate an alias set for register saves & restores from stack. */
972 rs6000_sr_alias_set = new_alias_set ();
975 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
977 /* We can only guarantee the availability of DI pseudo-ops when
978 assembling for 64-bit targets. */
981 targetm.asm_out.aligned_op.di = NULL;
982 targetm.asm_out.unaligned_op.di = NULL;
985 /* Set maximum branch target alignment at two instructions, eight bytes. */
986 align_jumps_max_skip = 8;
987 align_loops_max_skip = 8;
989 /* Arrange to save and restore machine status around nested functions. */
990 init_machine_status = rs6000_init_machine_status;
993 /* Handle generic options of the form -mfoo=yes/no.
994 NAME is the option name.
995 VALUE is the option value.
996 FLAG is the pointer to the flag where to store a 1 or 0, depending on
997 whether the option value is 'yes' or 'no' respectively. */
999 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1003 else if (!strcmp (value, "yes"))
1005 else if (!strcmp (value, "no"))
1008 error ("unknown -m%s= option specified: '%s'", name, value);
1011 /* Handle -mabi= options. */
1013 rs6000_parse_abi_options (void)
1015 if (rs6000_abi_string == 0)
1017 else if (! strcmp (rs6000_abi_string, "altivec"))
1018 rs6000_altivec_abi = 1;
1019 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1020 rs6000_altivec_abi = 0;
1021 else if (! strcmp (rs6000_abi_string, "spe"))
1024 if (!TARGET_SPE_ABI)
1025 error ("not configured for ABI: '%s'", rs6000_abi_string);
1028 else if (! strcmp (rs6000_abi_string, "no-spe"))
1031 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1034 /* Handle -malign-XXXXXX options. */
1036 rs6000_parse_alignment_option (void)
1038 if (rs6000_alignment_string == 0)
1040 else if (! strcmp (rs6000_alignment_string, "power"))
1041 rs6000_alignment_flags = MASK_ALIGN_POWER;
1042 else if (! strcmp (rs6000_alignment_string, "natural"))
1043 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1045 error ("unknown -malign-XXXXX option specified: '%s'",
1046 rs6000_alignment_string);
1049 /* Validate and record the size specified with the -mtls-size option. */
1052 rs6000_parse_tls_size_option (void)
1054 if (rs6000_tls_size_string == 0)
1056 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1057 rs6000_tls_size = 16;
1058 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1059 rs6000_tls_size = 32;
1060 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1061 rs6000_tls_size = 64;
1063 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1067 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1071 /* Do anything needed at the start of the asm file. */
1074 rs6000_file_start (void)
1078 const char *start = buffer;
1079 struct rs6000_cpu_select *ptr;
1080 const char *default_cpu = TARGET_CPU_DEFAULT;
1081 FILE *file = asm_out_file;
1083 default_file_start ();
1085 #ifdef TARGET_BI_ARCH
1086 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1090 if (flag_verbose_asm)
1092 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1093 rs6000_select[0].string = default_cpu;
1095 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1097 ptr = &rs6000_select[i];
1098 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1100 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1105 #ifdef USING_ELFOS_H
1106 switch (rs6000_sdata)
1108 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1109 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1110 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1111 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1114 if (rs6000_sdata && g_switch_value)
1116 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1127 /* Return nonzero if this function is known to have a null epilogue. */
1130 direct_return (void)
1132 if (reload_completed)
1134 rs6000_stack_t *info = rs6000_stack_info ();
1136 if (info->first_gp_reg_save == 32
1137 && info->first_fp_reg_save == 64
1138 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1139 && ! info->lr_save_p
1140 && ! info->cr_save_p
1141 && info->vrsave_mask == 0
1149 /* Returns 1 always. */
1152 any_operand (rtx op ATTRIBUTE_UNUSED,
1153 enum machine_mode mode ATTRIBUTE_UNUSED)
1158 /* Returns 1 if op is the count register. */
1160 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1162 if (GET_CODE (op) != REG)
1165 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1168 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1174 /* Returns 1 if op is an altivec register. */
1176 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1179 return (register_operand (op, mode)
1180 && (GET_CODE (op) != REG
1181 || REGNO (op) > FIRST_PSEUDO_REGISTER
1182 || ALTIVEC_REGNO_P (REGNO (op))));
1186 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1188 if (GET_CODE (op) != REG)
1191 if (XER_REGNO_P (REGNO (op)))
1197 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1198 by such constants completes more quickly. */
1201 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1203 return ( GET_CODE (op) == CONST_INT
1204 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1207 /* Return 1 if OP is a constant that can fit in a D field. */
1210 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1212 return (GET_CODE (op) == CONST_INT
1213 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1216 /* Similar for an unsigned D field. */
1219 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1221 return (GET_CODE (op) == CONST_INT
1222 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1225 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1228 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1230 return (GET_CODE (op) == CONST_INT
1231 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1234 /* Returns 1 if OP is a CONST_INT that is a positive value
1235 and an exact power of 2. */
1238 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1240 return (GET_CODE (op) == CONST_INT
1242 && exact_log2 (INTVAL (op)) >= 0);
1245 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1249 gpc_reg_operand (rtx op, enum machine_mode mode)
1251 return (register_operand (op, mode)
1252 && (GET_CODE (op) != REG
1253 || (REGNO (op) >= ARG_POINTER_REGNUM
1254 && !XER_REGNO_P (REGNO (op)))
1255 || REGNO (op) < MQ_REGNO));
1258 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1262 cc_reg_operand (rtx op, enum machine_mode mode)
1264 return (register_operand (op, mode)
1265 && (GET_CODE (op) != REG
1266 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1267 || CR_REGNO_P (REGNO (op))));
1270 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1271 CR field that isn't CR0. */
1274 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1276 return (register_operand (op, mode)
1277 && (GET_CODE (op) != REG
1278 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1279 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1282 /* Returns 1 if OP is either a constant integer valid for a D-field or
1283 a non-special register. If a register, it must be in the proper
1284 mode unless MODE is VOIDmode. */
1287 reg_or_short_operand (rtx op, enum machine_mode mode)
1289 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1292 /* Similar, except check if the negation of the constant would be
1293 valid for a D-field. */
1296 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1298 if (GET_CODE (op) == CONST_INT)
1299 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1301 return gpc_reg_operand (op, mode);
1304 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1305 a non-special register. If a register, it must be in the proper
1306 mode unless MODE is VOIDmode. */
1309 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1311 if (gpc_reg_operand (op, mode))
1313 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1320 /* Return 1 if the operand is either a register or an integer whose
1321 high-order 16 bits are zero. */
1324 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1326 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1329 /* Return 1 is the operand is either a non-special register or ANY
1330 constant integer. */
1333 reg_or_cint_operand (rtx op, enum machine_mode mode)
1335 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1338 /* Return 1 is the operand is either a non-special register or ANY
1339 32-bit signed constant integer. */
1342 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1344 return (gpc_reg_operand (op, mode)
1345 || (GET_CODE (op) == CONST_INT
1346 #if HOST_BITS_PER_WIDE_INT != 32
1347 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1348 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1353 /* Return 1 is the operand is either a non-special register or a 32-bit
1354 signed constant integer valid for 64-bit addition. */
1357 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1359 return (gpc_reg_operand (op, mode)
1360 || (GET_CODE (op) == CONST_INT
1361 #if HOST_BITS_PER_WIDE_INT == 32
1362 && INTVAL (op) < 0x7fff8000
1364 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1370 /* Return 1 is the operand is either a non-special register or a 32-bit
1371 signed constant integer valid for 64-bit subtraction. */
1374 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1376 return (gpc_reg_operand (op, mode)
1377 || (GET_CODE (op) == CONST_INT
1378 #if HOST_BITS_PER_WIDE_INT == 32
1379 && (- INTVAL (op)) < 0x7fff8000
1381 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1387 /* Return 1 is the operand is either a non-special register or ANY
1388 32-bit unsigned constant integer. */
1391 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1393 if (GET_CODE (op) == CONST_INT)
1395 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1397 if (GET_MODE_BITSIZE (mode) <= 32)
1400 if (INTVAL (op) < 0)
1404 return ((INTVAL (op) & GET_MODE_MASK (mode)
1405 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1407 else if (GET_CODE (op) == CONST_DOUBLE)
1409 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1413 return CONST_DOUBLE_HIGH (op) == 0;
1416 return gpc_reg_operand (op, mode);
1419 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1422 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1424 return (GET_CODE (op) == SYMBOL_REF
1425 || GET_CODE (op) == CONST
1426 || GET_CODE (op) == LABEL_REF);
1429 /* Return 1 if the operand is a simple references that can be loaded via
1430 the GOT (labels involving addition aren't allowed). */
1433 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1435 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1438 /* Return the number of instructions it takes to form a constant in an
1439 integer register. */
1442 num_insns_constant_wide (HOST_WIDE_INT value)
1444 /* signed constant loadable with {cal|addi} */
1445 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1448 /* constant loadable with {cau|addis} */
1449 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1452 #if HOST_BITS_PER_WIDE_INT == 64
1453 else if (TARGET_POWERPC64)
1455 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1456 HOST_WIDE_INT high = value >> 31;
1458 if (high == 0 || high == -1)
1464 return num_insns_constant_wide (high) + 1;
1466 return (num_insns_constant_wide (high)
1467 + num_insns_constant_wide (low) + 1);
1476 num_insns_constant (rtx op, enum machine_mode mode)
1478 if (GET_CODE (op) == CONST_INT)
1480 #if HOST_BITS_PER_WIDE_INT == 64
1481 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1482 && mask64_operand (op, mode))
1486 return num_insns_constant_wide (INTVAL (op));
1489 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1494 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1495 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1496 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1499 else if (GET_CODE (op) == CONST_DOUBLE)
1505 int endian = (WORDS_BIG_ENDIAN == 0);
1507 if (mode == VOIDmode || mode == DImode)
1509 high = CONST_DOUBLE_HIGH (op);
1510 low = CONST_DOUBLE_LOW (op);
1514 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1515 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1517 low = l[1 - endian];
1521 return (num_insns_constant_wide (low)
1522 + num_insns_constant_wide (high));
1526 if (high == 0 && low >= 0)
1527 return num_insns_constant_wide (low);
1529 else if (high == -1 && low < 0)
1530 return num_insns_constant_wide (low);
1532 else if (mask64_operand (op, mode))
1536 return num_insns_constant_wide (high) + 1;
1539 return (num_insns_constant_wide (high)
1540 + num_insns_constant_wide (low) + 1);
1548 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1549 register with one instruction per word. We only do this if we can
1550 safely read CONST_DOUBLE_{LOW,HIGH}. */
1553 easy_fp_constant (rtx op, enum machine_mode mode)
1555 if (GET_CODE (op) != CONST_DOUBLE
1556 || GET_MODE (op) != mode
1557 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1560 /* Consider all constants with -msoft-float to be easy. */
1561 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1565 /* If we are using V.4 style PIC, consider all constants to be hard. */
1566 if (flag_pic && DEFAULT_ABI == ABI_V4)
1569 #ifdef TARGET_RELOCATABLE
1570 /* Similarly if we are using -mrelocatable, consider all constants
1572 if (TARGET_RELOCATABLE)
1581 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1582 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1584 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1585 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1586 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1587 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1590 else if (mode == DFmode)
1595 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1596 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1598 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1599 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1602 else if (mode == SFmode)
1607 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1608 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1610 return num_insns_constant_wide (l) == 1;
1613 else if (mode == DImode)
1614 return ((TARGET_POWERPC64
1615 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1616 || (num_insns_constant (op, DImode) <= 2));
1618 else if (mode == SImode)
1624 /* Return nonzero if all elements of a vector have the same value. */
1627 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1631 units = CONST_VECTOR_NUNITS (op);
1633 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1634 for (i = 1; i < units; ++i)
1635 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1642 /* Return 1 if the operand is a CONST_INT and can be put into a
1643 register without using memory. */
1646 easy_vector_constant (rtx op, enum machine_mode mode)
1650 if (GET_CODE (op) != CONST_VECTOR
1655 if (zero_constant (op, mode)
1656 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1657 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1660 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1663 if (TARGET_SPE && mode == V1DImode)
1666 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1667 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1669 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1671 evmergelo r0, r0, r0
1674 I don't know how efficient it would be to allow bigger constants,
1675 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1676 instructions is better than a 64-bit memory load, but I don't
1677 have the e500 timing specs. */
1678 if (TARGET_SPE && mode == V2SImode
1679 && cst >= -0x7fff && cst <= 0x7fff
1680 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1687 if (EASY_VECTOR_15 (cst, op, mode))
1689 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1693 if (EASY_VECTOR_15 (cst, op, mode))
1695 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1699 if (EASY_VECTOR_15 (cst, op, mode))
1705 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1711 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1714 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1718 if (!easy_vector_constant (op, mode))
1721 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1723 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1727 output_vec_const_move (rtx *operands)
1730 enum machine_mode mode;
1736 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1737 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1738 mode = GET_MODE (dest);
1742 if (zero_constant (vec, mode))
1743 return "vxor %0,%0,%0";
1744 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1746 else if (easy_vector_constant (vec, mode))
1748 operands[1] = GEN_INT (cst);
1752 if (EASY_VECTOR_15 (cst, vec, mode))
1754 operands[1] = GEN_INT (cst);
1755 return "vspltisw %0,%1";
1759 if (EASY_VECTOR_15 (cst, vec, mode))
1761 operands[1] = GEN_INT (cst);
1762 return "vspltish %0,%1";
1766 if (EASY_VECTOR_15 (cst, vec, mode))
1768 operands[1] = GEN_INT (cst);
1769 return "vspltisb %0,%1";
1781 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1782 pattern of V1DI, V4HI, and V2SF.
1784 FIXME: We should probably return # and add post reload
1785 splitters for these, but this way is so easy ;-).
1787 operands[1] = GEN_INT (cst);
1788 operands[2] = GEN_INT (cst2);
1790 return "li %0,%1\n\tevmergelo %0,%0,%0";
1792 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1798 /* Return 1 if the operand is the constant 0. This works for scalars
1799 as well as vectors. */
1801 zero_constant (rtx op, enum machine_mode mode)
1803 return op == CONST0_RTX (mode);
1806 /* Return 1 if the operand is 0.0. */
1808 zero_fp_constant (rtx op, enum machine_mode mode)
1810 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1813 /* Return 1 if the operand is in volatile memory. Note that during
1814 the RTL generation phase, memory_operand does not return TRUE for
1815 volatile memory references. So this function allows us to
1816 recognize volatile references where its safe. */
1819 volatile_mem_operand (rtx op, enum machine_mode mode)
1821 if (GET_CODE (op) != MEM)
1824 if (!MEM_VOLATILE_P (op))
1827 if (mode != GET_MODE (op))
1830 if (reload_completed)
1831 return memory_operand (op, mode);
1833 if (reload_in_progress)
1834 return strict_memory_address_p (mode, XEXP (op, 0));
1836 return memory_address_p (mode, XEXP (op, 0));
1839 /* Return 1 if the operand is an offsettable memory operand. */
1842 offsettable_mem_operand (rtx op, enum machine_mode mode)
1844 return ((GET_CODE (op) == MEM)
1845 && offsettable_address_p (reload_completed || reload_in_progress,
1846 mode, XEXP (op, 0)));
1849 /* Return 1 if the operand is either an easy FP constant (see above) or
1853 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1855 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1858 /* Return 1 if the operand is either a non-special register or an item
1859 that can be used as the operand of a `mode' add insn. */
1862 add_operand (rtx op, enum machine_mode mode)
1864 if (GET_CODE (op) == CONST_INT)
1865 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1866 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1868 return gpc_reg_operand (op, mode);
1871 /* Return 1 if OP is a constant but not a valid add_operand. */
1874 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1876 return (GET_CODE (op) == CONST_INT
1877 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1878 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1881 /* Return 1 if the operand is a non-special register or a constant that
1882 can be used as the operand of an OR or XOR insn on the RS/6000. */
1885 logical_operand (rtx op, enum machine_mode mode)
1887 HOST_WIDE_INT opl, oph;
1889 if (gpc_reg_operand (op, mode))
1892 if (GET_CODE (op) == CONST_INT)
1894 opl = INTVAL (op) & GET_MODE_MASK (mode);
1896 #if HOST_BITS_PER_WIDE_INT <= 32
1897 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1901 else if (GET_CODE (op) == CONST_DOUBLE)
1903 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1906 opl = CONST_DOUBLE_LOW (op);
1907 oph = CONST_DOUBLE_HIGH (op);
1914 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1915 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1918 /* Return 1 if C is a constant that is not a logical operand (as
1919 above), but could be split into one. */
1922 non_logical_cint_operand (rtx op, enum machine_mode mode)
1924 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1925 && ! logical_operand (op, mode)
1926 && reg_or_logical_cint_operand (op, mode));
1929 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1930 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1931 Reject all ones and all zeros, since these should have been optimized
1932 away and confuse the making of MB and ME. */
1935 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1937 HOST_WIDE_INT c, lsb;
1939 if (GET_CODE (op) != CONST_INT)
1944 /* Fail in 64-bit mode if the mask wraps around because the upper
1945 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1946 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1949 /* We don't change the number of transitions by inverting,
1950 so make sure we start with the LS bit zero. */
1954 /* Reject all zeros or all ones. */
1958 /* Find the first transition. */
1961 /* Invert to look for a second transition. */
1964 /* Erase first transition. */
1967 /* Find the second transition (if any). */
1970 /* Match if all the bits above are 1's (or c is zero). */
1974 /* Return 1 for the PowerPC64 rlwinm corner case. */
1977 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1979 HOST_WIDE_INT c, lsb;
1981 if (GET_CODE (op) != CONST_INT)
1986 if ((c & 0x80000001) != 0x80000001)
2000 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2001 It is if there are no more than one 1->0 or 0->1 transitions.
2002 Reject all zeros, since zero should have been optimized away and
2003 confuses the making of MB and ME. */
2006 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2008 if (GET_CODE (op) == CONST_INT)
2010 HOST_WIDE_INT c, lsb;
2014 /* Reject all zeros. */
2018 /* We don't change the number of transitions by inverting,
2019 so make sure we start with the LS bit zero. */
2023 /* Find the transition, and check that all bits above are 1's. */
2026 /* Match if all the bits above are 1's (or c is zero). */
2032 /* Like mask64_operand, but allow up to three transitions. This
2033 predicate is used by insn patterns that generate two rldicl or
2034 rldicr machine insns. */
2037 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2039 if (GET_CODE (op) == CONST_INT)
2041 HOST_WIDE_INT c, lsb;
2045 /* Disallow all zeros. */
2049 /* We don't change the number of transitions by inverting,
2050 so make sure we start with the LS bit zero. */
2054 /* Find the first transition. */
2057 /* Invert to look for a second transition. */
2060 /* Erase first transition. */
2063 /* Find the second transition. */
2066 /* Invert to look for a third transition. */
2069 /* Erase second transition. */
2072 /* Find the third transition (if any). */
2075 /* Match if all the bits above are 1's (or c is zero). */
2081 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2082 implement ANDing by the mask IN. */
2084 build_mask64_2_operands (rtx in, rtx *out)
2086 #if HOST_BITS_PER_WIDE_INT >= 64
2087 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2090 if (GET_CODE (in) != CONST_INT)
2096 /* Assume c initially something like 0x00fff000000fffff. The idea
2097 is to rotate the word so that the middle ^^^^^^ group of zeros
2098 is at the MS end and can be cleared with an rldicl mask. We then
2099 rotate back and clear off the MS ^^ group of zeros with a
2101 c = ~c; /* c == 0xff000ffffff00000 */
2102 lsb = c & -c; /* lsb == 0x0000000000100000 */
2103 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2104 c = ~c; /* c == 0x00fff000000fffff */
2105 c &= -lsb; /* c == 0x00fff00000000000 */
2106 lsb = c & -c; /* lsb == 0x0000100000000000 */
2107 c = ~c; /* c == 0xff000fffffffffff */
2108 c &= -lsb; /* c == 0xff00000000000000 */
2110 while ((lsb >>= 1) != 0)
2111 shift++; /* shift == 44 on exit from loop */
2112 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2113 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2114 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2118 /* Assume c initially something like 0xff000f0000000000. The idea
2119 is to rotate the word so that the ^^^ middle group of zeros
2120 is at the LS end and can be cleared with an rldicr mask. We then
2121 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2123 lsb = c & -c; /* lsb == 0x0000010000000000 */
2124 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2125 c = ~c; /* c == 0x00fff0ffffffffff */
2126 c &= -lsb; /* c == 0x00fff00000000000 */
2127 lsb = c & -c; /* lsb == 0x0000100000000000 */
2128 c = ~c; /* c == 0xff000fffffffffff */
2129 c &= -lsb; /* c == 0xff00000000000000 */
2131 while ((lsb >>= 1) != 0)
2132 shift++; /* shift == 44 on exit from loop */
2133 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2134 m1 >>= shift; /* m1 == 0x0000000000000fff */
2135 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2138 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2139 masks will be all 1's. We are guaranteed more than one transition. */
2140 out[0] = GEN_INT (64 - shift);
2141 out[1] = GEN_INT (m1);
2142 out[2] = GEN_INT (shift);
2143 out[3] = GEN_INT (m2);
2151 /* Return 1 if the operand is either a non-special register or a constant
2152 that can be used as the operand of a PowerPC64 logical AND insn. */
2155 and64_operand (rtx op, enum machine_mode mode)
2157 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2158 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2160 return (logical_operand (op, mode) || mask64_operand (op, mode));
2163 /* Like the above, but also match constants that can be implemented
2164 with two rldicl or rldicr insns. */
2167 and64_2_operand (rtx op, enum machine_mode mode)
2169 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2170 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2172 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2175 /* Return 1 if the operand is either a non-special register or a
2176 constant that can be used as the operand of an RS/6000 logical AND insn. */
2179 and_operand (rtx op, enum machine_mode mode)
2181 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2182 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2184 return (logical_operand (op, mode) || mask_operand (op, mode));
2187 /* Return 1 if the operand is a general register or memory operand. */
2190 reg_or_mem_operand (rtx op, enum machine_mode mode)
2192 return (gpc_reg_operand (op, mode)
2193 || memory_operand (op, mode)
2194 || macho_lo_sum_memory_operand (op, mode)
2195 || volatile_mem_operand (op, mode));
2198 /* Return 1 if the operand is a general register or memory operand without
2199 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2203 lwa_operand (rtx op, enum machine_mode mode)
2207 if (reload_completed && GET_CODE (inner) == SUBREG)
2208 inner = SUBREG_REG (inner);
2210 return gpc_reg_operand (inner, mode)
2211 || (memory_operand (inner, mode)
2212 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2213 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2214 && (GET_CODE (XEXP (inner, 0)) != PLUS
2215 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2216 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2219 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2222 symbol_ref_operand (rtx op, enum machine_mode mode)
2224 if (mode != VOIDmode && GET_MODE (op) != mode)
2227 return (GET_CODE (op) == SYMBOL_REF
2228 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2231 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2232 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2235 call_operand (rtx op, enum machine_mode mode)
2237 if (mode != VOIDmode && GET_MODE (op) != mode)
2240 return (GET_CODE (op) == SYMBOL_REF
2241 || (GET_CODE (op) == REG
2242 && (REGNO (op) == LINK_REGISTER_REGNUM
2243 || REGNO (op) == COUNT_REGISTER_REGNUM
2244 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2247 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2251 current_file_function_operand (rtx op,
2252 enum machine_mode mode ATTRIBUTE_UNUSED)
2254 return (GET_CODE (op) == SYMBOL_REF
2255 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2256 && (SYMBOL_REF_LOCAL_P (op)
2257 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2260 /* Return 1 if this operand is a valid input for a move insn. */
2263 input_operand (rtx op, enum machine_mode mode)
2265 /* Memory is always valid. */
2266 if (memory_operand (op, mode))
2269 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2270 if (GET_CODE (op) == CONSTANT_P_RTX)
2273 /* For floating-point, easy constants are valid. */
2274 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2276 && easy_fp_constant (op, mode))
2279 /* Allow any integer constant. */
2280 if (GET_MODE_CLASS (mode) == MODE_INT
2281 && (GET_CODE (op) == CONST_INT
2282 || GET_CODE (op) == CONST_DOUBLE))
2285 /* Allow easy vector constants. */
2286 if (GET_CODE (op) == CONST_VECTOR
2287 && easy_vector_constant (op, mode))
2290 /* For floating-point or multi-word mode, the only remaining valid type
2292 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2293 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2294 return register_operand (op, mode);
2296 /* The only cases left are integral modes one word or smaller (we
2297 do not get called for MODE_CC values). These can be in any
2299 if (register_operand (op, mode))
2302 /* A SYMBOL_REF referring to the TOC is valid. */
2303 if (legitimate_constant_pool_address_p (op))
2306 /* A constant pool expression (relative to the TOC) is valid */
2307 if (toc_relative_expr_p (op))
2310 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2312 if (DEFAULT_ABI == ABI_V4
2313 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2314 && small_data_operand (op, Pmode))
2321 /* Darwin, AIX increases natural record alignment to doubleword if the first
2322 field is an FP double while the FP fields remain word aligned. */
2325 rs6000_special_round_type_align (tree type, int computed, int specified)
2327 tree field = TYPE_FIELDS (type);
2329 /* Skip all the static variables only if ABI is greater than
2331 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2332 field = TREE_CHAIN (field);
2334 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2335 return MAX (computed, specified);
2337 return MAX (MAX (computed, specified), 64);
2340 /* Return 1 for an operand in small memory on V.4/eabi. */
2343 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2344 enum machine_mode mode ATTRIBUTE_UNUSED)
2349 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2352 if (DEFAULT_ABI != ABI_V4)
2355 if (GET_CODE (op) == SYMBOL_REF)
2358 else if (GET_CODE (op) != CONST
2359 || GET_CODE (XEXP (op, 0)) != PLUS
2360 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2361 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2366 rtx sum = XEXP (op, 0);
2367 HOST_WIDE_INT summand;
2369 /* We have to be careful here, because it is the referenced address
2370 that must be 32k from _SDA_BASE_, not just the symbol. */
2371 summand = INTVAL (XEXP (sum, 1));
2372 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2375 sym_ref = XEXP (sum, 0);
2378 return SYMBOL_REF_SMALL_P (sym_ref);
2384 /* Return true, if operand is a memory operand and has a
2385 displacement divisible by 4. */
2388 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2393 if (!memory_operand (op, mode))
2396 addr = XEXP (op, 0);
2397 if (GET_CODE (addr) == PLUS
2398 && GET_CODE (XEXP (addr, 0)) == REG
2399 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2400 off = INTVAL (XEXP (addr, 1));
2402 return (off % 4) == 0;
2405 /* Return true if either operand is a general purpose register. */
2408 gpr_or_gpr_p (rtx op0, rtx op1)
2410 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2411 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2415 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2418 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2420 switch (GET_CODE(op))
2423 if (RS6000_SYMBOL_REF_TLS_P (op))
2425 else if (CONSTANT_POOL_ADDRESS_P (op))
2427 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2435 else if (! strcmp (XSTR (op, 0), toc_label_name))
2444 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2445 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2447 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2456 constant_pool_expr_p (rtx op)
2460 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2464 toc_relative_expr_p (rtx op)
2468 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2471 /* SPE offset addressing is limited to 5-bits worth of double words. */
2472 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2475 legitimate_constant_pool_address_p (rtx x)
2478 && GET_CODE (x) == PLUS
2479 && GET_CODE (XEXP (x, 0)) == REG
2480 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2481 && constant_pool_expr_p (XEXP (x, 1)));
2485 legitimate_small_data_p (enum machine_mode mode, rtx x)
2487 return (DEFAULT_ABI == ABI_V4
2488 && !flag_pic && !TARGET_TOC
2489 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2490 && small_data_operand (x, mode));
2494 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2496 unsigned HOST_WIDE_INT offset, extra;
2498 if (GET_CODE (x) != PLUS)
2500 if (GET_CODE (XEXP (x, 0)) != REG)
2502 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2504 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2507 offset = INTVAL (XEXP (x, 1));
2515 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2516 which leaves the only valid constant offset of zero, which by
2517 canonicalization rules is also invalid. */
2524 /* SPE vector modes. */
2525 return SPE_CONST_OFFSET_OK (offset);
2529 if (mode == DFmode || !TARGET_POWERPC64)
2531 else if (offset & 3)
2537 if (mode == TFmode || !TARGET_POWERPC64)
2539 else if (offset & 3)
2549 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2553 legitimate_indexed_address_p (rtx x, int strict)
2557 if (GET_CODE (x) != PLUS)
2562 if (!REG_P (op0) || !REG_P (op1))
2565 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2566 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2567 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2568 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2572 legitimate_indirect_address_p (rtx x, int strict)
2574 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2578 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2580 if (!TARGET_MACHO || !flag_pic
2581 || mode != SImode || GET_CODE(x) != MEM)
2585 if (GET_CODE (x) != LO_SUM)
2587 if (GET_CODE (XEXP (x, 0)) != REG)
2589 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2593 return CONSTANT_P (x);
2597 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2599 if (GET_CODE (x) != LO_SUM)
2601 if (GET_CODE (XEXP (x, 0)) != REG)
2603 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2607 if (TARGET_ELF || TARGET_MACHO)
2609 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2613 if (GET_MODE_NUNITS (mode) != 1)
2615 if (GET_MODE_BITSIZE (mode) > 32
2616 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2619 return CONSTANT_P (x);
2626 /* Try machine-dependent ways of modifying an illegitimate address
2627 to be legitimate. If we find one, return the new, valid address.
2628 This is used from only one place: `memory_address' in explow.c.
2630 OLDX is the address as it was before break_out_memory_refs was
2631 called. In some cases it is useful to look at this to decide what
2634 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2636 It is always safe for this function to do nothing. It exists to
2637 recognize opportunities to optimize the output.
2639 On RS/6000, first check for the sum of a register with a constant
2640 integer that is out of range. If so, generate code to add the
2641 constant with the low-order 16 bits masked to the register and force
2642 this result into another register (this can be done with `cau').
2643 Then generate an address of REG+(CONST&0xffff), allowing for the
2644 possibility of bit 16 being a one.
2646 Then check for the sum of a register and something not constant, try to
2647 load the other things into a register and return the sum. */
2650 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2651 enum machine_mode mode)
2653 if (GET_CODE (x) == SYMBOL_REF)
2655 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2657 return rs6000_legitimize_tls_address (x, model);
2660 if (GET_CODE (x) == PLUS
2661 && GET_CODE (XEXP (x, 0)) == REG
2662 && GET_CODE (XEXP (x, 1)) == CONST_INT
2663 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2665 HOST_WIDE_INT high_int, low_int;
2667 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2668 high_int = INTVAL (XEXP (x, 1)) - low_int;
2669 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2670 GEN_INT (high_int)), 0);
2671 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2673 else if (GET_CODE (x) == PLUS
2674 && GET_CODE (XEXP (x, 0)) == REG
2675 && GET_CODE (XEXP (x, 1)) != CONST_INT
2676 && GET_MODE_NUNITS (mode) == 1
2677 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2679 || (mode != DFmode && mode != TFmode))
2680 && (TARGET_POWERPC64 || mode != DImode)
2683 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2684 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2686 else if (ALTIVEC_VECTOR_MODE (mode))
2690 /* Make sure both operands are registers. */
2691 if (GET_CODE (x) == PLUS)
2692 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2693 force_reg (Pmode, XEXP (x, 1)));
2695 reg = force_reg (Pmode, x);
2698 else if (SPE_VECTOR_MODE (mode))
2700 /* We accept [reg + reg] and [reg + OFFSET]. */
2702 if (GET_CODE (x) == PLUS)
2704 rtx op1 = XEXP (x, 0);
2705 rtx op2 = XEXP (x, 1);
2707 op1 = force_reg (Pmode, op1);
2709 if (GET_CODE (op2) != REG
2710 && (GET_CODE (op2) != CONST_INT
2711 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2712 op2 = force_reg (Pmode, op2);
2714 return gen_rtx_PLUS (Pmode, op1, op2);
2717 return force_reg (Pmode, x);
2723 && GET_CODE (x) != CONST_INT
2724 && GET_CODE (x) != CONST_DOUBLE
2726 && GET_MODE_NUNITS (mode) == 1
2727 && (GET_MODE_BITSIZE (mode) <= 32
2728 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2730 rtx reg = gen_reg_rtx (Pmode);
2731 emit_insn (gen_elf_high (reg, x));
2732 return gen_rtx_LO_SUM (Pmode, reg, x);
2734 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2737 && ! MACHO_DYNAMIC_NO_PIC_P
2739 && GET_CODE (x) != CONST_INT
2740 && GET_CODE (x) != CONST_DOUBLE
2742 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2746 rtx reg = gen_reg_rtx (Pmode);
2747 emit_insn (gen_macho_high (reg, x));
2748 return gen_rtx_LO_SUM (Pmode, reg, x);
2751 && constant_pool_expr_p (x)
2752 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2754 return create_TOC_reference (x);
2760 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2762 static GTY(()) rtx rs6000_tls_symbol;
2764 rs6000_tls_get_addr (void)
2766 if (!rs6000_tls_symbol)
2767 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2769 return rs6000_tls_symbol;
2772 /* Construct the SYMBOL_REF for TLS GOT references. */
2774 static GTY(()) rtx rs6000_got_symbol;
2776 rs6000_got_sym (void)
2778 if (!rs6000_got_symbol)
2780 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2781 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2782 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2785 return rs6000_got_symbol;
2788 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2789 this (thread-local) address. */
2792 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2796 dest = gen_reg_rtx (Pmode);
2797 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2803 tlsreg = gen_rtx_REG (Pmode, 13);
2804 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2808 tlsreg = gen_rtx_REG (Pmode, 2);
2809 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2813 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2817 tmp = gen_reg_rtx (Pmode);
2820 tlsreg = gen_rtx_REG (Pmode, 13);
2821 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2825 tlsreg = gen_rtx_REG (Pmode, 2);
2826 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2830 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2832 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2837 rtx r3, got, tga, tmp1, tmp2, eqv;
2840 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2844 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2847 rtx gsym = rs6000_got_sym ();
2848 got = gen_reg_rtx (Pmode);
2850 rs6000_emit_move (got, gsym, Pmode);
2854 static int tls_got_labelno = 0;
2855 rtx tempLR, lab, tmp3, mem;
2858 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2859 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2860 tempLR = gen_reg_rtx (Pmode);
2861 tmp1 = gen_reg_rtx (Pmode);
2862 tmp2 = gen_reg_rtx (Pmode);
2863 tmp3 = gen_reg_rtx (Pmode);
2864 mem = gen_rtx_MEM (Pmode, tmp1);
2865 RTX_UNCHANGING_P (mem) = 1;
2867 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2869 emit_move_insn (tmp1, tempLR);
2870 emit_move_insn (tmp2, mem);
2871 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2872 last = emit_move_insn (got, tmp3);
2873 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2875 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2877 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2883 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2885 r3 = gen_rtx_REG (Pmode, 3);
2887 insn = gen_tls_gd_64 (r3, got, addr);
2889 insn = gen_tls_gd_32 (r3, got, addr);
2892 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2893 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2894 insn = emit_call_insn (insn);
2895 CONST_OR_PURE_CALL_P (insn) = 1;
2896 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2897 insn = get_insns ();
2899 emit_libcall_block (insn, dest, r3, addr);
2901 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2903 r3 = gen_rtx_REG (Pmode, 3);
2905 insn = gen_tls_ld_64 (r3, got);
2907 insn = gen_tls_ld_32 (r3, got);
2910 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2911 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2912 insn = emit_call_insn (insn);
2913 CONST_OR_PURE_CALL_P (insn) = 1;
2914 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2915 insn = get_insns ();
2917 tmp1 = gen_reg_rtx (Pmode);
2918 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2920 emit_libcall_block (insn, tmp1, r3, eqv);
2921 if (rs6000_tls_size == 16)
2924 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2926 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2928 else if (rs6000_tls_size == 32)
2930 tmp2 = gen_reg_rtx (Pmode);
2932 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2934 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2937 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2939 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2943 tmp2 = gen_reg_rtx (Pmode);
2945 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2947 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2949 insn = gen_rtx_SET (Pmode, dest,
2950 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2956 /* IE, or 64 bit offset LE. */
2957 tmp2 = gen_reg_rtx (Pmode);
2959 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2961 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2964 insn = gen_tls_tls_64 (dest, tmp2, addr);
2966 insn = gen_tls_tls_32 (dest, tmp2, addr);
2974 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2975 instruction definitions. */
2978 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
2980 return RS6000_SYMBOL_REF_TLS_P (x);
2983 /* Return 1 if X contains a thread-local symbol. */
2986 rs6000_tls_referenced_p (rtx x)
2988 if (! TARGET_HAVE_TLS)
2991 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2994 /* Return 1 if *X is a thread-local symbol. This is the same as
2995 rs6000_tls_symbol_ref except for the type of the unused argument. */
2998 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3000 return RS6000_SYMBOL_REF_TLS_P (*x);
3003 /* The convention appears to be to define this wherever it is used.
3004 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3005 is now used here. */
3006 #ifndef REG_MODE_OK_FOR_BASE_P
3007 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3010 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3011 replace the input X, or the original X if no replacement is called for.
3012 The output parameter *WIN is 1 if the calling macro should goto WIN,
3015 For RS/6000, we wish to handle large displacements off a base
3016 register by splitting the addend across an addiu/addis and the mem insn.
3017 This cuts number of extra insns needed from 3 to 1.
3019 On Darwin, we use this to generate code for floating point constants.
3020 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3021 The Darwin code is inside #if TARGET_MACHO because only then is
3022 machopic_function_base_name() defined. */
3024 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3025 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3027 /* We must recognize output that we have already generated ourselves. */
3028 if (GET_CODE (x) == PLUS
3029 && GET_CODE (XEXP (x, 0)) == PLUS
3030 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3031 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3032 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3034 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3035 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3036 opnum, (enum reload_type)type);
3042 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3043 && GET_CODE (x) == LO_SUM
3044 && GET_CODE (XEXP (x, 0)) == PLUS
3045 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3046 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3047 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3048 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3049 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3050 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3051 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3053 /* Result of previous invocation of this function on Darwin
3054 floating point constant. */
3055 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3056 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3057 opnum, (enum reload_type)type);
3062 if (GET_CODE (x) == PLUS
3063 && GET_CODE (XEXP (x, 0)) == REG
3064 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3065 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3066 && GET_CODE (XEXP (x, 1)) == CONST_INT
3067 && !SPE_VECTOR_MODE (mode)
3068 && !ALTIVEC_VECTOR_MODE (mode))
3070 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3071 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3073 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3075 /* Check for 32-bit overflow. */
3076 if (high + low != val)
3082 /* Reload the high part into a base reg; leave the low part
3083 in the mem directly. */
3085 x = gen_rtx_PLUS (GET_MODE (x),
3086 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3090 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3091 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3092 opnum, (enum reload_type)type);
3097 if (GET_CODE (x) == SYMBOL_REF
3098 && DEFAULT_ABI == ABI_DARWIN
3099 && !ALTIVEC_VECTOR_MODE (mode)
3100 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3101 /* Don't do this for TFmode, since the result isn't offsettable. */
3106 rtx offset = gen_rtx_CONST (Pmode,
3107 gen_rtx_MINUS (Pmode, x,
3108 gen_rtx_SYMBOL_REF (Pmode,
3109 machopic_function_base_name ())));
3110 x = gen_rtx_LO_SUM (GET_MODE (x),
3111 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3112 gen_rtx_HIGH (Pmode, offset)), offset);
3115 x = gen_rtx_LO_SUM (GET_MODE (x),
3116 gen_rtx_HIGH (Pmode, x), x);
3118 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3119 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3120 opnum, (enum reload_type)type);
3126 && constant_pool_expr_p (x)
3127 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3129 (x) = create_TOC_reference (x);
3137 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3138 that is a valid memory address for an instruction.
3139 The MODE argument is the machine mode for the MEM expression
3140 that wants to use this address.
3142 On the RS/6000, there are four valid address: a SYMBOL_REF that
3143 refers to a constant pool entry of an address (or the sum of it
3144 plus a constant), a short (16-bit signed) constant plus a register,
3145 the sum of two registers, or a register indirect, possibly with an
3146 auto-increment. For DFmode and DImode with a constant plus register,
3147 we must ensure that both words are addressable or PowerPC64 with offset
3150 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3151 32-bit DImode, TImode), indexed addressing cannot be used because
3152 adjacent memory cells are accessed by adding word-sized offsets
3153 during assembly output. */
3155 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3157 if (RS6000_SYMBOL_REF_TLS_P (x))
3159 if (legitimate_indirect_address_p (x, reg_ok_strict))
3161 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3162 && !ALTIVEC_VECTOR_MODE (mode)
3163 && !SPE_VECTOR_MODE (mode)
3165 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3167 if (legitimate_small_data_p (mode, x))
3169 if (legitimate_constant_pool_address_p (x))
3171 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3173 && GET_CODE (x) == PLUS
3174 && GET_CODE (XEXP (x, 0)) == REG
3175 && XEXP (x, 0) == virtual_stack_vars_rtx
3176 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3178 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3181 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3183 || (mode != DFmode && mode != TFmode))
3184 && (TARGET_POWERPC64 || mode != DImode)
3185 && legitimate_indexed_address_p (x, reg_ok_strict))
3187 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3192 /* Go to LABEL if ADDR (a legitimate address expression)
3193 has an effect that depends on the machine mode it is used for.
3195 On the RS/6000 this is true of all integral offsets (since AltiVec
3196 modes don't allow them) or is a pre-increment or decrement.
3198 ??? Except that due to conceptual problems in offsettable_address_p
3199 we can't really report the problems of integral offsets. So leave
3200 this assuming that the adjustable offset must be valid for the
3201 sub-words of a TFmode operand, which is what we had before. */
3204 rs6000_mode_dependent_address (rtx addr)
3206 switch (GET_CODE (addr))
3209 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3211 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3212 return val + 12 + 0x8000 >= 0x10000;
3221 return TARGET_UPDATE;
3230 /* Try to output insns to set TARGET equal to the constant C if it can
3231 be done in less than N insns. Do all computations in MODE.
3232 Returns the place where the output has been placed if it can be
3233 done and the insns have been emitted. If it would take more than N
3234 insns, zero is returned and no insns and emitted. */
3237 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3238 rtx source, int n ATTRIBUTE_UNUSED)
3240 rtx result, insn, set;
3241 HOST_WIDE_INT c0, c1;
3243 if (mode == QImode || mode == HImode)
3246 dest = gen_reg_rtx (mode);
3247 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3250 else if (mode == SImode)
3252 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3254 emit_insn (gen_rtx_SET (VOIDmode, result,
3255 GEN_INT (INTVAL (source)
3256 & (~ (HOST_WIDE_INT) 0xffff))));
3257 emit_insn (gen_rtx_SET (VOIDmode, dest,
3258 gen_rtx_IOR (SImode, result,
3259 GEN_INT (INTVAL (source) & 0xffff))));
3262 else if (mode == DImode)
3264 if (GET_CODE (source) == CONST_INT)
3266 c0 = INTVAL (source);
3269 else if (GET_CODE (source) == CONST_DOUBLE)
3271 #if HOST_BITS_PER_WIDE_INT >= 64
3272 c0 = CONST_DOUBLE_LOW (source);
3275 c0 = CONST_DOUBLE_LOW (source);
3276 c1 = CONST_DOUBLE_HIGH (source);
3282 result = rs6000_emit_set_long_const (dest, c0, c1);
3287 insn = get_last_insn ();
3288 set = single_set (insn);
3289 if (! CONSTANT_P (SET_SRC (set)))
3290 set_unique_reg_note (insn, REG_EQUAL, source);
3295 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3296 fall back to a straight forward decomposition. We do this to avoid
3297 exponential run times encountered when looking for longer sequences
3298 with rs6000_emit_set_const. */
3300 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3302 if (!TARGET_POWERPC64)
3304 rtx operand1, operand2;
3306 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3308 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3310 emit_move_insn (operand1, GEN_INT (c1));
3311 emit_move_insn (operand2, GEN_INT (c2));
3315 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3318 ud2 = (c1 & 0xffff0000) >> 16;
3319 #if HOST_BITS_PER_WIDE_INT >= 64
3323 ud4 = (c2 & 0xffff0000) >> 16;
3325 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3326 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3329 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3331 emit_move_insn (dest, GEN_INT (ud1));
3334 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3335 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3338 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3341 emit_move_insn (dest, GEN_INT (ud2 << 16));
3343 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3345 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3346 || (ud4 == 0 && ! (ud3 & 0x8000)))
3349 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3352 emit_move_insn (dest, GEN_INT (ud3 << 16));
3355 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3356 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3358 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3363 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3366 emit_move_insn (dest, GEN_INT (ud4 << 16));
3369 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3371 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3373 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3374 GEN_INT (ud2 << 16)));
3376 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3382 /* Emit a move from SOURCE to DEST in mode MODE. */
3384 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3388 operands[1] = source;
3390 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3391 if (GET_CODE (operands[1]) == CONST_DOUBLE
3392 && ! FLOAT_MODE_P (mode)
3393 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3395 /* FIXME. This should never happen. */
3396 /* Since it seems that it does, do the safe thing and convert
3398 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3400 if (GET_CODE (operands[1]) == CONST_DOUBLE
3401 && ! FLOAT_MODE_P (mode)
3402 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3403 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3404 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3405 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3408 /* Check if GCC is setting up a block move that will end up using FP
3409 registers as temporaries. We must make sure this is acceptable. */
3410 if (GET_CODE (operands[0]) == MEM
3411 && GET_CODE (operands[1]) == MEM
3413 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3414 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3415 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3416 ? 32 : MEM_ALIGN (operands[0])))
3417 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3419 : MEM_ALIGN (operands[1]))))
3420 && ! MEM_VOLATILE_P (operands [0])
3421 && ! MEM_VOLATILE_P (operands [1]))
3423 emit_move_insn (adjust_address (operands[0], SImode, 0),
3424 adjust_address (operands[1], SImode, 0));
3425 emit_move_insn (adjust_address (operands[0], SImode, 4),
3426 adjust_address (operands[1], SImode, 4));
3430 else if (mode == DImode && TARGET_POWERPC64
3431 && GET_CODE (operands[0]) == REG
3432 && GET_CODE (operands[1]) == MEM && optimize > 0
3433 && SLOW_UNALIGNED_ACCESS (DImode,
3434 MEM_ALIGN (operands[1]) > 32
3436 : MEM_ALIGN (operands[1]))
3439 rtx reg = gen_reg_rtx (SImode);
3440 emit_insn (gen_rtx_SET (SImode, reg,
3441 adjust_address (operands[1], SImode, 0)));
3442 reg = simplify_gen_subreg (DImode, reg, SImode, 0);
3443 emit_insn (gen_insvdi (operands[0], GEN_INT (32), const0_rtx, reg));
3444 reg = gen_reg_rtx (SImode);
3445 emit_insn (gen_rtx_SET (SImode, reg,
3446 adjust_address (operands[1], SImode, 4)));
3447 reg = simplify_gen_subreg (DImode, reg, SImode, 0);
3448 emit_insn (gen_insvdi (operands[0], GEN_INT (32), GEN_INT (32), reg));
3451 else if (mode == DImode && TARGET_POWERPC64
3452 && GET_CODE (operands[1]) == REG
3453 && GET_CODE (operands[0]) == MEM && optimize > 0
3454 && SLOW_UNALIGNED_ACCESS (DImode,
3455 MEM_ALIGN (operands[0]) > 32
3457 : MEM_ALIGN (operands[0]))
3460 rtx reg = gen_reg_rtx (DImode);
3461 emit_move_insn (reg,
3462 gen_rtx_LSHIFTRT (DImode, operands[1], GEN_INT (32)));
3463 emit_move_insn (adjust_address (operands[0], SImode, 0),
3464 simplify_gen_subreg (SImode, reg, DImode, 0));
3465 emit_move_insn (adjust_address (operands[0], SImode, 4),
3466 simplify_gen_subreg (SImode, operands[1], DImode, 0));
3471 if (!no_new_pseudos)
3473 if (GET_CODE (operands[1]) == MEM && optimize > 0
3474 && (mode == QImode || mode == HImode || mode == SImode)
3475 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3477 rtx reg = gen_reg_rtx (word_mode);
3479 emit_insn (gen_rtx_SET (word_mode, reg,
3480 gen_rtx_ZERO_EXTEND (word_mode,
3482 operands[1] = gen_lowpart (mode, reg);
3484 if (GET_CODE (operands[0]) != REG)
3485 operands[1] = force_reg (mode, operands[1]);
3488 if (mode == SFmode && ! TARGET_POWERPC
3489 && TARGET_HARD_FLOAT && TARGET_FPRS
3490 && GET_CODE (operands[0]) == MEM)
3494 if (reload_in_progress || reload_completed)
3495 regnum = true_regnum (operands[1]);
3496 else if (GET_CODE (operands[1]) == REG)
3497 regnum = REGNO (operands[1]);
3501 /* If operands[1] is a register, on POWER it may have
3502 double-precision data in it, so truncate it to single
3504 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3507 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3508 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3509 operands[1] = newreg;
3513 /* Recognize the case where operand[1] is a reference to thread-local
3514 data and load its address to a register. */
3515 if (GET_CODE (operands[1]) == SYMBOL_REF)
3517 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3519 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3522 /* Handle the case where reload calls us with an invalid address. */
3523 if (reload_in_progress && mode == Pmode
3524 && (! general_operand (operands[1], mode)
3525 || ! nonimmediate_operand (operands[0], mode)))
3528 /* Handle the case of CONSTANT_P_RTX. */
3529 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3532 /* 128-bit constant floating-point values on Darwin should really be
3533 loaded as two parts. */
3534 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3535 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3536 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3538 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3539 know how to get a DFmode SUBREG of a TFmode. */
3540 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3541 simplify_gen_subreg (DImode, operands[1], mode, 0),
3543 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3544 GET_MODE_SIZE (DImode)),
3545 simplify_gen_subreg (DImode, operands[1], mode,
3546 GET_MODE_SIZE (DImode)),
3551 /* FIXME: In the long term, this switch statement should go away
3552 and be replaced by a sequence of tests based on things like
3558 if (CONSTANT_P (operands[1])
3559 && GET_CODE (operands[1]) != CONST_INT)
3560 operands[1] = force_const_mem (mode, operands[1]);
3566 if (CONSTANT_P (operands[1])
3567 && ! easy_fp_constant (operands[1], mode))
3568 operands[1] = force_const_mem (mode, operands[1]);
3579 if (CONSTANT_P (operands[1])
3580 && !easy_vector_constant (operands[1], mode))
3581 operands[1] = force_const_mem (mode, operands[1]);
3586 /* Use default pattern for address of ELF small data */
3589 && DEFAULT_ABI == ABI_V4
3590 && (GET_CODE (operands[1]) == SYMBOL_REF
3591 || GET_CODE (operands[1]) == CONST)
3592 && small_data_operand (operands[1], mode))
3594 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3598 if (DEFAULT_ABI == ABI_V4
3599 && mode == Pmode && mode == SImode
3600 && flag_pic == 1 && got_operand (operands[1], mode))
3602 emit_insn (gen_movsi_got (operands[0], operands[1]));
3606 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3610 && CONSTANT_P (operands[1])
3611 && GET_CODE (operands[1]) != HIGH
3612 && GET_CODE (operands[1]) != CONST_INT)
3614 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3616 /* If this is a function address on -mcall-aixdesc,
3617 convert it to the address of the descriptor. */
3618 if (DEFAULT_ABI == ABI_AIX
3619 && GET_CODE (operands[1]) == SYMBOL_REF
3620 && XSTR (operands[1], 0)[0] == '.')
3622 const char *name = XSTR (operands[1], 0);
3624 while (*name == '.')
3626 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3627 CONSTANT_POOL_ADDRESS_P (new_ref)
3628 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3629 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3630 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3631 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3632 operands[1] = new_ref;
3635 if (DEFAULT_ABI == ABI_DARWIN)
3638 if (MACHO_DYNAMIC_NO_PIC_P)
3640 /* Take care of any required data indirection. */
3641 operands[1] = rs6000_machopic_legitimize_pic_address (
3642 operands[1], mode, operands[0]);
3643 if (operands[0] != operands[1])
3644 emit_insn (gen_rtx_SET (VOIDmode,
3645 operands[0], operands[1]));
3649 emit_insn (gen_macho_high (target, operands[1]));
3650 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3654 emit_insn (gen_elf_high (target, operands[1]));
3655 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3659 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3660 and we have put it in the TOC, we just need to make a TOC-relative
3663 && GET_CODE (operands[1]) == SYMBOL_REF
3664 && constant_pool_expr_p (operands[1])
3665 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3666 get_pool_mode (operands[1])))
3668 operands[1] = create_TOC_reference (operands[1]);
3670 else if (mode == Pmode
3671 && CONSTANT_P (operands[1])
3672 && ((GET_CODE (operands[1]) != CONST_INT
3673 && ! easy_fp_constant (operands[1], mode))
3674 || (GET_CODE (operands[1]) == CONST_INT
3675 && num_insns_constant (operands[1], mode) > 2)
3676 || (GET_CODE (operands[0]) == REG
3677 && FP_REGNO_P (REGNO (operands[0]))))
3678 && GET_CODE (operands[1]) != HIGH
3679 && ! legitimate_constant_pool_address_p (operands[1])
3680 && ! toc_relative_expr_p (operands[1]))
3682 /* Emit a USE operation so that the constant isn't deleted if
3683 expensive optimizations are turned on because nobody
3684 references it. This should only be done for operands that
3685 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3686 This should not be done for operands that contain LABEL_REFs.
3687 For now, we just handle the obvious case. */
3688 if (GET_CODE (operands[1]) != LABEL_REF)
3689 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3692 /* Darwin uses a special PIC legitimizer. */
3693 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3696 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3698 if (operands[0] != operands[1])
3699 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3704 /* If we are to limit the number of things we put in the TOC and
3705 this is a symbol plus a constant we can add in one insn,
3706 just put the symbol in the TOC and add the constant. Don't do
3707 this if reload is in progress. */
3708 if (GET_CODE (operands[1]) == CONST
3709 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3710 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3711 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3712 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3713 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3714 && ! side_effects_p (operands[0]))
3717 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3718 rtx other = XEXP (XEXP (operands[1], 0), 1);
3720 sym = force_reg (mode, sym);
3722 emit_insn (gen_addsi3 (operands[0], sym, other));
3724 emit_insn (gen_adddi3 (operands[0], sym, other));
3728 operands[1] = force_const_mem (mode, operands[1]);
3731 && constant_pool_expr_p (XEXP (operands[1], 0))
3732 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3733 get_pool_constant (XEXP (operands[1], 0)),
3734 get_pool_mode (XEXP (operands[1], 0))))
3737 = gen_rtx_MEM (mode,
3738 create_TOC_reference (XEXP (operands[1], 0)));
3739 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3740 RTX_UNCHANGING_P (operands[1]) = 1;
3746 if (GET_CODE (operands[0]) == MEM
3747 && GET_CODE (XEXP (operands[0], 0)) != REG
3748 && ! reload_in_progress)
3750 = replace_equiv_address (operands[0],
3751 copy_addr_to_reg (XEXP (operands[0], 0)));
3753 if (GET_CODE (operands[1]) == MEM
3754 && GET_CODE (XEXP (operands[1], 0)) != REG
3755 && ! reload_in_progress)
3757 = replace_equiv_address (operands[1],
3758 copy_addr_to_reg (XEXP (operands[1], 0)));
3761 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3763 gen_rtx_SET (VOIDmode,
3764 operands[0], operands[1]),
3765 gen_rtx_CLOBBER (VOIDmode,
3766 gen_rtx_SCRATCH (SImode)))));
3775 /* Above, we may have called force_const_mem which may have returned
3776 an invalid address. If we can, fix this up; otherwise, reload will
3777 have to deal with it. */
3778 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3779 operands[1] = validize_mem (operands[1]);
3782 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3785 /* Nonzero if we can use a floating-point register to pass this arg. */
3786 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3787 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3788 && (CUM)->fregno <= FP_ARG_MAX_REG \
3789 && TARGET_HARD_FLOAT && TARGET_FPRS)
3791 /* Nonzero if we can use an AltiVec register to pass this arg. */
3792 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3793 (ALTIVEC_VECTOR_MODE (MODE) \
3794 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3795 && TARGET_ALTIVEC_ABI \
3798 /* Return a nonzero value to say to return the function value in
3799 memory, just as large structures are always returned. TYPE will be
3800 the data type of the value, and FNTYPE will be the type of the
3801 function doing the returning, or @code{NULL} for libcalls.
3803 The AIX ABI for the RS/6000 specifies that all structures are
3804 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3805 specifies that structures <= 8 bytes are returned in r3/r4, but a
3806 draft put them in memory, and GCC used to implement the draft
3807 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3808 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3809 compatibility can change DRAFT_V4_STRUCT_RET to override the
3810 default, and -m switches get the final word. See
3811 rs6000_override_options for more details.
3813 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3814 long double support is enabled. These values are returned in memory.
3816 int_size_in_bytes returns -1 for variable size objects, which go in
3817 memory always. The cast to unsigned makes -1 > 8. */
3820 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3822 if (AGGREGATE_TYPE_P (type)
3823 && (TARGET_AIX_STRUCT_RET
3824 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3826 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3831 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3832 for a call to a function whose data type is FNTYPE.
3833 For a library call, FNTYPE is 0.
3835 For incoming args we set the number of arguments in the prototype large
3836 so we never return a PARALLEL. */
3839 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3840 rtx libname ATTRIBUTE_UNUSED, int incoming, int libcall)
3842 static CUMULATIVE_ARGS zero_cumulative;
3844 *cum = zero_cumulative;
3846 cum->fregno = FP_ARG_MIN_REG;
3847 cum->vregno = ALTIVEC_ARG_MIN_REG;
3848 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3849 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3850 ? CALL_LIBCALL : CALL_NORMAL);
3851 cum->sysv_gregno = GP_ARG_MIN_REG;
3852 cum->stdarg = fntype
3853 && (TYPE_ARG_TYPES (fntype) != 0
3854 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3855 != void_type_node));
3858 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3860 else if (cum->prototype)
3861 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3862 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3863 || rs6000_return_in_memory (TREE_TYPE (fntype),
3867 cum->nargs_prototype = 0;
3869 /* Check for a longcall attribute. */
3871 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3872 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3873 cum->call_cookie = CALL_LONG;
3875 if (TARGET_DEBUG_ARG)
3877 fprintf (stderr, "\ninit_cumulative_args:");
3880 tree ret_type = TREE_TYPE (fntype);
3881 fprintf (stderr, " ret code = %s,",
3882 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3885 if (cum->call_cookie & CALL_LONG)
3886 fprintf (stderr, " longcall,");
3888 fprintf (stderr, " proto = %d, nargs = %d\n",
3889 cum->prototype, cum->nargs_prototype);
3893 /* If defined, a C expression which determines whether, and in which
3894 direction, to pad out an argument with extra space. The value
3895 should be of type `enum direction': either `upward' to pad above
3896 the argument, `downward' to pad below, or `none' to inhibit
3899 For the AIX ABI structs are always stored left shifted in their
3903 function_arg_padding (enum machine_mode mode, tree type)
3905 #ifndef AGGREGATE_PADDING_FIXED
3906 #define AGGREGATE_PADDING_FIXED 0
3908 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3909 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3912 if (!AGGREGATE_PADDING_FIXED)
3914 /* GCC used to pass structures of the same size as integer types as
3915 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3916 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3917 passed padded downward, except that -mstrict-align further
3918 muddied the water in that multi-component structures of 2 and 4
3919 bytes in size were passed padded upward.
3921 The following arranges for best compatibility with previous
3922 versions of gcc, but removes the -mstrict-align dependency. */
3923 if (BYTES_BIG_ENDIAN)
3925 HOST_WIDE_INT size = 0;
3927 if (mode == BLKmode)
3929 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3930 size = int_size_in_bytes (type);
3933 size = GET_MODE_SIZE (mode);
3935 if (size == 1 || size == 2 || size == 4)
3941 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3943 if (type != 0 && AGGREGATE_TYPE_P (type))
3947 /* Fall back to the default. */
3948 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3951 /* If defined, a C expression that gives the alignment boundary, in bits,
3952 of an argument with the specified mode and type. If it is not defined,
3953 PARM_BOUNDARY is used for all arguments.
3955 V.4 wants long longs to be double word aligned. */
3958 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3960 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3962 else if (SPE_VECTOR_MODE (mode))
3964 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3967 return PARM_BOUNDARY;
3970 /* Update the data in CUM to advance over an argument
3971 of mode MODE and data type TYPE.
3972 (TYPE is null for libcalls where that information may not be available.) */
3975 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3976 tree type, int named)
3978 cum->nargs_prototype--;
3980 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3982 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
3985 /* PowerPC64 Linux and AIX allocates GPRs for a vector argument
3986 even if it is going to be passed in a vector register.
3987 Darwin does the same for variable-argument functions. */
3988 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
3989 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
3993 /* Vector parameters must be 16-byte aligned. This places
3994 them at 2 mod 4 in terms of words in 32-bit mode, since
3995 the parameter save area starts at offset 24 from the
3996 stack. In 64-bit mode, they just have to start on an
3997 even word, since the parameter save area is 16-byte
3998 aligned. Space for GPRs is reserved even if the argument
3999 will be passed in memory. */
4001 align = ((6 - (cum->words & 3)) & 3);
4003 align = cum->words & 1;
4004 cum->words += align + RS6000_ARG_SIZE (mode, type);
4006 if (TARGET_DEBUG_ARG)
4008 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4010 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4011 cum->nargs_prototype, cum->prototype,
4012 GET_MODE_NAME (mode));
4016 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4018 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4020 else if (DEFAULT_ABI == ABI_V4)
4022 if (TARGET_HARD_FLOAT && TARGET_FPRS
4023 && (mode == SFmode || mode == DFmode))
4025 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4030 cum->words += cum->words & 1;
4031 cum->words += RS6000_ARG_SIZE (mode, type);
4037 int gregno = cum->sysv_gregno;
4039 /* Aggregates and IEEE quad get passed by reference. */
4040 if ((type && AGGREGATE_TYPE_P (type))
4044 n_words = RS6000_ARG_SIZE (mode, type);
4046 /* Long long and SPE vectors are put in odd registers. */
4047 if (n_words == 2 && (gregno & 1) == 0)
4050 /* Long long and SPE vectors are not split between registers
4052 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4054 /* Long long is aligned on the stack. */
4056 cum->words += cum->words & 1;
4057 cum->words += n_words;
4060 /* Note: continuing to accumulate gregno past when we've started
4061 spilling to the stack indicates the fact that we've started
4062 spilling to the stack to expand_builtin_saveregs. */
4063 cum->sysv_gregno = gregno + n_words;
4066 if (TARGET_DEBUG_ARG)
4068 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4069 cum->words, cum->fregno);
4070 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4071 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4072 fprintf (stderr, "mode = %4s, named = %d\n",
4073 GET_MODE_NAME (mode), named);
4078 int align = (TARGET_32BIT && (cum->words & 1) != 0
4079 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4081 cum->words += align + RS6000_ARG_SIZE (mode, type);
4083 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4084 && TARGET_HARD_FLOAT && TARGET_FPRS)
4085 cum->fregno += (mode == TFmode ? 2 : 1);
4087 if (TARGET_DEBUG_ARG)
4089 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4090 cum->words, cum->fregno);
4091 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4092 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4093 fprintf (stderr, "named = %d, align = %d\n", named, align);
4098 /* Determine where to put a SIMD argument on the SPE. */
4101 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4106 int gregno = cum->sysv_gregno;
4107 int n_words = RS6000_ARG_SIZE (mode, type);
4109 /* SPE vectors are put in odd registers. */
4110 if (n_words == 2 && (gregno & 1) == 0)
4113 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4116 enum machine_mode m = SImode;
4118 r1 = gen_rtx_REG (m, gregno);
4119 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4120 r2 = gen_rtx_REG (m, gregno + 1);
4121 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4122 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4129 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4130 return gen_rtx_REG (mode, cum->sysv_gregno);
4136 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4139 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4140 tree type, int align_words)
4144 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4145 in vararg list into zero, one or two GPRs */
4146 if (align_words >= GP_ARG_NUM_REG)
4147 return gen_rtx_PARALLEL (DFmode,
4149 gen_rtx_EXPR_LIST (VOIDmode,
4150 NULL_RTX, const0_rtx),
4151 gen_rtx_EXPR_LIST (VOIDmode,
4155 else if (align_words + RS6000_ARG_SIZE (mode, type)
4157 /* If this is partially on the stack, then we only
4158 include the portion actually in registers here. */
4159 return gen_rtx_PARALLEL (DFmode,
4161 gen_rtx_EXPR_LIST (VOIDmode,
4162 gen_rtx_REG (SImode,
4166 gen_rtx_EXPR_LIST (VOIDmode,
4171 /* split a DFmode arg into two GPRs */
4172 return gen_rtx_PARALLEL (DFmode,
4174 gen_rtx_EXPR_LIST (VOIDmode,
4175 gen_rtx_REG (SImode,
4179 gen_rtx_EXPR_LIST (VOIDmode,
4180 gen_rtx_REG (SImode,
4184 gen_rtx_EXPR_LIST (VOIDmode,
4185 gen_rtx_REG (mode, cum->fregno),
4188 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4190 else if (mode == DImode)
4192 if (align_words < GP_ARG_NUM_REG - 1)
4193 return gen_rtx_PARALLEL (DImode,
4195 gen_rtx_EXPR_LIST (VOIDmode,
4196 gen_rtx_REG (SImode,
4200 gen_rtx_EXPR_LIST (VOIDmode,
4201 gen_rtx_REG (SImode,
4205 else if (align_words == GP_ARG_NUM_REG - 1)
4206 return gen_rtx_PARALLEL (DImode,
4208 gen_rtx_EXPR_LIST (VOIDmode,
4209 NULL_RTX, const0_rtx),
4210 gen_rtx_EXPR_LIST (VOIDmode,
4211 gen_rtx_REG (SImode,
4216 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4219 int size = int_size_in_bytes (type);
4220 int no_units = ((size - 1) / 4) + 1;
4221 int max_no_words = GP_ARG_NUM_REG - align_words;
4222 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4223 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4225 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4227 for (k=0; k < rtlvec_len; k++)
4228 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4229 gen_rtx_REG (SImode,
4232 k == 0 ? const0_rtx : GEN_INT (k*4));
4234 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4240 /* Determine where to put an argument to a function.
4241 Value is zero to push the argument on the stack,
4242 or a hard register in which to store the argument.
4244 MODE is the argument's machine mode.
4245 TYPE is the data type of the argument (as a tree).
4246 This is null for libcalls where that information may
4248 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4249 the preceding args and about the function being called.
4250 NAMED is nonzero if this argument is a named parameter
4251 (otherwise it is an extra parameter matching an ellipsis).
4253 On RS/6000 the first eight words of non-FP are normally in registers
4254 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4255 Under V.4, the first 8 FP args are in registers.
4257 If this is floating-point and no prototype is specified, we use
4258 both an FP and integer register (or possibly FP reg and stack). Library
4259 functions (when CALL_LIBCALL is set) always have the proper types for args,
4260 so we can pass the FP value just in one register. emit_library_function
4261 doesn't support PARALLEL anyway. */
4264 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4265 tree type, int named)
4267 enum rs6000_abi abi = DEFAULT_ABI;
4269 /* Return a marker to indicate whether CR1 needs to set or clear the
4270 bit that V.4 uses to say fp args were passed in registers.
4271 Assume that we don't need the marker for software floating point,
4272 or compiler generated library calls. */
4273 if (mode == VOIDmode)
4276 && cum->nargs_prototype < 0
4277 && (cum->call_cookie & CALL_LIBCALL) == 0
4278 && (cum->prototype || TARGET_NO_PROTOTYPE))
4280 /* For the SPE, we need to crxor CR6 always. */
4282 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4283 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4284 return GEN_INT (cum->call_cookie
4285 | ((cum->fregno == FP_ARG_MIN_REG)
4286 ? CALL_V4_SET_FP_ARGS
4287 : CALL_V4_CLEAR_FP_ARGS));
4290 return GEN_INT (cum->call_cookie);
4293 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4294 if (TARGET_64BIT && ! cum->prototype)
4296 /* Vector parameters get passed in vector register
4297 and also in GPRs or memory, in absence of prototype. */
4300 align_words = (cum->words + 1) & ~1;
4302 if (align_words >= GP_ARG_NUM_REG)
4308 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4310 return gen_rtx_PARALLEL (mode,
4312 gen_rtx_EXPR_LIST (VOIDmode,
4314 gen_rtx_EXPR_LIST (VOIDmode,
4315 gen_rtx_REG (mode, cum->vregno),
4319 return gen_rtx_REG (mode, cum->vregno);
4320 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4322 if (named || abi == ABI_V4)
4326 /* Vector parameters to varargs functions under AIX or Darwin
4327 get passed in memory and possibly also in GPRs. */
4328 int align, align_words;
4329 enum machine_mode part_mode = mode;
4331 /* Vector parameters must be 16-byte aligned. This places them at
4332 2 mod 4 in terms of words in 32-bit mode, since the parameter
4333 save area starts at offset 24 from the stack. In 64-bit mode,
4334 they just have to start on an even word, since the parameter
4335 save area is 16-byte aligned. */
4337 align = ((6 - (cum->words & 3)) & 3);
4339 align = cum->words & 1;
4340 align_words = cum->words + align;
4342 /* Out of registers? Memory, then. */
4343 if (align_words >= GP_ARG_NUM_REG)
4346 /* The vector value goes in GPRs. Only the part of the
4347 value in GPRs is reported here. */
4348 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4350 /* Fortunately, there are only two possibilities, the value
4351 is either wholly in GPRs or half in GPRs and half not. */
4354 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4357 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4358 return rs6000_spe_function_arg (cum, mode, type);
4359 else if (abi == ABI_V4)
4361 if (TARGET_HARD_FLOAT && TARGET_FPRS
4362 && (mode == SFmode || mode == DFmode))
4364 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4365 return gen_rtx_REG (mode, cum->fregno);
4372 int gregno = cum->sysv_gregno;
4374 /* Aggregates and IEEE quad get passed by reference. */
4375 if ((type && AGGREGATE_TYPE_P (type))
4379 n_words = RS6000_ARG_SIZE (mode, type);
4381 /* Long long and SPE vectors are put in odd registers. */
4382 if (n_words == 2 && (gregno & 1) == 0)
4385 /* Long long do not split between registers and stack. */
4386 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4387 return gen_rtx_REG (mode, gregno);
4394 int align = (TARGET_32BIT && (cum->words & 1) != 0
4395 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4396 int align_words = cum->words + align;
4398 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4401 if (TARGET_32BIT && TARGET_POWERPC64
4402 && (mode == DFmode || mode == DImode || mode == BLKmode))
4403 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4405 if (USE_FP_FOR_ARG_P (cum, mode, type))
4408 || ((cum->nargs_prototype > 0)
4409 /* IBM AIX extended its linkage convention definition always
4410 to require FP args after register save area hole on the
4412 && (DEFAULT_ABI != ABI_AIX
4414 || (align_words < GP_ARG_NUM_REG))))
4415 return gen_rtx_REG (mode, cum->fregno);
4417 return gen_rtx_PARALLEL (mode,
4419 gen_rtx_EXPR_LIST (VOIDmode,
4420 ((align_words >= GP_ARG_NUM_REG)
4423 + RS6000_ARG_SIZE (mode, type)
4425 /* If this is partially on the stack, then
4426 we only include the portion actually
4427 in registers here. */
4428 ? gen_rtx_REG (SImode,
4429 GP_ARG_MIN_REG + align_words)
4430 : gen_rtx_REG (mode,
4431 GP_ARG_MIN_REG + align_words))),
4433 gen_rtx_EXPR_LIST (VOIDmode,
4434 gen_rtx_REG (mode, cum->fregno),
4437 else if (align_words < GP_ARG_NUM_REG)
4438 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4444 /* For an arg passed partly in registers and partly in memory,
4445 this is the number of registers used.
4446 For args passed entirely in registers or entirely in memory, zero. */
4449 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4450 tree type, int named)
4452 if (DEFAULT_ABI == ABI_V4)
4455 if (USE_FP_FOR_ARG_P (cum, mode, type)
4456 || USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4458 if (cum->nargs_prototype >= 0)
4462 if (cum->words < GP_ARG_NUM_REG
4463 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4465 int ret = GP_ARG_NUM_REG - cum->words;
4466 if (ret && TARGET_DEBUG_ARG)
4467 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4475 /* A C expression that indicates when an argument must be passed by
4476 reference. If nonzero for an argument, a copy of that argument is
4477 made in memory and a pointer to the argument is passed instead of
4478 the argument itself. The pointer is passed in whatever way is
4479 appropriate for passing a pointer to that type.
4481 Under V.4, structures and unions are passed by reference.
4483 As an extension to all ABIs, variable sized types are passed by
4487 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4488 enum machine_mode mode ATTRIBUTE_UNUSED,
4489 tree type, int named ATTRIBUTE_UNUSED)
4491 if (DEFAULT_ABI == ABI_V4
4492 && ((type && AGGREGATE_TYPE_P (type))
4495 if (TARGET_DEBUG_ARG)
4496 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4500 return type && int_size_in_bytes (type) < 0;
4504 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4507 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4512 for (i = 0; i < nregs; i++)
4514 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4515 if (reload_completed)
4517 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4520 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4521 i * GET_MODE_SIZE(reg_mode));
4524 tem = replace_equiv_address (tem, XEXP (tem, 0));
4526 if (tem == NULL_RTX)
4529 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4534 /* Perform any needed actions needed for a function that is receiving a
4535 variable number of arguments.
4539 MODE and TYPE are the mode and type of the current parameter.
4541 PRETEND_SIZE is a variable that should be set to the amount of stack
4542 that must be pushed by the prolog to pretend that our caller pushed
4545 Normally, this macro will push all remaining incoming registers on the
4546 stack and set PRETEND_SIZE to the length of the registers pushed. */
4549 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4550 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4552 CUMULATIVE_ARGS next_cum;
4553 int reg_size = TARGET_32BIT ? 4 : 8;
4554 rtx save_area = NULL_RTX, mem;
4555 int first_reg_offset, set;
4557 /* Skip the last named argument. */
4559 function_arg_advance (&next_cum, mode, type, 1);
4561 if (DEFAULT_ABI == ABI_V4)
4563 /* Indicate to allocate space on the stack for varargs save area. */
4564 cfun->machine->sysv_varargs_p = 1;
4566 save_area = plus_constant (virtual_stack_vars_rtx,
4567 - RS6000_VARARGS_SIZE);
4569 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4573 first_reg_offset = next_cum.words;
4574 save_area = virtual_incoming_args_rtx;
4575 cfun->machine->sysv_varargs_p = 0;
4577 if (MUST_PASS_IN_STACK (mode, type))
4578 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4581 set = get_varargs_alias_set ();
4582 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4584 mem = gen_rtx_MEM (BLKmode,
4585 plus_constant (save_area,
4586 first_reg_offset * reg_size)),
4587 set_mem_alias_set (mem, set);
4588 set_mem_align (mem, BITS_PER_WORD);
4590 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4591 GP_ARG_NUM_REG - first_reg_offset);
4594 /* Save FP registers if needed. */
4595 if (DEFAULT_ABI == ABI_V4
4596 && TARGET_HARD_FLOAT && TARGET_FPRS
4598 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4600 int fregno = next_cum.fregno;
4601 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4602 rtx lab = gen_label_rtx ();
4603 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4605 emit_jump_insn (gen_rtx_SET (VOIDmode,
4607 gen_rtx_IF_THEN_ELSE (VOIDmode,
4608 gen_rtx_NE (VOIDmode, cr1,
4610 gen_rtx_LABEL_REF (VOIDmode, lab),
4613 while (fregno <= FP_ARG_V4_MAX_REG)
4615 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4616 set_mem_alias_set (mem, set);
4617 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4626 /* Create the va_list data type. */
4629 rs6000_build_builtin_va_list (void)
4631 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4633 /* For AIX, prefer 'char *' because that's what the system
4634 header files like. */
4635 if (DEFAULT_ABI != ABI_V4)
4636 return build_pointer_type (char_type_node);
4638 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4639 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4641 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4642 unsigned_char_type_node);
4643 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4644 unsigned_char_type_node);
4645 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4647 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4648 short_unsigned_type_node);
4649 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4651 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4654 DECL_FIELD_CONTEXT (f_gpr) = record;
4655 DECL_FIELD_CONTEXT (f_fpr) = record;
4656 DECL_FIELD_CONTEXT (f_res) = record;
4657 DECL_FIELD_CONTEXT (f_ovf) = record;
4658 DECL_FIELD_CONTEXT (f_sav) = record;
4660 TREE_CHAIN (record) = type_decl;
4661 TYPE_NAME (record) = type_decl;
4662 TYPE_FIELDS (record) = f_gpr;
4663 TREE_CHAIN (f_gpr) = f_fpr;
4664 TREE_CHAIN (f_fpr) = f_res;
4665 TREE_CHAIN (f_res) = f_ovf;
4666 TREE_CHAIN (f_ovf) = f_sav;
4668 layout_type (record);
4670 /* The correct type is an array type of one element. */
4671 return build_array_type (record, build_index_type (size_zero_node));
4674 /* Implement va_start. */
4677 rs6000_va_start (tree valist, rtx nextarg)
4679 HOST_WIDE_INT words, n_gpr, n_fpr;
4680 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4681 tree gpr, fpr, ovf, sav, t;
4683 /* Only SVR4 needs something special. */
4684 if (DEFAULT_ABI != ABI_V4)
4686 std_expand_builtin_va_start (valist, nextarg);
4690 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4691 f_fpr = TREE_CHAIN (f_gpr);
4692 f_res = TREE_CHAIN (f_fpr);
4693 f_ovf = TREE_CHAIN (f_res);
4694 f_sav = TREE_CHAIN (f_ovf);
4696 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4697 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4698 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4699 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4700 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4702 /* Count number of gp and fp argument registers used. */
4703 words = current_function_args_info.words;
4704 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4705 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4707 if (TARGET_DEBUG_ARG)
4708 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4709 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4710 words, n_gpr, n_fpr);
4712 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4713 TREE_SIDE_EFFECTS (t) = 1;
4714 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4716 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4717 TREE_SIDE_EFFECTS (t) = 1;
4718 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4720 /* Find the overflow area. */
4721 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4723 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4724 build_int_2 (words * UNITS_PER_WORD, 0));
4725 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4726 TREE_SIDE_EFFECTS (t) = 1;
4727 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4729 /* Find the register save area. */
4730 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4731 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4732 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4733 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4734 TREE_SIDE_EFFECTS (t) = 1;
4735 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4738 /* Implement va_arg. */
4741 rs6000_va_arg (tree valist, tree type)
4743 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4744 tree gpr, fpr, ovf, sav, reg, t, u;
4745 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4746 rtx lab_false, lab_over, addr_rtx, r;
4748 if (DEFAULT_ABI != ABI_V4)
4750 /* Variable sized types are passed by reference. */
4751 if (int_size_in_bytes (type) < 0)
4753 u = build_pointer_type (type);
4755 /* Args grow upward. */
4756 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4757 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4758 TREE_SIDE_EFFECTS (t) = 1;
4760 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4761 TREE_SIDE_EFFECTS (t) = 1;
4763 t = build1 (INDIRECT_REF, u, t);
4764 TREE_SIDE_EFFECTS (t) = 1;
4766 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4769 return std_expand_builtin_va_arg (valist, type);
4772 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4773 f_fpr = TREE_CHAIN (f_gpr);
4774 f_res = TREE_CHAIN (f_fpr);
4775 f_ovf = TREE_CHAIN (f_res);
4776 f_sav = TREE_CHAIN (f_ovf);
4778 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4779 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4780 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4781 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4782 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4784 size = int_size_in_bytes (type);
4785 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4787 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4789 /* Aggregates and long doubles are passed by reference. */
4795 size = UNITS_PER_WORD;
4798 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4800 /* FP args go in FP registers, if present. */
4809 /* Otherwise into GP registers. */
4817 /* Pull the value out of the saved registers.... */
4819 lab_false = gen_label_rtx ();
4820 lab_over = gen_label_rtx ();
4821 addr_rtx = gen_reg_rtx (Pmode);
4823 /* AltiVec vectors never go in registers. */
4824 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4826 TREE_THIS_VOLATILE (reg) = 1;
4827 emit_cmp_and_jump_insns
4828 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4829 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4832 /* Long long is aligned in the registers. */
4835 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4836 build_int_2 (n_reg - 1, 0));
4837 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4838 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4839 TREE_SIDE_EFFECTS (u) = 1;
4840 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4844 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4848 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4849 build_int_2 (n_reg, 0));
4850 TREE_SIDE_EFFECTS (u) = 1;
4852 u = build1 (CONVERT_EXPR, integer_type_node, u);
4853 TREE_SIDE_EFFECTS (u) = 1;
4855 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4856 TREE_SIDE_EFFECTS (u) = 1;
4858 t = build (PLUS_EXPR, ptr_type_node, t, u);
4859 TREE_SIDE_EFFECTS (t) = 1;
4861 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4863 emit_move_insn (addr_rtx, r);
4865 emit_jump_insn (gen_jump (lab_over));
4869 emit_label (lab_false);
4871 /* ... otherwise out of the overflow area. */
4873 /* Make sure we don't find reg 7 for the next int arg.
4875 All AltiVec vectors go in the overflow area. So in the AltiVec
4876 case we need to get the vectors from the overflow area, but
4877 remember where the GPRs and FPRs are. */
4878 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4879 || !TARGET_ALTIVEC))
4881 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4882 TREE_SIDE_EFFECTS (t) = 1;
4883 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4886 /* Care for on-stack alignment if needed. */
4893 /* AltiVec vectors are 16 byte aligned. */
4894 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4899 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4900 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4904 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4906 emit_move_insn (addr_rtx, r);
4908 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4909 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4910 TREE_SIDE_EFFECTS (t) = 1;
4911 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4913 emit_label (lab_over);
4917 r = gen_rtx_MEM (Pmode, addr_rtx);
4918 set_mem_alias_set (r, get_varargs_alias_set ());
4919 emit_move_insn (addr_rtx, r);
4927 #define def_builtin(MASK, NAME, TYPE, CODE) \
4929 if ((MASK) & target_flags) \
4930 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4934 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4936 static const struct builtin_description bdesc_3arg[] =
4938 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4939 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4940 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4941 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4942 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4943 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4944 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4945 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4946 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4947 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4948 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4949 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4950 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4951 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4952 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4953 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4954 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4955 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4956 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4957 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4958 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4959 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4960 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4963 /* DST operations: void foo (void *, const int, const char). */
4965 static const struct builtin_description bdesc_dst[] =
4967 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4968 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4969 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4970 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4973 /* Simple binary operations: VECc = foo (VECa, VECb). */
4975 static struct builtin_description bdesc_2arg[] =
4977 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4978 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4979 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4980 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4981 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4982 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4983 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4984 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4985 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4986 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4987 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4988 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4989 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4990 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4991 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4992 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4993 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4994 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4995 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4996 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4997 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4998 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4999 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5000 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5001 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5002 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5003 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5004 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5005 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5006 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5007 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5008 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5009 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5010 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5011 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5012 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5013 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5014 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5015 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5016 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5017 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5018 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5019 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5020 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5021 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5022 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5023 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5024 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5025 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5026 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5027 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5028 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5029 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5030 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5031 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5032 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5033 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5034 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5035 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5036 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5037 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5038 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5039 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5040 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5041 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5042 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5043 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5044 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5045 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5046 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5047 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5048 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5049 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5050 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5051 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5052 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5053 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5054 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5055 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5056 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5057 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5058 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5059 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5060 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5061 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5062 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5063 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5064 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5065 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5066 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5067 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5068 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5069 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5070 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5071 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5072 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5073 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5074 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5075 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5076 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5077 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5078 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5079 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5080 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5081 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5082 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5083 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5084 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5085 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5086 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5087 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5088 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5089 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5091 /* Place holder, leave as first spe builtin. */
5092 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5093 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5094 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5095 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5096 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5097 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5098 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5099 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5100 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5101 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5102 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5103 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5104 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5105 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5106 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5107 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5108 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5109 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5110 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5111 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5112 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5113 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5114 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5115 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5116 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5117 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5118 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5119 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5120 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5121 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5122 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5123 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5124 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5125 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5126 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5127 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5128 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5129 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5130 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5131 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5132 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5133 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5134 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5135 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5136 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5137 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5138 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5139 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5140 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5141 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5142 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5143 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5144 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5145 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5146 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5147 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5148 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5149 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5150 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5151 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5152 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5153 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5154 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5155 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5156 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5157 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5158 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5159 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5160 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5161 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5162 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5163 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5164 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5165 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5166 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5167 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5168 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5169 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5170 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5171 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5172 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5173 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5174 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5175 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5176 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5177 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5178 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5179 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5180 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5181 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5182 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5183 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5184 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5185 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5186 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5187 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5188 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5189 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5190 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5191 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5192 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5193 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5194 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5195 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5196 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5197 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5198 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5199 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5200 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5202 /* SPE binary operations expecting a 5-bit unsigned literal. */
5203 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5205 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5206 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5207 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5208 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5209 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5210 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5211 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5212 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5213 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5214 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5215 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5216 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5217 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5218 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5219 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5220 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5221 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5222 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5223 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5224 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5225 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5226 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5227 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5228 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5229 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5230 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5232 /* Place-holder. Leave as last binary SPE builtin. */
5233 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5236 /* AltiVec predicates. */
5238 struct builtin_description_predicates
5240 const unsigned int mask;
5241 const enum insn_code icode;
5243 const char *const name;
5244 const enum rs6000_builtins code;
5247 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5249 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5250 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5251 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5252 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5253 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5254 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5255 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5256 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5257 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5258 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5259 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5260 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5261 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5264 /* SPE predicates. */
5265 static struct builtin_description bdesc_spe_predicates[] =
5267 /* Place-holder. Leave as first. */
5268 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5269 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5270 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5271 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5272 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5273 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5274 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5275 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5276 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5277 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5278 /* Place-holder. Leave as last. */
5279 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5282 /* SPE evsel predicates. */
5283 static struct builtin_description bdesc_spe_evsel[] =
5285 /* Place-holder. Leave as first. */
5286 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5287 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5288 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5289 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5290 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5291 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5292 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5293 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5294 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5295 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5296 /* Place-holder. Leave as last. */
5297 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5300 /* ABS* operations. */
5302 static const struct builtin_description bdesc_abs[] =
5304 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5305 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5306 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5307 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5308 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5309 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5310 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5313 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5316 static struct builtin_description bdesc_1arg[] =
5318 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5319 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5320 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5321 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5322 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5323 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5324 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5325 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5326 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5327 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5328 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5329 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5330 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5331 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5332 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5333 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5334 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5336 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5337 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5338 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5339 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5340 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5341 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5342 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5343 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5344 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5345 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5346 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5347 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5348 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5349 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5350 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5351 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5352 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5353 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5354 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5355 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5356 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5357 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5358 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5359 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5360 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5361 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5362 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5363 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5364 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5365 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5367 /* Place-holder. Leave as last unary SPE builtin. */
5368 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5372 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5375 tree arg0 = TREE_VALUE (arglist);
5376 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5377 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5378 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5380 if (icode == CODE_FOR_nothing)
5381 /* Builtin not supported on this processor. */
5384 /* If we got invalid arguments bail out before generating bad rtl. */
5385 if (arg0 == error_mark_node)
5388 if (icode == CODE_FOR_altivec_vspltisb
5389 || icode == CODE_FOR_altivec_vspltish
5390 || icode == CODE_FOR_altivec_vspltisw
5391 || icode == CODE_FOR_spe_evsplatfi
5392 || icode == CODE_FOR_spe_evsplati)
5394 /* Only allow 5-bit *signed* literals. */
5395 if (GET_CODE (op0) != CONST_INT
5396 || INTVAL (op0) > 0x1f
5397 || INTVAL (op0) < -0x1f)
5399 error ("argument 1 must be a 5-bit signed literal");
5405 || GET_MODE (target) != tmode
5406 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5407 target = gen_reg_rtx (tmode);
5409 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5410 op0 = copy_to_mode_reg (mode0, op0);
5412 pat = GEN_FCN (icode) (target, op0);
5421 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5423 rtx pat, scratch1, scratch2;
5424 tree arg0 = TREE_VALUE (arglist);
5425 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5426 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5427 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5429 /* If we have invalid arguments, bail out before generating bad rtl. */
5430 if (arg0 == error_mark_node)
5434 || GET_MODE (target) != tmode
5435 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5436 target = gen_reg_rtx (tmode);
5438 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5439 op0 = copy_to_mode_reg (mode0, op0);
5441 scratch1 = gen_reg_rtx (mode0);
5442 scratch2 = gen_reg_rtx (mode0);
5444 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5453 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5456 tree arg0 = TREE_VALUE (arglist);
5457 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5458 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5459 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5460 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5461 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5462 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5464 if (icode == CODE_FOR_nothing)
5465 /* Builtin not supported on this processor. */
5468 /* If we got invalid arguments bail out before generating bad rtl. */
5469 if (arg0 == error_mark_node || arg1 == error_mark_node)
5472 if (icode == CODE_FOR_altivec_vcfux
5473 || icode == CODE_FOR_altivec_vcfsx
5474 || icode == CODE_FOR_altivec_vctsxs
5475 || icode == CODE_FOR_altivec_vctuxs
5476 || icode == CODE_FOR_altivec_vspltb
5477 || icode == CODE_FOR_altivec_vsplth
5478 || icode == CODE_FOR_altivec_vspltw
5479 || icode == CODE_FOR_spe_evaddiw
5480 || icode == CODE_FOR_spe_evldd
5481 || icode == CODE_FOR_spe_evldh
5482 || icode == CODE_FOR_spe_evldw
5483 || icode == CODE_FOR_spe_evlhhesplat
5484 || icode == CODE_FOR_spe_evlhhossplat
5485 || icode == CODE_FOR_spe_evlhhousplat
5486 || icode == CODE_FOR_spe_evlwhe
5487 || icode == CODE_FOR_spe_evlwhos
5488 || icode == CODE_FOR_spe_evlwhou
5489 || icode == CODE_FOR_spe_evlwhsplat
5490 || icode == CODE_FOR_spe_evlwwsplat
5491 || icode == CODE_FOR_spe_evrlwi
5492 || icode == CODE_FOR_spe_evslwi
5493 || icode == CODE_FOR_spe_evsrwis
5494 || icode == CODE_FOR_spe_evsubifw
5495 || icode == CODE_FOR_spe_evsrwiu)
5497 /* Only allow 5-bit unsigned literals. */
5498 if (TREE_CODE (arg1) != INTEGER_CST
5499 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5501 error ("argument 2 must be a 5-bit unsigned literal");
5507 || GET_MODE (target) != tmode
5508 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5509 target = gen_reg_rtx (tmode);
5511 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5512 op0 = copy_to_mode_reg (mode0, op0);
5513 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5514 op1 = copy_to_mode_reg (mode1, op1);
5516 pat = GEN_FCN (icode) (target, op0, op1);
5525 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5526 tree arglist, rtx target)
5529 tree cr6_form = TREE_VALUE (arglist);
5530 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5531 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5532 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5533 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5534 enum machine_mode tmode = SImode;
5535 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5536 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5539 if (TREE_CODE (cr6_form) != INTEGER_CST)
5541 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5545 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5550 /* If we have invalid arguments, bail out before generating bad rtl. */
5551 if (arg0 == error_mark_node || arg1 == error_mark_node)
5555 || GET_MODE (target) != tmode
5556 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5557 target = gen_reg_rtx (tmode);
5559 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5560 op0 = copy_to_mode_reg (mode0, op0);
5561 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5562 op1 = copy_to_mode_reg (mode1, op1);
5564 scratch = gen_reg_rtx (mode0);
5566 pat = GEN_FCN (icode) (scratch, op0, op1,
5567 gen_rtx (SYMBOL_REF, Pmode, opcode));
5572 /* The vec_any* and vec_all* predicates use the same opcodes for two
5573 different operations, but the bits in CR6 will be different
5574 depending on what information we want. So we have to play tricks
5575 with CR6 to get the right bits out.
5577 If you think this is disgusting, look at the specs for the
5578 AltiVec predicates. */
5580 switch (cr6_form_int)
5583 emit_insn (gen_cr6_test_for_zero (target));
5586 emit_insn (gen_cr6_test_for_zero_reverse (target));
5589 emit_insn (gen_cr6_test_for_lt (target));
5592 emit_insn (gen_cr6_test_for_lt_reverse (target));
5595 error ("argument 1 of __builtin_altivec_predicate is out of range");
5603 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5606 tree arg0 = TREE_VALUE (arglist);
5607 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5608 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5609 enum machine_mode mode0 = Pmode;
5610 enum machine_mode mode1 = Pmode;
5611 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5612 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5614 if (icode == CODE_FOR_nothing)
5615 /* Builtin not supported on this processor. */
5618 /* If we got invalid arguments bail out before generating bad rtl. */
5619 if (arg0 == error_mark_node || arg1 == error_mark_node)
5623 || GET_MODE (target) != tmode
5624 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5625 target = gen_reg_rtx (tmode);
5627 op1 = copy_to_mode_reg (mode1, op1);
5629 if (op0 == const0_rtx)
5631 addr = gen_rtx_MEM (tmode, op1);
5635 op0 = copy_to_mode_reg (mode0, op0);
5636 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5639 pat = GEN_FCN (icode) (target, addr);
5649 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5651 tree arg0 = TREE_VALUE (arglist);
5652 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5653 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5654 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5655 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5656 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5658 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5659 enum machine_mode mode1 = Pmode;
5660 enum machine_mode mode2 = Pmode;
5662 /* Invalid arguments. Bail before doing anything stoopid! */
5663 if (arg0 == error_mark_node
5664 || arg1 == error_mark_node
5665 || arg2 == error_mark_node)
5668 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5669 op0 = copy_to_mode_reg (tmode, op0);
5671 op2 = copy_to_mode_reg (mode2, op2);
5673 if (op1 == const0_rtx)
5675 addr = gen_rtx_MEM (tmode, op2);
5679 op1 = copy_to_mode_reg (mode1, op1);
5680 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5683 pat = GEN_FCN (icode) (addr, op0);
5690 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5693 tree arg0 = TREE_VALUE (arglist);
5694 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5695 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5696 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5697 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5698 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5699 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5700 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5701 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5702 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5704 if (icode == CODE_FOR_nothing)
5705 /* Builtin not supported on this processor. */
5708 /* If we got invalid arguments bail out before generating bad rtl. */
5709 if (arg0 == error_mark_node
5710 || arg1 == error_mark_node
5711 || arg2 == error_mark_node)
5714 if (icode == CODE_FOR_altivec_vsldoi_4sf
5715 || icode == CODE_FOR_altivec_vsldoi_4si
5716 || icode == CODE_FOR_altivec_vsldoi_8hi
5717 || icode == CODE_FOR_altivec_vsldoi_16qi)
5719 /* Only allow 4-bit unsigned literals. */
5720 if (TREE_CODE (arg2) != INTEGER_CST
5721 || TREE_INT_CST_LOW (arg2) & ~0xf)
5723 error ("argument 3 must be a 4-bit unsigned literal");
5729 || GET_MODE (target) != tmode
5730 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5731 target = gen_reg_rtx (tmode);
5733 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5734 op0 = copy_to_mode_reg (mode0, op0);
5735 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5736 op1 = copy_to_mode_reg (mode1, op1);
5737 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5738 op2 = copy_to_mode_reg (mode2, op2);
5740 pat = GEN_FCN (icode) (target, op0, op1, op2);
5748 /* Expand the lvx builtins. */
5750 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5752 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5753 tree arglist = TREE_OPERAND (exp, 1);
5754 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5756 enum machine_mode tmode, mode0;
5758 enum insn_code icode;
5762 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5763 icode = CODE_FOR_altivec_lvx_16qi;
5765 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5766 icode = CODE_FOR_altivec_lvx_8hi;
5768 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5769 icode = CODE_FOR_altivec_lvx_4si;
5771 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5772 icode = CODE_FOR_altivec_lvx_4sf;
5781 arg0 = TREE_VALUE (arglist);
5782 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5783 tmode = insn_data[icode].operand[0].mode;
5784 mode0 = insn_data[icode].operand[1].mode;
5787 || GET_MODE (target) != tmode
5788 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5789 target = gen_reg_rtx (tmode);
5791 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5792 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5794 pat = GEN_FCN (icode) (target, op0);
5801 /* Expand the stvx builtins. */
5803 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5806 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5807 tree arglist = TREE_OPERAND (exp, 1);
5808 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5810 enum machine_mode mode0, mode1;
5812 enum insn_code icode;
5816 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5817 icode = CODE_FOR_altivec_stvx_16qi;
5819 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5820 icode = CODE_FOR_altivec_stvx_8hi;
5822 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5823 icode = CODE_FOR_altivec_stvx_4si;
5825 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5826 icode = CODE_FOR_altivec_stvx_4sf;
5833 arg0 = TREE_VALUE (arglist);
5834 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5835 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5836 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5837 mode0 = insn_data[icode].operand[0].mode;
5838 mode1 = insn_data[icode].operand[1].mode;
5840 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5841 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5842 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5843 op1 = copy_to_mode_reg (mode1, op1);
5845 pat = GEN_FCN (icode) (op0, op1);
5853 /* Expand the dst builtins. */
5855 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5858 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5859 tree arglist = TREE_OPERAND (exp, 1);
5860 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5861 tree arg0, arg1, arg2;
5862 enum machine_mode mode0, mode1, mode2;
5863 rtx pat, op0, op1, op2;
5864 struct builtin_description *d;
5869 /* Handle DST variants. */
5870 d = (struct builtin_description *) bdesc_dst;
5871 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5872 if (d->code == fcode)
5874 arg0 = TREE_VALUE (arglist);
5875 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5876 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5877 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5878 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5879 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5880 mode0 = insn_data[d->icode].operand[0].mode;
5881 mode1 = insn_data[d->icode].operand[1].mode;
5882 mode2 = insn_data[d->icode].operand[2].mode;
5884 /* Invalid arguments, bail out before generating bad rtl. */
5885 if (arg0 == error_mark_node
5886 || arg1 == error_mark_node
5887 || arg2 == error_mark_node)
5890 if (TREE_CODE (arg2) != INTEGER_CST
5891 || TREE_INT_CST_LOW (arg2) & ~0x3)
5893 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5897 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5898 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5899 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5900 op1 = copy_to_mode_reg (mode1, op1);
5902 pat = GEN_FCN (d->icode) (op0, op1, op2);
5913 /* Expand the builtin in EXP and store the result in TARGET. Store
5914 true in *EXPANDEDP if we found a builtin to expand. */
5916 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
5918 struct builtin_description *d;
5919 struct builtin_description_predicates *dp;
5921 enum insn_code icode;
5922 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5923 tree arglist = TREE_OPERAND (exp, 1);
5926 enum machine_mode tmode, mode0;
5927 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5929 target = altivec_expand_ld_builtin (exp, target, expandedp);
5933 target = altivec_expand_st_builtin (exp, target, expandedp);
5937 target = altivec_expand_dst_builtin (exp, target, expandedp);
5945 case ALTIVEC_BUILTIN_STVX:
5946 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5947 case ALTIVEC_BUILTIN_STVEBX:
5948 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5949 case ALTIVEC_BUILTIN_STVEHX:
5950 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5951 case ALTIVEC_BUILTIN_STVEWX:
5952 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5953 case ALTIVEC_BUILTIN_STVXL:
5954 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5956 case ALTIVEC_BUILTIN_MFVSCR:
5957 icode = CODE_FOR_altivec_mfvscr;
5958 tmode = insn_data[icode].operand[0].mode;
5961 || GET_MODE (target) != tmode
5962 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5963 target = gen_reg_rtx (tmode);
5965 pat = GEN_FCN (icode) (target);
5971 case ALTIVEC_BUILTIN_MTVSCR:
5972 icode = CODE_FOR_altivec_mtvscr;
5973 arg0 = TREE_VALUE (arglist);
5974 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5975 mode0 = insn_data[icode].operand[0].mode;
5977 /* If we got invalid arguments bail out before generating bad rtl. */
5978 if (arg0 == error_mark_node)
5981 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5982 op0 = copy_to_mode_reg (mode0, op0);
5984 pat = GEN_FCN (icode) (op0);
5989 case ALTIVEC_BUILTIN_DSSALL:
5990 emit_insn (gen_altivec_dssall ());
5993 case ALTIVEC_BUILTIN_DSS:
5994 icode = CODE_FOR_altivec_dss;
5995 arg0 = TREE_VALUE (arglist);
5996 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5997 mode0 = insn_data[icode].operand[0].mode;
5999 /* If we got invalid arguments bail out before generating bad rtl. */
6000 if (arg0 == error_mark_node)
6003 if (TREE_CODE (arg0) != INTEGER_CST
6004 || TREE_INT_CST_LOW (arg0) & ~0x3)
6006 error ("argument to dss must be a 2-bit unsigned literal");
6010 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6011 op0 = copy_to_mode_reg (mode0, op0);
6013 emit_insn (gen_altivec_dss (op0));
6017 /* Expand abs* operations. */
6018 d = (struct builtin_description *) bdesc_abs;
6019 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6020 if (d->code == fcode)
6021 return altivec_expand_abs_builtin (d->icode, arglist, target);
6023 /* Expand the AltiVec predicates. */
6024 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6025 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6026 if (dp->code == fcode)
6027 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6029 /* LV* are funky. We initialized them differently. */
6032 case ALTIVEC_BUILTIN_LVSL:
6033 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6035 case ALTIVEC_BUILTIN_LVSR:
6036 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6038 case ALTIVEC_BUILTIN_LVEBX:
6039 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6041 case ALTIVEC_BUILTIN_LVEHX:
6042 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6044 case ALTIVEC_BUILTIN_LVEWX:
6045 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6047 case ALTIVEC_BUILTIN_LVXL:
6048 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6050 case ALTIVEC_BUILTIN_LVX:
6051 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6062 /* Binops that need to be initialized manually, but can be expanded
6063 automagically by rs6000_expand_binop_builtin. */
6064 static struct builtin_description bdesc_2arg_spe[] =
6066 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6067 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6068 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6069 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6070 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6071 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6072 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6073 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6074 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6075 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6076 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6077 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6078 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6079 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6080 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6081 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6082 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6083 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6084 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6085 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6086 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6087 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6090 /* Expand the builtin in EXP and store the result in TARGET. Store
6091 true in *EXPANDEDP if we found a builtin to expand.
6093 This expands the SPE builtins that are not simple unary and binary
6096 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6098 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6099 tree arglist = TREE_OPERAND (exp, 1);
6101 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6102 enum insn_code icode;
6103 enum machine_mode tmode, mode0;
6105 struct builtin_description *d;
6110 /* Syntax check for a 5-bit unsigned immediate. */
6113 case SPE_BUILTIN_EVSTDD:
6114 case SPE_BUILTIN_EVSTDH:
6115 case SPE_BUILTIN_EVSTDW:
6116 case SPE_BUILTIN_EVSTWHE:
6117 case SPE_BUILTIN_EVSTWHO:
6118 case SPE_BUILTIN_EVSTWWE:
6119 case SPE_BUILTIN_EVSTWWO:
6120 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6121 if (TREE_CODE (arg1) != INTEGER_CST
6122 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6124 error ("argument 2 must be a 5-bit unsigned literal");
6132 /* The evsplat*i instructions are not quite generic. */
6135 case SPE_BUILTIN_EVSPLATFI:
6136 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6138 case SPE_BUILTIN_EVSPLATI:
6139 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6145 d = (struct builtin_description *) bdesc_2arg_spe;
6146 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6147 if (d->code == fcode)
6148 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6150 d = (struct builtin_description *) bdesc_spe_predicates;
6151 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6152 if (d->code == fcode)
6153 return spe_expand_predicate_builtin (d->icode, arglist, target);
6155 d = (struct builtin_description *) bdesc_spe_evsel;
6156 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6157 if (d->code == fcode)
6158 return spe_expand_evsel_builtin (d->icode, arglist, target);
6162 case SPE_BUILTIN_EVSTDDX:
6163 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6164 case SPE_BUILTIN_EVSTDHX:
6165 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6166 case SPE_BUILTIN_EVSTDWX:
6167 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6168 case SPE_BUILTIN_EVSTWHEX:
6169 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6170 case SPE_BUILTIN_EVSTWHOX:
6171 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6172 case SPE_BUILTIN_EVSTWWEX:
6173 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6174 case SPE_BUILTIN_EVSTWWOX:
6175 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6176 case SPE_BUILTIN_EVSTDD:
6177 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6178 case SPE_BUILTIN_EVSTDH:
6179 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6180 case SPE_BUILTIN_EVSTDW:
6181 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6182 case SPE_BUILTIN_EVSTWHE:
6183 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6184 case SPE_BUILTIN_EVSTWHO:
6185 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6186 case SPE_BUILTIN_EVSTWWE:
6187 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6188 case SPE_BUILTIN_EVSTWWO:
6189 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6190 case SPE_BUILTIN_MFSPEFSCR:
6191 icode = CODE_FOR_spe_mfspefscr;
6192 tmode = insn_data[icode].operand[0].mode;
6195 || GET_MODE (target) != tmode
6196 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6197 target = gen_reg_rtx (tmode);
6199 pat = GEN_FCN (icode) (target);
6204 case SPE_BUILTIN_MTSPEFSCR:
6205 icode = CODE_FOR_spe_mtspefscr;
6206 arg0 = TREE_VALUE (arglist);
6207 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6208 mode0 = insn_data[icode].operand[0].mode;
6210 if (arg0 == error_mark_node)
6213 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6214 op0 = copy_to_mode_reg (mode0, op0);
6216 pat = GEN_FCN (icode) (op0);
6229 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6231 rtx pat, scratch, tmp;
6232 tree form = TREE_VALUE (arglist);
6233 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6234 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6235 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6236 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6237 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6238 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6242 if (TREE_CODE (form) != INTEGER_CST)
6244 error ("argument 1 of __builtin_spe_predicate must be a constant");
6248 form_int = TREE_INT_CST_LOW (form);
6253 if (arg0 == error_mark_node || arg1 == error_mark_node)
6257 || GET_MODE (target) != SImode
6258 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6259 target = gen_reg_rtx (SImode);
6261 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6262 op0 = copy_to_mode_reg (mode0, op0);
6263 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6264 op1 = copy_to_mode_reg (mode1, op1);
6266 scratch = gen_reg_rtx (CCmode);
6268 pat = GEN_FCN (icode) (scratch, op0, op1);
6273 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6274 _lower_. We use one compare, but look in different bits of the
6275 CR for each variant.
6277 There are 2 elements in each SPE simd type (upper/lower). The CR
6278 bits are set as follows:
6280 BIT0 | BIT 1 | BIT 2 | BIT 3
6281 U | L | (U | L) | (U & L)
6283 So, for an "all" relationship, BIT 3 would be set.
6284 For an "any" relationship, BIT 2 would be set. Etc.
6286 Following traditional nomenclature, these bits map to:
6288 BIT0 | BIT 1 | BIT 2 | BIT 3
6291 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6296 /* All variant. OV bit. */
6298 /* We need to get to the OV bit, which is the ORDERED bit. We
6299 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6300 that's ugly and will trigger a validate_condition_mode abort.
6301 So let's just use another pattern. */
6302 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6304 /* Any variant. EQ bit. */
6308 /* Upper variant. LT bit. */
6312 /* Lower variant. GT bit. */
6317 error ("argument 1 of __builtin_spe_predicate is out of range");
6321 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6322 emit_move_insn (target, tmp);
6327 /* The evsel builtins look like this:
6329 e = __builtin_spe_evsel_OP (a, b, c, d);
6333 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6334 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6338 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6341 tree arg0 = TREE_VALUE (arglist);
6342 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6343 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6344 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6345 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6346 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6347 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6348 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6349 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6350 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6355 if (arg0 == error_mark_node || arg1 == error_mark_node
6356 || arg2 == error_mark_node || arg3 == error_mark_node)
6360 || GET_MODE (target) != mode0
6361 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6362 target = gen_reg_rtx (mode0);
6364 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6365 op0 = copy_to_mode_reg (mode0, op0);
6366 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6367 op1 = copy_to_mode_reg (mode0, op1);
6368 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6369 op2 = copy_to_mode_reg (mode0, op2);
6370 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6371 op3 = copy_to_mode_reg (mode0, op3);
6373 /* Generate the compare. */
6374 scratch = gen_reg_rtx (CCmode);
6375 pat = GEN_FCN (icode) (scratch, op0, op1);
6380 if (mode0 == V2SImode)
6381 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6383 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6388 /* Expand an expression EXP that calls a built-in function,
6389 with result going to TARGET if that's convenient
6390 (and in mode MODE if that's convenient).
6391 SUBTARGET may be used as the target for computing one of EXP's operands.
6392 IGNORE is nonzero if the value is to be ignored. */
6395 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6396 enum machine_mode mode ATTRIBUTE_UNUSED,
6397 int ignore ATTRIBUTE_UNUSED)
6399 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6400 tree arglist = TREE_OPERAND (exp, 1);
6401 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6402 struct builtin_description *d;
6409 ret = altivec_expand_builtin (exp, target, &success);
6416 ret = spe_expand_builtin (exp, target, &success);
6422 if (TARGET_ALTIVEC || TARGET_SPE)
6424 /* Handle simple unary operations. */
6425 d = (struct builtin_description *) bdesc_1arg;
6426 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6427 if (d->code == fcode)
6428 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6430 /* Handle simple binary operations. */
6431 d = (struct builtin_description *) bdesc_2arg;
6432 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6433 if (d->code == fcode)
6434 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6436 /* Handle simple ternary operations. */
6437 d = (struct builtin_description *) bdesc_3arg;
6438 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6439 if (d->code == fcode)
6440 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6448 rs6000_init_builtins (void)
6450 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6451 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6452 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6455 spe_init_builtins ();
6457 altivec_init_builtins ();
6458 if (TARGET_ALTIVEC || TARGET_SPE)
6459 rs6000_common_init_builtins ();
6462 /* Search through a set of builtins and enable the mask bits.
6463 DESC is an array of builtins.
6464 SIZE is the total number of builtins.
6465 START is the builtin enum at which to start.
6466 END is the builtin enum at which to end. */
6468 enable_mask_for_builtins (struct builtin_description *desc, int size,
6469 enum rs6000_builtins start,
6470 enum rs6000_builtins end)
6474 for (i = 0; i < size; ++i)
6475 if (desc[i].code == start)
6481 for (; i < size; ++i)
6483 /* Flip all the bits on. */
6484 desc[i].mask = target_flags;
6485 if (desc[i].code == end)
6491 spe_init_builtins (void)
6493 tree endlink = void_list_node;
6494 tree puint_type_node = build_pointer_type (unsigned_type_node);
6495 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6496 struct builtin_description *d;
6499 tree v2si_ftype_4_v2si
6500 = build_function_type
6501 (opaque_V2SI_type_node,
6502 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6503 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6504 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6505 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6508 tree v2sf_ftype_4_v2sf
6509 = build_function_type
6510 (opaque_V2SF_type_node,
6511 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6512 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6513 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6514 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6517 tree int_ftype_int_v2si_v2si
6518 = build_function_type
6520 tree_cons (NULL_TREE, integer_type_node,
6521 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6522 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6525 tree int_ftype_int_v2sf_v2sf
6526 = build_function_type
6528 tree_cons (NULL_TREE, integer_type_node,
6529 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6530 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6533 tree void_ftype_v2si_puint_int
6534 = build_function_type (void_type_node,
6535 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6536 tree_cons (NULL_TREE, puint_type_node,
6537 tree_cons (NULL_TREE,
6541 tree void_ftype_v2si_puint_char
6542 = build_function_type (void_type_node,
6543 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6544 tree_cons (NULL_TREE, puint_type_node,
6545 tree_cons (NULL_TREE,
6549 tree void_ftype_v2si_pv2si_int
6550 = build_function_type (void_type_node,
6551 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6552 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6553 tree_cons (NULL_TREE,
6557 tree void_ftype_v2si_pv2si_char
6558 = build_function_type (void_type_node,
6559 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6560 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6561 tree_cons (NULL_TREE,
6566 = build_function_type (void_type_node,
6567 tree_cons (NULL_TREE, integer_type_node, endlink));
6570 = build_function_type (integer_type_node, endlink);
6572 tree v2si_ftype_pv2si_int
6573 = build_function_type (opaque_V2SI_type_node,
6574 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6575 tree_cons (NULL_TREE, integer_type_node,
6578 tree v2si_ftype_puint_int
6579 = build_function_type (opaque_V2SI_type_node,
6580 tree_cons (NULL_TREE, puint_type_node,
6581 tree_cons (NULL_TREE, integer_type_node,
6584 tree v2si_ftype_pushort_int
6585 = build_function_type (opaque_V2SI_type_node,
6586 tree_cons (NULL_TREE, pushort_type_node,
6587 tree_cons (NULL_TREE, integer_type_node,
6590 tree v2si_ftype_signed_char
6591 = build_function_type (opaque_V2SI_type_node,
6592 tree_cons (NULL_TREE, signed_char_type_node,
6595 /* The initialization of the simple binary and unary builtins is
6596 done in rs6000_common_init_builtins, but we have to enable the
6597 mask bits here manually because we have run out of `target_flags'
6598 bits. We really need to redesign this mask business. */
6600 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6601 ARRAY_SIZE (bdesc_2arg),
6604 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6605 ARRAY_SIZE (bdesc_1arg),
6607 SPE_BUILTIN_EVSUBFUSIAAW);
6608 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6609 ARRAY_SIZE (bdesc_spe_predicates),
6610 SPE_BUILTIN_EVCMPEQ,
6611 SPE_BUILTIN_EVFSTSTLT);
6612 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6613 ARRAY_SIZE (bdesc_spe_evsel),
6614 SPE_BUILTIN_EVSEL_CMPGTS,
6615 SPE_BUILTIN_EVSEL_FSTSTEQ);
6617 (*lang_hooks.decls.pushdecl)
6618 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6619 opaque_V2SI_type_node));
6621 /* Initialize irregular SPE builtins. */
6623 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6624 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6625 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6626 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6627 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6628 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6629 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6630 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6631 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6632 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6633 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6634 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6635 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6636 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6637 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6638 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6639 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6640 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6643 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6644 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6645 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6646 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6647 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6648 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6649 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6650 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6651 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6652 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6653 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6654 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6655 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6656 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6657 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6658 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6659 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6660 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6661 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6662 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6663 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6664 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6667 d = (struct builtin_description *) bdesc_spe_predicates;
6668 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6672 switch (insn_data[d->icode].operand[1].mode)
6675 type = int_ftype_int_v2si_v2si;
6678 type = int_ftype_int_v2sf_v2sf;
6684 def_builtin (d->mask, d->name, type, d->code);
6687 /* Evsel predicates. */
6688 d = (struct builtin_description *) bdesc_spe_evsel;
6689 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6693 switch (insn_data[d->icode].operand[1].mode)
6696 type = v2si_ftype_4_v2si;
6699 type = v2sf_ftype_4_v2sf;
6705 def_builtin (d->mask, d->name, type, d->code);
6710 altivec_init_builtins (void)
6712 struct builtin_description *d;
6713 struct builtin_description_predicates *dp;
6715 tree pfloat_type_node = build_pointer_type (float_type_node);
6716 tree pint_type_node = build_pointer_type (integer_type_node);
6717 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6718 tree pchar_type_node = build_pointer_type (char_type_node);
6720 tree pvoid_type_node = build_pointer_type (void_type_node);
6722 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6723 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6724 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6725 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6727 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6729 tree int_ftype_int_v4si_v4si
6730 = build_function_type_list (integer_type_node,
6731 integer_type_node, V4SI_type_node,
6732 V4SI_type_node, NULL_TREE);
6733 tree v4sf_ftype_pcfloat
6734 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6735 tree void_ftype_pfloat_v4sf
6736 = build_function_type_list (void_type_node,
6737 pfloat_type_node, V4SF_type_node, NULL_TREE);
6738 tree v4si_ftype_pcint
6739 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6740 tree void_ftype_pint_v4si
6741 = build_function_type_list (void_type_node,
6742 pint_type_node, V4SI_type_node, NULL_TREE);
6743 tree v8hi_ftype_pcshort
6744 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6745 tree void_ftype_pshort_v8hi
6746 = build_function_type_list (void_type_node,
6747 pshort_type_node, V8HI_type_node, NULL_TREE);
6748 tree v16qi_ftype_pcchar
6749 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6750 tree void_ftype_pchar_v16qi
6751 = build_function_type_list (void_type_node,
6752 pchar_type_node, V16QI_type_node, NULL_TREE);
6753 tree void_ftype_v4si
6754 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6755 tree v8hi_ftype_void
6756 = build_function_type (V8HI_type_node, void_list_node);
6757 tree void_ftype_void
6758 = build_function_type (void_type_node, void_list_node);
6760 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6762 tree v16qi_ftype_long_pcvoid
6763 = build_function_type_list (V16QI_type_node,
6764 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6765 tree v8hi_ftype_long_pcvoid
6766 = build_function_type_list (V8HI_type_node,
6767 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6768 tree v4si_ftype_long_pcvoid
6769 = build_function_type_list (V4SI_type_node,
6770 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6772 tree void_ftype_v4si_long_pvoid
6773 = build_function_type_list (void_type_node,
6774 V4SI_type_node, long_integer_type_node,
6775 pvoid_type_node, NULL_TREE);
6776 tree void_ftype_v16qi_long_pvoid
6777 = build_function_type_list (void_type_node,
6778 V16QI_type_node, long_integer_type_node,
6779 pvoid_type_node, NULL_TREE);
6780 tree void_ftype_v8hi_long_pvoid
6781 = build_function_type_list (void_type_node,
6782 V8HI_type_node, long_integer_type_node,
6783 pvoid_type_node, NULL_TREE);
6784 tree int_ftype_int_v8hi_v8hi
6785 = build_function_type_list (integer_type_node,
6786 integer_type_node, V8HI_type_node,
6787 V8HI_type_node, NULL_TREE);
6788 tree int_ftype_int_v16qi_v16qi
6789 = build_function_type_list (integer_type_node,
6790 integer_type_node, V16QI_type_node,
6791 V16QI_type_node, NULL_TREE);
6792 tree int_ftype_int_v4sf_v4sf
6793 = build_function_type_list (integer_type_node,
6794 integer_type_node, V4SF_type_node,
6795 V4SF_type_node, NULL_TREE);
6796 tree v4si_ftype_v4si
6797 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6798 tree v8hi_ftype_v8hi
6799 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6800 tree v16qi_ftype_v16qi
6801 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6802 tree v4sf_ftype_v4sf
6803 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6804 tree void_ftype_pcvoid_int_char
6805 = build_function_type_list (void_type_node,
6806 pcvoid_type_node, integer_type_node,
6807 char_type_node, NULL_TREE);
6809 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6810 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6811 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6812 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6813 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6814 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6815 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6816 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6817 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6818 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6819 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6820 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6821 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6822 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6823 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6824 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6825 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6826 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6827 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6828 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6829 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
6830 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
6831 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6832 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6833 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6834 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
6835 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
6836 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
6837 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
6838 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
6839 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
6840 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
6842 /* Add the DST variants. */
6843 d = (struct builtin_description *) bdesc_dst;
6844 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6845 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6847 /* Initialize the predicates. */
6848 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6849 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6851 enum machine_mode mode1;
6854 mode1 = insn_data[dp->icode].operand[1].mode;
6859 type = int_ftype_int_v4si_v4si;
6862 type = int_ftype_int_v8hi_v8hi;
6865 type = int_ftype_int_v16qi_v16qi;
6868 type = int_ftype_int_v4sf_v4sf;
6874 def_builtin (dp->mask, dp->name, type, dp->code);
6877 /* Initialize the abs* operators. */
6878 d = (struct builtin_description *) bdesc_abs;
6879 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6881 enum machine_mode mode0;
6884 mode0 = insn_data[d->icode].operand[0].mode;
6889 type = v4si_ftype_v4si;
6892 type = v8hi_ftype_v8hi;
6895 type = v16qi_ftype_v16qi;
6898 type = v4sf_ftype_v4sf;
6904 def_builtin (d->mask, d->name, type, d->code);
6909 rs6000_common_init_builtins (void)
6911 struct builtin_description *d;
6914 tree v4sf_ftype_v4sf_v4sf_v16qi
6915 = build_function_type_list (V4SF_type_node,
6916 V4SF_type_node, V4SF_type_node,
6917 V16QI_type_node, NULL_TREE);
6918 tree v4si_ftype_v4si_v4si_v16qi
6919 = build_function_type_list (V4SI_type_node,
6920 V4SI_type_node, V4SI_type_node,
6921 V16QI_type_node, NULL_TREE);
6922 tree v8hi_ftype_v8hi_v8hi_v16qi
6923 = build_function_type_list (V8HI_type_node,
6924 V8HI_type_node, V8HI_type_node,
6925 V16QI_type_node, NULL_TREE);
6926 tree v16qi_ftype_v16qi_v16qi_v16qi
6927 = build_function_type_list (V16QI_type_node,
6928 V16QI_type_node, V16QI_type_node,
6929 V16QI_type_node, NULL_TREE);
6930 tree v4si_ftype_char
6931 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6932 tree v8hi_ftype_char
6933 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6934 tree v16qi_ftype_char
6935 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6936 tree v8hi_ftype_v16qi
6937 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6938 tree v4sf_ftype_v4sf
6939 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6941 tree v2si_ftype_v2si_v2si
6942 = build_function_type_list (opaque_V2SI_type_node,
6943 opaque_V2SI_type_node,
6944 opaque_V2SI_type_node, NULL_TREE);
6946 tree v2sf_ftype_v2sf_v2sf
6947 = build_function_type_list (opaque_V2SF_type_node,
6948 opaque_V2SF_type_node,
6949 opaque_V2SF_type_node, NULL_TREE);
6951 tree v2si_ftype_int_int
6952 = build_function_type_list (opaque_V2SI_type_node,
6953 integer_type_node, integer_type_node,
6956 tree v2si_ftype_v2si
6957 = build_function_type_list (opaque_V2SI_type_node,
6958 opaque_V2SI_type_node, NULL_TREE);
6960 tree v2sf_ftype_v2sf
6961 = build_function_type_list (opaque_V2SF_type_node,
6962 opaque_V2SF_type_node, NULL_TREE);
6964 tree v2sf_ftype_v2si
6965 = build_function_type_list (opaque_V2SF_type_node,
6966 opaque_V2SI_type_node, NULL_TREE);
6968 tree v2si_ftype_v2sf
6969 = build_function_type_list (opaque_V2SI_type_node,
6970 opaque_V2SF_type_node, NULL_TREE);
6972 tree v2si_ftype_v2si_char
6973 = build_function_type_list (opaque_V2SI_type_node,
6974 opaque_V2SI_type_node,
6975 char_type_node, NULL_TREE);
6977 tree v2si_ftype_int_char
6978 = build_function_type_list (opaque_V2SI_type_node,
6979 integer_type_node, char_type_node, NULL_TREE);
6981 tree v2si_ftype_char
6982 = build_function_type_list (opaque_V2SI_type_node,
6983 char_type_node, NULL_TREE);
6985 tree int_ftype_int_int
6986 = build_function_type_list (integer_type_node,
6987 integer_type_node, integer_type_node,
6990 tree v4si_ftype_v4si_v4si
6991 = build_function_type_list (V4SI_type_node,
6992 V4SI_type_node, V4SI_type_node, NULL_TREE);
6993 tree v4sf_ftype_v4si_char
6994 = build_function_type_list (V4SF_type_node,
6995 V4SI_type_node, char_type_node, NULL_TREE);
6996 tree v4si_ftype_v4sf_char
6997 = build_function_type_list (V4SI_type_node,
6998 V4SF_type_node, char_type_node, NULL_TREE);
6999 tree v4si_ftype_v4si_char
7000 = build_function_type_list (V4SI_type_node,
7001 V4SI_type_node, char_type_node, NULL_TREE);
7002 tree v8hi_ftype_v8hi_char
7003 = build_function_type_list (V8HI_type_node,
7004 V8HI_type_node, char_type_node, NULL_TREE);
7005 tree v16qi_ftype_v16qi_char
7006 = build_function_type_list (V16QI_type_node,
7007 V16QI_type_node, char_type_node, NULL_TREE);
7008 tree v16qi_ftype_v16qi_v16qi_char
7009 = build_function_type_list (V16QI_type_node,
7010 V16QI_type_node, V16QI_type_node,
7011 char_type_node, NULL_TREE);
7012 tree v8hi_ftype_v8hi_v8hi_char
7013 = build_function_type_list (V8HI_type_node,
7014 V8HI_type_node, V8HI_type_node,
7015 char_type_node, NULL_TREE);
7016 tree v4si_ftype_v4si_v4si_char
7017 = build_function_type_list (V4SI_type_node,
7018 V4SI_type_node, V4SI_type_node,
7019 char_type_node, NULL_TREE);
7020 tree v4sf_ftype_v4sf_v4sf_char
7021 = build_function_type_list (V4SF_type_node,
7022 V4SF_type_node, V4SF_type_node,
7023 char_type_node, NULL_TREE);
7024 tree v4sf_ftype_v4sf_v4sf
7025 = build_function_type_list (V4SF_type_node,
7026 V4SF_type_node, V4SF_type_node, NULL_TREE);
7027 tree v4sf_ftype_v4sf_v4sf_v4si
7028 = build_function_type_list (V4SF_type_node,
7029 V4SF_type_node, V4SF_type_node,
7030 V4SI_type_node, NULL_TREE);
7031 tree v4sf_ftype_v4sf_v4sf_v4sf
7032 = build_function_type_list (V4SF_type_node,
7033 V4SF_type_node, V4SF_type_node,
7034 V4SF_type_node, NULL_TREE);
7035 tree v4si_ftype_v4si_v4si_v4si
7036 = build_function_type_list (V4SI_type_node,
7037 V4SI_type_node, V4SI_type_node,
7038 V4SI_type_node, NULL_TREE);
7039 tree v8hi_ftype_v8hi_v8hi
7040 = build_function_type_list (V8HI_type_node,
7041 V8HI_type_node, V8HI_type_node, NULL_TREE);
7042 tree v8hi_ftype_v8hi_v8hi_v8hi
7043 = build_function_type_list (V8HI_type_node,
7044 V8HI_type_node, V8HI_type_node,
7045 V8HI_type_node, NULL_TREE);
7046 tree v4si_ftype_v8hi_v8hi_v4si
7047 = build_function_type_list (V4SI_type_node,
7048 V8HI_type_node, V8HI_type_node,
7049 V4SI_type_node, NULL_TREE);
7050 tree v4si_ftype_v16qi_v16qi_v4si
7051 = build_function_type_list (V4SI_type_node,
7052 V16QI_type_node, V16QI_type_node,
7053 V4SI_type_node, NULL_TREE);
7054 tree v16qi_ftype_v16qi_v16qi
7055 = build_function_type_list (V16QI_type_node,
7056 V16QI_type_node, V16QI_type_node, NULL_TREE);
7057 tree v4si_ftype_v4sf_v4sf
7058 = build_function_type_list (V4SI_type_node,
7059 V4SF_type_node, V4SF_type_node, NULL_TREE);
7060 tree v8hi_ftype_v16qi_v16qi
7061 = build_function_type_list (V8HI_type_node,
7062 V16QI_type_node, V16QI_type_node, NULL_TREE);
7063 tree v4si_ftype_v8hi_v8hi
7064 = build_function_type_list (V4SI_type_node,
7065 V8HI_type_node, V8HI_type_node, NULL_TREE);
7066 tree v8hi_ftype_v4si_v4si
7067 = build_function_type_list (V8HI_type_node,
7068 V4SI_type_node, V4SI_type_node, NULL_TREE);
7069 tree v16qi_ftype_v8hi_v8hi
7070 = build_function_type_list (V16QI_type_node,
7071 V8HI_type_node, V8HI_type_node, NULL_TREE);
7072 tree v4si_ftype_v16qi_v4si
7073 = build_function_type_list (V4SI_type_node,
7074 V16QI_type_node, V4SI_type_node, NULL_TREE);
7075 tree v4si_ftype_v16qi_v16qi
7076 = build_function_type_list (V4SI_type_node,
7077 V16QI_type_node, V16QI_type_node, NULL_TREE);
7078 tree v4si_ftype_v8hi_v4si
7079 = build_function_type_list (V4SI_type_node,
7080 V8HI_type_node, V4SI_type_node, NULL_TREE);
7081 tree v4si_ftype_v8hi
7082 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7083 tree int_ftype_v4si_v4si
7084 = build_function_type_list (integer_type_node,
7085 V4SI_type_node, V4SI_type_node, NULL_TREE);
7086 tree int_ftype_v4sf_v4sf
7087 = build_function_type_list (integer_type_node,
7088 V4SF_type_node, V4SF_type_node, NULL_TREE);
7089 tree int_ftype_v16qi_v16qi
7090 = build_function_type_list (integer_type_node,
7091 V16QI_type_node, V16QI_type_node, NULL_TREE);
7092 tree int_ftype_v8hi_v8hi
7093 = build_function_type_list (integer_type_node,
7094 V8HI_type_node, V8HI_type_node, NULL_TREE);
7096 /* Add the simple ternary operators. */
7097 d = (struct builtin_description *) bdesc_3arg;
7098 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7101 enum machine_mode mode0, mode1, mode2, mode3;
7104 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7107 mode0 = insn_data[d->icode].operand[0].mode;
7108 mode1 = insn_data[d->icode].operand[1].mode;
7109 mode2 = insn_data[d->icode].operand[2].mode;
7110 mode3 = insn_data[d->icode].operand[3].mode;
7112 /* When all four are of the same mode. */
7113 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7118 type = v4si_ftype_v4si_v4si_v4si;
7121 type = v4sf_ftype_v4sf_v4sf_v4sf;
7124 type = v8hi_ftype_v8hi_v8hi_v8hi;
7127 type = v16qi_ftype_v16qi_v16qi_v16qi;
7133 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7138 type = v4si_ftype_v4si_v4si_v16qi;
7141 type = v4sf_ftype_v4sf_v4sf_v16qi;
7144 type = v8hi_ftype_v8hi_v8hi_v16qi;
7147 type = v16qi_ftype_v16qi_v16qi_v16qi;
7153 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7154 && mode3 == V4SImode)
7155 type = v4si_ftype_v16qi_v16qi_v4si;
7156 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7157 && mode3 == V4SImode)
7158 type = v4si_ftype_v8hi_v8hi_v4si;
7159 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7160 && mode3 == V4SImode)
7161 type = v4sf_ftype_v4sf_v4sf_v4si;
7163 /* vchar, vchar, vchar, 4 bit literal. */
7164 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7166 type = v16qi_ftype_v16qi_v16qi_char;
7168 /* vshort, vshort, vshort, 4 bit literal. */
7169 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7171 type = v8hi_ftype_v8hi_v8hi_char;
7173 /* vint, vint, vint, 4 bit literal. */
7174 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7176 type = v4si_ftype_v4si_v4si_char;
7178 /* vfloat, vfloat, vfloat, 4 bit literal. */
7179 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7181 type = v4sf_ftype_v4sf_v4sf_char;
7186 def_builtin (d->mask, d->name, type, d->code);
7189 /* Add the simple binary operators. */
7190 d = (struct builtin_description *) bdesc_2arg;
7191 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7193 enum machine_mode mode0, mode1, mode2;
7196 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7199 mode0 = insn_data[d->icode].operand[0].mode;
7200 mode1 = insn_data[d->icode].operand[1].mode;
7201 mode2 = insn_data[d->icode].operand[2].mode;
7203 /* When all three operands are of the same mode. */
7204 if (mode0 == mode1 && mode1 == mode2)
7209 type = v4sf_ftype_v4sf_v4sf;
7212 type = v4si_ftype_v4si_v4si;
7215 type = v16qi_ftype_v16qi_v16qi;
7218 type = v8hi_ftype_v8hi_v8hi;
7221 type = v2si_ftype_v2si_v2si;
7224 type = v2sf_ftype_v2sf_v2sf;
7227 type = int_ftype_int_int;
7234 /* A few other combos we really don't want to do manually. */
7236 /* vint, vfloat, vfloat. */
7237 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7238 type = v4si_ftype_v4sf_v4sf;
7240 /* vshort, vchar, vchar. */
7241 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7242 type = v8hi_ftype_v16qi_v16qi;
7244 /* vint, vshort, vshort. */
7245 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7246 type = v4si_ftype_v8hi_v8hi;
7248 /* vshort, vint, vint. */
7249 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7250 type = v8hi_ftype_v4si_v4si;
7252 /* vchar, vshort, vshort. */
7253 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7254 type = v16qi_ftype_v8hi_v8hi;
7256 /* vint, vchar, vint. */
7257 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7258 type = v4si_ftype_v16qi_v4si;
7260 /* vint, vchar, vchar. */
7261 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7262 type = v4si_ftype_v16qi_v16qi;
7264 /* vint, vshort, vint. */
7265 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7266 type = v4si_ftype_v8hi_v4si;
7268 /* vint, vint, 5 bit literal. */
7269 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7270 type = v4si_ftype_v4si_char;
7272 /* vshort, vshort, 5 bit literal. */
7273 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7274 type = v8hi_ftype_v8hi_char;
7276 /* vchar, vchar, 5 bit literal. */
7277 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7278 type = v16qi_ftype_v16qi_char;
7280 /* vfloat, vint, 5 bit literal. */
7281 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7282 type = v4sf_ftype_v4si_char;
7284 /* vint, vfloat, 5 bit literal. */
7285 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7286 type = v4si_ftype_v4sf_char;
7288 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7289 type = v2si_ftype_int_int;
7291 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7292 type = v2si_ftype_v2si_char;
7294 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7295 type = v2si_ftype_int_char;
7298 else if (mode0 == SImode)
7303 type = int_ftype_v4si_v4si;
7306 type = int_ftype_v4sf_v4sf;
7309 type = int_ftype_v16qi_v16qi;
7312 type = int_ftype_v8hi_v8hi;
7322 def_builtin (d->mask, d->name, type, d->code);
7325 /* Add the simple unary operators. */
7326 d = (struct builtin_description *) bdesc_1arg;
7327 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7329 enum machine_mode mode0, mode1;
7332 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7335 mode0 = insn_data[d->icode].operand[0].mode;
7336 mode1 = insn_data[d->icode].operand[1].mode;
7338 if (mode0 == V4SImode && mode1 == QImode)
7339 type = v4si_ftype_char;
7340 else if (mode0 == V8HImode && mode1 == QImode)
7341 type = v8hi_ftype_char;
7342 else if (mode0 == V16QImode && mode1 == QImode)
7343 type = v16qi_ftype_char;
7344 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7345 type = v4sf_ftype_v4sf;
7346 else if (mode0 == V8HImode && mode1 == V16QImode)
7347 type = v8hi_ftype_v16qi;
7348 else if (mode0 == V4SImode && mode1 == V8HImode)
7349 type = v4si_ftype_v8hi;
7350 else if (mode0 == V2SImode && mode1 == V2SImode)
7351 type = v2si_ftype_v2si;
7352 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7353 type = v2sf_ftype_v2sf;
7354 else if (mode0 == V2SFmode && mode1 == V2SImode)
7355 type = v2sf_ftype_v2si;
7356 else if (mode0 == V2SImode && mode1 == V2SFmode)
7357 type = v2si_ftype_v2sf;
7358 else if (mode0 == V2SImode && mode1 == QImode)
7359 type = v2si_ftype_char;
7363 def_builtin (d->mask, d->name, type, d->code);
7368 rs6000_init_libfuncs (void)
7370 if (!TARGET_HARD_FLOAT)
7373 if (DEFAULT_ABI != ABI_V4)
7375 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7377 /* AIX library routines for float->int conversion. */
7378 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7379 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7380 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7381 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7384 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7385 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7386 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7387 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7388 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7392 /* 32-bit SVR4 quad floating point routines. */
7394 set_optab_libfunc (add_optab, TFmode, "_q_add");
7395 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7396 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7397 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7398 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7399 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7400 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7402 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7403 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7404 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7405 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7406 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7407 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7409 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7410 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7411 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7412 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7413 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7414 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7415 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7419 /* Expand a block move operation, and return 1 if successful. Return 0
7420 if we should let the compiler generate normal code.
7422 operands[0] is the destination
7423 operands[1] is the source
7424 operands[2] is the length
7425 operands[3] is the alignment */
7427 #define MAX_MOVE_REG 4
7430 expand_block_move (rtx operands[])
7432 rtx orig_dest = operands[0];
7433 rtx orig_src = operands[1];
7434 rtx bytes_rtx = operands[2];
7435 rtx align_rtx = operands[3];
7436 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7441 rtx stores[MAX_MOVE_REG];
7444 /* If this is not a fixed size move, just call memcpy */
7448 /* If this is not a fixed size alignment, abort */
7449 if (GET_CODE (align_rtx) != CONST_INT)
7451 align = INTVAL (align_rtx);
7453 /* Anything to move? */
7454 bytes = INTVAL (bytes_rtx);
7458 /* store_one_arg depends on expand_block_move to handle at least the size of
7459 reg_parm_stack_space. */
7460 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7463 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7466 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7467 rtx (*mov) (rtx, rtx);
7469 enum machine_mode mode = BLKmode;
7473 && bytes > 24 /* move up to 32 bytes at a time */
7481 && ! fixed_regs[12])
7483 move_bytes = (bytes > 32) ? 32 : bytes;
7484 gen_func.movstrsi = gen_movstrsi_8reg;
7486 else if (TARGET_STRING
7487 && bytes > 16 /* move up to 24 bytes at a time */
7493 && ! fixed_regs[10])
7495 move_bytes = (bytes > 24) ? 24 : bytes;
7496 gen_func.movstrsi = gen_movstrsi_6reg;
7498 else if (TARGET_STRING
7499 && bytes > 8 /* move up to 16 bytes at a time */
7505 move_bytes = (bytes > 16) ? 16 : bytes;
7506 gen_func.movstrsi = gen_movstrsi_4reg;
7508 else if (bytes >= 8 && TARGET_POWERPC64
7509 /* 64-bit loads and stores require word-aligned
7511 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7515 gen_func.mov = gen_movdi;
7517 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7518 { /* move up to 8 bytes at a time */
7519 move_bytes = (bytes > 8) ? 8 : bytes;
7520 gen_func.movstrsi = gen_movstrsi_2reg;
7522 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7523 { /* move 4 bytes */
7526 gen_func.mov = gen_movsi;
7528 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7529 { /* move 2 bytes */
7532 gen_func.mov = gen_movhi;
7534 else if (TARGET_STRING && bytes > 1)
7535 { /* move up to 4 bytes at a time */
7536 move_bytes = (bytes > 4) ? 4 : bytes;
7537 gen_func.movstrsi = gen_movstrsi_1reg;
7539 else /* move 1 byte at a time */
7543 gen_func.mov = gen_movqi;
7546 src = adjust_address (orig_src, mode, offset);
7547 dest = adjust_address (orig_dest, mode, offset);
7549 if (mode != BLKmode)
7551 rtx tmp_reg = gen_reg_rtx (mode);
7553 emit_insn ((*gen_func.mov) (tmp_reg, src));
7554 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7557 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7560 for (i = 0; i < num_reg; i++)
7561 emit_insn (stores[i]);
7565 if (mode == BLKmode)
7567 /* Move the address into scratch registers. The movstrsi
7568 patterns require zero offset. */
7569 if (!REG_P (XEXP (src, 0)))
7571 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7572 src = replace_equiv_address (src, src_reg);
7574 set_mem_size (src, GEN_INT (move_bytes));
7576 if (!REG_P (XEXP (dest, 0)))
7578 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7579 dest = replace_equiv_address (dest, dest_reg);
7581 set_mem_size (dest, GEN_INT (move_bytes));
7583 emit_insn ((*gen_func.movstrsi) (dest, src,
7584 GEN_INT (move_bytes & 31),
7593 /* Return 1 if OP is a load multiple operation. It is known to be a
7594 PARALLEL and the first section will be tested. */
7597 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7599 int count = XVECLEN (op, 0);
7600 unsigned int dest_regno;
7604 /* Perform a quick check so we don't blow up below. */
7606 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7607 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7608 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7611 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7612 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7614 for (i = 1; i < count; i++)
7616 rtx elt = XVECEXP (op, 0, i);
7618 if (GET_CODE (elt) != SET
7619 || GET_CODE (SET_DEST (elt)) != REG
7620 || GET_MODE (SET_DEST (elt)) != SImode
7621 || REGNO (SET_DEST (elt)) != dest_regno + i
7622 || GET_CODE (SET_SRC (elt)) != MEM
7623 || GET_MODE (SET_SRC (elt)) != SImode
7624 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7625 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7626 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7627 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7634 /* Similar, but tests for store multiple. Here, the second vector element
7635 is a CLOBBER. It will be tested later. */
7638 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7640 int count = XVECLEN (op, 0) - 1;
7641 unsigned int src_regno;
7645 /* Perform a quick check so we don't blow up below. */
7647 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7648 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7649 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7652 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7653 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7655 for (i = 1; i < count; i++)
7657 rtx elt = XVECEXP (op, 0, i + 1);
7659 if (GET_CODE (elt) != SET
7660 || GET_CODE (SET_SRC (elt)) != REG
7661 || GET_MODE (SET_SRC (elt)) != SImode
7662 || REGNO (SET_SRC (elt)) != src_regno + i
7663 || GET_CODE (SET_DEST (elt)) != MEM
7664 || GET_MODE (SET_DEST (elt)) != SImode
7665 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7666 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7667 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7668 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7675 /* Return a string to perform a load_multiple operation.
7676 operands[0] is the vector.
7677 operands[1] is the source address.
7678 operands[2] is the first destination register. */
7681 rs6000_output_load_multiple (rtx operands[3])
7683 /* We have to handle the case where the pseudo used to contain the address
7684 is assigned to one of the output registers. */
7686 int words = XVECLEN (operands[0], 0);
7689 if (XVECLEN (operands[0], 0) == 1)
7690 return "{l|lwz} %2,0(%1)";
7692 for (i = 0; i < words; i++)
7693 if (refers_to_regno_p (REGNO (operands[2]) + i,
7694 REGNO (operands[2]) + i + 1, operands[1], 0))
7698 xop[0] = GEN_INT (4 * (words-1));
7699 xop[1] = operands[1];
7700 xop[2] = operands[2];
7701 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7706 xop[0] = GEN_INT (4 * (words-1));
7707 xop[1] = operands[1];
7708 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7709 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7714 for (j = 0; j < words; j++)
7717 xop[0] = GEN_INT (j * 4);
7718 xop[1] = operands[1];
7719 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7720 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7722 xop[0] = GEN_INT (i * 4);
7723 xop[1] = operands[1];
7724 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7729 return "{lsi|lswi} %2,%1,%N0";
7732 /* Return 1 for a parallel vrsave operation. */
7735 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7737 int count = XVECLEN (op, 0);
7738 unsigned int dest_regno, src_regno;
7742 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7743 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7744 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7747 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7748 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7750 if (dest_regno != VRSAVE_REGNO
7751 && src_regno != VRSAVE_REGNO)
7754 for (i = 1; i < count; i++)
7756 rtx elt = XVECEXP (op, 0, i);
7758 if (GET_CODE (elt) != CLOBBER
7759 && GET_CODE (elt) != SET)
7766 /* Return 1 for an PARALLEL suitable for mfcr. */
7769 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7771 int count = XVECLEN (op, 0);
7774 /* Perform a quick check so we don't blow up below. */
7776 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7777 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7778 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7781 for (i = 0; i < count; i++)
7783 rtx exp = XVECEXP (op, 0, i);
7788 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7790 if (GET_CODE (src_reg) != REG
7791 || GET_MODE (src_reg) != CCmode
7792 || ! CR_REGNO_P (REGNO (src_reg)))
7795 if (GET_CODE (exp) != SET
7796 || GET_CODE (SET_DEST (exp)) != REG
7797 || GET_MODE (SET_DEST (exp)) != SImode
7798 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7800 unspec = SET_SRC (exp);
7801 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7803 if (GET_CODE (unspec) != UNSPEC
7804 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7805 || XVECLEN (unspec, 0) != 2
7806 || XVECEXP (unspec, 0, 0) != src_reg
7807 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7808 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7814 /* Return 1 for an PARALLEL suitable for mtcrf. */
7817 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7819 int count = XVECLEN (op, 0);
7823 /* Perform a quick check so we don't blow up below. */
7825 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7826 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7827 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7829 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7831 if (GET_CODE (src_reg) != REG
7832 || GET_MODE (src_reg) != SImode
7833 || ! INT_REGNO_P (REGNO (src_reg)))
7836 for (i = 0; i < count; i++)
7838 rtx exp = XVECEXP (op, 0, i);
7842 if (GET_CODE (exp) != SET
7843 || GET_CODE (SET_DEST (exp)) != REG
7844 || GET_MODE (SET_DEST (exp)) != CCmode
7845 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7847 unspec = SET_SRC (exp);
7848 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7850 if (GET_CODE (unspec) != UNSPEC
7851 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7852 || XVECLEN (unspec, 0) != 2
7853 || XVECEXP (unspec, 0, 0) != src_reg
7854 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7855 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7861 /* Return 1 for an PARALLEL suitable for lmw. */
7864 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7866 int count = XVECLEN (op, 0);
7867 unsigned int dest_regno;
7869 unsigned int base_regno;
7870 HOST_WIDE_INT offset;
7873 /* Perform a quick check so we don't blow up below. */
7875 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7876 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7877 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7880 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7881 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7884 || count != 32 - (int) dest_regno)
7887 if (legitimate_indirect_address_p (src_addr, 0))
7890 base_regno = REGNO (src_addr);
7891 if (base_regno == 0)
7894 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7896 offset = INTVAL (XEXP (src_addr, 1));
7897 base_regno = REGNO (XEXP (src_addr, 0));
7902 for (i = 0; i < count; i++)
7904 rtx elt = XVECEXP (op, 0, i);
7907 HOST_WIDE_INT newoffset;
7909 if (GET_CODE (elt) != SET
7910 || GET_CODE (SET_DEST (elt)) != REG
7911 || GET_MODE (SET_DEST (elt)) != SImode
7912 || REGNO (SET_DEST (elt)) != dest_regno + i
7913 || GET_CODE (SET_SRC (elt)) != MEM
7914 || GET_MODE (SET_SRC (elt)) != SImode)
7916 newaddr = XEXP (SET_SRC (elt), 0);
7917 if (legitimate_indirect_address_p (newaddr, 0))
7922 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7924 addr_reg = XEXP (newaddr, 0);
7925 newoffset = INTVAL (XEXP (newaddr, 1));
7929 if (REGNO (addr_reg) != base_regno
7930 || newoffset != offset + 4 * i)
7937 /* Return 1 for an PARALLEL suitable for stmw. */
7940 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7942 int count = XVECLEN (op, 0);
7943 unsigned int src_regno;
7945 unsigned int base_regno;
7946 HOST_WIDE_INT offset;
7949 /* Perform a quick check so we don't blow up below. */
7951 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7952 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7953 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7956 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7957 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7960 || count != 32 - (int) src_regno)
7963 if (legitimate_indirect_address_p (dest_addr, 0))
7966 base_regno = REGNO (dest_addr);
7967 if (base_regno == 0)
7970 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7972 offset = INTVAL (XEXP (dest_addr, 1));
7973 base_regno = REGNO (XEXP (dest_addr, 0));
7978 for (i = 0; i < count; i++)
7980 rtx elt = XVECEXP (op, 0, i);
7983 HOST_WIDE_INT newoffset;
7985 if (GET_CODE (elt) != SET
7986 || GET_CODE (SET_SRC (elt)) != REG
7987 || GET_MODE (SET_SRC (elt)) != SImode
7988 || REGNO (SET_SRC (elt)) != src_regno + i
7989 || GET_CODE (SET_DEST (elt)) != MEM
7990 || GET_MODE (SET_DEST (elt)) != SImode)
7992 newaddr = XEXP (SET_DEST (elt), 0);
7993 if (legitimate_indirect_address_p (newaddr, 0))
7998 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8000 addr_reg = XEXP (newaddr, 0);
8001 newoffset = INTVAL (XEXP (newaddr, 1));
8005 if (REGNO (addr_reg) != base_regno
8006 || newoffset != offset + 4 * i)
8013 /* A validation routine: say whether CODE, a condition code, and MODE
8014 match. The other alternatives either don't make sense or should
8015 never be generated. */
8018 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8020 if (GET_RTX_CLASS (code) != '<'
8021 || GET_MODE_CLASS (mode) != MODE_CC)
8024 /* These don't make sense. */
8025 if ((code == GT || code == LT || code == GE || code == LE)
8026 && mode == CCUNSmode)
8029 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8030 && mode != CCUNSmode)
8033 if (mode != CCFPmode
8034 && (code == ORDERED || code == UNORDERED
8035 || code == UNEQ || code == LTGT
8036 || code == UNGT || code == UNLT
8037 || code == UNGE || code == UNLE))
8040 /* These should never be generated except for
8041 flag_finite_math_only. */
8042 if (mode == CCFPmode
8043 && ! flag_finite_math_only
8044 && (code == LE || code == GE
8045 || code == UNEQ || code == LTGT
8046 || code == UNGT || code == UNLT))
8049 /* These are invalid; the information is not there. */
8050 if (mode == CCEQmode
8051 && code != EQ && code != NE)
8055 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8056 We only check the opcode against the mode of the CC value here. */
8059 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8061 enum rtx_code code = GET_CODE (op);
8062 enum machine_mode cc_mode;
8064 if (GET_RTX_CLASS (code) != '<')
8067 cc_mode = GET_MODE (XEXP (op, 0));
8068 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8071 validate_condition_mode (code, cc_mode);
8076 /* Return 1 if OP is a comparison operation that is valid for a branch
8077 insn and which is true if the corresponding bit in the CC register
8081 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8085 if (! branch_comparison_operator (op, mode))
8088 code = GET_CODE (op);
8089 return (code == EQ || code == LT || code == GT
8090 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
8091 || code == LTU || code == GTU
8092 || code == UNORDERED);
8095 /* Return 1 if OP is a comparison operation that is valid for an scc
8096 insn: it must be a positive comparison. */
8099 scc_comparison_operator (rtx op, enum machine_mode mode)
8101 return branch_positive_comparison_operator (op, mode);
8105 trap_comparison_operator (rtx op, enum machine_mode mode)
8107 if (mode != VOIDmode && mode != GET_MODE (op))
8109 return GET_RTX_CLASS (GET_CODE (op)) == '<';
8113 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8115 enum rtx_code code = GET_CODE (op);
8116 return (code == AND || code == IOR || code == XOR);
8120 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8122 enum rtx_code code = GET_CODE (op);
8123 return (code == IOR || code == XOR);
8127 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8129 enum rtx_code code = GET_CODE (op);
8130 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8133 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8134 mask required to convert the result of a rotate insn into a shift
8135 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8138 includes_lshift_p (rtx shiftop, rtx andop)
8140 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8142 shift_mask <<= INTVAL (shiftop);
8144 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8147 /* Similar, but for right shift. */
8150 includes_rshift_p (rtx shiftop, rtx andop)
8152 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8154 shift_mask >>= INTVAL (shiftop);
8156 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8159 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8160 to perform a left shift. It must have exactly SHIFTOP least
8161 significant 0's, then one or more 1's, then zero or more 0's. */
8164 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8166 if (GET_CODE (andop) == CONST_INT)
8168 HOST_WIDE_INT c, lsb, shift_mask;
8171 if (c == 0 || c == ~0)
8175 shift_mask <<= INTVAL (shiftop);
8177 /* Find the least significant one bit. */
8180 /* It must coincide with the LSB of the shift mask. */
8181 if (-lsb != shift_mask)
8184 /* Invert to look for the next transition (if any). */
8187 /* Remove the low group of ones (originally low group of zeros). */
8190 /* Again find the lsb, and check we have all 1's above. */
8194 else if (GET_CODE (andop) == CONST_DOUBLE
8195 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8197 HOST_WIDE_INT low, high, lsb;
8198 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8200 low = CONST_DOUBLE_LOW (andop);
8201 if (HOST_BITS_PER_WIDE_INT < 64)
8202 high = CONST_DOUBLE_HIGH (andop);
8204 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8205 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8208 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8210 shift_mask_high = ~0;
8211 if (INTVAL (shiftop) > 32)
8212 shift_mask_high <<= INTVAL (shiftop) - 32;
8216 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8223 return high == -lsb;
8226 shift_mask_low = ~0;
8227 shift_mask_low <<= INTVAL (shiftop);
8231 if (-lsb != shift_mask_low)
8234 if (HOST_BITS_PER_WIDE_INT < 64)
8239 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8242 return high == -lsb;
8246 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8252 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8253 to perform a left shift. It must have SHIFTOP or more least
8254 significant 0's, with the remainder of the word 1's. */
8257 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8259 if (GET_CODE (andop) == CONST_INT)
8261 HOST_WIDE_INT c, lsb, shift_mask;
8264 shift_mask <<= INTVAL (shiftop);
8267 /* Find the least significant one bit. */
8270 /* It must be covered by the shift mask.
8271 This test also rejects c == 0. */
8272 if ((lsb & shift_mask) == 0)
8275 /* Check we have all 1's above the transition, and reject all 1's. */
8276 return c == -lsb && lsb != 1;
8278 else if (GET_CODE (andop) == CONST_DOUBLE
8279 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8281 HOST_WIDE_INT low, lsb, shift_mask_low;
8283 low = CONST_DOUBLE_LOW (andop);
8285 if (HOST_BITS_PER_WIDE_INT < 64)
8287 HOST_WIDE_INT high, shift_mask_high;
8289 high = CONST_DOUBLE_HIGH (andop);
8293 shift_mask_high = ~0;
8294 if (INTVAL (shiftop) > 32)
8295 shift_mask_high <<= INTVAL (shiftop) - 32;
8299 if ((lsb & shift_mask_high) == 0)
8302 return high == -lsb;
8308 shift_mask_low = ~0;
8309 shift_mask_low <<= INTVAL (shiftop);
8313 if ((lsb & shift_mask_low) == 0)
8316 return low == -lsb && lsb != 1;
8322 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8323 for lfq and stfq insns.
8325 Note reg1 and reg2 *must* be hard registers. To be sure we will
8326 abort if we are passed pseudo registers. */
8329 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8331 /* We might have been passed a SUBREG. */
8332 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8335 return (REGNO (reg1) == REGNO (reg2) - 1);
8338 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8339 addr1 and addr2 must be in consecutive memory locations
8340 (addr2 == addr1 + 8). */
8343 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8348 /* Extract an offset (if used) from the first addr. */
8349 if (GET_CODE (addr1) == PLUS)
8351 /* If not a REG, return zero. */
8352 if (GET_CODE (XEXP (addr1, 0)) != REG)
8356 reg1 = REGNO (XEXP (addr1, 0));
8357 /* The offset must be constant! */
8358 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8360 offset1 = INTVAL (XEXP (addr1, 1));
8363 else if (GET_CODE (addr1) != REG)
8367 reg1 = REGNO (addr1);
8368 /* This was a simple (mem (reg)) expression. Offset is 0. */
8372 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8373 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8374 register as addr1. */
8375 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8377 if (GET_CODE (addr2) != PLUS)
8380 if (GET_CODE (XEXP (addr2, 0)) != REG
8381 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8384 if (reg1 != REGNO (XEXP (addr2, 0)))
8387 /* The offset for the second addr must be 8 more than the first addr. */
8388 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8391 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8396 /* Return the register class of a scratch register needed to copy IN into
8397 or out of a register in CLASS in MODE. If it can be done directly,
8398 NO_REGS is returned. */
8401 secondary_reload_class (enum reg_class class,
8402 enum machine_mode mode ATTRIBUTE_UNUSED,
8407 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8409 && MACHOPIC_INDIRECT
8413 /* We cannot copy a symbolic operand directly into anything
8414 other than BASE_REGS for TARGET_ELF. So indicate that a
8415 register from BASE_REGS is needed as an intermediate
8418 On Darwin, pic addresses require a load from memory, which
8419 needs a base register. */
8420 if (class != BASE_REGS
8421 && (GET_CODE (in) == SYMBOL_REF
8422 || GET_CODE (in) == HIGH
8423 || GET_CODE (in) == LABEL_REF
8424 || GET_CODE (in) == CONST))
8428 if (GET_CODE (in) == REG)
8431 if (regno >= FIRST_PSEUDO_REGISTER)
8433 regno = true_regnum (in);
8434 if (regno >= FIRST_PSEUDO_REGISTER)
8438 else if (GET_CODE (in) == SUBREG)
8440 regno = true_regnum (in);
8441 if (regno >= FIRST_PSEUDO_REGISTER)
8447 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8449 if (class == GENERAL_REGS || class == BASE_REGS
8450 || (regno >= 0 && INT_REGNO_P (regno)))
8453 /* Constants, memory, and FP registers can go into FP registers. */
8454 if ((regno == -1 || FP_REGNO_P (regno))
8455 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8458 /* Memory, and AltiVec registers can go into AltiVec registers. */
8459 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8460 && class == ALTIVEC_REGS)
8463 /* We can copy among the CR registers. */
8464 if ((class == CR_REGS || class == CR0_REGS)
8465 && regno >= 0 && CR_REGNO_P (regno))
8468 /* Otherwise, we need GENERAL_REGS. */
8469 return GENERAL_REGS;
8472 /* Given a comparison operation, return the bit number in CCR to test. We
8473 know this is a valid comparison.
8475 SCC_P is 1 if this is for an scc. That means that %D will have been
8476 used instead of %C, so the bits will be in different places.
8478 Return -1 if OP isn't a valid comparison for some reason. */
8481 ccr_bit (rtx op, int scc_p)
8483 enum rtx_code code = GET_CODE (op);
8484 enum machine_mode cc_mode;
8489 if (GET_RTX_CLASS (code) != '<')
8494 if (GET_CODE (reg) != REG
8495 || ! CR_REGNO_P (REGNO (reg)))
8498 cc_mode = GET_MODE (reg);
8499 cc_regnum = REGNO (reg);
8500 base_bit = 4 * (cc_regnum - CR0_REGNO);
8502 validate_condition_mode (code, cc_mode);
8504 /* When generating a sCOND operation, only positive conditions are
8506 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8507 && code != GTU && code != LTU)
8513 if (TARGET_E500 && !TARGET_FPRS
8514 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8515 return base_bit + 1;
8516 return scc_p ? base_bit + 3 : base_bit + 2;
8518 if (TARGET_E500 && !TARGET_FPRS
8519 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8520 return base_bit + 1;
8521 return base_bit + 2;
8522 case GT: case GTU: case UNLE:
8523 return base_bit + 1;
8524 case LT: case LTU: case UNGE:
8526 case ORDERED: case UNORDERED:
8527 return base_bit + 3;
8530 /* If scc, we will have done a cror to put the bit in the
8531 unordered position. So test that bit. For integer, this is ! LT
8532 unless this is an scc insn. */
8533 return scc_p ? base_bit + 3 : base_bit;
8536 return scc_p ? base_bit + 3 : base_bit + 1;
8543 /* Return the GOT register. */
8546 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8548 /* The second flow pass currently (June 1999) can't update
8549 regs_ever_live without disturbing other parts of the compiler, so
8550 update it here to make the prolog/epilogue code happy. */
8551 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8552 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8554 current_function_uses_pic_offset_table = 1;
8556 return pic_offset_table_rtx;
8559 /* Function to init struct machine_function.
8560 This will be called, via a pointer variable,
8561 from push_function_context. */
8563 static struct machine_function *
8564 rs6000_init_machine_status (void)
8566 return ggc_alloc_cleared (sizeof (machine_function));
8569 /* These macros test for integers and extract the low-order bits. */
8571 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8572 && GET_MODE (X) == VOIDmode)
8574 #define INT_LOWPART(X) \
8575 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8581 unsigned long val = INT_LOWPART (op);
8583 /* If the high bit is zero, the value is the first 1 bit we find
8585 if ((val & 0x80000000) == 0)
8587 if ((val & 0xffffffff) == 0)
8591 while (((val <<= 1) & 0x80000000) == 0)
8596 /* If the high bit is set and the low bit is not, or the mask is all
8597 1's, the value is zero. */
8598 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8601 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8604 while (((val >>= 1) & 1) != 0)
8614 unsigned long val = INT_LOWPART (op);
8616 /* If the low bit is zero, the value is the first 1 bit we find from
8620 if ((val & 0xffffffff) == 0)
8624 while (((val >>= 1) & 1) == 0)
8630 /* If the low bit is set and the high bit is not, or the mask is all
8631 1's, the value is 31. */
8632 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8635 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8638 while (((val <<= 1) & 0x80000000) != 0)
8644 /* Locate some local-dynamic symbol still in use by this function
8645 so that we can print its name in some tls_ld pattern. */
8648 rs6000_get_some_local_dynamic_name (void)
8652 if (cfun->machine->some_ld_name)
8653 return cfun->machine->some_ld_name;
8655 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8657 && for_each_rtx (&PATTERN (insn),
8658 rs6000_get_some_local_dynamic_name_1, 0))
8659 return cfun->machine->some_ld_name;
8664 /* Helper function for rs6000_get_some_local_dynamic_name. */
8667 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8671 if (GET_CODE (x) == SYMBOL_REF)
8673 const char *str = XSTR (x, 0);
8674 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8676 cfun->machine->some_ld_name = str;
8684 /* Print an operand. Recognize special options, documented below. */
8687 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8688 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8690 #define SMALL_DATA_RELOC "sda21"
8691 #define SMALL_DATA_REG 0
8695 print_operand (FILE *file, rtx x, int code)
8699 unsigned HOST_WIDE_INT uval;
8704 /* Write out an instruction after the call which may be replaced
8705 with glue code by the loader. This depends on the AIX version. */
8706 asm_fprintf (file, RS6000_CALL_GLUE);
8709 /* %a is output_address. */
8712 /* If X is a constant integer whose low-order 5 bits are zero,
8713 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8714 in the AIX assembler where "sri" with a zero shift count
8715 writes a trash instruction. */
8716 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8723 /* If constant, low-order 16 bits of constant, unsigned.
8724 Otherwise, write normally. */
8726 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8728 print_operand (file, x, 0);
8732 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8733 for 64-bit mask direction. */
8734 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8737 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8741 /* X is a CR register. Print the number of the EQ bit of the CR */
8742 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8743 output_operand_lossage ("invalid %%E value");
8745 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8749 /* X is a CR register. Print the shift count needed to move it
8750 to the high-order four bits. */
8751 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8752 output_operand_lossage ("invalid %%f value");
8754 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8758 /* Similar, but print the count for the rotate in the opposite
8760 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8761 output_operand_lossage ("invalid %%F value");
8763 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8767 /* X is a constant integer. If it is negative, print "m",
8768 otherwise print "z". This is to make an aze or ame insn. */
8769 if (GET_CODE (x) != CONST_INT)
8770 output_operand_lossage ("invalid %%G value");
8771 else if (INTVAL (x) >= 0)
8778 /* If constant, output low-order five bits. Otherwise, write
8781 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8783 print_operand (file, x, 0);
8787 /* If constant, output low-order six bits. Otherwise, write
8790 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8792 print_operand (file, x, 0);
8796 /* Print `i' if this is a constant, else nothing. */
8802 /* Write the bit number in CCR for jump. */
8805 output_operand_lossage ("invalid %%j code");
8807 fprintf (file, "%d", i);
8811 /* Similar, but add one for shift count in rlinm for scc and pass
8812 scc flag to `ccr_bit'. */
8815 output_operand_lossage ("invalid %%J code");
8817 /* If we want bit 31, write a shift count of zero, not 32. */
8818 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8822 /* X must be a constant. Write the 1's complement of the
8825 output_operand_lossage ("invalid %%k value");
8827 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8831 /* X must be a symbolic constant on ELF. Write an
8832 expression suitable for an 'addi' that adds in the low 16
8834 if (GET_CODE (x) != CONST)
8836 print_operand_address (file, x);
8841 if (GET_CODE (XEXP (x, 0)) != PLUS
8842 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8843 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8844 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8845 output_operand_lossage ("invalid %%K value");
8846 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8848 /* For GNU as, there must be a non-alphanumeric character
8849 between 'l' and the number. The '-' is added by
8850 print_operand() already. */
8851 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8853 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8857 /* %l is output_asm_label. */
8860 /* Write second word of DImode or DFmode reference. Works on register
8861 or non-indexed memory only. */
8862 if (GET_CODE (x) == REG)
8863 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8864 else if (GET_CODE (x) == MEM)
8866 /* Handle possible auto-increment. Since it is pre-increment and
8867 we have already done it, we can just use an offset of word. */
8868 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8869 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8870 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8873 output_address (XEXP (adjust_address_nv (x, SImode,
8877 if (small_data_operand (x, GET_MODE (x)))
8878 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8879 reg_names[SMALL_DATA_REG]);
8884 /* MB value for a mask operand. */
8885 if (! mask_operand (x, SImode))
8886 output_operand_lossage ("invalid %%m value");
8888 fprintf (file, "%d", extract_MB (x));
8892 /* ME value for a mask operand. */
8893 if (! mask_operand (x, SImode))
8894 output_operand_lossage ("invalid %%M value");
8896 fprintf (file, "%d", extract_ME (x));
8899 /* %n outputs the negative of its operand. */
8902 /* Write the number of elements in the vector times 4. */
8903 if (GET_CODE (x) != PARALLEL)
8904 output_operand_lossage ("invalid %%N value");
8906 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8910 /* Similar, but subtract 1 first. */
8911 if (GET_CODE (x) != PARALLEL)
8912 output_operand_lossage ("invalid %%O value");
8914 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8918 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8920 || INT_LOWPART (x) < 0
8921 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8922 output_operand_lossage ("invalid %%p value");
8924 fprintf (file, "%d", i);
8928 /* The operand must be an indirect memory reference. The result
8929 is the register number. */
8930 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8931 || REGNO (XEXP (x, 0)) >= 32)
8932 output_operand_lossage ("invalid %%P value");
8934 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8938 /* This outputs the logical code corresponding to a boolean
8939 expression. The expression may have one or both operands
8940 negated (if one, only the first one). For condition register
8941 logical operations, it will also treat the negated
8942 CR codes as NOTs, but not handle NOTs of them. */
8944 const char *const *t = 0;
8946 enum rtx_code code = GET_CODE (x);
8947 static const char * const tbl[3][3] = {
8948 { "and", "andc", "nor" },
8949 { "or", "orc", "nand" },
8950 { "xor", "eqv", "xor" } };
8954 else if (code == IOR)
8956 else if (code == XOR)
8959 output_operand_lossage ("invalid %%q value");
8961 if (GET_CODE (XEXP (x, 0)) != NOT)
8965 if (GET_CODE (XEXP (x, 1)) == NOT)
8983 /* X is a CR register. Print the mask for `mtcrf'. */
8984 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8985 output_operand_lossage ("invalid %%R value");
8987 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8991 /* Low 5 bits of 32 - value */
8993 output_operand_lossage ("invalid %%s value");
8995 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8999 /* PowerPC64 mask position. All 0's is excluded.
9000 CONST_INT 32-bit mask is considered sign-extended so any
9001 transition must occur within the CONST_INT, not on the boundary. */
9002 if (! mask64_operand (x, DImode))
9003 output_operand_lossage ("invalid %%S value");
9005 uval = INT_LOWPART (x);
9007 if (uval & 1) /* Clear Left */
9009 #if HOST_BITS_PER_WIDE_INT > 64
9010 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9014 else /* Clear Right */
9017 #if HOST_BITS_PER_WIDE_INT > 64
9018 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9026 fprintf (file, "%d", i);
9030 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9031 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9034 /* Bit 3 is OV bit. */
9035 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9037 /* If we want bit 31, write a shift count of zero, not 32. */
9038 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9042 /* Print the symbolic name of a branch target register. */
9043 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9044 && REGNO (x) != COUNT_REGISTER_REGNUM))
9045 output_operand_lossage ("invalid %%T value");
9046 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9047 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9049 fputs ("ctr", file);
9053 /* High-order 16 bits of constant for use in unsigned operand. */
9055 output_operand_lossage ("invalid %%u value");
9057 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9058 (INT_LOWPART (x) >> 16) & 0xffff);
9062 /* High-order 16 bits of constant for use in signed operand. */
9064 output_operand_lossage ("invalid %%v value");
9066 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9067 (INT_LOWPART (x) >> 16) & 0xffff);
9071 /* Print `u' if this has an auto-increment or auto-decrement. */
9072 if (GET_CODE (x) == MEM
9073 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9074 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9079 /* Print the trap code for this operand. */
9080 switch (GET_CODE (x))
9083 fputs ("eq", file); /* 4 */
9086 fputs ("ne", file); /* 24 */
9089 fputs ("lt", file); /* 16 */
9092 fputs ("le", file); /* 20 */
9095 fputs ("gt", file); /* 8 */
9098 fputs ("ge", file); /* 12 */
9101 fputs ("llt", file); /* 2 */
9104 fputs ("lle", file); /* 6 */
9107 fputs ("lgt", file); /* 1 */
9110 fputs ("lge", file); /* 5 */
9118 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9121 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9122 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9124 print_operand (file, x, 0);
9128 /* MB value for a PowerPC64 rldic operand. */
9129 val = (GET_CODE (x) == CONST_INT
9130 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9135 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9136 if ((val <<= 1) < 0)
9139 #if HOST_BITS_PER_WIDE_INT == 32
9140 if (GET_CODE (x) == CONST_INT && i >= 0)
9141 i += 32; /* zero-extend high-part was all 0's */
9142 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9144 val = CONST_DOUBLE_LOW (x);
9151 for ( ; i < 64; i++)
9152 if ((val <<= 1) < 0)
9157 fprintf (file, "%d", i + 1);
9161 if (GET_CODE (x) == MEM
9162 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9167 /* Like 'L', for third word of TImode */
9168 if (GET_CODE (x) == REG)
9169 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9170 else if (GET_CODE (x) == MEM)
9172 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9173 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9174 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9176 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9177 if (small_data_operand (x, GET_MODE (x)))
9178 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9179 reg_names[SMALL_DATA_REG]);
9184 /* X is a SYMBOL_REF. Write out the name preceded by a
9185 period and without any trailing data in brackets. Used for function
9186 names. If we are configured for System V (or the embedded ABI) on
9187 the PowerPC, do not emit the period, since those systems do not use
9188 TOCs and the like. */
9189 if (GET_CODE (x) != SYMBOL_REF)
9192 if (XSTR (x, 0)[0] != '.')
9194 switch (DEFAULT_ABI)
9209 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9211 assemble_name (file, XSTR (x, 0));
9215 /* Like 'L', for last word of TImode. */
9216 if (GET_CODE (x) == REG)
9217 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9218 else if (GET_CODE (x) == MEM)
9220 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9221 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9222 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9224 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9225 if (small_data_operand (x, GET_MODE (x)))
9226 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9227 reg_names[SMALL_DATA_REG]);
9231 /* Print AltiVec or SPE memory operand. */
9236 if (GET_CODE (x) != MEM)
9244 if (GET_CODE (tmp) == REG)
9246 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9249 /* Handle [reg+UIMM]. */
9250 else if (GET_CODE (tmp) == PLUS &&
9251 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9255 if (GET_CODE (XEXP (tmp, 0)) != REG)
9258 x = INTVAL (XEXP (tmp, 1));
9259 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9263 /* Fall through. Must be [reg+reg]. */
9265 if (GET_CODE (tmp) == REG)
9266 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9267 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9269 if (REGNO (XEXP (tmp, 0)) == 0)
9270 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9271 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9273 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9274 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9282 if (GET_CODE (x) == REG)
9283 fprintf (file, "%s", reg_names[REGNO (x)]);
9284 else if (GET_CODE (x) == MEM)
9286 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9287 know the width from the mode. */
9288 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9289 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9290 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9291 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9292 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9293 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9295 output_address (XEXP (x, 0));
9298 output_addr_const (file, x);
9302 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9306 output_operand_lossage ("invalid %%xn code");
9310 /* Print the address of an operand. */
9313 print_operand_address (FILE *file, rtx x)
9315 if (GET_CODE (x) == REG)
9316 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9317 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9318 || GET_CODE (x) == LABEL_REF)
9320 output_addr_const (file, x);
9321 if (small_data_operand (x, GET_MODE (x)))
9322 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9323 reg_names[SMALL_DATA_REG]);
9324 else if (TARGET_TOC)
9327 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9329 if (REGNO (XEXP (x, 0)) == 0)
9330 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9331 reg_names[ REGNO (XEXP (x, 0)) ]);
9333 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9334 reg_names[ REGNO (XEXP (x, 1)) ]);
9336 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9337 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9338 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9340 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9341 && CONSTANT_P (XEXP (x, 1)))
9343 output_addr_const (file, XEXP (x, 1));
9344 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9348 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9349 && CONSTANT_P (XEXP (x, 1)))
9351 fprintf (file, "lo16(");
9352 output_addr_const (file, XEXP (x, 1));
9353 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9356 else if (legitimate_constant_pool_address_p (x))
9358 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9360 rtx contains_minus = XEXP (x, 1);
9364 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9365 turn it into (sym) for output_addr_const. */
9366 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9367 contains_minus = XEXP (contains_minus, 0);
9369 minus = XEXP (contains_minus, 0);
9370 symref = XEXP (minus, 0);
9371 XEXP (contains_minus, 0) = symref;
9376 name = XSTR (symref, 0);
9377 newname = alloca (strlen (name) + sizeof ("@toc"));
9378 strcpy (newname, name);
9379 strcat (newname, "@toc");
9380 XSTR (symref, 0) = newname;
9382 output_addr_const (file, XEXP (x, 1));
9384 XSTR (symref, 0) = name;
9385 XEXP (contains_minus, 0) = minus;
9388 output_addr_const (file, XEXP (x, 1));
9390 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9396 /* Target hook for assembling integer objects. The PowerPC version has
9397 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9398 is defined. It also needs to handle DI-mode objects on 64-bit
9402 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9404 #ifdef RELOCATABLE_NEEDS_FIXUP
9405 /* Special handling for SI values. */
9406 if (size == 4 && aligned_p)
9408 extern int in_toc_section (void);
9409 static int recurse = 0;
9411 /* For -mrelocatable, we mark all addresses that need to be fixed up
9412 in the .fixup section. */
9413 if (TARGET_RELOCATABLE
9414 && !in_toc_section ()
9415 && !in_text_section ()
9417 && GET_CODE (x) != CONST_INT
9418 && GET_CODE (x) != CONST_DOUBLE
9424 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9426 ASM_OUTPUT_LABEL (asm_out_file, buf);
9427 fprintf (asm_out_file, "\t.long\t(");
9428 output_addr_const (asm_out_file, x);
9429 fprintf (asm_out_file, ")@fixup\n");
9430 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9431 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9432 fprintf (asm_out_file, "\t.long\t");
9433 assemble_name (asm_out_file, buf);
9434 fprintf (asm_out_file, "\n\t.previous\n");
9438 /* Remove initial .'s to turn a -mcall-aixdesc function
9439 address into the address of the descriptor, not the function
9441 else if (GET_CODE (x) == SYMBOL_REF
9442 && XSTR (x, 0)[0] == '.'
9443 && DEFAULT_ABI == ABI_AIX)
9445 const char *name = XSTR (x, 0);
9446 while (*name == '.')
9449 fprintf (asm_out_file, "\t.long\t%s\n", name);
9453 #endif /* RELOCATABLE_NEEDS_FIXUP */
9454 return default_assemble_integer (x, size, aligned_p);
9457 #ifdef HAVE_GAS_HIDDEN
9458 /* Emit an assembler directive to set symbol visibility for DECL to
9462 rs6000_assemble_visibility (tree decl, int vis)
9464 /* Functions need to have their entry point symbol visibility set as
9465 well as their descriptor symbol visibility. */
9466 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9468 static const char * const visibility_types[] = {
9469 NULL, "internal", "hidden", "protected"
9472 const char *name, *type;
9474 name = ((* targetm.strip_name_encoding)
9475 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9476 type = visibility_types[vis];
9478 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9479 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9482 default_assemble_visibility (decl, vis);
9487 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9489 /* Reversal of FP compares takes care -- an ordered compare
9490 becomes an unordered compare and vice versa. */
9491 if (mode == CCFPmode
9492 && (!flag_finite_math_only
9493 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9494 || code == UNEQ || code == LTGT))
9495 return reverse_condition_maybe_unordered (code);
9497 return reverse_condition (code);
9500 /* Generate a compare for CODE. Return a brand-new rtx that
9501 represents the result of the compare. */
9504 rs6000_generate_compare (enum rtx_code code)
9506 enum machine_mode comp_mode;
9509 if (rs6000_compare_fp_p)
9510 comp_mode = CCFPmode;
9511 else if (code == GTU || code == LTU
9512 || code == GEU || code == LEU)
9513 comp_mode = CCUNSmode;
9517 /* First, the compare. */
9518 compare_result = gen_reg_rtx (comp_mode);
9520 /* SPE FP compare instructions on the GPRs. Yuck! */
9521 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9522 && rs6000_compare_fp_p)
9524 rtx cmp, or1, or2, or_result, compare_result2;
9532 cmp = flag_finite_math_only
9533 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9535 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9536 rs6000_compare_op1);
9544 cmp = flag_finite_math_only
9545 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9547 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9548 rs6000_compare_op1);
9556 cmp = flag_finite_math_only
9557 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9559 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9560 rs6000_compare_op1);
9566 /* Synthesize LE and GE from LT/GT || EQ. */
9567 if (code == LE || code == GE || code == LEU || code == GEU)
9569 /* Synthesize GE/LE frome GT/LT || EQ. */
9575 case LE: code = LT; break;
9576 case GE: code = GT; break;
9577 case LEU: code = LT; break;
9578 case GEU: code = GT; break;
9582 or1 = gen_reg_rtx (SImode);
9583 or2 = gen_reg_rtx (SImode);
9584 or_result = gen_reg_rtx (CCEQmode);
9585 compare_result2 = gen_reg_rtx (CCFPmode);
9588 cmp = flag_finite_math_only
9589 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9591 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9592 rs6000_compare_op1);
9595 /* The MC8540 FP compare instructions set the CR bits
9596 differently than other PPC compare instructions. For
9597 that matter, there is no generic test instruction, but a
9598 testgt, testlt, and testeq. For a true condition, bit 2
9599 is set (x1xx) in the CR. Following the traditional CR
9605 ... bit 2 would be a GT CR alias, so later on we
9606 look in the GT bits for the branch instructions.
9607 However, we must be careful to emit correct RTL in
9608 the meantime, so optimizations don't get confused. */
9610 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9611 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9613 /* OR them together. */
9614 cmp = gen_rtx_SET (VOIDmode, or_result,
9615 gen_rtx_COMPARE (CCEQmode,
9616 gen_rtx_IOR (SImode, or1, or2),
9618 compare_result = or_result;
9623 /* We only care about 1 bit (x1xx), so map everything to NE to
9624 maintain rtl sanity. We'll get to the right bit (x1xx) at
9625 code output time. */
9626 if (code == NE || code == LTGT)
9627 /* Do the inverse here because we have no cmpne
9628 instruction. We use the cmpeq instruction and expect
9629 to get a 0 instead. */
9638 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9639 gen_rtx_COMPARE (comp_mode,
9641 rs6000_compare_op1)));
9643 /* Some kinds of FP comparisons need an OR operation;
9644 under flag_finite_math_only we don't bother. */
9645 if (rs6000_compare_fp_p
9646 && ! flag_finite_math_only
9647 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9648 && (code == LE || code == GE
9649 || code == UNEQ || code == LTGT
9650 || code == UNGT || code == UNLT))
9652 enum rtx_code or1, or2;
9653 rtx or1_rtx, or2_rtx, compare2_rtx;
9654 rtx or_result = gen_reg_rtx (CCEQmode);
9658 case LE: or1 = LT; or2 = EQ; break;
9659 case GE: or1 = GT; or2 = EQ; break;
9660 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9661 case LTGT: or1 = LT; or2 = GT; break;
9662 case UNGT: or1 = UNORDERED; or2 = GT; break;
9663 case UNLT: or1 = UNORDERED; or2 = LT; break;
9666 validate_condition_mode (or1, comp_mode);
9667 validate_condition_mode (or2, comp_mode);
9668 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9669 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9670 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9671 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9673 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9675 compare_result = or_result;
9679 validate_condition_mode (code, GET_MODE (compare_result));
9681 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9685 /* Emit the RTL for an sCOND pattern. */
9688 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9691 enum machine_mode op_mode;
9692 enum rtx_code cond_code;
9694 condition_rtx = rs6000_generate_compare (code);
9695 cond_code = GET_CODE (condition_rtx);
9698 || cond_code == GE || cond_code == LE
9699 || cond_code == GEU || cond_code == LEU
9700 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9702 rtx not_result = gen_reg_rtx (CCEQmode);
9703 rtx not_op, rev_cond_rtx;
9704 enum machine_mode cc_mode;
9706 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9708 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9709 SImode, XEXP (condition_rtx, 0), const0_rtx);
9710 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9711 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9712 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9715 op_mode = GET_MODE (rs6000_compare_op0);
9716 if (op_mode == VOIDmode)
9717 op_mode = GET_MODE (rs6000_compare_op1);
9719 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9721 PUT_MODE (condition_rtx, DImode);
9722 convert_move (result, condition_rtx, 0);
9726 PUT_MODE (condition_rtx, SImode);
9727 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9731 /* Emit a branch of kind CODE to location LOC. */
9734 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9736 rtx condition_rtx, loc_ref;
9738 condition_rtx = rs6000_generate_compare (code);
9739 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9740 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9741 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9745 /* Return the string to output a conditional branch to LABEL, which is
9746 the operand number of the label, or -1 if the branch is really a
9749 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9750 condition code register and its mode specifies what kind of
9753 REVERSED is nonzero if we should reverse the sense of the comparison.
9755 INSN is the insn. */
9758 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9760 static char string[64];
9761 enum rtx_code code = GET_CODE (op);
9762 rtx cc_reg = XEXP (op, 0);
9763 enum machine_mode mode = GET_MODE (cc_reg);
9764 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9765 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9766 int really_reversed = reversed ^ need_longbranch;
9772 validate_condition_mode (code, mode);
9774 /* Work out which way this really branches. We could use
9775 reverse_condition_maybe_unordered here always but this
9776 makes the resulting assembler clearer. */
9777 if (really_reversed)
9779 /* Reversal of FP compares takes care -- an ordered compare
9780 becomes an unordered compare and vice versa. */
9781 if (mode == CCFPmode)
9782 code = reverse_condition_maybe_unordered (code);
9784 code = reverse_condition (code);
9787 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9789 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9792 /* Opposite of GT. */
9794 else if (code == NE)
9802 /* Not all of these are actually distinct opcodes, but
9803 we distinguish them for clarity of the resulting assembler. */
9805 ccode = "ne"; break;
9807 ccode = "eq"; break;
9809 ccode = "ge"; break;
9810 case GT: case GTU: case UNGT:
9811 ccode = "gt"; break;
9813 ccode = "le"; break;
9814 case LT: case LTU: case UNLT:
9815 ccode = "lt"; break;
9816 case UNORDERED: ccode = "un"; break;
9817 case ORDERED: ccode = "nu"; break;
9818 case UNGE: ccode = "nl"; break;
9819 case UNLE: ccode = "ng"; break;
9824 /* Maybe we have a guess as to how likely the branch is.
9825 The old mnemonics don't have a way to specify this information. */
9827 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9828 if (note != NULL_RTX)
9830 /* PROB is the difference from 50%. */
9831 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9832 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9834 /* Only hint for highly probable/improbable branches on newer
9835 cpus as static prediction overrides processor dynamic
9836 prediction. For older cpus we may as well always hint, but
9837 assume not taken for branches that are very close to 50% as a
9838 mispredicted taken branch is more expensive than a
9839 mispredicted not-taken branch. */
9841 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9843 if (abs (prob) > REG_BR_PROB_BASE / 20
9844 && ((prob > 0) ^ need_longbranch))
9852 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9854 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9856 /* We need to escape any '%' characters in the reg_names string.
9857 Assume they'd only be the first character.... */
9858 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9860 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9864 /* If the branch distance was too far, we may have to use an
9865 unconditional branch to go the distance. */
9866 if (need_longbranch)
9867 s += sprintf (s, ",$+8\n\tb %s", label);
9869 s += sprintf (s, ",%s", label);
9875 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9876 operands of the last comparison is nonzero/true, FALSE_COND if it
9877 is zero/false. Return 0 if the hardware has no such operation. */
9880 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9882 enum rtx_code code = GET_CODE (op);
9883 rtx op0 = rs6000_compare_op0;
9884 rtx op1 = rs6000_compare_op1;
9886 enum machine_mode compare_mode = GET_MODE (op0);
9887 enum machine_mode result_mode = GET_MODE (dest);
9890 /* These modes should always match. */
9891 if (GET_MODE (op1) != compare_mode
9892 /* In the isel case however, we can use a compare immediate, so
9893 op1 may be a small constant. */
9894 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9896 if (GET_MODE (true_cond) != result_mode)
9898 if (GET_MODE (false_cond) != result_mode)
9901 /* First, work out if the hardware can do this at all, or
9902 if it's too slow.... */
9903 if (! rs6000_compare_fp_p)
9906 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9910 /* Eliminate half of the comparisons by switching operands, this
9911 makes the remaining code simpler. */
9912 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9913 || code == LTGT || code == LT || code == UNLE)
9915 code = reverse_condition_maybe_unordered (code);
9917 true_cond = false_cond;
9921 /* UNEQ and LTGT take four instructions for a comparison with zero,
9922 it'll probably be faster to use a branch here too. */
9923 if (code == UNEQ && HONOR_NANS (compare_mode))
9926 if (GET_CODE (op1) == CONST_DOUBLE)
9927 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9929 /* We're going to try to implement comparisons by performing
9930 a subtract, then comparing against zero. Unfortunately,
9931 Inf - Inf is NaN which is not zero, and so if we don't
9932 know that the operand is finite and the comparison
9933 would treat EQ different to UNORDERED, we can't do it. */
9934 if (HONOR_INFINITIES (compare_mode)
9935 && code != GT && code != UNGE
9936 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9937 /* Constructs of the form (a OP b ? a : b) are safe. */
9938 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9939 || (! rtx_equal_p (op0, true_cond)
9940 && ! rtx_equal_p (op1, true_cond))))
9942 /* At this point we know we can use fsel. */
9944 /* Reduce the comparison to a comparison against zero. */
9945 temp = gen_reg_rtx (compare_mode);
9946 emit_insn (gen_rtx_SET (VOIDmode, temp,
9947 gen_rtx_MINUS (compare_mode, op0, op1)));
9949 op1 = CONST0_RTX (compare_mode);
9951 /* If we don't care about NaNs we can reduce some of the comparisons
9952 down to faster ones. */
9953 if (! HONOR_NANS (compare_mode))
9959 true_cond = false_cond;
9972 /* Now, reduce everything down to a GE. */
9979 temp = gen_reg_rtx (compare_mode);
9980 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9985 temp = gen_reg_rtx (compare_mode);
9986 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9991 temp = gen_reg_rtx (compare_mode);
9992 emit_insn (gen_rtx_SET (VOIDmode, temp,
9993 gen_rtx_NEG (compare_mode,
9994 gen_rtx_ABS (compare_mode, op0))));
9999 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10000 temp = gen_reg_rtx (result_mode);
10001 emit_insn (gen_rtx_SET (VOIDmode, temp,
10002 gen_rtx_IF_THEN_ELSE (result_mode,
10003 gen_rtx_GE (VOIDmode,
10005 true_cond, false_cond)));
10006 false_cond = true_cond;
10009 temp = gen_reg_rtx (compare_mode);
10010 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10015 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10016 temp = gen_reg_rtx (result_mode);
10017 emit_insn (gen_rtx_SET (VOIDmode, temp,
10018 gen_rtx_IF_THEN_ELSE (result_mode,
10019 gen_rtx_GE (VOIDmode,
10021 true_cond, false_cond)));
10022 true_cond = false_cond;
10025 temp = gen_reg_rtx (compare_mode);
10026 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10034 emit_insn (gen_rtx_SET (VOIDmode, dest,
10035 gen_rtx_IF_THEN_ELSE (result_mode,
10036 gen_rtx_GE (VOIDmode,
10038 true_cond, false_cond)));
10042 /* Same as above, but for ints (isel). */
10045 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10047 rtx condition_rtx, cr;
10049 /* All isel implementations thus far are 32-bits. */
10050 if (GET_MODE (rs6000_compare_op0) != SImode)
10053 /* We still have to do the compare, because isel doesn't do a
10054 compare, it just looks at the CRx bits set by a previous compare
10056 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10057 cr = XEXP (condition_rtx, 0);
10059 if (GET_MODE (cr) == CCmode)
10060 emit_insn (gen_isel_signed (dest, condition_rtx,
10061 true_cond, false_cond, cr));
10063 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10064 true_cond, false_cond, cr));
10070 output_isel (rtx *operands)
10072 enum rtx_code code;
10074 code = GET_CODE (operands[1]);
10075 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10077 PUT_CODE (operands[1], reverse_condition (code));
10078 return "isel %0,%3,%2,%j1";
10081 return "isel %0,%2,%3,%j1";
10085 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10087 enum machine_mode mode = GET_MODE (op0);
10091 if (code == SMAX || code == SMIN)
10096 if (code == SMAX || code == UMAX)
10097 target = emit_conditional_move (dest, c, op0, op1, mode,
10098 op0, op1, mode, 0);
10100 target = emit_conditional_move (dest, c, op0, op1, mode,
10101 op1, op0, mode, 0);
10102 if (target == NULL_RTX)
10104 if (target != dest)
10105 emit_move_insn (dest, target);
10108 /* Emit instructions to move SRC to DST. Called by splitters for
10109 multi-register moves. It will emit at most one instruction for
10110 each register that is accessed; that is, it won't emit li/lis pairs
10111 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10115 rs6000_split_multireg_move (rtx dst, rtx src)
10117 /* The register number of the first register being moved. */
10119 /* The mode that is to be moved. */
10120 enum machine_mode mode;
10121 /* The mode that the move is being done in, and its size. */
10122 enum machine_mode reg_mode;
10124 /* The number of registers that will be moved. */
10127 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10128 mode = GET_MODE (dst);
10129 nregs = HARD_REGNO_NREGS (reg, mode);
10130 if (FP_REGNO_P (reg))
10132 else if (ALTIVEC_REGNO_P (reg))
10133 reg_mode = V16QImode;
10135 reg_mode = word_mode;
10136 reg_mode_size = GET_MODE_SIZE (reg_mode);
10138 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10141 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10143 /* Move register range backwards, if we might have destructive
10146 for (i = nregs - 1; i >= 0; i--)
10147 emit_insn (gen_rtx_SET (VOIDmode,
10148 simplify_gen_subreg (reg_mode, dst, mode,
10149 i * reg_mode_size),
10150 simplify_gen_subreg (reg_mode, src, mode,
10151 i * reg_mode_size)));
10157 bool used_update = false;
10159 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10163 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10164 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10167 breg = XEXP (XEXP (src, 0), 0);
10168 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10169 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10170 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10171 emit_insn (TARGET_32BIT
10172 ? gen_addsi3 (breg, breg, delta_rtx)
10173 : gen_adddi3 (breg, breg, delta_rtx));
10174 src = gen_rtx_MEM (mode, breg);
10177 /* We have now address involving an base register only.
10178 If we use one of the registers to address memory,
10179 we have change that register last. */
10181 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10182 ? XEXP (XEXP (src, 0), 0)
10188 if (REGNO (breg) >= REGNO (dst)
10189 && REGNO (breg) < REGNO (dst) + nregs)
10190 j = REGNO (breg) - REGNO (dst);
10193 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10197 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10198 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10201 breg = XEXP (XEXP (dst, 0), 0);
10202 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10203 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10204 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10206 /* We have to update the breg before doing the store.
10207 Use store with update, if available. */
10211 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10212 emit_insn (TARGET_32BIT
10213 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10214 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10215 used_update = true;
10218 emit_insn (TARGET_32BIT
10219 ? gen_addsi3 (breg, breg, delta_rtx)
10220 : gen_adddi3 (breg, breg, delta_rtx));
10221 dst = gen_rtx_MEM (mode, breg);
10225 for (i = 0; i < nregs; i++)
10227 /* Calculate index to next subword. */
10232 /* If compiler already emited move of first word by
10233 store with update, no need to do anything. */
10234 if (j == 0 && used_update)
10237 emit_insn (gen_rtx_SET (VOIDmode,
10238 simplify_gen_subreg (reg_mode, dst, mode,
10239 j * reg_mode_size),
10240 simplify_gen_subreg (reg_mode, src, mode,
10241 j * reg_mode_size)));
10247 /* This page contains routines that are used to determine what the
10248 function prologue and epilogue code will do and write them out. */
10250 /* Return the first fixed-point register that is required to be
10251 saved. 32 if none. */
10254 first_reg_to_save (void)
10258 /* Find lowest numbered live register. */
10259 for (first_reg = 13; first_reg <= 31; first_reg++)
10260 if (regs_ever_live[first_reg]
10261 && (! call_used_regs[first_reg]
10262 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10263 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10264 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10269 && current_function_uses_pic_offset_table
10270 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10271 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10277 /* Similar, for FP regs. */
10280 first_fp_reg_to_save (void)
10284 /* Find lowest numbered live register. */
10285 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10286 if (regs_ever_live[first_reg])
10292 /* Similar, for AltiVec regs. */
10295 first_altivec_reg_to_save (void)
10299 /* Stack frame remains as is unless we are in AltiVec ABI. */
10300 if (! TARGET_ALTIVEC_ABI)
10301 return LAST_ALTIVEC_REGNO + 1;
10303 /* Find lowest numbered live register. */
10304 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10305 if (regs_ever_live[i])
10311 /* Return a 32-bit mask of the AltiVec registers we need to set in
10312 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10313 the 32-bit word is 0. */
10315 static unsigned int
10316 compute_vrsave_mask (void)
10318 unsigned int i, mask = 0;
10320 /* First, find out if we use _any_ altivec registers. */
10321 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10322 if (regs_ever_live[i])
10323 mask |= ALTIVEC_REG_BIT (i);
10328 /* Next, remove the argument registers from the set. These must
10329 be in the VRSAVE mask set by the caller, so we don't need to add
10330 them in again. More importantly, the mask we compute here is
10331 used to generate CLOBBERs in the set_vrsave insn, and we do not
10332 wish the argument registers to die. */
10333 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10334 mask &= ~ALTIVEC_REG_BIT (i);
10336 /* Similarly, remove the return value from the set. */
10339 diddle_return_value (is_altivec_return_reg, &yes);
10341 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10348 is_altivec_return_reg (rtx reg, void *xyes)
10350 bool *yes = (bool *) xyes;
10351 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10356 /* Calculate the stack information for the current function. This is
10357 complicated by having two separate calling sequences, the AIX calling
10358 sequence and the V.4 calling sequence.
10360 AIX (and Darwin/Mac OS X) stack frames look like:
10362 SP----> +---------------------------------------+
10363 | back chain to caller | 0 0
10364 +---------------------------------------+
10365 | saved CR | 4 8 (8-11)
10366 +---------------------------------------+
10368 +---------------------------------------+
10369 | reserved for compilers | 12 24
10370 +---------------------------------------+
10371 | reserved for binders | 16 32
10372 +---------------------------------------+
10373 | saved TOC pointer | 20 40
10374 +---------------------------------------+
10375 | Parameter save area (P) | 24 48
10376 +---------------------------------------+
10377 | Alloca space (A) | 24+P etc.
10378 +---------------------------------------+
10379 | Local variable space (L) | 24+P+A
10380 +---------------------------------------+
10381 | Float/int conversion temporary (X) | 24+P+A+L
10382 +---------------------------------------+
10383 | Save area for AltiVec registers (W) | 24+P+A+L+X
10384 +---------------------------------------+
10385 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10386 +---------------------------------------+
10387 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10388 +---------------------------------------+
10389 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10390 +---------------------------------------+
10391 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10392 +---------------------------------------+
10393 old SP->| back chain to caller's caller |
10394 +---------------------------------------+
10396 The required alignment for AIX configurations is two words (i.e., 8
10400 V.4 stack frames look like:
10402 SP----> +---------------------------------------+
10403 | back chain to caller | 0
10404 +---------------------------------------+
10405 | caller's saved LR | 4
10406 +---------------------------------------+
10407 | Parameter save area (P) | 8
10408 +---------------------------------------+
10409 | Alloca space (A) | 8+P
10410 +---------------------------------------+
10411 | Varargs save area (V) | 8+P+A
10412 +---------------------------------------+
10413 | Local variable space (L) | 8+P+A+V
10414 +---------------------------------------+
10415 | Float/int conversion temporary (X) | 8+P+A+V+L
10416 +---------------------------------------+
10417 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10418 +---------------------------------------+
10419 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10420 +---------------------------------------+
10421 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10422 +---------------------------------------+
10423 | SPE: area for 64-bit GP registers |
10424 +---------------------------------------+
10425 | SPE alignment padding |
10426 +---------------------------------------+
10427 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10428 +---------------------------------------+
10429 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10430 +---------------------------------------+
10431 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10432 +---------------------------------------+
10433 old SP->| back chain to caller's caller |
10434 +---------------------------------------+
10436 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10437 given. (But note below and in sysv4.h that we require only 8 and
10438 may round up the size of our stack frame anyways. The historical
10439 reason is early versions of powerpc-linux which didn't properly
10440 align the stack at program startup. A happy side-effect is that
10441 -mno-eabi libraries can be used with -meabi programs.)
10443 The EABI configuration defaults to the V.4 layout. However,
10444 the stack alignment requirements may differ. If -mno-eabi is not
10445 given, the required stack alignment is 8 bytes; if -mno-eabi is
10446 given, the required alignment is 16 bytes. (But see V.4 comment
10449 #ifndef ABI_STACK_BOUNDARY
10450 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10453 static rs6000_stack_t *
10454 rs6000_stack_info (void)
10456 static rs6000_stack_t info, zero_info;
10457 rs6000_stack_t *info_ptr = &info;
10458 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10460 HOST_WIDE_INT total_raw_size;
10462 /* Zero all fields portably. */
10467 /* Cache value so we don't rescan instruction chain over and over. */
10468 if (cfun->machine->insn_chain_scanned_p == 0)
10470 cfun->machine->insn_chain_scanned_p = 1;
10471 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10475 /* Select which calling sequence. */
10476 info_ptr->abi = DEFAULT_ABI;
10478 /* Calculate which registers need to be saved & save area size. */
10479 info_ptr->first_gp_reg_save = first_reg_to_save ();
10480 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10481 even if it currently looks like we won't. */
10482 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10483 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10484 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10485 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10486 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10488 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10490 /* For the SPE, we have an additional upper 32-bits on each GPR.
10491 Ideally we should save the entire 64-bits only when the upper
10492 half is used in SIMD instructions. Since we only record
10493 registers live (not the size they are used in), this proves
10494 difficult because we'd have to traverse the instruction chain at
10495 the right time, taking reload into account. This is a real pain,
10496 so we opt to save the GPRs in 64-bits always if but one register
10497 gets used in 64-bits. Otherwise, all the registers in the frame
10498 get saved in 32-bits.
10500 So... since when we save all GPRs (except the SP) in 64-bits, the
10501 traditional GP save area will be empty. */
10502 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10503 info_ptr->gp_size = 0;
10505 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10506 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10508 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10509 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10510 - info_ptr->first_altivec_reg_save);
10512 /* Does this function call anything? */
10513 info_ptr->calls_p = (! current_function_is_leaf
10514 || cfun->machine->ra_needs_full_frame);
10516 /* Determine if we need to save the link register. */
10517 if (rs6000_ra_ever_killed ()
10518 || (DEFAULT_ABI == ABI_AIX
10519 && current_function_profile
10520 && !TARGET_PROFILE_KERNEL)
10521 #ifdef TARGET_RELOCATABLE
10522 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10524 || (info_ptr->first_fp_reg_save != 64
10525 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10526 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10527 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10528 || (DEFAULT_ABI == ABI_DARWIN
10530 && current_function_uses_pic_offset_table)
10531 || info_ptr->calls_p)
10533 info_ptr->lr_save_p = 1;
10534 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10537 /* Determine if we need to save the condition code registers. */
10538 if (regs_ever_live[CR2_REGNO]
10539 || regs_ever_live[CR3_REGNO]
10540 || regs_ever_live[CR4_REGNO])
10542 info_ptr->cr_save_p = 1;
10543 if (DEFAULT_ABI == ABI_V4)
10544 info_ptr->cr_size = reg_size;
10547 /* If the current function calls __builtin_eh_return, then we need
10548 to allocate stack space for registers that will hold data for
10549 the exception handler. */
10550 if (current_function_calls_eh_return)
10553 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10556 /* SPE saves EH registers in 64-bits. */
10557 ehrd_size = i * (TARGET_SPE_ABI
10558 && info_ptr->spe_64bit_regs_used != 0
10559 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10564 /* Determine various sizes. */
10565 info_ptr->reg_size = reg_size;
10566 info_ptr->fixed_size = RS6000_SAVE_AREA;
10567 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10568 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10569 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10572 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10573 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10575 info_ptr->spe_gp_size = 0;
10577 if (TARGET_ALTIVEC_ABI)
10578 info_ptr->vrsave_mask = compute_vrsave_mask ();
10580 info_ptr->vrsave_mask = 0;
10582 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10583 info_ptr->vrsave_size = 4;
10585 info_ptr->vrsave_size = 0;
10587 /* Calculate the offsets. */
10588 switch (DEFAULT_ABI)
10596 info_ptr->fp_save_offset = - info_ptr->fp_size;
10597 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10599 if (TARGET_ALTIVEC_ABI)
10601 info_ptr->vrsave_save_offset
10602 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10604 /* Align stack so vector save area is on a quadword boundary. */
10605 if (info_ptr->altivec_size != 0)
10606 info_ptr->altivec_padding_size
10607 = 16 - (-info_ptr->vrsave_save_offset % 16);
10609 info_ptr->altivec_padding_size = 0;
10611 info_ptr->altivec_save_offset
10612 = info_ptr->vrsave_save_offset
10613 - info_ptr->altivec_padding_size
10614 - info_ptr->altivec_size;
10616 /* Adjust for AltiVec case. */
10617 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10620 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10621 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10622 info_ptr->lr_save_offset = 2*reg_size;
10626 info_ptr->fp_save_offset = - info_ptr->fp_size;
10627 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10628 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10630 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10632 /* Align stack so SPE GPR save area is aligned on a
10633 double-word boundary. */
10634 if (info_ptr->spe_gp_size != 0)
10635 info_ptr->spe_padding_size
10636 = 8 - (-info_ptr->cr_save_offset % 8);
10638 info_ptr->spe_padding_size = 0;
10640 info_ptr->spe_gp_save_offset
10641 = info_ptr->cr_save_offset
10642 - info_ptr->spe_padding_size
10643 - info_ptr->spe_gp_size;
10645 /* Adjust for SPE case. */
10646 info_ptr->toc_save_offset
10647 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10649 else if (TARGET_ALTIVEC_ABI)
10651 info_ptr->vrsave_save_offset
10652 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10654 /* Align stack so vector save area is on a quadword boundary. */
10655 if (info_ptr->altivec_size != 0)
10656 info_ptr->altivec_padding_size
10657 = 16 - (-info_ptr->vrsave_save_offset % 16);
10659 info_ptr->altivec_padding_size = 0;
10661 info_ptr->altivec_save_offset
10662 = info_ptr->vrsave_save_offset
10663 - info_ptr->altivec_padding_size
10664 - info_ptr->altivec_size;
10666 /* Adjust for AltiVec case. */
10667 info_ptr->toc_save_offset
10668 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10671 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10672 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10673 info_ptr->lr_save_offset = reg_size;
10677 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10678 + info_ptr->gp_size
10679 + info_ptr->altivec_size
10680 + info_ptr->altivec_padding_size
10681 + info_ptr->spe_gp_size
10682 + info_ptr->spe_padding_size
10684 + info_ptr->cr_size
10685 + info_ptr->lr_size
10686 + info_ptr->vrsave_size
10687 + info_ptr->toc_size,
10688 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10691 total_raw_size = (info_ptr->vars_size
10692 + info_ptr->parm_size
10693 + info_ptr->save_size
10694 + info_ptr->varargs_size
10695 + info_ptr->fixed_size);
10697 info_ptr->total_size =
10698 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10700 /* Determine if we need to allocate any stack frame:
10702 For AIX we need to push the stack if a frame pointer is needed
10703 (because the stack might be dynamically adjusted), if we are
10704 debugging, if we make calls, or if the sum of fp_save, gp_save,
10705 and local variables are more than the space needed to save all
10706 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10707 + 18*8 = 288 (GPR13 reserved).
10709 For V.4 we don't have the stack cushion that AIX uses, but assume
10710 that the debugger can handle stackless frames. */
10712 if (info_ptr->calls_p)
10713 info_ptr->push_p = 1;
10715 else if (DEFAULT_ABI == ABI_V4)
10716 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10718 else if (frame_pointer_needed)
10719 info_ptr->push_p = 1;
10721 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10722 info_ptr->push_p = 1;
10726 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10728 /* Zero offsets if we're not saving those registers. */
10729 if (info_ptr->fp_size == 0)
10730 info_ptr->fp_save_offset = 0;
10732 if (info_ptr->gp_size == 0)
10733 info_ptr->gp_save_offset = 0;
10735 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10736 info_ptr->altivec_save_offset = 0;
10738 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10739 info_ptr->vrsave_save_offset = 0;
10741 if (! TARGET_SPE_ABI
10742 || info_ptr->spe_64bit_regs_used == 0
10743 || info_ptr->spe_gp_size == 0)
10744 info_ptr->spe_gp_save_offset = 0;
10746 if (! info_ptr->lr_save_p)
10747 info_ptr->lr_save_offset = 0;
10749 if (! info_ptr->cr_save_p)
10750 info_ptr->cr_save_offset = 0;
10752 if (! info_ptr->toc_save_p)
10753 info_ptr->toc_save_offset = 0;
10758 /* Return true if the current function uses any GPRs in 64-bit SIMD
10762 spe_func_has_64bit_regs_p (void)
10766 /* Functions that save and restore all the call-saved registers will
10767 need to save/restore the registers in 64-bits. */
10768 if (current_function_calls_eh_return
10769 || current_function_calls_setjmp
10770 || current_function_has_nonlocal_goto)
10773 insns = get_insns ();
10775 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10781 i = PATTERN (insn);
10782 if (GET_CODE (i) == SET
10783 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10792 debug_stack_info (rs6000_stack_t *info)
10794 const char *abi_string;
10797 info = rs6000_stack_info ();
10799 fprintf (stderr, "\nStack information for function %s:\n",
10800 ((current_function_decl && DECL_NAME (current_function_decl))
10801 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10806 default: abi_string = "Unknown"; break;
10807 case ABI_NONE: abi_string = "NONE"; break;
10808 case ABI_AIX: abi_string = "AIX"; break;
10809 case ABI_DARWIN: abi_string = "Darwin"; break;
10810 case ABI_V4: abi_string = "V.4"; break;
10813 fprintf (stderr, "\tABI = %5s\n", abi_string);
10815 if (TARGET_ALTIVEC_ABI)
10816 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10818 if (TARGET_SPE_ABI)
10819 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10821 if (info->first_gp_reg_save != 32)
10822 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10824 if (info->first_fp_reg_save != 64)
10825 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10827 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10828 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10829 info->first_altivec_reg_save);
10831 if (info->lr_save_p)
10832 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10834 if (info->cr_save_p)
10835 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10837 if (info->toc_save_p)
10838 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10840 if (info->vrsave_mask)
10841 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10844 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10847 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10849 if (info->gp_save_offset)
10850 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10852 if (info->fp_save_offset)
10853 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10855 if (info->altivec_save_offset)
10856 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10857 info->altivec_save_offset);
10859 if (info->spe_gp_save_offset)
10860 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10861 info->spe_gp_save_offset);
10863 if (info->vrsave_save_offset)
10864 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10865 info->vrsave_save_offset);
10867 if (info->lr_save_offset)
10868 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10870 if (info->cr_save_offset)
10871 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10873 if (info->toc_save_offset)
10874 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10876 if (info->varargs_save_offset)
10877 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10879 if (info->total_size)
10880 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
10883 if (info->varargs_size)
10884 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10886 if (info->vars_size)
10887 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
10890 if (info->parm_size)
10891 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10893 if (info->fixed_size)
10894 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10897 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10899 if (info->spe_gp_size)
10900 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10903 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10905 if (info->altivec_size)
10906 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10908 if (info->vrsave_size)
10909 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10911 if (info->altivec_padding_size)
10912 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10913 info->altivec_padding_size);
10915 if (info->spe_padding_size)
10916 fprintf (stderr, "\tspe_padding_size = %5d\n",
10917 info->spe_padding_size);
10920 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10923 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10925 if (info->toc_size)
10926 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10928 if (info->save_size)
10929 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10931 if (info->reg_size != 4)
10932 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10934 fprintf (stderr, "\n");
10938 rs6000_return_addr (int count, rtx frame)
10940 /* Currently we don't optimize very well between prolog and body
10941 code and for PIC code the code can be actually quite bad, so
10942 don't try to be too clever here. */
10943 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10945 cfun->machine->ra_needs_full_frame = 1;
10952 plus_constant (copy_to_reg
10953 (gen_rtx_MEM (Pmode,
10954 memory_address (Pmode, frame))),
10955 RETURN_ADDRESS_OFFSET)));
10958 cfun->machine->ra_need_lr = 1;
10959 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10962 /* Say whether a function is a candidate for sibcall handling or not.
10963 We do not allow indirect calls to be optimized into sibling calls.
10964 Also, we can't do it if there are any vector parameters; there's
10965 nowhere to put the VRsave code so it works; note that functions with
10966 vector parameters are required to have a prototype, so the argument
10967 type info must be available here. (The tail recursion case can work
10968 with vector parameters, but there's no way to distinguish here.) */
10970 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
10975 if (TARGET_ALTIVEC_VRSAVE)
10977 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10978 type; type = TREE_CHAIN (type))
10980 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10984 if (DEFAULT_ABI == ABI_DARWIN
10985 || (*targetm.binds_local_p) (decl))
10987 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10989 if (!lookup_attribute ("longcall", attr_list)
10990 || lookup_attribute ("shortcall", attr_list))
10998 rs6000_ra_ever_killed (void)
11004 /* Irritatingly, there are two kinds of thunks -- those created with
11005 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
11006 through the regular part of the compiler. This is a very hacky
11007 way to tell them apart. */
11008 if (current_function_is_thunk && !no_new_pseudos)
11011 /* regs_ever_live has LR marked as used if any sibcalls are present,
11012 but this should not force saving and restoring in the
11013 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11014 clobbers LR, so that is inappropriate. */
11016 /* Also, the prologue can generate a store into LR that
11017 doesn't really count, like this:
11020 bcl to set PIC register
11024 When we're called from the epilogue, we need to avoid counting
11025 this as a store. */
11027 push_topmost_sequence ();
11028 top = get_insns ();
11029 pop_topmost_sequence ();
11030 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11032 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11036 if (FIND_REG_INC_NOTE (insn, reg))
11038 else if (GET_CODE (insn) == CALL_INSN
11039 && !SIBLING_CALL_P (insn))
11041 else if (set_of (reg, insn) != NULL_RTX
11042 && !prologue_epilogue_contains (insn))
11049 /* Add a REG_MAYBE_DEAD note to the insn. */
11051 rs6000_maybe_dead (rtx insn)
11053 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11058 /* Emit instructions needed to load the TOC register.
11059 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11060 a constant pool; or for SVR4 -fpic. */
11063 rs6000_emit_load_toc_table (int fromprolog)
11066 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11068 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11070 rtx temp = (fromprolog
11071 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11072 : gen_reg_rtx (Pmode));
11073 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11075 rs6000_maybe_dead (insn);
11076 insn = emit_move_insn (dest, temp);
11078 rs6000_maybe_dead (insn);
11080 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11083 rtx tempLR = (fromprolog
11084 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11085 : gen_reg_rtx (Pmode));
11086 rtx temp0 = (fromprolog
11087 ? gen_rtx_REG (Pmode, 0)
11088 : gen_reg_rtx (Pmode));
11091 /* possibly create the toc section */
11092 if (! toc_initialized)
11095 function_section (current_function_decl);
11102 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11103 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11105 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11106 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11108 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11110 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11111 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11118 static int reload_toc_labelno = 0;
11120 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11122 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11123 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11125 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11126 emit_move_insn (dest, tempLR);
11127 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11129 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11131 rs6000_maybe_dead (insn);
11133 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11135 /* This is for AIX code running in non-PIC ELF32. */
11138 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11139 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11141 insn = emit_insn (gen_elf_high (dest, realsym));
11143 rs6000_maybe_dead (insn);
11144 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11146 rs6000_maybe_dead (insn);
11148 else if (DEFAULT_ABI == ABI_AIX)
11151 insn = emit_insn (gen_load_toc_aix_si (dest));
11153 insn = emit_insn (gen_load_toc_aix_di (dest));
11155 rs6000_maybe_dead (insn);
11161 /* Emit instructions to restore the link register after determining where
11162 its value has been stored. */
11165 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11167 rs6000_stack_t *info = rs6000_stack_info ();
11170 operands[0] = source;
11171 operands[1] = scratch;
11173 if (info->lr_save_p)
11175 rtx frame_rtx = stack_pointer_rtx;
11176 HOST_WIDE_INT sp_offset = 0;
11179 if (frame_pointer_needed
11180 || current_function_calls_alloca
11181 || info->total_size > 32767)
11183 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11184 frame_rtx = operands[1];
11186 else if (info->push_p)
11187 sp_offset = info->total_size;
11189 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11190 tmp = gen_rtx_MEM (Pmode, tmp);
11191 emit_move_insn (tmp, operands[0]);
11194 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11197 static GTY(()) int set = -1;
11200 get_TOC_alias_set (void)
11203 set = new_alias_set ();
11207 /* This returns nonzero if the current function uses the TOC. This is
11208 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11209 is generated by the ABI_V4 load_toc_* patterns. */
11216 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11219 rtx pat = PATTERN (insn);
11222 if (GET_CODE (pat) == PARALLEL)
11223 for (i = 0; i < XVECLEN (pat, 0); i++)
11225 rtx sub = XVECEXP (pat, 0, i);
11226 if (GET_CODE (sub) == USE)
11228 sub = XEXP (sub, 0);
11229 if (GET_CODE (sub) == UNSPEC
11230 && XINT (sub, 1) == UNSPEC_TOC)
11240 create_TOC_reference (rtx symbol)
11242 return gen_rtx_PLUS (Pmode,
11243 gen_rtx_REG (Pmode, TOC_REGISTER),
11244 gen_rtx_CONST (Pmode,
11245 gen_rtx_MINUS (Pmode, symbol,
11246 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11249 /* If _Unwind_* has been called from within the same module,
11250 toc register is not guaranteed to be saved to 40(1) on function
11251 entry. Save it there in that case. */
11254 rs6000_aix_emit_builtin_unwind_init (void)
11257 rtx stack_top = gen_reg_rtx (Pmode);
11258 rtx opcode_addr = gen_reg_rtx (Pmode);
11259 rtx opcode = gen_reg_rtx (SImode);
11260 rtx tocompare = gen_reg_rtx (SImode);
11261 rtx no_toc_save_needed = gen_label_rtx ();
11263 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11264 emit_move_insn (stack_top, mem);
11266 mem = gen_rtx_MEM (Pmode,
11267 gen_rtx_PLUS (Pmode, stack_top,
11268 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11269 emit_move_insn (opcode_addr, mem);
11270 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11271 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11272 : 0xE8410028, SImode));
11274 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11275 SImode, NULL_RTX, NULL_RTX,
11276 no_toc_save_needed);
11278 mem = gen_rtx_MEM (Pmode,
11279 gen_rtx_PLUS (Pmode, stack_top,
11280 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11281 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11282 emit_label (no_toc_save_needed);
11285 /* This ties together stack memory (MEM with an alias set of
11286 rs6000_sr_alias_set) and the change to the stack pointer. */
11289 rs6000_emit_stack_tie (void)
11291 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11293 set_mem_alias_set (mem, rs6000_sr_alias_set);
11294 emit_insn (gen_stack_tie (mem));
11297 /* Emit the correct code for allocating stack space, as insns.
11298 If COPY_R12, make sure a copy of the old frame is left in r12.
11299 The generated code may use hard register 0 as a temporary. */
11302 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11305 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11306 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11307 rtx todec = GEN_INT (-size);
11309 if (current_function_limit_stack)
11311 if (REG_P (stack_limit_rtx)
11312 && REGNO (stack_limit_rtx) > 1
11313 && REGNO (stack_limit_rtx) <= 31)
11315 emit_insn (TARGET_32BIT
11316 ? gen_addsi3 (tmp_reg,
11319 : gen_adddi3 (tmp_reg,
11323 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11326 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11328 && DEFAULT_ABI == ABI_V4)
11330 rtx toload = gen_rtx_CONST (VOIDmode,
11331 gen_rtx_PLUS (Pmode,
11335 emit_insn (gen_elf_high (tmp_reg, toload));
11336 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11337 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11341 warning ("stack limit expression is not supported");
11344 if (copy_r12 || ! TARGET_UPDATE)
11345 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11351 /* Need a note here so that try_split doesn't get confused. */
11352 if (get_last_insn() == NULL_RTX)
11353 emit_note (NOTE_INSN_DELETED);
11354 insn = emit_move_insn (tmp_reg, todec);
11355 try_split (PATTERN (insn), insn, 0);
11359 insn = emit_insn (TARGET_32BIT
11360 ? gen_movsi_update (stack_reg, stack_reg,
11362 : gen_movdi_update (stack_reg, stack_reg,
11363 todec, stack_reg));
11367 insn = emit_insn (TARGET_32BIT
11368 ? gen_addsi3 (stack_reg, stack_reg, todec)
11369 : gen_adddi3 (stack_reg, stack_reg, todec));
11370 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11371 gen_rtx_REG (Pmode, 12));
11374 RTX_FRAME_RELATED_P (insn) = 1;
11376 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11377 gen_rtx_SET (VOIDmode, stack_reg,
11378 gen_rtx_PLUS (Pmode, stack_reg,
11383 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11384 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11385 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11386 deduce these equivalences by itself so it wasn't necessary to hold
11387 its hand so much. */
11390 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11391 rtx reg2, rtx rreg)
11395 /* copy_rtx will not make unique copies of registers, so we need to
11396 ensure we don't have unwanted sharing here. */
11398 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11401 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11403 real = copy_rtx (PATTERN (insn));
11405 if (reg2 != NULL_RTX)
11406 real = replace_rtx (real, reg2, rreg);
11408 real = replace_rtx (real, reg,
11409 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11410 STACK_POINTER_REGNUM),
11413 /* We expect that 'real' is either a SET or a PARALLEL containing
11414 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11415 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11417 if (GET_CODE (real) == SET)
11421 temp = simplify_rtx (SET_SRC (set));
11423 SET_SRC (set) = temp;
11424 temp = simplify_rtx (SET_DEST (set));
11426 SET_DEST (set) = temp;
11427 if (GET_CODE (SET_DEST (set)) == MEM)
11429 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11431 XEXP (SET_DEST (set), 0) = temp;
11434 else if (GET_CODE (real) == PARALLEL)
11437 for (i = 0; i < XVECLEN (real, 0); i++)
11438 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11440 rtx set = XVECEXP (real, 0, i);
11442 temp = simplify_rtx (SET_SRC (set));
11444 SET_SRC (set) = temp;
11445 temp = simplify_rtx (SET_DEST (set));
11447 SET_DEST (set) = temp;
11448 if (GET_CODE (SET_DEST (set)) == MEM)
11450 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11452 XEXP (SET_DEST (set), 0) = temp;
11454 RTX_FRAME_RELATED_P (set) = 1;
11461 real = spe_synthesize_frame_save (real);
11463 RTX_FRAME_RELATED_P (insn) = 1;
11464 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11469 /* Given an SPE frame note, return a PARALLEL of SETs with the
11470 original note, plus a synthetic register save. */
11473 spe_synthesize_frame_save (rtx real)
11475 rtx synth, offset, reg, real2;
11477 if (GET_CODE (real) != SET
11478 || GET_MODE (SET_SRC (real)) != V2SImode)
11481 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11482 frame related note. The parallel contains a set of the register
11483 being saved, and another set to a synthetic register (n+1200).
11484 This is so we can differentiate between 64-bit and 32-bit saves.
11485 Words cannot describe this nastiness. */
11487 if (GET_CODE (SET_DEST (real)) != MEM
11488 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11489 || GET_CODE (SET_SRC (real)) != REG)
11493 (set (mem (plus (reg x) (const y)))
11496 (set (mem (plus (reg x) (const y+4)))
11500 real2 = copy_rtx (real);
11501 PUT_MODE (SET_DEST (real2), SImode);
11502 reg = SET_SRC (real2);
11503 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11504 synth = copy_rtx (real2);
11506 if (BYTES_BIG_ENDIAN)
11508 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11509 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11512 reg = SET_SRC (synth);
11514 synth = replace_rtx (synth, reg,
11515 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11517 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11518 synth = replace_rtx (synth, offset,
11519 GEN_INT (INTVAL (offset)
11520 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11522 RTX_FRAME_RELATED_P (synth) = 1;
11523 RTX_FRAME_RELATED_P (real2) = 1;
11524 if (BYTES_BIG_ENDIAN)
11525 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11527 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11532 /* Returns an insn that has a vrsave set operation with the
11533 appropriate CLOBBERs. */
11536 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11539 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11540 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11543 = gen_rtx_SET (VOIDmode,
11545 gen_rtx_UNSPEC_VOLATILE (SImode,
11546 gen_rtvec (2, reg, vrsave),
11551 /* We need to clobber the registers in the mask so the scheduler
11552 does not move sets to VRSAVE before sets of AltiVec registers.
11554 However, if the function receives nonlocal gotos, reload will set
11555 all call saved registers live. We will end up with:
11557 (set (reg 999) (mem))
11558 (parallel [ (set (reg vrsave) (unspec blah))
11559 (clobber (reg 999))])
11561 The clobber will cause the store into reg 999 to be dead, and
11562 flow will attempt to delete an epilogue insn. In this case, we
11563 need an unspec use/set of the register. */
11565 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11566 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11568 if (!epiloguep || call_used_regs [i])
11569 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11570 gen_rtx_REG (V4SImode, i));
11573 rtx reg = gen_rtx_REG (V4SImode, i);
11576 = gen_rtx_SET (VOIDmode,
11578 gen_rtx_UNSPEC (V4SImode,
11579 gen_rtvec (1, reg), 27));
11583 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11585 for (i = 0; i < nclobs; ++i)
11586 XVECEXP (insn, 0, i) = clobs[i];
11591 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11592 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11595 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11596 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11598 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11599 rtx replacea, replaceb;
11601 int_rtx = GEN_INT (offset);
11603 /* Some cases that need register indexed addressing. */
11604 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11606 && SPE_VECTOR_MODE (mode)
11607 && !SPE_CONST_OFFSET_OK (offset)))
11609 /* Whomever calls us must make sure r11 is available in the
11610 flow path of instructions in the prologue. */
11611 offset_rtx = gen_rtx_REG (Pmode, 11);
11612 emit_move_insn (offset_rtx, int_rtx);
11614 replacea = offset_rtx;
11615 replaceb = int_rtx;
11619 offset_rtx = int_rtx;
11620 replacea = NULL_RTX;
11621 replaceb = NULL_RTX;
11624 reg = gen_rtx_REG (mode, regno);
11625 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11626 mem = gen_rtx_MEM (mode, addr);
11627 set_mem_alias_set (mem, rs6000_sr_alias_set);
11629 insn = emit_move_insn (mem, reg);
11631 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11634 /* Emit an offset memory reference suitable for a frame store, while
11635 converting to a valid addressing mode. */
11638 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11640 rtx int_rtx, offset_rtx;
11642 int_rtx = GEN_INT (offset);
11644 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11646 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11647 emit_move_insn (offset_rtx, int_rtx);
11650 offset_rtx = int_rtx;
11652 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11655 /* Emit function prologue as insns. */
11658 rs6000_emit_prologue (void)
11660 rs6000_stack_t *info = rs6000_stack_info ();
11661 enum machine_mode reg_mode = Pmode;
11662 int reg_size = UNITS_PER_WORD;
11663 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11664 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11665 rtx frame_reg_rtx = sp_reg_rtx;
11666 rtx cr_save_rtx = NULL_RTX;
11668 int saving_FPRs_inline;
11669 int using_store_multiple;
11670 HOST_WIDE_INT sp_offset = 0;
11672 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11674 reg_mode = V2SImode;
11678 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11679 && (!TARGET_SPE_ABI
11680 || info->spe_64bit_regs_used == 0)
11681 && info->first_gp_reg_save < 31);
11682 saving_FPRs_inline = (info->first_fp_reg_save == 64
11683 || FP_SAVE_INLINE (info->first_fp_reg_save)
11684 || current_function_calls_eh_return
11685 || cfun->machine->ra_need_lr);
11687 /* For V.4, update stack before we do any saving and set back pointer. */
11689 && (DEFAULT_ABI == ABI_V4
11690 || current_function_calls_eh_return))
11692 if (info->total_size < 32767)
11693 sp_offset = info->total_size;
11695 frame_reg_rtx = frame_ptr_rtx;
11696 rs6000_emit_allocate_stack (info->total_size,
11697 (frame_reg_rtx != sp_reg_rtx
11698 && (info->cr_save_p
11700 || info->first_fp_reg_save < 64
11701 || info->first_gp_reg_save < 32
11703 if (frame_reg_rtx != sp_reg_rtx)
11704 rs6000_emit_stack_tie ();
11707 /* Save AltiVec registers if needed. */
11708 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11712 /* There should be a non inline version of this, for when we
11713 are saving lots of vector registers. */
11714 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11715 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11717 rtx areg, savereg, mem;
11720 offset = info->altivec_save_offset + sp_offset
11721 + 16 * (i - info->first_altivec_reg_save);
11723 savereg = gen_rtx_REG (V4SImode, i);
11725 areg = gen_rtx_REG (Pmode, 0);
11726 emit_move_insn (areg, GEN_INT (offset));
11728 /* AltiVec addressing mode is [reg+reg]. */
11729 mem = gen_rtx_MEM (V4SImode,
11730 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11732 set_mem_alias_set (mem, rs6000_sr_alias_set);
11734 insn = emit_move_insn (mem, savereg);
11736 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11737 areg, GEN_INT (offset));
11741 /* VRSAVE is a bit vector representing which AltiVec registers
11742 are used. The OS uses this to determine which vector
11743 registers to save on a context switch. We need to save
11744 VRSAVE on the stack frame, add whatever AltiVec registers we
11745 used in this function, and do the corresponding magic in the
11748 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
11749 && info->vrsave_mask != 0)
11751 rtx reg, mem, vrsave;
11754 /* Get VRSAVE onto a GPR. */
11755 reg = gen_rtx_REG (SImode, 12);
11756 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11758 emit_insn (gen_get_vrsave_internal (reg));
11760 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11763 offset = info->vrsave_save_offset + sp_offset;
11765 = gen_rtx_MEM (SImode,
11766 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11767 set_mem_alias_set (mem, rs6000_sr_alias_set);
11768 insn = emit_move_insn (mem, reg);
11770 /* Include the registers in the mask. */
11771 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11773 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11776 /* If we use the link register, get it into r0. */
11777 if (info->lr_save_p)
11778 emit_move_insn (gen_rtx_REG (Pmode, 0),
11779 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11781 /* If we need to save CR, put it into r12. */
11782 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11784 cr_save_rtx = gen_rtx_REG (SImode, 12);
11785 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11788 /* Do any required saving of fpr's. If only one or two to save, do
11789 it ourselves. Otherwise, call function. */
11790 if (saving_FPRs_inline)
11793 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11794 if ((regs_ever_live[info->first_fp_reg_save+i]
11795 && ! call_used_regs[info->first_fp_reg_save+i]))
11796 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11797 info->first_fp_reg_save + i,
11798 info->fp_save_offset + sp_offset + 8 * i,
11801 else if (info->first_fp_reg_save != 64)
11805 const char *alloc_rname;
11807 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11809 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11810 gen_rtx_REG (Pmode,
11811 LINK_REGISTER_REGNUM));
11812 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11813 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11814 alloc_rname = ggc_strdup (rname);
11815 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11816 gen_rtx_SYMBOL_REF (Pmode,
11818 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11820 rtx addr, reg, mem;
11821 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11822 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11823 GEN_INT (info->fp_save_offset
11824 + sp_offset + 8*i));
11825 mem = gen_rtx_MEM (DFmode, addr);
11826 set_mem_alias_set (mem, rs6000_sr_alias_set);
11828 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11830 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11831 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11832 NULL_RTX, NULL_RTX);
11835 /* Save GPRs. This is done as a PARALLEL if we are using
11836 the store-multiple instructions. */
11837 if (using_store_multiple)
11841 p = rtvec_alloc (32 - info->first_gp_reg_save);
11842 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11844 rtx addr, reg, mem;
11845 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11846 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11847 GEN_INT (info->gp_save_offset
11850 mem = gen_rtx_MEM (reg_mode, addr);
11851 set_mem_alias_set (mem, rs6000_sr_alias_set);
11853 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11855 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11856 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11857 NULL_RTX, NULL_RTX);
11862 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11863 if ((regs_ever_live[info->first_gp_reg_save+i]
11864 && ! call_used_regs[info->first_gp_reg_save+i])
11865 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11866 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11867 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11869 rtx addr, reg, mem;
11870 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11872 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11874 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11877 if (!SPE_CONST_OFFSET_OK (offset))
11879 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11880 emit_move_insn (b, GEN_INT (offset));
11883 b = GEN_INT (offset);
11885 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11886 mem = gen_rtx_MEM (V2SImode, addr);
11887 set_mem_alias_set (mem, rs6000_sr_alias_set);
11888 insn = emit_move_insn (mem, reg);
11890 if (GET_CODE (b) == CONST_INT)
11891 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11892 NULL_RTX, NULL_RTX);
11894 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11895 b, GEN_INT (offset));
11899 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11900 GEN_INT (info->gp_save_offset
11903 mem = gen_rtx_MEM (reg_mode, addr);
11904 set_mem_alias_set (mem, rs6000_sr_alias_set);
11906 insn = emit_move_insn (mem, reg);
11907 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11908 NULL_RTX, NULL_RTX);
11913 /* ??? There's no need to emit actual instructions here, but it's the
11914 easiest way to get the frame unwind information emitted. */
11915 if (current_function_calls_eh_return)
11917 unsigned int i, regno;
11919 /* In AIX ABI we need to pretend we save r2 here. */
11922 rtx addr, reg, mem;
11924 reg = gen_rtx_REG (reg_mode, 2);
11925 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11926 GEN_INT (sp_offset + 5 * reg_size));
11927 mem = gen_rtx_MEM (reg_mode, addr);
11928 set_mem_alias_set (mem, rs6000_sr_alias_set);
11930 insn = emit_move_insn (mem, reg);
11931 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11932 NULL_RTX, NULL_RTX);
11933 PATTERN (insn) = gen_blockage ();
11938 regno = EH_RETURN_DATA_REGNO (i);
11939 if (regno == INVALID_REGNUM)
11942 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11943 info->ehrd_offset + sp_offset
11944 + reg_size * (int) i,
11949 /* Save lr if we used it. */
11950 if (info->lr_save_p)
11952 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11953 GEN_INT (info->lr_save_offset + sp_offset));
11954 rtx reg = gen_rtx_REG (Pmode, 0);
11955 rtx mem = gen_rtx_MEM (Pmode, addr);
11956 /* This should not be of rs6000_sr_alias_set, because of
11957 __builtin_return_address. */
11959 insn = emit_move_insn (mem, reg);
11960 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11961 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11964 /* Save CR if we use any that must be preserved. */
11965 if (info->cr_save_p)
11967 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11968 GEN_INT (info->cr_save_offset + sp_offset));
11969 rtx mem = gen_rtx_MEM (SImode, addr);
11971 set_mem_alias_set (mem, rs6000_sr_alias_set);
11973 /* If r12 was used to hold the original sp, copy cr into r0 now
11975 if (REGNO (frame_reg_rtx) == 12)
11977 cr_save_rtx = gen_rtx_REG (SImode, 0);
11978 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11980 insn = emit_move_insn (mem, cr_save_rtx);
11982 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11983 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11984 But that's OK. All we have to do is specify that _one_ condition
11985 code register is saved in this stack slot. The thrower's epilogue
11986 will then restore all the call-saved registers.
11987 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11988 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11989 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11992 /* Update stack and set back pointer unless this is V.4,
11993 for which it was done previously. */
11995 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
11996 rs6000_emit_allocate_stack (info->total_size, FALSE);
11998 /* Set frame pointer, if needed. */
11999 if (frame_pointer_needed)
12001 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12003 RTX_FRAME_RELATED_P (insn) = 1;
12006 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12007 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12008 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12009 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12011 /* If emit_load_toc_table will use the link register, we need to save
12012 it. We use R12 for this purpose because emit_load_toc_table
12013 can use register 0. This allows us to use a plain 'blr' to return
12014 from the procedure more often. */
12015 int save_LR_around_toc_setup = (TARGET_ELF
12016 && DEFAULT_ABI != ABI_AIX
12018 && ! info->lr_save_p
12019 && EXIT_BLOCK_PTR->pred != NULL);
12020 if (save_LR_around_toc_setup)
12022 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12023 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12024 rs6000_emit_load_toc_table (TRUE);
12025 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12028 rs6000_emit_load_toc_table (TRUE);
12032 if (DEFAULT_ABI == ABI_DARWIN
12033 && flag_pic && current_function_uses_pic_offset_table)
12035 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12036 const char *picbase = machopic_function_base_name ();
12037 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12039 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12041 rs6000_maybe_dead (
12042 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12043 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12048 /* Write function prologue. */
12051 rs6000_output_function_prologue (FILE *file,
12052 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12054 rs6000_stack_t *info = rs6000_stack_info ();
12056 if (TARGET_DEBUG_STACK)
12057 debug_stack_info (info);
12059 /* Write .extern for any function we will call to save and restore
12061 if (info->first_fp_reg_save < 64
12062 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12063 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12064 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12065 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12066 RESTORE_FP_SUFFIX);
12068 /* Write .extern for AIX common mode routines, if needed. */
12069 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12071 fputs ("\t.extern __mulh\n", file);
12072 fputs ("\t.extern __mull\n", file);
12073 fputs ("\t.extern __divss\n", file);
12074 fputs ("\t.extern __divus\n", file);
12075 fputs ("\t.extern __quoss\n", file);
12076 fputs ("\t.extern __quous\n", file);
12077 common_mode_defined = 1;
12080 if (! HAVE_prologue)
12084 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12085 the "toplevel" insn chain. */
12086 emit_note (NOTE_INSN_DELETED);
12087 rs6000_emit_prologue ();
12088 emit_note (NOTE_INSN_DELETED);
12090 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12094 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12096 INSN_ADDRESSES_NEW (insn, addr);
12101 if (TARGET_DEBUG_STACK)
12102 debug_rtx_list (get_insns (), 100);
12103 final (get_insns (), file, FALSE, FALSE);
12107 rs6000_pic_labelno++;
12110 /* Emit function epilogue as insns.
12112 At present, dwarf2out_frame_debug_expr doesn't understand
12113 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12114 anywhere in the epilogue. Most of the insns below would in any case
12115 need special notes to explain where r11 is in relation to the stack. */
12118 rs6000_emit_epilogue (int sibcall)
12120 rs6000_stack_t *info;
12121 int restoring_FPRs_inline;
12122 int using_load_multiple;
12123 int using_mfcr_multiple;
12124 int use_backchain_to_restore_sp;
12126 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12127 rtx frame_reg_rtx = sp_reg_rtx;
12128 enum machine_mode reg_mode = Pmode;
12129 int reg_size = UNITS_PER_WORD;
12132 info = rs6000_stack_info ();
12134 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12136 reg_mode = V2SImode;
12140 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12141 && (!TARGET_SPE_ABI
12142 || info->spe_64bit_regs_used == 0)
12143 && info->first_gp_reg_save < 31);
12144 restoring_FPRs_inline = (sibcall
12145 || current_function_calls_eh_return
12146 || info->first_fp_reg_save == 64
12147 || FP_SAVE_INLINE (info->first_fp_reg_save));
12148 use_backchain_to_restore_sp = (frame_pointer_needed
12149 || current_function_calls_alloca
12150 || info->total_size > 32767);
12151 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12152 || rs6000_cpu == PROCESSOR_PPC603
12153 || rs6000_cpu == PROCESSOR_PPC750
12156 /* If we have a frame pointer, a call to alloca, or a large stack
12157 frame, restore the old stack pointer using the backchain. Otherwise,
12158 we know what size to update it with. */
12159 if (use_backchain_to_restore_sp)
12161 /* Under V.4, don't reset the stack pointer until after we're done
12162 loading the saved registers. */
12163 if (DEFAULT_ABI == ABI_V4)
12164 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12166 emit_move_insn (frame_reg_rtx,
12167 gen_rtx_MEM (Pmode, sp_reg_rtx));
12170 else if (info->push_p)
12172 if (DEFAULT_ABI == ABI_V4
12173 || current_function_calls_eh_return)
12174 sp_offset = info->total_size;
12177 emit_insn (TARGET_32BIT
12178 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12179 GEN_INT (info->total_size))
12180 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12181 GEN_INT (info->total_size)));
12185 /* Restore AltiVec registers if needed. */
12186 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12190 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12191 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12193 rtx addr, areg, mem;
12195 areg = gen_rtx_REG (Pmode, 0);
12197 (areg, GEN_INT (info->altivec_save_offset
12199 + 16 * (i - info->first_altivec_reg_save)));
12201 /* AltiVec addressing mode is [reg+reg]. */
12202 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12203 mem = gen_rtx_MEM (V4SImode, addr);
12204 set_mem_alias_set (mem, rs6000_sr_alias_set);
12206 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12210 /* Restore VRSAVE if needed. */
12211 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE
12212 && info->vrsave_mask != 0)
12214 rtx addr, mem, reg;
12216 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12217 GEN_INT (info->vrsave_save_offset + sp_offset));
12218 mem = gen_rtx_MEM (SImode, addr);
12219 set_mem_alias_set (mem, rs6000_sr_alias_set);
12220 reg = gen_rtx_REG (SImode, 12);
12221 emit_move_insn (reg, mem);
12223 emit_insn (generate_set_vrsave (reg, info, 1));
12226 /* Get the old lr if we saved it. */
12227 if (info->lr_save_p)
12229 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12230 info->lr_save_offset + sp_offset);
12232 set_mem_alias_set (mem, rs6000_sr_alias_set);
12234 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12237 /* Get the old cr if we saved it. */
12238 if (info->cr_save_p)
12240 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12241 GEN_INT (info->cr_save_offset + sp_offset));
12242 rtx mem = gen_rtx_MEM (SImode, addr);
12244 set_mem_alias_set (mem, rs6000_sr_alias_set);
12246 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12249 /* Set LR here to try to overlap restores below. */
12250 if (info->lr_save_p)
12251 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12252 gen_rtx_REG (Pmode, 0));
12254 /* Load exception handler data registers, if needed. */
12255 if (current_function_calls_eh_return)
12257 unsigned int i, regno;
12261 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12262 GEN_INT (sp_offset + 5 * reg_size));
12263 rtx mem = gen_rtx_MEM (reg_mode, addr);
12265 set_mem_alias_set (mem, rs6000_sr_alias_set);
12267 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12274 regno = EH_RETURN_DATA_REGNO (i);
12275 if (regno == INVALID_REGNUM)
12278 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12279 info->ehrd_offset + sp_offset
12280 + reg_size * (int) i);
12281 set_mem_alias_set (mem, rs6000_sr_alias_set);
12283 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12287 /* Restore GPRs. This is done as a PARALLEL if we are using
12288 the load-multiple instructions. */
12289 if (using_load_multiple)
12292 p = rtvec_alloc (32 - info->first_gp_reg_save);
12293 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12295 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12296 GEN_INT (info->gp_save_offset
12299 rtx mem = gen_rtx_MEM (reg_mode, addr);
12301 set_mem_alias_set (mem, rs6000_sr_alias_set);
12304 gen_rtx_SET (VOIDmode,
12305 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12308 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12311 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12312 if ((regs_ever_live[info->first_gp_reg_save+i]
12313 && ! call_used_regs[info->first_gp_reg_save+i])
12314 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12315 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12316 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12318 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12319 GEN_INT (info->gp_save_offset
12322 rtx mem = gen_rtx_MEM (reg_mode, addr);
12324 /* Restore 64-bit quantities for SPE. */
12325 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12327 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12330 if (!SPE_CONST_OFFSET_OK (offset))
12332 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12333 emit_move_insn (b, GEN_INT (offset));
12336 b = GEN_INT (offset);
12338 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12339 mem = gen_rtx_MEM (V2SImode, addr);
12342 set_mem_alias_set (mem, rs6000_sr_alias_set);
12344 emit_move_insn (gen_rtx_REG (reg_mode,
12345 info->first_gp_reg_save + i), mem);
12348 /* Restore fpr's if we need to do it without calling a function. */
12349 if (restoring_FPRs_inline)
12350 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12351 if ((regs_ever_live[info->first_fp_reg_save+i]
12352 && ! call_used_regs[info->first_fp_reg_save+i]))
12355 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12356 GEN_INT (info->fp_save_offset
12359 mem = gen_rtx_MEM (DFmode, addr);
12360 set_mem_alias_set (mem, rs6000_sr_alias_set);
12362 emit_move_insn (gen_rtx_REG (DFmode,
12363 info->first_fp_reg_save + i),
12367 /* If we saved cr, restore it here. Just those that were used. */
12368 if (info->cr_save_p)
12370 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12373 if (using_mfcr_multiple)
12375 for (i = 0; i < 8; i++)
12376 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12382 if (using_mfcr_multiple && count > 1)
12387 p = rtvec_alloc (count);
12390 for (i = 0; i < 8; i++)
12391 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12393 rtvec r = rtvec_alloc (2);
12394 RTVEC_ELT (r, 0) = r12_rtx;
12395 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12396 RTVEC_ELT (p, ndx) =
12397 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12398 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12401 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12406 for (i = 0; i < 8; i++)
12407 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12409 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12415 /* If this is V.4, unwind the stack pointer after all of the loads
12416 have been done. We need to emit a block here so that sched
12417 doesn't decide to move the sp change before the register restores
12418 (which may not have any obvious dependency on the stack). This
12419 doesn't hurt performance, because there is no scheduling that can
12420 be done after this point. */
12421 if (DEFAULT_ABI == ABI_V4
12422 || current_function_calls_eh_return)
12424 if (frame_reg_rtx != sp_reg_rtx)
12425 rs6000_emit_stack_tie ();
12427 if (use_backchain_to_restore_sp)
12429 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12431 else if (sp_offset != 0)
12433 emit_insn (TARGET_32BIT
12434 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12435 GEN_INT (sp_offset))
12436 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12437 GEN_INT (sp_offset)));
12441 if (current_function_calls_eh_return)
12443 rtx sa = EH_RETURN_STACKADJ_RTX;
12444 emit_insn (TARGET_32BIT
12445 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12446 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12452 if (! restoring_FPRs_inline)
12453 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12455 p = rtvec_alloc (2);
12457 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12458 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12459 gen_rtx_REG (Pmode,
12460 LINK_REGISTER_REGNUM));
12462 /* If we have to restore more than two FP registers, branch to the
12463 restore function. It will return to our caller. */
12464 if (! restoring_FPRs_inline)
12468 const char *alloc_rname;
12470 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12471 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12472 alloc_rname = ggc_strdup (rname);
12473 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12474 gen_rtx_SYMBOL_REF (Pmode,
12477 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12480 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12481 GEN_INT (info->fp_save_offset + 8*i));
12482 mem = gen_rtx_MEM (DFmode, addr);
12483 set_mem_alias_set (mem, rs6000_sr_alias_set);
12485 RTVEC_ELT (p, i+3) =
12486 gen_rtx_SET (VOIDmode,
12487 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12492 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12496 /* Write function epilogue. */
12499 rs6000_output_function_epilogue (FILE *file,
12500 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12502 rs6000_stack_t *info = rs6000_stack_info ();
12504 if (! HAVE_epilogue)
12506 rtx insn = get_last_insn ();
12507 /* If the last insn was a BARRIER, we don't have to write anything except
12508 the trace table. */
12509 if (GET_CODE (insn) == NOTE)
12510 insn = prev_nonnote_insn (insn);
12511 if (insn == 0 || GET_CODE (insn) != BARRIER)
12513 /* This is slightly ugly, but at least we don't have two
12514 copies of the epilogue-emitting code. */
12517 /* A NOTE_INSN_DELETED is supposed to be at the start
12518 and end of the "toplevel" insn chain. */
12519 emit_note (NOTE_INSN_DELETED);
12520 rs6000_emit_epilogue (FALSE);
12521 emit_note (NOTE_INSN_DELETED);
12523 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12527 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12529 INSN_ADDRESSES_NEW (insn, addr);
12534 if (TARGET_DEBUG_STACK)
12535 debug_rtx_list (get_insns (), 100);
12536 final (get_insns (), file, FALSE, FALSE);
12542 macho_branch_islands ();
12543 /* Mach-O doesn't support labels at the end of objects, so if
12544 it looks like we might want one, insert a NOP. */
12546 rtx insn = get_last_insn ();
12549 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12550 insn = PREV_INSN (insn);
12554 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12555 fputs ("\tnop\n", file);
12559 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12562 We don't output a traceback table if -finhibit-size-directive was
12563 used. The documentation for -finhibit-size-directive reads
12564 ``don't output a @code{.size} assembler directive, or anything
12565 else that would cause trouble if the function is split in the
12566 middle, and the two halves are placed at locations far apart in
12567 memory.'' The traceback table has this property, since it
12568 includes the offset from the start of the function to the
12569 traceback table itself.
12571 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12572 different traceback table. */
12573 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12574 && rs6000_traceback != traceback_none)
12576 const char *fname = NULL;
12577 const char *language_string = lang_hooks.name;
12578 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12580 int optional_tbtab;
12582 if (rs6000_traceback == traceback_full)
12583 optional_tbtab = 1;
12584 else if (rs6000_traceback == traceback_part)
12585 optional_tbtab = 0;
12587 optional_tbtab = !optimize_size && !TARGET_ELF;
12589 if (optional_tbtab)
12591 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12592 while (*fname == '.') /* V.4 encodes . in the name */
12595 /* Need label immediately before tbtab, so we can compute
12596 its offset from the function start. */
12597 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12598 ASM_OUTPUT_LABEL (file, fname);
12601 /* The .tbtab pseudo-op can only be used for the first eight
12602 expressions, since it can't handle the possibly variable
12603 length fields that follow. However, if you omit the optional
12604 fields, the assembler outputs zeros for all optional fields
12605 anyways, giving each variable length field is minimum length
12606 (as defined in sys/debug.h). Thus we can not use the .tbtab
12607 pseudo-op at all. */
12609 /* An all-zero word flags the start of the tbtab, for debuggers
12610 that have to find it by searching forward from the entry
12611 point or from the current pc. */
12612 fputs ("\t.long 0\n", file);
12614 /* Tbtab format type. Use format type 0. */
12615 fputs ("\t.byte 0,", file);
12617 /* Language type. Unfortunately, there does not seem to be any
12618 official way to discover the language being compiled, so we
12619 use language_string.
12620 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
12621 Java is 13. Objective-C is 14. */
12622 if (! strcmp (language_string, "GNU C"))
12624 else if (! strcmp (language_string, "GNU F77"))
12626 else if (! strcmp (language_string, "GNU Pascal"))
12628 else if (! strcmp (language_string, "GNU Ada"))
12630 else if (! strcmp (language_string, "GNU C++"))
12632 else if (! strcmp (language_string, "GNU Java"))
12634 else if (! strcmp (language_string, "GNU Objective-C"))
12638 fprintf (file, "%d,", i);
12640 /* 8 single bit fields: global linkage (not set for C extern linkage,
12641 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12642 from start of procedure stored in tbtab, internal function, function
12643 has controlled storage, function has no toc, function uses fp,
12644 function logs/aborts fp operations. */
12645 /* Assume that fp operations are used if any fp reg must be saved. */
12646 fprintf (file, "%d,",
12647 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12649 /* 6 bitfields: function is interrupt handler, name present in
12650 proc table, function calls alloca, on condition directives
12651 (controls stack walks, 3 bits), saves condition reg, saves
12653 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12654 set up as a frame pointer, even when there is no alloca call. */
12655 fprintf (file, "%d,",
12656 ((optional_tbtab << 6)
12657 | ((optional_tbtab & frame_pointer_needed) << 5)
12658 | (info->cr_save_p << 1)
12659 | (info->lr_save_p)));
12661 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12663 fprintf (file, "%d,",
12664 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12666 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12667 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12669 if (optional_tbtab)
12671 /* Compute the parameter info from the function decl argument
12674 int next_parm_info_bit = 31;
12676 for (decl = DECL_ARGUMENTS (current_function_decl);
12677 decl; decl = TREE_CHAIN (decl))
12679 rtx parameter = DECL_INCOMING_RTL (decl);
12680 enum machine_mode mode = GET_MODE (parameter);
12682 if (GET_CODE (parameter) == REG)
12684 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12690 if (mode == SFmode)
12692 else if (mode == DFmode || mode == TFmode)
12697 /* If only one bit will fit, don't or in this entry. */
12698 if (next_parm_info_bit > 0)
12699 parm_info |= (bits << (next_parm_info_bit - 1));
12700 next_parm_info_bit -= 2;
12704 fixed_parms += ((GET_MODE_SIZE (mode)
12705 + (UNITS_PER_WORD - 1))
12707 next_parm_info_bit -= 1;
12713 /* Number of fixed point parameters. */
12714 /* This is actually the number of words of fixed point parameters; thus
12715 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12716 fprintf (file, "%d,", fixed_parms);
12718 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12720 /* This is actually the number of fp registers that hold parameters;
12721 and thus the maximum value is 13. */
12722 /* Set parameters on stack bit if parameters are not in their original
12723 registers, regardless of whether they are on the stack? Xlc
12724 seems to set the bit when not optimizing. */
12725 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12727 if (! optional_tbtab)
12730 /* Optional fields follow. Some are variable length. */
12732 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12733 11 double float. */
12734 /* There is an entry for each parameter in a register, in the order that
12735 they occur in the parameter list. Any intervening arguments on the
12736 stack are ignored. If the list overflows a long (max possible length
12737 34 bits) then completely leave off all elements that don't fit. */
12738 /* Only emit this long if there was at least one parameter. */
12739 if (fixed_parms || float_parms)
12740 fprintf (file, "\t.long %d\n", parm_info);
12742 /* Offset from start of code to tb table. */
12743 fputs ("\t.long ", file);
12744 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12746 RS6000_OUTPUT_BASENAME (file, fname);
12748 assemble_name (file, fname);
12750 fputs ("-.", file);
12752 RS6000_OUTPUT_BASENAME (file, fname);
12754 assemble_name (file, fname);
12758 /* Interrupt handler mask. */
12759 /* Omit this long, since we never set the interrupt handler bit
12762 /* Number of CTL (controlled storage) anchors. */
12763 /* Omit this long, since the has_ctl bit is never set above. */
12765 /* Displacement into stack of each CTL anchor. */
12766 /* Omit this list of longs, because there are no CTL anchors. */
12768 /* Length of function name. */
12771 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12773 /* Function name. */
12774 assemble_string (fname, strlen (fname));
12776 /* Register for alloca automatic storage; this is always reg 31.
12777 Only emit this if the alloca bit was set above. */
12778 if (frame_pointer_needed)
12779 fputs ("\t.byte 31\n", file);
12781 fputs ("\t.align 2\n", file);
12785 /* A C compound statement that outputs the assembler code for a thunk
12786 function, used to implement C++ virtual function calls with
12787 multiple inheritance. The thunk acts as a wrapper around a virtual
12788 function, adjusting the implicit object parameter before handing
12789 control off to the real function.
12791 First, emit code to add the integer DELTA to the location that
12792 contains the incoming first argument. Assume that this argument
12793 contains a pointer, and is the one used to pass the `this' pointer
12794 in C++. This is the incoming argument *before* the function
12795 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12796 values of all other incoming arguments.
12798 After the addition, emit code to jump to FUNCTION, which is a
12799 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12800 not touch the return address. Hence returning from FUNCTION will
12801 return to whoever called the current `thunk'.
12803 The effect must be as if FUNCTION had been called directly with the
12804 adjusted first argument. This macro is responsible for emitting
12805 all of the code for a thunk function; output_function_prologue()
12806 and output_function_epilogue() are not invoked.
12808 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12809 been extracted from it.) It might possibly be useful on some
12810 targets, but probably not.
12812 If you do not define this macro, the target-independent code in the
12813 C++ frontend will generate a less efficient heavyweight thunk that
12814 calls FUNCTION instead of jumping to it. The generic approach does
12815 not support varargs. */
12818 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
12819 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12822 rtx this, insn, funexp;
12824 reload_completed = 1;
12825 epilogue_completed = 1;
12826 no_new_pseudos = 1;
12828 /* Mark the end of the (empty) prologue. */
12829 emit_note (NOTE_INSN_PROLOGUE_END);
12831 /* Find the "this" pointer. If the function returns a structure,
12832 the structure return pointer is in r3. */
12833 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12834 this = gen_rtx_REG (Pmode, 4);
12836 this = gen_rtx_REG (Pmode, 3);
12838 /* Apply the constant offset, if required. */
12841 rtx delta_rtx = GEN_INT (delta);
12842 emit_insn (TARGET_32BIT
12843 ? gen_addsi3 (this, this, delta_rtx)
12844 : gen_adddi3 (this, this, delta_rtx));
12847 /* Apply the offset from the vtable, if required. */
12850 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12851 rtx tmp = gen_rtx_REG (Pmode, 12);
12853 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12854 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12856 emit_insn (TARGET_32BIT
12857 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12858 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12859 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12863 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12865 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12867 emit_insn (TARGET_32BIT
12868 ? gen_addsi3 (this, this, tmp)
12869 : gen_adddi3 (this, this, tmp));
12872 /* Generate a tail call to the target function. */
12873 if (!TREE_USED (function))
12875 assemble_external (function);
12876 TREE_USED (function) = 1;
12878 funexp = XEXP (DECL_RTL (function), 0);
12879 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12882 if (MACHOPIC_INDIRECT)
12883 funexp = machopic_indirect_call_target (funexp);
12886 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12887 generate sibcall RTL explicitly to avoid constraint abort. */
12888 insn = emit_call_insn (
12889 gen_rtx_PARALLEL (VOIDmode,
12891 gen_rtx_CALL (VOIDmode,
12892 funexp, const0_rtx),
12893 gen_rtx_USE (VOIDmode, const0_rtx),
12894 gen_rtx_USE (VOIDmode,
12895 gen_rtx_REG (SImode,
12896 LINK_REGISTER_REGNUM)),
12897 gen_rtx_RETURN (VOIDmode))));
12898 SIBLING_CALL_P (insn) = 1;
12901 /* Run just enough of rest_of_compilation to get the insns emitted.
12902 There's not really enough bulk here to make other passes such as
12903 instruction scheduling worth while. Note that use_thunk calls
12904 assemble_start_function and assemble_end_function. */
12905 insn = get_insns ();
12906 insn_locators_initialize ();
12907 shorten_branches (insn);
12908 final_start_function (insn, file, 1);
12909 final (insn, file, 1, 0);
12910 final_end_function ();
12912 reload_completed = 0;
12913 epilogue_completed = 0;
12914 no_new_pseudos = 0;
12917 /* A quick summary of the various types of 'constant-pool tables'
12920 Target Flags Name One table per
12921 AIX (none) AIX TOC object file
12922 AIX -mfull-toc AIX TOC object file
12923 AIX -mminimal-toc AIX minimal TOC translation unit
12924 SVR4/EABI (none) SVR4 SDATA object file
12925 SVR4/EABI -fpic SVR4 pic object file
12926 SVR4/EABI -fPIC SVR4 PIC translation unit
12927 SVR4/EABI -mrelocatable EABI TOC function
12928 SVR4/EABI -maix AIX TOC object file
12929 SVR4/EABI -maix -mminimal-toc
12930 AIX minimal TOC translation unit
12932 Name Reg. Set by entries contains:
12933 made by addrs? fp? sum?
12935 AIX TOC 2 crt0 as Y option option
12936 AIX minimal TOC 30 prolog gcc Y Y option
12937 SVR4 SDATA 13 crt0 gcc N Y N
12938 SVR4 pic 30 prolog ld Y not yet N
12939 SVR4 PIC 30 prolog gcc Y option option
12940 EABI TOC 30 prolog gcc Y option option
12944 /* Hash functions for the hash table. */
12947 rs6000_hash_constant (rtx k)
12949 enum rtx_code code = GET_CODE (k);
12950 enum machine_mode mode = GET_MODE (k);
12951 unsigned result = (code << 3) ^ mode;
12952 const char *format;
12955 format = GET_RTX_FORMAT (code);
12956 flen = strlen (format);
12962 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12965 if (mode != VOIDmode)
12966 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12978 for (; fidx < flen; fidx++)
12979 switch (format[fidx])
12984 const char *str = XSTR (k, fidx);
12985 len = strlen (str);
12986 result = result * 613 + len;
12987 for (i = 0; i < len; i++)
12988 result = result * 613 + (unsigned) str[i];
12993 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12997 result = result * 613 + (unsigned) XINT (k, fidx);
13000 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13001 result = result * 613 + (unsigned) XWINT (k, fidx);
13005 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13006 result = result * 613 + (unsigned) (XWINT (k, fidx)
13020 toc_hash_function (const void *hash_entry)
13022 const struct toc_hash_struct *thc =
13023 (const struct toc_hash_struct *) hash_entry;
13024 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13027 /* Compare H1 and H2 for equivalence. */
13030 toc_hash_eq (const void *h1, const void *h2)
13032 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13033 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13035 if (((const struct toc_hash_struct *) h1)->key_mode
13036 != ((const struct toc_hash_struct *) h2)->key_mode)
13039 return rtx_equal_p (r1, r2);
13042 /* These are the names given by the C++ front-end to vtables, and
13043 vtable-like objects. Ideally, this logic should not be here;
13044 instead, there should be some programmatic way of inquiring as
13045 to whether or not an object is a vtable. */
13047 #define VTABLE_NAME_P(NAME) \
13048 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13049 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13050 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13051 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13054 rs6000_output_symbol_ref (FILE *file, rtx x)
13056 /* Currently C++ toc references to vtables can be emitted before it
13057 is decided whether the vtable is public or private. If this is
13058 the case, then the linker will eventually complain that there is
13059 a reference to an unknown section. Thus, for vtables only,
13060 we emit the TOC reference to reference the symbol and not the
13062 const char *name = XSTR (x, 0);
13064 if (VTABLE_NAME_P (name))
13066 RS6000_OUTPUT_BASENAME (file, name);
13069 assemble_name (file, name);
13072 /* Output a TOC entry. We derive the entry name from what is being
13076 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13079 const char *name = buf;
13080 const char *real_name;
13087 /* When the linker won't eliminate them, don't output duplicate
13088 TOC entries (this happens on AIX if there is any kind of TOC,
13089 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13091 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13093 struct toc_hash_struct *h;
13096 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13097 time because GGC is not initialized at that point. */
13098 if (toc_hash_table == NULL)
13099 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13100 toc_hash_eq, NULL);
13102 h = ggc_alloc (sizeof (*h));
13104 h->key_mode = mode;
13105 h->labelno = labelno;
13107 found = htab_find_slot (toc_hash_table, h, 1);
13108 if (*found == NULL)
13110 else /* This is indeed a duplicate.
13111 Set this label equal to that label. */
13113 fputs ("\t.set ", file);
13114 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13115 fprintf (file, "%d,", labelno);
13116 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13117 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13123 /* If we're going to put a double constant in the TOC, make sure it's
13124 aligned properly when strict alignment is on. */
13125 if (GET_CODE (x) == CONST_DOUBLE
13126 && STRICT_ALIGNMENT
13127 && GET_MODE_BITSIZE (mode) >= 64
13128 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13129 ASM_OUTPUT_ALIGN (file, 3);
13132 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13134 /* Handle FP constants specially. Note that if we have a minimal
13135 TOC, things we put here aren't actually in the TOC, so we can allow
13137 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13139 REAL_VALUE_TYPE rv;
13142 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13143 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13147 if (TARGET_MINIMAL_TOC)
13148 fputs (DOUBLE_INT_ASM_OP, file);
13150 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13151 k[0] & 0xffffffff, k[1] & 0xffffffff,
13152 k[2] & 0xffffffff, k[3] & 0xffffffff);
13153 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13154 k[0] & 0xffffffff, k[1] & 0xffffffff,
13155 k[2] & 0xffffffff, k[3] & 0xffffffff);
13160 if (TARGET_MINIMAL_TOC)
13161 fputs ("\t.long ", file);
13163 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13164 k[0] & 0xffffffff, k[1] & 0xffffffff,
13165 k[2] & 0xffffffff, k[3] & 0xffffffff);
13166 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13167 k[0] & 0xffffffff, k[1] & 0xffffffff,
13168 k[2] & 0xffffffff, k[3] & 0xffffffff);
13172 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13174 REAL_VALUE_TYPE rv;
13177 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13178 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13182 if (TARGET_MINIMAL_TOC)
13183 fputs (DOUBLE_INT_ASM_OP, file);
13185 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13186 k[0] & 0xffffffff, k[1] & 0xffffffff);
13187 fprintf (file, "0x%lx%08lx\n",
13188 k[0] & 0xffffffff, k[1] & 0xffffffff);
13193 if (TARGET_MINIMAL_TOC)
13194 fputs ("\t.long ", file);
13196 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13197 k[0] & 0xffffffff, k[1] & 0xffffffff);
13198 fprintf (file, "0x%lx,0x%lx\n",
13199 k[0] & 0xffffffff, k[1] & 0xffffffff);
13203 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13205 REAL_VALUE_TYPE rv;
13208 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13209 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13213 if (TARGET_MINIMAL_TOC)
13214 fputs (DOUBLE_INT_ASM_OP, file);
13216 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13217 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13222 if (TARGET_MINIMAL_TOC)
13223 fputs ("\t.long ", file);
13225 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13226 fprintf (file, "0x%lx\n", l & 0xffffffff);
13230 else if (GET_MODE (x) == VOIDmode
13231 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13233 unsigned HOST_WIDE_INT low;
13234 HOST_WIDE_INT high;
13236 if (GET_CODE (x) == CONST_DOUBLE)
13238 low = CONST_DOUBLE_LOW (x);
13239 high = CONST_DOUBLE_HIGH (x);
13242 #if HOST_BITS_PER_WIDE_INT == 32
13245 high = (low & 0x80000000) ? ~0 : 0;
13249 low = INTVAL (x) & 0xffffffff;
13250 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13254 /* TOC entries are always Pmode-sized, but since this
13255 is a bigendian machine then if we're putting smaller
13256 integer constants in the TOC we have to pad them.
13257 (This is still a win over putting the constants in
13258 a separate constant pool, because then we'd have
13259 to have both a TOC entry _and_ the actual constant.)
13261 For a 32-bit target, CONST_INT values are loaded and shifted
13262 entirely within `low' and can be stored in one TOC entry. */
13264 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13265 abort ();/* It would be easy to make this work, but it doesn't now. */
13267 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13269 #if HOST_BITS_PER_WIDE_INT == 32
13270 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13271 POINTER_SIZE, &low, &high, 0);
13274 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13275 high = (HOST_WIDE_INT) low >> 32;
13282 if (TARGET_MINIMAL_TOC)
13283 fputs (DOUBLE_INT_ASM_OP, file);
13285 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13286 (long) high & 0xffffffff, (long) low & 0xffffffff);
13287 fprintf (file, "0x%lx%08lx\n",
13288 (long) high & 0xffffffff, (long) low & 0xffffffff);
13293 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13295 if (TARGET_MINIMAL_TOC)
13296 fputs ("\t.long ", file);
13298 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13299 (long) high & 0xffffffff, (long) low & 0xffffffff);
13300 fprintf (file, "0x%lx,0x%lx\n",
13301 (long) high & 0xffffffff, (long) low & 0xffffffff);
13305 if (TARGET_MINIMAL_TOC)
13306 fputs ("\t.long ", file);
13308 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13309 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13315 if (GET_CODE (x) == CONST)
13317 if (GET_CODE (XEXP (x, 0)) != PLUS)
13320 base = XEXP (XEXP (x, 0), 0);
13321 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13324 if (GET_CODE (base) == SYMBOL_REF)
13325 name = XSTR (base, 0);
13326 else if (GET_CODE (base) == LABEL_REF)
13327 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13328 else if (GET_CODE (base) == CODE_LABEL)
13329 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13333 real_name = (*targetm.strip_name_encoding) (name);
13334 if (TARGET_MINIMAL_TOC)
13335 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13338 fprintf (file, "\t.tc %s", real_name);
13341 fprintf (file, ".N%d", - offset);
13343 fprintf (file, ".P%d", offset);
13345 fputs ("[TC],", file);
13348 /* Currently C++ toc references to vtables can be emitted before it
13349 is decided whether the vtable is public or private. If this is
13350 the case, then the linker will eventually complain that there is
13351 a TOC reference to an unknown section. Thus, for vtables only,
13352 we emit the TOC reference to reference the symbol and not the
13354 if (VTABLE_NAME_P (name))
13356 RS6000_OUTPUT_BASENAME (file, name);
13358 fprintf (file, "%d", offset);
13359 else if (offset > 0)
13360 fprintf (file, "+%d", offset);
13363 output_addr_const (file, x);
13367 /* Output an assembler pseudo-op to write an ASCII string of N characters
13368 starting at P to FILE.
13370 On the RS/6000, we have to do this using the .byte operation and
13371 write out special characters outside the quoted string.
13372 Also, the assembler is broken; very long strings are truncated,
13373 so we must artificially break them up early. */
13376 output_ascii (FILE *file, const char *p, int n)
13379 int i, count_string;
13380 const char *for_string = "\t.byte \"";
13381 const char *for_decimal = "\t.byte ";
13382 const char *to_close = NULL;
13385 for (i = 0; i < n; i++)
13388 if (c >= ' ' && c < 0177)
13391 fputs (for_string, file);
13394 /* Write two quotes to get one. */
13402 for_decimal = "\"\n\t.byte ";
13406 if (count_string >= 512)
13408 fputs (to_close, file);
13410 for_string = "\t.byte \"";
13411 for_decimal = "\t.byte ";
13419 fputs (for_decimal, file);
13420 fprintf (file, "%d", c);
13422 for_string = "\n\t.byte \"";
13423 for_decimal = ", ";
13429 /* Now close the string if we have written one. Then end the line. */
13431 fputs (to_close, file);
13434 /* Generate a unique section name for FILENAME for a section type
13435 represented by SECTION_DESC. Output goes into BUF.
13437 SECTION_DESC can be any string, as long as it is different for each
13438 possible section type.
13440 We name the section in the same manner as xlc. The name begins with an
13441 underscore followed by the filename (after stripping any leading directory
13442 names) with the last period replaced by the string SECTION_DESC. If
13443 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13447 rs6000_gen_section_name (char **buf, const char *filename,
13448 const char *section_desc)
13450 const char *q, *after_last_slash, *last_period = 0;
13454 after_last_slash = filename;
13455 for (q = filename; *q; q++)
13458 after_last_slash = q + 1;
13459 else if (*q == '.')
13463 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13464 *buf = (char *) xmalloc (len);
13469 for (q = after_last_slash; *q; q++)
13471 if (q == last_period)
13473 strcpy (p, section_desc);
13474 p += strlen (section_desc);
13478 else if (ISALNUM (*q))
13482 if (last_period == 0)
13483 strcpy (p, section_desc);
13488 /* Emit profile function. */
13491 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13493 if (TARGET_PROFILE_KERNEL)
13496 if (DEFAULT_ABI == ABI_AIX)
13498 #ifndef NO_PROFILE_COUNTERS
13499 # define NO_PROFILE_COUNTERS 0
13501 if (NO_PROFILE_COUNTERS)
13502 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13506 const char *label_name;
13509 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13510 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13511 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13513 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13517 else if (DEFAULT_ABI == ABI_DARWIN)
13519 const char *mcount_name = RS6000_MCOUNT;
13520 int caller_addr_regno = LINK_REGISTER_REGNUM;
13522 /* Be conservative and always set this, at least for now. */
13523 current_function_uses_pic_offset_table = 1;
13526 /* For PIC code, set up a stub and collect the caller's address
13527 from r0, which is where the prologue puts it. */
13528 if (MACHOPIC_INDIRECT)
13530 mcount_name = machopic_stub_name (mcount_name);
13531 if (current_function_uses_pic_offset_table)
13532 caller_addr_regno = 0;
13535 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13537 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13541 /* Write function profiler code. */
13544 output_function_profiler (FILE *file, int labelno)
13549 switch (DEFAULT_ABI)
13558 warning ("no profiling of 64-bit code for this ABI");
13561 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13562 fprintf (file, "\tmflr %s\n", reg_names[0]);
13565 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13566 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13567 reg_names[0], save_lr, reg_names[1]);
13568 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13569 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13570 assemble_name (file, buf);
13571 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13573 else if (flag_pic > 1)
13575 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13576 reg_names[0], save_lr, reg_names[1]);
13577 /* Now, we need to get the address of the label. */
13578 fputs ("\tbl 1f\n\t.long ", file);
13579 assemble_name (file, buf);
13580 fputs ("-.\n1:", file);
13581 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13582 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13583 reg_names[0], reg_names[11]);
13584 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13585 reg_names[0], reg_names[0], reg_names[11]);
13589 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13590 assemble_name (file, buf);
13591 fputs ("@ha\n", file);
13592 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13593 reg_names[0], save_lr, reg_names[1]);
13594 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13595 assemble_name (file, buf);
13596 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13599 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13600 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13605 if (!TARGET_PROFILE_KERNEL)
13607 /* Don't do anything, done in output_profile_hook (). */
13614 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13615 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13617 if (current_function_needs_context)
13619 asm_fprintf (file, "\tstd %s,24(%s)\n",
13620 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13621 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13622 asm_fprintf (file, "\tld %s,24(%s)\n",
13623 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13626 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13634 rs6000_use_dfa_pipeline_interface (void)
13639 /* Power4 load update and store update instructions are cracked into a
13640 load or store and an integer insn which are executed in the same cycle.
13641 Branches have their own dispatch slot which does not count against the
13642 GCC issue rate, but it changes the program flow so there are no other
13643 instructions to issue in this cycle. */
13646 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13647 int verbose ATTRIBUTE_UNUSED,
13648 rtx insn, int more)
13650 if (GET_CODE (PATTERN (insn)) == USE
13651 || GET_CODE (PATTERN (insn)) == CLOBBER)
13654 if (rs6000_cpu == PROCESSOR_POWER4)
13656 if (is_microcoded_insn (insn))
13658 else if (is_cracked_insn (insn))
13659 return more > 2 ? more - 2 : 0;
13665 /* Adjust the cost of a scheduling dependency. Return the new cost of
13666 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13669 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13672 if (! recog_memoized (insn))
13675 if (REG_NOTE_KIND (link) != 0)
13678 if (REG_NOTE_KIND (link) == 0)
13680 /* Data dependency; DEP_INSN writes a register that INSN reads
13681 some cycles later. */
13682 switch (get_attr_type (insn))
13685 /* Tell the first scheduling pass about the latency between
13686 a mtctr and bctr (and mtlr and br/blr). The first
13687 scheduling pass will not know about this latency since
13688 the mtctr instruction, which has the latency associated
13689 to it, will be generated by reload. */
13690 return TARGET_POWER ? 5 : 4;
13692 /* Leave some extra cycles between a compare and its
13693 dependent branch, to inhibit expensive mispredicts. */
13694 if ((rs6000_cpu_attr == CPU_PPC603
13695 || rs6000_cpu_attr == CPU_PPC604
13696 || rs6000_cpu_attr == CPU_PPC604E
13697 || rs6000_cpu_attr == CPU_PPC620
13698 || rs6000_cpu_attr == CPU_PPC630
13699 || rs6000_cpu_attr == CPU_PPC750
13700 || rs6000_cpu_attr == CPU_PPC7400
13701 || rs6000_cpu_attr == CPU_PPC7450
13702 || rs6000_cpu_attr == CPU_POWER4)
13703 && recog_memoized (dep_insn)
13704 && (INSN_CODE (dep_insn) >= 0)
13705 && (get_attr_type (dep_insn) == TYPE_CMP
13706 || get_attr_type (dep_insn) == TYPE_COMPARE
13707 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13708 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13709 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13710 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13711 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13712 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13717 /* Fall out to return default cost. */
13723 /* The function returns a true if INSN is microcoded.
13724 Return false otherwise. */
13727 is_microcoded_insn (rtx insn)
13729 if (!insn || !INSN_P (insn)
13730 || GET_CODE (PATTERN (insn)) == USE
13731 || GET_CODE (PATTERN (insn)) == CLOBBER)
13734 if (rs6000_cpu == PROCESSOR_POWER4)
13736 enum attr_type type = get_attr_type (insn);
13737 if (type == TYPE_LOAD_EXT_U
13738 || type == TYPE_LOAD_EXT_UX
13739 || type == TYPE_LOAD_UX
13740 || type == TYPE_STORE_UX
13741 || type == TYPE_MFCR)
13748 /* The function returns a nonzero value if INSN can be scheduled only
13749 as the first insn in a dispatch group ("dispatch-slot restricted").
13750 In this case, the returned value indicates how many dispatch slots
13751 the insn occupies (at the beginning of the group).
13752 Return 0 otherwise. */
13755 is_dispatch_slot_restricted (rtx insn)
13757 enum attr_type type;
13759 if (rs6000_cpu != PROCESSOR_POWER4)
13763 || insn == NULL_RTX
13764 || GET_CODE (insn) == NOTE
13765 || GET_CODE (PATTERN (insn)) == USE
13766 || GET_CODE (PATTERN (insn)) == CLOBBER)
13769 type = get_attr_type (insn);
13775 case TYPE_DELAYED_CR:
13776 case TYPE_CR_LOGICAL:
13788 /* The function returns true if INSN is cracked into 2 instructions
13789 by the processor (and therefore occupies 2 issue slots). */
13792 is_cracked_insn (rtx insn)
13794 if (!insn || !INSN_P (insn)
13795 || GET_CODE (PATTERN (insn)) == USE
13796 || GET_CODE (PATTERN (insn)) == CLOBBER)
13799 if (rs6000_cpu == PROCESSOR_POWER4)
13801 enum attr_type type = get_attr_type (insn);
13802 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13803 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13804 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13805 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13806 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13807 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13808 || type == TYPE_IDIV || type == TYPE_LDIV
13809 || type == TYPE_INSERT_WORD)
13816 /* The function returns true if INSN can be issued only from
13817 the branch slot. */
13820 is_branch_slot_insn (rtx insn)
13822 if (!insn || !INSN_P (insn)
13823 || GET_CODE (PATTERN (insn)) == USE
13824 || GET_CODE (PATTERN (insn)) == CLOBBER)
13827 if (rs6000_cpu == PROCESSOR_POWER4)
13829 enum attr_type type = get_attr_type (insn);
13830 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
13838 /* A C statement (sans semicolon) to update the integer scheduling
13839 priority INSN_PRIORITY (INSN). Increase the priority to execute the
13840 INSN earlier, reduce the priority to execute INSN later. Do not
13841 define this macro if you do not need to adjust the scheduling
13842 priorities of insns. */
13845 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
13847 /* On machines (like the 750) which have asymmetric integer units,
13848 where one integer unit can do multiply and divides and the other
13849 can't, reduce the priority of multiply/divide so it is scheduled
13850 before other integer operations. */
13853 if (! INSN_P (insn))
13856 if (GET_CODE (PATTERN (insn)) == USE)
13859 switch (rs6000_cpu_attr) {
13861 switch (get_attr_type (insn))
13868 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13869 priority, priority);
13870 if (priority >= 0 && priority < 0x01000000)
13877 if (is_dispatch_slot_restricted (insn)
13878 && reload_completed
13879 && current_sched_info->sched_max_insns_priority
13880 && rs6000_sched_restricted_insns_priority)
13883 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
13884 if (rs6000_sched_restricted_insns_priority == 1)
13885 /* Attach highest priority to insn. This means that in
13886 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
13887 precede 'priority' (critical path) considerations. */
13888 return current_sched_info->sched_max_insns_priority;
13889 else if (rs6000_sched_restricted_insns_priority == 2)
13890 /* Increase priority of insn by a minimal amount. This means that in
13891 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
13892 precede dispatch-slot restriction considerations. */
13893 return (priority + 1);
13899 /* Return how many instructions the machine can issue per cycle. */
13902 rs6000_issue_rate (void)
13904 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13905 if (!reload_completed)
13908 switch (rs6000_cpu_attr) {
13909 case CPU_RIOS1: /* ? */
13911 case CPU_PPC601: /* ? */
13933 /* Return how many instructions to look ahead for better insn
13937 rs6000_use_sched_lookahead (void)
13939 if (rs6000_cpu_attr == CPU_PPC8540)
13944 /* Determine is PAT refers to memory. */
13947 is_mem_ref (rtx pat)
13953 if (GET_CODE (pat) == MEM)
13956 /* Recursively process the pattern. */
13957 fmt = GET_RTX_FORMAT (GET_CODE (pat));
13959 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
13962 ret |= is_mem_ref (XEXP (pat, i));
13963 else if (fmt[i] == 'E')
13964 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
13965 ret |= is_mem_ref (XVECEXP (pat, i, j));
13971 /* Determine if PAT is a PATTERN of a load insn. */
13974 is_load_insn1 (rtx pat)
13976 if (!pat || pat == NULL_RTX)
13979 if (GET_CODE (pat) == SET)
13980 return is_mem_ref (SET_SRC (pat));
13982 if (GET_CODE (pat) == PARALLEL)
13986 for (i = 0; i < XVECLEN (pat, 0); i++)
13987 if (is_load_insn1 (XVECEXP (pat, 0, i)))
13994 /* Determine if INSN loads from memory. */
13997 is_load_insn (rtx insn)
13999 if (!insn || !INSN_P (insn))
14002 if (GET_CODE (insn) == CALL_INSN)
14005 return is_load_insn1 (PATTERN (insn));
14008 /* Determine if PAT is a PATTERN of a store insn. */
14011 is_store_insn1 (rtx pat)
14013 if (!pat || pat == NULL_RTX)
14016 if (GET_CODE (pat) == SET)
14017 return is_mem_ref (SET_DEST (pat));
14019 if (GET_CODE (pat) == PARALLEL)
14023 for (i = 0; i < XVECLEN (pat, 0); i++)
14024 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14031 /* Determine if INSN stores to memory. */
14034 is_store_insn (rtx insn)
14036 if (!insn || !INSN_P (insn))
14039 return is_store_insn1 (PATTERN (insn));
14042 /* Returns whether the dependence between INSN and NEXT is considered
14043 costly by the given target. */
14046 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14048 /* If the flag is not enbled - no dependence is considered costly;
14049 allow all dependent insns in the same group.
14050 This is the most aggressive option. */
14051 if (rs6000_sched_costly_dep == no_dep_costly)
14054 /* If the flag is set to 1 - a dependence is always considered costly;
14055 do not allow dependent instructions in the same group.
14056 This is the most conservative option. */
14057 if (rs6000_sched_costly_dep == all_deps_costly)
14060 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14061 && is_load_insn (next)
14062 && is_store_insn (insn))
14063 /* Prevent load after store in the same group. */
14066 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14067 && is_load_insn (next)
14068 && is_store_insn (insn)
14069 && (!link || (int) REG_NOTE_KIND (link) == 0))
14070 /* Prevent load after store in the same group if it is a true dependence. */
14073 /* The flag is set to X; dependences with latency >= X are considered costly,
14074 and will not be scheduled in the same group. */
14075 if (rs6000_sched_costly_dep <= max_dep_latency
14076 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14082 /* Return the next insn after INSN that is found before TAIL is reached,
14083 skipping any "non-active" insns - insns that will not actually occupy
14084 an issue slot. Return NULL_RTX if such an insn is not found. */
14087 get_next_active_insn (rtx insn, rtx tail)
14091 if (!insn || insn == tail)
14094 next_insn = NEXT_INSN (insn);
14097 && next_insn != tail
14098 && (GET_CODE(next_insn) == NOTE
14099 || GET_CODE (PATTERN (next_insn)) == USE
14100 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14102 next_insn = NEXT_INSN (next_insn);
14105 if (!next_insn || next_insn == tail)
14111 /* Return whether the presence of INSN causes a dispatch group termination
14112 of group WHICH_GROUP.
14114 If WHICH_GROUP == current_group, this function will return true if INSN
14115 causes the termination of the current group (i.e, the dispatch group to
14116 which INSN belongs). This means that INSN will be the last insn in the
14117 group it belongs to.
14119 If WHICH_GROUP == previous_group, this function will return true if INSN
14120 causes the termination of the previous group (i.e, the dispatch group that
14121 precedes the group to which INSN belongs). This means that INSN will be
14122 the first insn in the group it belongs to). */
14125 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14127 enum attr_type type;
14132 type = get_attr_type (insn);
14134 if (is_microcoded_insn (insn))
14137 if (which_group == current_group)
14139 if (is_branch_slot_insn (insn))
14143 else if (which_group == previous_group)
14145 if (is_dispatch_slot_restricted (insn))
14153 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14154 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14157 is_costly_group (rtx *group_insns, rtx next_insn)
14162 int issue_rate = rs6000_issue_rate ();
14164 for (i = 0; i < issue_rate; i++)
14166 rtx insn = group_insns[i];
14169 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14171 rtx next = XEXP (link, 0);
14172 if (next == next_insn)
14174 cost = insn_cost (insn, link, next_insn);
14175 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14184 /* Utility of the function redefine_groups.
14185 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14186 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14187 to keep it "far" (in a separate group) from GROUP_INSNS, following
14188 one of the following schemes, depending on the value of the flag
14189 -minsert_sched_nops = X:
14190 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14191 in order to force NEXT_INSN into a separate group.
14192 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14193 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14194 insertion (has a group just ended, how many vacant issue slots remain in the
14195 last group, and how many dispatch groups were encountered so far). */
14198 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14199 bool *group_end, int can_issue_more, int *group_count)
14203 int issue_rate = rs6000_issue_rate ();
14204 bool end = *group_end;
14207 if (next_insn == NULL_RTX)
14208 return can_issue_more;
14210 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14211 return can_issue_more;
14213 force = is_costly_group (group_insns, next_insn);
14215 return can_issue_more;
14217 if (sched_verbose > 6)
14218 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14219 *group_count ,can_issue_more);
14221 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14224 can_issue_more = 0;
14226 /* Since only a branch can be issued in the last issue_slot, it is
14227 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14228 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14229 in this case the last nop will start a new group and the branch will be
14230 forced to the new group. */
14231 if (can_issue_more && !is_branch_slot_insn (next_insn))
14234 while (can_issue_more > 0)
14237 emit_insn_before (nop, next_insn);
14245 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14247 int n_nops = rs6000_sched_insert_nops;
14249 /* Nops can't be issued from the branch slot, so the effective
14250 issue_rate for nops is 'issue_rate - 1'. */
14251 if (can_issue_more == 0)
14252 can_issue_more = issue_rate;
14254 if (can_issue_more == 0)
14256 can_issue_more = issue_rate - 1;
14259 for (i = 0; i < issue_rate; i++)
14261 group_insns[i] = 0;
14268 emit_insn_before (nop, next_insn);
14269 if (can_issue_more == issue_rate - 1) /* new group begins */
14272 if (can_issue_more == 0)
14274 can_issue_more = issue_rate - 1;
14277 for (i = 0; i < issue_rate; i++)
14279 group_insns[i] = 0;
14285 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14288 *group_end = /* Is next_insn going to start a new group? */
14290 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14291 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14292 || (can_issue_more < issue_rate &&
14293 insn_terminates_group_p (next_insn, previous_group)));
14294 if (*group_end && end)
14297 if (sched_verbose > 6)
14298 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14299 *group_count, can_issue_more);
14300 return can_issue_more;
14303 return can_issue_more;
14306 /* This function tries to synch the dispatch groups that the compiler "sees"
14307 with the dispatch groups that the processor dispatcher is expected to
14308 form in practice. It tries to achieve this synchronization by forcing the
14309 estimated processor grouping on the compiler (as opposed to the function
14310 'pad_goups' which tries to force the scheduler's grouping on the processor).
14312 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14313 examines the (estimated) dispatch groups that will be formed by the processor
14314 dispatcher. It marks these group boundaries to reflect the estimated
14315 processor grouping, overriding the grouping that the scheduler had marked.
14316 Depending on the value of the flag '-minsert-sched-nops' this function can
14317 force certain insns into separate groups or force a certain distance between
14318 them by inserting nops, for example, if there exists a "costly dependence"
14321 The function estimates the group boundaries that the processor will form as
14322 folllows: It keeps track of how many vacant issue slots are available after
14323 each insn. A subsequent insn will start a new group if one of the following
14325 - no more vacant issue slots remain in the current dispatch group.
14326 - only the last issue slot, which is the branch slot, is vacant, but the next
14327 insn is not a branch.
14328 - only the last 2 or less issue slots, including the branch slot, are vacant,
14329 which means that a cracked insn (which occupies two issue slots) can't be
14330 issued in this group.
14331 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14332 start a new group. */
14335 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14337 rtx insn, next_insn;
14339 int can_issue_more;
14342 int group_count = 0;
14346 issue_rate = rs6000_issue_rate ();
14347 group_insns = alloca (issue_rate * sizeof (rtx));
14348 for (i = 0; i < issue_rate; i++)
14350 group_insns[i] = 0;
14352 can_issue_more = issue_rate;
14354 insn = get_next_active_insn (prev_head_insn, tail);
14357 while (insn != NULL_RTX)
14359 slot = (issue_rate - can_issue_more);
14360 group_insns[slot] = insn;
14362 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14363 if (insn_terminates_group_p (insn, current_group))
14364 can_issue_more = 0;
14366 next_insn = get_next_active_insn (insn, tail);
14367 if (next_insn == NULL_RTX)
14368 return group_count + 1;
14370 group_end = /* Is next_insn going to start a new group? */
14371 (can_issue_more == 0
14372 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14373 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14374 || (can_issue_more < issue_rate &&
14375 insn_terminates_group_p (next_insn, previous_group)));
14377 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14378 next_insn, &group_end, can_issue_more, &group_count);
14383 can_issue_more = 0;
14384 for (i = 0; i < issue_rate; i++)
14386 group_insns[i] = 0;
14390 if (GET_MODE (next_insn) == TImode && can_issue_more)
14391 PUT_MODE(next_insn, VOIDmode);
14392 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14393 PUT_MODE (next_insn, TImode);
14396 if (can_issue_more == 0)
14397 can_issue_more = issue_rate;
14400 return group_count;
14403 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14404 dispatch group boundaries that the scheduler had marked. Pad with nops
14405 any dispatch groups which have vacant issue slots, in order to force the
14406 scheduler's grouping on the processor dispatcher. The function
14407 returns the number of dispatch groups found. */
14410 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14412 rtx insn, next_insn;
14415 int can_issue_more;
14417 int group_count = 0;
14419 /* Initialize issue_rate. */
14420 issue_rate = rs6000_issue_rate ();
14421 can_issue_more = issue_rate;
14423 insn = get_next_active_insn (prev_head_insn, tail);
14424 next_insn = get_next_active_insn (insn, tail);
14426 while (insn != NULL_RTX)
14429 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14431 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14433 if (next_insn == NULL_RTX)
14438 /* If the scheduler had marked group termination at this location
14439 (between insn and next_indn), and neither insn nor next_insn will
14440 force group termination, pad the group with nops to force group
14443 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14444 && !insn_terminates_group_p (insn, current_group)
14445 && !insn_terminates_group_p (next_insn, previous_group))
14447 if (!is_branch_slot_insn(next_insn))
14450 while (can_issue_more)
14453 emit_insn_before (nop, next_insn);
14458 can_issue_more = issue_rate;
14463 next_insn = get_next_active_insn (insn, tail);
14466 return group_count;
14469 /* The following function is called at the end of scheduling BB.
14470 After reload, it inserts nops at insn group bundling. */
14473 rs6000_sched_finish (FILE *dump, int sched_verbose)
14478 fprintf (dump, "=== Finishing schedule.\n");
14480 if (reload_completed && rs6000_cpu == PROCESSOR_POWER4)
14482 if (rs6000_sched_insert_nops == sched_finish_none)
14485 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14486 n_groups = pad_groups (dump, sched_verbose,
14487 current_sched_info->prev_head,
14488 current_sched_info->next_tail);
14490 n_groups = redefine_groups (dump, sched_verbose,
14491 current_sched_info->prev_head,
14492 current_sched_info->next_tail);
14494 if (sched_verbose >= 6)
14496 fprintf (dump, "ngroups = %d\n", n_groups);
14497 print_rtl (dump, current_sched_info->prev_head);
14498 fprintf (dump, "Done finish_sched\n");
14503 /* Length in units of the trampoline for entering a nested function. */
14506 rs6000_trampoline_size (void)
14510 switch (DEFAULT_ABI)
14516 ret = (TARGET_32BIT) ? 12 : 24;
14521 ret = (TARGET_32BIT) ? 40 : 48;
14528 /* Emit RTL insns to initialize the variable parts of a trampoline.
14529 FNADDR is an RTX for the address of the function's pure code.
14530 CXT is an RTX for the static chain value for the function. */
14533 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14535 enum machine_mode pmode = Pmode;
14536 int regsize = (TARGET_32BIT) ? 4 : 8;
14537 rtx ctx_reg = force_reg (pmode, cxt);
14539 switch (DEFAULT_ABI)
14544 /* Macros to shorten the code expansions below. */
14545 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14546 #define MEM_PLUS(addr,offset) \
14547 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14549 /* Under AIX, just build the 3 word function descriptor */
14552 rtx fn_reg = gen_reg_rtx (pmode);
14553 rtx toc_reg = gen_reg_rtx (pmode);
14554 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14555 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14556 emit_move_insn (MEM_DEREF (addr), fn_reg);
14557 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14558 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14562 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14565 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14566 FALSE, VOIDmode, 4,
14568 GEN_INT (rs6000_trampoline_size ()), SImode,
14578 /* Table of valid machine attributes. */
14580 const struct attribute_spec rs6000_attribute_table[] =
14582 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14583 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14584 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14585 { NULL, 0, 0, false, false, false, NULL }
14588 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14589 struct attribute_spec.handler. */
14592 rs6000_handle_longcall_attribute (tree *node, tree name,
14593 tree args ATTRIBUTE_UNUSED,
14594 int flags ATTRIBUTE_UNUSED,
14595 bool *no_add_attrs)
14597 if (TREE_CODE (*node) != FUNCTION_TYPE
14598 && TREE_CODE (*node) != FIELD_DECL
14599 && TREE_CODE (*node) != TYPE_DECL)
14601 warning ("`%s' attribute only applies to functions",
14602 IDENTIFIER_POINTER (name));
14603 *no_add_attrs = true;
14609 /* Set longcall attributes on all functions declared when
14610 rs6000_default_long_calls is true. */
14612 rs6000_set_default_type_attributes (tree type)
14614 if (rs6000_default_long_calls
14615 && (TREE_CODE (type) == FUNCTION_TYPE
14616 || TREE_CODE (type) == METHOD_TYPE))
14617 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14619 TYPE_ATTRIBUTES (type));
14622 /* Return a reference suitable for calling a function with the
14623 longcall attribute. */
14626 rs6000_longcall_ref (rtx call_ref)
14628 const char *call_name;
14631 if (GET_CODE (call_ref) != SYMBOL_REF)
14634 /* System V adds '.' to the internal name, so skip them. */
14635 call_name = XSTR (call_ref, 0);
14636 if (*call_name == '.')
14638 while (*call_name == '.')
14641 node = get_identifier (call_name);
14642 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14645 return force_reg (Pmode, call_ref);
14648 #ifdef USING_ELFOS_H
14650 /* A C statement or statements to switch to the appropriate section
14651 for output of RTX in mode MODE. You can assume that RTX is some
14652 kind of constant in RTL. The argument MODE is redundant except in
14653 the case of a `const_int' rtx. Select the section by calling
14654 `text_section' or one of the alternatives for other sections.
14656 Do not define this macro if you put all constants in the read-only
14660 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14661 unsigned HOST_WIDE_INT align)
14663 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14666 default_elf_select_rtx_section (mode, x, align);
14669 /* A C statement or statements to switch to the appropriate
14670 section for output of DECL. DECL is either a `VAR_DECL' node
14671 or a constant of some sort. RELOC indicates whether forming
14672 the initial value of DECL requires link-time relocations. */
14675 rs6000_elf_select_section (tree decl, int reloc,
14676 unsigned HOST_WIDE_INT align)
14678 /* Pretend that we're always building for a shared library when
14679 ABI_AIX, because otherwise we end up with dynamic relocations
14680 in read-only sections. This happens for function pointers,
14681 references to vtables in typeinfo, and probably other cases. */
14682 default_elf_select_section_1 (decl, reloc, align,
14683 flag_pic || DEFAULT_ABI == ABI_AIX);
14686 /* A C statement to build up a unique section name, expressed as a
14687 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14688 RELOC indicates whether the initial value of EXP requires
14689 link-time relocations. If you do not define this macro, GCC will use
14690 the symbol name prefixed by `.' as the section name. Note - this
14691 macro can now be called for uninitialized data items as well as
14692 initialized data and functions. */
14695 rs6000_elf_unique_section (tree decl, int reloc)
14697 /* As above, pretend that we're always building for a shared library
14698 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
14699 default_unique_section_1 (decl, reloc,
14700 flag_pic || DEFAULT_ABI == ABI_AIX);
14703 /* For a SYMBOL_REF, set generic flags and then perform some
14704 target-specific processing.
14706 When the AIX ABI is requested on a non-AIX system, replace the
14707 function name with the real name (with a leading .) rather than the
14708 function descriptor name. This saves a lot of overriding code to
14709 read the prefixes. */
14712 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
14714 default_encode_section_info (decl, rtl, first);
14717 && TREE_CODE (decl) == FUNCTION_DECL
14719 && DEFAULT_ABI == ABI_AIX)
14721 rtx sym_ref = XEXP (rtl, 0);
14722 size_t len = strlen (XSTR (sym_ref, 0));
14723 char *str = alloca (len + 2);
14725 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
14726 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
14731 rs6000_elf_in_small_data_p (tree decl)
14733 if (rs6000_sdata == SDATA_NONE)
14736 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
14738 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
14739 if (strcmp (section, ".sdata") == 0
14740 || strcmp (section, ".sdata2") == 0
14741 || strcmp (section, ".sbss") == 0
14742 || strcmp (section, ".sbss2") == 0
14743 || strcmp (section, ".PPC.EMB.sdata0") == 0
14744 || strcmp (section, ".PPC.EMB.sbss0") == 0)
14749 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
14752 && (unsigned HOST_WIDE_INT) size <= g_switch_value
14753 /* If it's not public, and we're not going to reference it there,
14754 there's no need to put it in the small data section. */
14755 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
14762 #endif /* USING_ELFOS_H */
14765 /* Return a REG that occurs in ADDR with coefficient 1.
14766 ADDR can be effectively incremented by incrementing REG.
14768 r0 is special and we must not select it as an address
14769 register by this routine since our caller will try to
14770 increment the returned register via an "la" instruction. */
14773 find_addr_reg (rtx addr)
14775 while (GET_CODE (addr) == PLUS)
14777 if (GET_CODE (XEXP (addr, 0)) == REG
14778 && REGNO (XEXP (addr, 0)) != 0)
14779 addr = XEXP (addr, 0);
14780 else if (GET_CODE (XEXP (addr, 1)) == REG
14781 && REGNO (XEXP (addr, 1)) != 0)
14782 addr = XEXP (addr, 1);
14783 else if (CONSTANT_P (XEXP (addr, 0)))
14784 addr = XEXP (addr, 1);
14785 else if (CONSTANT_P (XEXP (addr, 1)))
14786 addr = XEXP (addr, 0);
14790 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
14796 rs6000_fatal_bad_address (rtx op)
14798 fatal_insn ("bad address", op);
14804 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
14805 reference and a constant. */
14808 symbolic_operand (rtx op)
14810 switch (GET_CODE (op))
14817 return (GET_CODE (op) == SYMBOL_REF ||
14818 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
14819 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
14820 && GET_CODE (XEXP (op, 1)) == CONST_INT);
14829 static tree branch_island_list = 0;
14831 /* Remember to generate a branch island for far calls to the given
14835 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
14837 tree branch_island = build_tree_list (function_name, label_name);
14838 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
14839 TREE_CHAIN (branch_island) = branch_island_list;
14840 branch_island_list = branch_island;
14843 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
14844 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
14845 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
14846 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
14848 /* Generate far-jump branch islands for everything on the
14849 branch_island_list. Invoked immediately after the last instruction
14850 of the epilogue has been emitted; the branch-islands must be
14851 appended to, and contiguous with, the function body. Mach-O stubs
14852 are generated in machopic_output_stub(). */
14855 macho_branch_islands (void)
14858 tree branch_island;
14860 for (branch_island = branch_island_list;
14862 branch_island = TREE_CHAIN (branch_island))
14864 const char *label =
14865 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
14867 darwin_strip_name_encoding (
14868 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
14869 char name_buf[512];
14870 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
14871 if (name[0] == '*' || name[0] == '&')
14872 strcpy (name_buf, name+1);
14876 strcpy (name_buf+1, name);
14878 strcpy (tmp_buf, "\n");
14879 strcat (tmp_buf, label);
14880 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14881 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14882 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
14883 BRANCH_ISLAND_LINE_NUMBER(branch_island));
14884 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14887 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
14888 strcat (tmp_buf, label);
14889 strcat (tmp_buf, "_pic\n");
14890 strcat (tmp_buf, label);
14891 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
14893 strcat (tmp_buf, "\taddis r11,r11,ha16(");
14894 strcat (tmp_buf, name_buf);
14895 strcat (tmp_buf, " - ");
14896 strcat (tmp_buf, label);
14897 strcat (tmp_buf, "_pic)\n");
14899 strcat (tmp_buf, "\tmtlr r0\n");
14901 strcat (tmp_buf, "\taddi r12,r11,lo16(");
14902 strcat (tmp_buf, name_buf);
14903 strcat (tmp_buf, " - ");
14904 strcat (tmp_buf, label);
14905 strcat (tmp_buf, "_pic)\n");
14907 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
14911 strcat (tmp_buf, ":\nlis r12,hi16(");
14912 strcat (tmp_buf, name_buf);
14913 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
14914 strcat (tmp_buf, name_buf);
14915 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
14917 output_asm_insn (tmp_buf, 0);
14918 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14919 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14920 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
14921 BRANCH_ISLAND_LINE_NUMBER (branch_island));
14922 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14925 branch_island_list = 0;
14928 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
14929 already there or not. */
14932 no_previous_def (tree function_name)
14934 tree branch_island;
14935 for (branch_island = branch_island_list;
14937 branch_island = TREE_CHAIN (branch_island))
14938 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
14943 /* GET_PREV_LABEL gets the label name from the previous definition of
14947 get_prev_label (tree function_name)
14949 tree branch_island;
14950 for (branch_island = branch_island_list;
14952 branch_island = TREE_CHAIN (branch_island))
14953 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
14954 return BRANCH_ISLAND_LABEL_NAME (branch_island);
14958 /* INSN is either a function call or a millicode call. It may have an
14959 unconditional jump in its delay slot.
14961 CALL_DEST is the routine we are calling. */
14964 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
14966 static char buf[256];
14967 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
14968 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
14971 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
14973 if (no_previous_def (funname))
14975 int line_number = 0;
14976 rtx label_rtx = gen_label_rtx ();
14977 char *label_buf, temp_buf[256];
14978 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
14979 CODE_LABEL_NUMBER (label_rtx));
14980 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
14981 labelname = get_identifier (label_buf);
14982 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
14984 line_number = NOTE_LINE_NUMBER (insn);
14985 add_compiler_branch_island (labelname, funname, line_number);
14988 labelname = get_prev_label (funname);
14990 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
14991 instruction will reach 'foo', otherwise link as 'bl L42'".
14992 "L42" should be a 'branch island', that will do a far jump to
14993 'foo'. Branch islands are generated in
14994 macho_branch_islands(). */
14995 sprintf (buf, "jbsr %%z%d,%.246s",
14996 dest_operand_number, IDENTIFIER_POINTER (labelname));
14999 sprintf (buf, "bl %%z%d", dest_operand_number);
15003 #endif /* TARGET_MACHO */
15005 /* Generate PIC and indirect symbol stubs. */
15008 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15010 unsigned int length;
15011 char *symbol_name, *lazy_ptr_name;
15012 char *local_label_0;
15013 static int label = 0;
15015 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15016 symb = (*targetm.strip_name_encoding) (symb);
15019 length = strlen (symb);
15020 symbol_name = alloca (length + 32);
15021 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15023 lazy_ptr_name = alloca (length + 32);
15024 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15027 machopic_picsymbol_stub1_section ();
15029 machopic_symbol_stub1_section ();
15030 fprintf (file, "\t.align 2\n");
15032 fprintf (file, "%s:\n", stub);
15033 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15038 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15039 sprintf (local_label_0, "\"L%011d$spb\"", label);
15041 fprintf (file, "\tmflr r0\n");
15042 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15043 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15044 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15045 lazy_ptr_name, local_label_0);
15046 fprintf (file, "\tmtlr r0\n");
15047 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15048 lazy_ptr_name, local_label_0);
15049 fprintf (file, "\tmtctr r12\n");
15050 fprintf (file, "\tbctr\n");
15054 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15055 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15056 fprintf (file, "\tmtctr r12\n");
15057 fprintf (file, "\tbctr\n");
15060 machopic_lazy_symbol_ptr_section ();
15061 fprintf (file, "%s:\n", lazy_ptr_name);
15062 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15063 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15066 /* Legitimize PIC addresses. If the address is already
15067 position-independent, we return ORIG. Newly generated
15068 position-independent addresses go into a reg. This is REG if non
15069 zero, otherwise we allocate register(s) as necessary. */
15071 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15074 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15079 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15080 reg = gen_reg_rtx (Pmode);
15082 if (GET_CODE (orig) == CONST)
15084 if (GET_CODE (XEXP (orig, 0)) == PLUS
15085 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15088 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15090 /* Use a different reg for the intermediate value, as
15091 it will be marked UNCHANGING. */
15092 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15095 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15098 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15104 if (GET_CODE (offset) == CONST_INT)
15106 if (SMALL_INT (offset))
15107 return plus_constant (base, INTVAL (offset));
15108 else if (! reload_in_progress && ! reload_completed)
15109 offset = force_reg (Pmode, offset);
15112 rtx mem = force_const_mem (Pmode, orig);
15113 return machopic_legitimize_pic_address (mem, Pmode, reg);
15116 return gen_rtx (PLUS, Pmode, base, offset);
15119 /* Fall back on generic machopic code. */
15120 return machopic_legitimize_pic_address (orig, mode, reg);
15123 /* This is just a placeholder to make linking work without having to
15124 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15125 ever needed for Darwin (not too likely!) this would have to get a
15126 real definition. */
15133 #endif /* TARGET_MACHO */
15136 static unsigned int
15137 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15139 return default_section_type_flags_1 (decl, name, reloc,
15140 flag_pic || DEFAULT_ABI == ABI_AIX);
15143 /* Record an element in the table of global constructors. SYMBOL is
15144 a SYMBOL_REF of the function to be called; PRIORITY is a number
15145 between 0 and MAX_INIT_PRIORITY.
15147 This differs from default_named_section_asm_out_constructor in
15148 that we have special handling for -mrelocatable. */
15151 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15153 const char *section = ".ctors";
15156 if (priority != DEFAULT_INIT_PRIORITY)
15158 sprintf (buf, ".ctors.%.5u",
15159 /* Invert the numbering so the linker puts us in the proper
15160 order; constructors are run from right to left, and the
15161 linker sorts in increasing order. */
15162 MAX_INIT_PRIORITY - priority);
15166 named_section_flags (section, SECTION_WRITE);
15167 assemble_align (POINTER_SIZE);
15169 if (TARGET_RELOCATABLE)
15171 fputs ("\t.long (", asm_out_file);
15172 output_addr_const (asm_out_file, symbol);
15173 fputs (")@fixup\n", asm_out_file);
15176 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15180 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15182 const char *section = ".dtors";
15185 if (priority != DEFAULT_INIT_PRIORITY)
15187 sprintf (buf, ".dtors.%.5u",
15188 /* Invert the numbering so the linker puts us in the proper
15189 order; constructors are run from right to left, and the
15190 linker sorts in increasing order. */
15191 MAX_INIT_PRIORITY - priority);
15195 named_section_flags (section, SECTION_WRITE);
15196 assemble_align (POINTER_SIZE);
15198 if (TARGET_RELOCATABLE)
15200 fputs ("\t.long (", asm_out_file);
15201 output_addr_const (asm_out_file, symbol);
15202 fputs (")@fixup\n", asm_out_file);
15205 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15209 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15213 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15214 ASM_OUTPUT_LABEL (file, name);
15215 fputs (DOUBLE_INT_ASM_OP, file);
15217 assemble_name (file, name);
15218 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15219 assemble_name (file, name);
15220 fputs (",24\n\t.type\t.", file);
15221 assemble_name (file, name);
15222 fputs (",@function\n", file);
15223 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15225 fputs ("\t.globl\t.", file);
15226 assemble_name (file, name);
15229 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15231 ASM_OUTPUT_LABEL (file, name);
15235 if (TARGET_RELOCATABLE
15236 && (get_pool_size () != 0 || current_function_profile)
15241 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15243 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15244 fprintf (file, "\t.long ");
15245 assemble_name (file, buf);
15247 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15248 assemble_name (file, buf);
15252 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15253 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15255 if (DEFAULT_ABI == ABI_AIX)
15257 const char *desc_name, *orig_name;
15259 orig_name = (*targetm.strip_name_encoding) (name);
15260 desc_name = orig_name;
15261 while (*desc_name == '.')
15264 if (TREE_PUBLIC (decl))
15265 fprintf (file, "\t.globl %s\n", desc_name);
15267 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15268 fprintf (file, "%s:\n", desc_name);
15269 fprintf (file, "\t.long %s\n", orig_name);
15270 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15271 if (DEFAULT_ABI == ABI_AIX)
15272 fputs ("\t.long 0\n", file);
15273 fprintf (file, "\t.previous\n");
15275 ASM_OUTPUT_LABEL (file, name);
15281 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15283 fputs (GLOBAL_ASM_OP, stream);
15284 RS6000_OUTPUT_BASENAME (stream, name);
15285 putc ('\n', stream);
15289 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15292 static const char * const suffix[3] = { "PR", "RO", "RW" };
15294 if (flags & SECTION_CODE)
15296 else if (flags & SECTION_WRITE)
15301 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15302 (flags & SECTION_CODE) ? "." : "",
15303 name, suffix[smclass], flags & SECTION_ENTSIZE);
15307 rs6000_xcoff_select_section (tree decl, int reloc,
15308 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15310 if (decl_readonly_section_1 (decl, reloc, 1))
15312 if (TREE_PUBLIC (decl))
15313 read_only_data_section ();
15315 read_only_private_data_section ();
15319 if (TREE_PUBLIC (decl))
15322 private_data_section ();
15327 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15331 /* Use select_section for private and uninitialized data. */
15332 if (!TREE_PUBLIC (decl)
15333 || DECL_COMMON (decl)
15334 || DECL_INITIAL (decl) == NULL_TREE
15335 || DECL_INITIAL (decl) == error_mark_node
15336 || (flag_zero_initialized_in_bss
15337 && initializer_zerop (DECL_INITIAL (decl))))
15340 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15341 name = (*targetm.strip_name_encoding) (name);
15342 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15345 /* Select section for constant in constant pool.
15347 On RS/6000, all constants are in the private read-only data area.
15348 However, if this is being placed in the TOC it must be output as a
15352 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15353 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15355 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15358 read_only_private_data_section ();
15361 /* Remove any trailing [DS] or the like from the symbol name. */
15363 static const char *
15364 rs6000_xcoff_strip_name_encoding (const char *name)
15369 len = strlen (name);
15370 if (name[len - 1] == ']')
15371 return ggc_alloc_string (name, len - 4);
15376 /* Section attributes. AIX is always PIC. */
15378 static unsigned int
15379 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15381 unsigned int align;
15382 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15384 /* Align to at least UNIT size. */
15385 if (flags & SECTION_CODE)
15386 align = MIN_UNITS_PER_WORD;
15388 /* Increase alignment of large objects if not already stricter. */
15389 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15390 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15391 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15393 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15396 /* Output at beginning of assembler file.
15398 Initialize the section names for the RS/6000 at this point.
15400 Specify filename, including full path, to assembler.
15402 We want to go into the TOC section so at least one .toc will be emitted.
15403 Also, in order to output proper .bs/.es pairs, we need at least one static
15404 [RW] section emitted.
15406 Finally, declare mcount when profiling to make the assembler happy. */
15409 rs6000_xcoff_file_start (void)
15411 rs6000_gen_section_name (&xcoff_bss_section_name,
15412 main_input_filename, ".bss_");
15413 rs6000_gen_section_name (&xcoff_private_data_section_name,
15414 main_input_filename, ".rw_");
15415 rs6000_gen_section_name (&xcoff_read_only_section_name,
15416 main_input_filename, ".ro_");
15418 fputs ("\t.file\t", asm_out_file);
15419 output_quoted_string (asm_out_file, main_input_filename);
15420 fputc ('\n', asm_out_file);
15422 if (write_symbols != NO_DEBUG)
15423 private_data_section ();
15426 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15427 rs6000_file_start ();
15430 /* Output at end of assembler file.
15431 On the RS/6000, referencing data should automatically pull in text. */
15434 rs6000_xcoff_file_end (void)
15437 fputs ("_section_.text:\n", asm_out_file);
15439 fputs (TARGET_32BIT
15440 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15443 #endif /* TARGET_XCOFF */
15446 /* Cross-module name binding. Darwin does not support overriding
15447 functions at dynamic-link time. */
15450 rs6000_binds_local_p (tree decl)
15452 return default_binds_local_p_1 (decl, 0);
15456 /* Compute a (partial) cost for rtx X. Return true if the complete
15457 cost has been computed, and false if subexpressions should be
15458 scanned. In either case, *TOTAL contains the cost result. */
15461 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15466 /* On the RS/6000, if it is valid in the insn, it is free.
15467 So this always returns 0. */
15478 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15479 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15480 + 0x8000) >= 0x10000)
15481 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15482 ? COSTS_N_INSNS (2)
15483 : COSTS_N_INSNS (1));
15489 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15490 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15491 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15492 ? COSTS_N_INSNS (2)
15493 : COSTS_N_INSNS (1));
15499 *total = COSTS_N_INSNS (2);
15502 switch (rs6000_cpu)
15504 case PROCESSOR_RIOS1:
15505 case PROCESSOR_PPC405:
15506 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15507 ? COSTS_N_INSNS (5)
15508 : (INTVAL (XEXP (x, 1)) >= -256
15509 && INTVAL (XEXP (x, 1)) <= 255)
15510 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15513 case PROCESSOR_PPC440:
15514 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15515 ? COSTS_N_INSNS (3)
15516 : COSTS_N_INSNS (2));
15519 case PROCESSOR_RS64A:
15520 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15521 ? GET_MODE (XEXP (x, 1)) != DImode
15522 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15523 : (INTVAL (XEXP (x, 1)) >= -256
15524 && INTVAL (XEXP (x, 1)) <= 255)
15525 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15528 case PROCESSOR_RIOS2:
15529 case PROCESSOR_MPCCORE:
15530 case PROCESSOR_PPC604e:
15531 *total = COSTS_N_INSNS (2);
15534 case PROCESSOR_PPC601:
15535 *total = COSTS_N_INSNS (5);
15538 case PROCESSOR_PPC603:
15539 case PROCESSOR_PPC7400:
15540 case PROCESSOR_PPC750:
15541 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15542 ? COSTS_N_INSNS (5)
15543 : (INTVAL (XEXP (x, 1)) >= -256
15544 && INTVAL (XEXP (x, 1)) <= 255)
15545 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15548 case PROCESSOR_PPC7450:
15549 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15550 ? COSTS_N_INSNS (4)
15551 : COSTS_N_INSNS (3));
15554 case PROCESSOR_PPC403:
15555 case PROCESSOR_PPC604:
15556 case PROCESSOR_PPC8540:
15557 *total = COSTS_N_INSNS (4);
15560 case PROCESSOR_PPC620:
15561 case PROCESSOR_PPC630:
15562 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15563 ? GET_MODE (XEXP (x, 1)) != DImode
15564 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15565 : (INTVAL (XEXP (x, 1)) >= -256
15566 && INTVAL (XEXP (x, 1)) <= 255)
15567 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15570 case PROCESSOR_POWER4:
15571 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15572 ? GET_MODE (XEXP (x, 1)) != DImode
15573 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15574 : COSTS_N_INSNS (2));
15583 if (GET_CODE (XEXP (x, 1)) == CONST_INT
15584 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15586 *total = COSTS_N_INSNS (2);
15593 switch (rs6000_cpu)
15595 case PROCESSOR_RIOS1:
15596 *total = COSTS_N_INSNS (19);
15599 case PROCESSOR_RIOS2:
15600 *total = COSTS_N_INSNS (13);
15603 case PROCESSOR_RS64A:
15604 *total = (GET_MODE (XEXP (x, 1)) != DImode
15605 ? COSTS_N_INSNS (65)
15606 : COSTS_N_INSNS (67));
15609 case PROCESSOR_MPCCORE:
15610 *total = COSTS_N_INSNS (6);
15613 case PROCESSOR_PPC403:
15614 *total = COSTS_N_INSNS (33);
15617 case PROCESSOR_PPC405:
15618 *total = COSTS_N_INSNS (35);
15621 case PROCESSOR_PPC440:
15622 *total = COSTS_N_INSNS (34);
15625 case PROCESSOR_PPC601:
15626 *total = COSTS_N_INSNS (36);
15629 case PROCESSOR_PPC603:
15630 *total = COSTS_N_INSNS (37);
15633 case PROCESSOR_PPC604:
15634 case PROCESSOR_PPC604e:
15635 *total = COSTS_N_INSNS (20);
15638 case PROCESSOR_PPC620:
15639 case PROCESSOR_PPC630:
15640 *total = (GET_MODE (XEXP (x, 1)) != DImode
15641 ? COSTS_N_INSNS (21)
15642 : COSTS_N_INSNS (37));
15645 case PROCESSOR_PPC750:
15646 case PROCESSOR_PPC8540:
15647 case PROCESSOR_PPC7400:
15648 *total = COSTS_N_INSNS (19);
15651 case PROCESSOR_PPC7450:
15652 *total = COSTS_N_INSNS (23);
15655 case PROCESSOR_POWER4:
15656 *total = (GET_MODE (XEXP (x, 1)) != DImode
15657 ? COSTS_N_INSNS (18)
15658 : COSTS_N_INSNS (34));
15666 *total = COSTS_N_INSNS (4);
15670 /* MEM should be slightly more expensive than (plus (reg) (const)). */
15679 /* A C expression returning the cost of moving data from a register of class
15680 CLASS1 to one of CLASS2. */
15683 rs6000_register_move_cost (enum machine_mode mode,
15684 enum reg_class from, enum reg_class to)
15686 /* Moves from/to GENERAL_REGS. */
15687 if (reg_classes_intersect_p (to, GENERAL_REGS)
15688 || reg_classes_intersect_p (from, GENERAL_REGS))
15690 if (! reg_classes_intersect_p (to, GENERAL_REGS))
15693 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
15694 return (rs6000_memory_move_cost (mode, from, 0)
15695 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
15697 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
15698 else if (from == CR_REGS)
15702 /* A move will cost one instruction per GPR moved. */
15703 return 2 * HARD_REGNO_NREGS (0, mode);
15706 /* Moving between two similar registers is just one instruction. */
15707 else if (reg_classes_intersect_p (to, from))
15708 return mode == TFmode ? 4 : 2;
15710 /* Everything else has to go through GENERAL_REGS. */
15712 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
15713 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
15716 /* A C expressions returning the cost of moving data of MODE from a register to
15720 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
15721 int in ATTRIBUTE_UNUSED)
15723 if (reg_classes_intersect_p (class, GENERAL_REGS))
15724 return 4 * HARD_REGNO_NREGS (0, mode);
15725 else if (reg_classes_intersect_p (class, FLOAT_REGS))
15726 return 4 * HARD_REGNO_NREGS (32, mode);
15727 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
15728 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
15730 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
15733 /* Return an RTX representing where to find the function value of a
15734 function returning MODE. */
15736 rs6000_complex_function_value (enum machine_mode mode)
15738 unsigned int regno;
15740 enum machine_mode inner = GET_MODE_INNER (mode);
15742 if (FLOAT_MODE_P (mode))
15743 regno = FP_ARG_RETURN;
15746 regno = GP_ARG_RETURN;
15748 /* 32-bit is OK since it'll go in r3/r4. */
15750 && GET_MODE_BITSIZE (inner) >= 32)
15751 return gen_rtx_REG (mode, regno);
15754 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
15756 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
15757 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
15758 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
15761 /* Define how to find the value returned by a function.
15762 VALTYPE is the data type of the value (as a tree).
15763 If the precise function being called is known, FUNC is its FUNCTION_DECL;
15764 otherwise, FUNC is 0.
15766 On the SPE, both FPs and vectors are returned in r3.
15768 On RS/6000 an integer value is in r3 and a floating-point value is in
15769 fp1, unless -msoft-float. */
15772 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
15774 enum machine_mode mode;
15775 unsigned int regno;
15777 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
15779 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
15780 return gen_rtx_PARALLEL (DImode,
15782 gen_rtx_EXPR_LIST (VOIDmode,
15783 gen_rtx_REG (SImode, GP_ARG_RETURN),
15785 gen_rtx_EXPR_LIST (VOIDmode,
15786 gen_rtx_REG (SImode,
15787 GP_ARG_RETURN + 1),
15791 if ((INTEGRAL_TYPE_P (valtype)
15792 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
15793 || POINTER_TYPE_P (valtype))
15794 mode = TARGET_32BIT ? SImode : DImode;
15796 mode = TYPE_MODE (valtype);
15798 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
15799 regno = FP_ARG_RETURN;
15800 else if (TREE_CODE (valtype) == COMPLEX_TYPE
15801 && TARGET_HARD_FLOAT
15802 && SPLIT_COMPLEX_ARGS)
15803 return rs6000_complex_function_value (mode);
15804 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
15805 regno = ALTIVEC_ARG_RETURN;
15807 regno = GP_ARG_RETURN;
15809 return gen_rtx_REG (mode, regno);
15812 /* Define how to find the value returned by a library function
15813 assuming the value has mode MODE. */
15815 rs6000_libcall_value (enum machine_mode mode)
15817 unsigned int regno;
15819 if (GET_MODE_CLASS (mode) == MODE_FLOAT
15820 && TARGET_HARD_FLOAT && TARGET_FPRS)
15821 regno = FP_ARG_RETURN;
15822 else if (ALTIVEC_VECTOR_MODE (mode))
15823 regno = ALTIVEC_ARG_RETURN;
15824 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
15825 return rs6000_complex_function_value (mode);
15827 regno = GP_ARG_RETURN;
15829 return gen_rtx_REG (mode, regno);
15832 /* Define the offset between two registers, FROM to be eliminated and its
15833 replacement TO, at the start of a routine. */
15835 rs6000_initial_elimination_offset (int from, int to)
15837 rs6000_stack_t *info = rs6000_stack_info ();
15838 HOST_WIDE_INT offset;
15840 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
15841 offset = info->push_p ? 0 : -info->total_size;
15842 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
15843 offset = info->total_size;
15844 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
15845 offset = info->push_p ? info->total_size : 0;
15846 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
15854 /* Return true if TYPE is of type __ev64_opaque__. */
15857 is_ev64_opaque_type (tree type)
15860 && (type == opaque_V2SI_type_node
15861 || type == opaque_V2SF_type_node
15862 || type == opaque_p_V2SI_type_node));
15866 rs6000_dwarf_register_span (rtx reg)
15870 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
15873 regno = REGNO (reg);
15875 /* The duality of the SPE register size wreaks all kinds of havoc.
15876 This is a way of distinguishing r0 in 32-bits from r0 in
15879 gen_rtx_PARALLEL (VOIDmode,
15882 gen_rtx_REG (SImode, regno + 1200),
15883 gen_rtx_REG (SImode, regno))
15885 gen_rtx_REG (SImode, regno),
15886 gen_rtx_REG (SImode, regno + 1200)));
15889 /* Map internal gcc register numbers to DWARF2 register numbers. */
15892 rs6000_dbx_register_number (unsigned int regno)
15894 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
15896 if (regno == MQ_REGNO)
15898 if (regno == LINK_REGISTER_REGNUM)
15900 if (regno == COUNT_REGISTER_REGNUM)
15902 if (CR_REGNO_P (regno))
15903 return regno - CR0_REGNO + 86;
15904 if (regno == XER_REGNO)
15906 if (ALTIVEC_REGNO_P (regno))
15907 return regno - FIRST_ALTIVEC_REGNO + 1124;
15908 if (regno == VRSAVE_REGNO)
15910 if (regno == VSCR_REGNO)
15912 if (regno == SPE_ACC_REGNO)
15914 if (regno == SPEFSCR_REGNO)
15916 /* SPE high reg number. We get these values of regno from
15917 rs6000_dwarf_register_span. */
15918 if (regno >= 1200 && regno < 1232)
15924 #include "gt-rs6000.h"