1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
73 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
81 int toc_save_p; /* true if the TOC needs to be saved */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Support adjust_priority scheduler hook
128 and -mprioritize-restricted-insns= option. */
129 const char *rs6000_sched_restricted_insns_priority_str;
130 int rs6000_sched_restricted_insns_priority;
132 /* Support for -msched-costly-dep option. */
133 const char *rs6000_sched_costly_dep_str;
134 enum rs6000_dependence_cost rs6000_sched_costly_dep;
136 /* Support for -minsert-sched-nops option. */
137 const char *rs6000_sched_insert_nops_str;
138 enum rs6000_nop_insertion rs6000_sched_insert_nops;
140 /* Size of long double */
141 const char *rs6000_long_double_size_string;
142 int rs6000_long_double_type_size;
144 /* Whether -mabi=altivec has appeared */
145 int rs6000_altivec_abi;
147 /* Whether VRSAVE instructions should be generated. */
148 int rs6000_altivec_vrsave;
150 /* String from -mvrsave= option. */
151 const char *rs6000_altivec_vrsave_string;
153 /* Nonzero if we want SPE ABI extensions. */
156 /* Whether isel instructions should be generated. */
159 /* Whether SPE simd instructions should be generated. */
162 /* Nonzero if floating point operations are done in the GPRs. */
163 int rs6000_float_gprs = 0;
165 /* String from -mfloat-gprs=. */
166 const char *rs6000_float_gprs_string;
168 /* String from -misel=. */
169 const char *rs6000_isel_string;
171 /* String from -mspe=. */
172 const char *rs6000_spe_string;
174 /* Set to nonzero once AIX common-mode calls have been defined. */
175 static GTY(()) int common_mode_defined;
177 /* Save information from a "cmpxx" operation until the branch or scc is
179 rtx rs6000_compare_op0, rs6000_compare_op1;
180 int rs6000_compare_fp_p;
182 /* Label number of label created for -mrelocatable, to call to so we can
183 get the address of the GOT section */
184 int rs6000_pic_labelno;
187 /* Which abi to adhere to */
188 const char *rs6000_abi_name;
190 /* Semantics of the small data area */
191 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
193 /* Which small data model to use */
194 const char *rs6000_sdata_name = (char *)0;
196 /* Counter for labels which are to be placed in .fixup. */
197 int fixuplabelno = 0;
200 /* Bit size of immediate TLS offsets and string from which it is decoded. */
201 int rs6000_tls_size = 32;
202 const char *rs6000_tls_size_string;
204 /* ABI enumeration available for subtarget to use. */
205 enum rs6000_abi rs6000_current_abi;
207 /* ABI string from -mabi= option. */
208 const char *rs6000_abi_string;
211 const char *rs6000_debug_name;
212 int rs6000_debug_stack; /* debug stack applications */
213 int rs6000_debug_arg; /* debug argument handling */
216 static GTY(()) tree opaque_V2SI_type_node;
217 static GTY(()) tree opaque_V2SF_type_node;
218 static GTY(()) tree opaque_p_V2SI_type_node;
220 const char *rs6000_traceback_name;
222 traceback_default = 0,
228 /* Flag to say the TOC is initialized */
230 char toc_label_name[10];
232 /* Alias set for saves and restores from the rs6000 stack. */
233 static int rs6000_sr_alias_set;
235 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
236 The only place that looks at this is rs6000_set_default_type_attributes;
237 everywhere else should rely on the presence or absence of a longcall
238 attribute on the function declaration. */
239 int rs6000_default_long_calls;
240 const char *rs6000_longcall_switch;
242 /* Control alignment for fields within structures. */
243 /* String from -malign-XXXXX. */
244 const char *rs6000_alignment_string;
245 int rs6000_alignment_flags;
247 struct builtin_description
249 /* mask is not const because we're going to alter it below. This
250 nonsense will go away when we rewrite the -march infrastructure
251 to give us more target flag bits. */
253 const enum insn_code icode;
254 const char *const name;
255 const enum rs6000_builtins code;
258 static bool rs6000_function_ok_for_sibcall (tree, tree);
259 static int num_insns_constant_wide (HOST_WIDE_INT);
260 static void validate_condition_mode (enum rtx_code, enum machine_mode);
261 static rtx rs6000_generate_compare (enum rtx_code);
262 static void rs6000_maybe_dead (rtx);
263 static void rs6000_emit_stack_tie (void);
264 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
265 static rtx spe_synthesize_frame_save (rtx);
266 static bool spe_func_has_64bit_regs_p (void);
267 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
269 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
270 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
271 static unsigned rs6000_hash_constant (rtx);
272 static unsigned toc_hash_function (const void *);
273 static int toc_hash_eq (const void *, const void *);
274 static int constant_pool_expr_1 (rtx, int *, int *);
275 static bool constant_pool_expr_p (rtx);
276 static bool toc_relative_expr_p (rtx);
277 static bool legitimate_small_data_p (enum machine_mode, rtx);
278 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
279 static bool legitimate_indexed_address_p (rtx, int);
280 static bool legitimate_indirect_address_p (rtx, int);
281 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
282 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
283 static struct machine_function * rs6000_init_machine_status (void);
284 static bool rs6000_assemble_integer (rtx, unsigned int, int);
285 #ifdef HAVE_GAS_HIDDEN
286 static void rs6000_assemble_visibility (tree, int);
288 static int rs6000_ra_ever_killed (void);
289 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
290 extern const struct attribute_spec rs6000_attribute_table[];
291 static void rs6000_set_default_type_attributes (tree);
292 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
293 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
294 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
296 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
297 static bool rs6000_return_in_memory (tree, tree);
298 static void rs6000_file_start (void);
300 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
301 static void rs6000_elf_asm_out_constructor (rtx, int);
302 static void rs6000_elf_asm_out_destructor (rtx, int);
303 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
304 static void rs6000_elf_unique_section (tree, int);
305 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
306 unsigned HOST_WIDE_INT);
307 static void rs6000_elf_encode_section_info (tree, rtx, int)
309 static bool rs6000_elf_in_small_data_p (tree);
312 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
313 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
314 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
315 static void rs6000_xcoff_unique_section (tree, int);
316 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
317 unsigned HOST_WIDE_INT);
318 static const char * rs6000_xcoff_strip_name_encoding (const char *);
319 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
320 static void rs6000_xcoff_file_start (void);
321 static void rs6000_xcoff_file_end (void);
324 static bool rs6000_binds_local_p (tree);
326 static int rs6000_use_dfa_pipeline_interface (void);
327 static int rs6000_variable_issue (FILE *, int, rtx, int);
328 static bool rs6000_rtx_costs (rtx, int, int, int *);
329 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
330 static bool is_microcoded_insn (rtx);
331 static int is_dispatch_slot_restricted (rtx);
332 static bool is_cracked_insn (rtx);
333 static bool is_branch_slot_insn (rtx);
334 static int rs6000_adjust_priority (rtx, int);
335 static int rs6000_issue_rate (void);
336 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
337 static rtx get_next_active_insn (rtx, rtx);
338 static bool insn_terminates_group_p (rtx , enum group_termination);
339 static bool is_costly_group (rtx *, rtx);
340 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
341 static int redefine_groups (FILE *, int, rtx, rtx);
342 static int pad_groups (FILE *, int, rtx, rtx);
343 static void rs6000_sched_finish (FILE *, int);
344 static int rs6000_use_sched_lookahead (void);
346 static void rs6000_init_builtins (void);
347 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
348 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
349 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
350 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
351 static void altivec_init_builtins (void);
352 static void rs6000_common_init_builtins (void);
353 static void rs6000_init_libfuncs (void);
355 static void enable_mask_for_builtins (struct builtin_description *, int,
356 enum rs6000_builtins,
357 enum rs6000_builtins);
358 static void spe_init_builtins (void);
359 static rtx spe_expand_builtin (tree, rtx, bool *);
360 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
361 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
362 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
363 static rs6000_stack_t *rs6000_stack_info (void);
364 static void debug_stack_info (rs6000_stack_t *);
366 static rtx altivec_expand_builtin (tree, rtx, bool *);
367 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
368 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
369 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
370 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
371 static rtx altivec_expand_predicate_builtin (enum insn_code,
372 const char *, tree, rtx);
373 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
374 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
375 static void rs6000_parse_abi_options (void);
376 static void rs6000_parse_alignment_option (void);
377 static void rs6000_parse_tls_size_option (void);
378 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
379 static int first_altivec_reg_to_save (void);
380 static unsigned int compute_vrsave_mask (void);
381 static void is_altivec_return_reg (rtx, void *);
382 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
383 int easy_vector_constant (rtx, enum machine_mode);
384 static int easy_vector_same (rtx, enum machine_mode);
385 static bool is_ev64_opaque_type (tree);
386 static rtx rs6000_dwarf_register_span (rtx);
387 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
388 static rtx rs6000_tls_get_addr (void);
389 static rtx rs6000_got_sym (void);
390 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
391 static const char *rs6000_get_some_local_dynamic_name (void);
392 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
393 static rtx rs6000_complex_function_value (enum machine_mode);
394 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
395 enum machine_mode, tree);
396 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
397 enum machine_mode, tree, int);
398 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
399 enum machine_mode, tree,
401 static tree rs6000_build_builtin_va_list (void);
403 /* Hash table stuff for keeping track of TOC entries. */
405 struct toc_hash_struct GTY(())
407 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
408 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
410 enum machine_mode key_mode;
414 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
416 /* Default register names. */
417 char rs6000_reg_names[][8] =
419 "0", "1", "2", "3", "4", "5", "6", "7",
420 "8", "9", "10", "11", "12", "13", "14", "15",
421 "16", "17", "18", "19", "20", "21", "22", "23",
422 "24", "25", "26", "27", "28", "29", "30", "31",
423 "0", "1", "2", "3", "4", "5", "6", "7",
424 "8", "9", "10", "11", "12", "13", "14", "15",
425 "16", "17", "18", "19", "20", "21", "22", "23",
426 "24", "25", "26", "27", "28", "29", "30", "31",
427 "mq", "lr", "ctr","ap",
428 "0", "1", "2", "3", "4", "5", "6", "7",
430 /* AltiVec registers. */
431 "0", "1", "2", "3", "4", "5", "6", "7",
432 "8", "9", "10", "11", "12", "13", "14", "15",
433 "16", "17", "18", "19", "20", "21", "22", "23",
434 "24", "25", "26", "27", "28", "29", "30", "31",
440 #ifdef TARGET_REGNAMES
441 static const char alt_reg_names[][8] =
443 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
444 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
445 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
446 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
447 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
448 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
449 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
450 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
451 "mq", "lr", "ctr", "ap",
452 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
454 /* AltiVec registers. */
455 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
456 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
457 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
458 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
465 #ifndef MASK_STRICT_ALIGN
466 #define MASK_STRICT_ALIGN 0
468 #ifndef TARGET_PROFILE_KERNEL
469 #define TARGET_PROFILE_KERNEL 0
472 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
473 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
475 /* Return 1 for a symbol ref for a thread-local storage symbol. */
476 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
477 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
479 /* Initialize the GCC target structure. */
480 #undef TARGET_ATTRIBUTE_TABLE
481 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
482 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
483 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
485 #undef TARGET_ASM_ALIGNED_DI_OP
486 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
488 /* Default unaligned ops are only provided for ELF. Find the ops needed
489 for non-ELF systems. */
490 #ifndef OBJECT_FORMAT_ELF
492 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
494 #undef TARGET_ASM_UNALIGNED_HI_OP
495 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
496 #undef TARGET_ASM_UNALIGNED_SI_OP
497 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
498 #undef TARGET_ASM_UNALIGNED_DI_OP
499 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
502 #undef TARGET_ASM_UNALIGNED_HI_OP
503 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
504 #undef TARGET_ASM_UNALIGNED_SI_OP
505 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
509 /* This hook deals with fixups for relocatable code and DI-mode objects
511 #undef TARGET_ASM_INTEGER
512 #define TARGET_ASM_INTEGER rs6000_assemble_integer
514 #ifdef HAVE_GAS_HIDDEN
515 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
516 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
519 #undef TARGET_HAVE_TLS
520 #define TARGET_HAVE_TLS HAVE_AS_TLS
522 #undef TARGET_CANNOT_FORCE_CONST_MEM
523 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
525 #undef TARGET_ASM_FUNCTION_PROLOGUE
526 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
527 #undef TARGET_ASM_FUNCTION_EPILOGUE
528 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
530 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
531 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
532 #undef TARGET_SCHED_VARIABLE_ISSUE
533 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
535 #undef TARGET_SCHED_ISSUE_RATE
536 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
537 #undef TARGET_SCHED_ADJUST_COST
538 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
539 #undef TARGET_SCHED_ADJUST_PRIORITY
540 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
541 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
542 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
543 #undef TARGET_SCHED_FINISH
544 #define TARGET_SCHED_FINISH rs6000_sched_finish
546 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
547 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
549 #undef TARGET_INIT_BUILTINS
550 #define TARGET_INIT_BUILTINS rs6000_init_builtins
552 #undef TARGET_EXPAND_BUILTIN
553 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
555 #undef TARGET_INIT_LIBFUNCS
556 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
559 #undef TARGET_BINDS_LOCAL_P
560 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
563 #undef TARGET_ASM_OUTPUT_MI_THUNK
564 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
566 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
567 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
569 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
570 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
572 #undef TARGET_RTX_COSTS
573 #define TARGET_RTX_COSTS rs6000_rtx_costs
574 #undef TARGET_ADDRESS_COST
575 #define TARGET_ADDRESS_COST hook_int_rtx_0
577 #undef TARGET_VECTOR_OPAQUE_P
578 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
580 #undef TARGET_DWARF_REGISTER_SPAN
581 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
583 /* On rs6000, function arguments are promoted, as are function return
585 #undef TARGET_PROMOTE_FUNCTION_ARGS
586 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
587 #undef TARGET_PROMOTE_FUNCTION_RETURN
588 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
590 /* Structure return values are passed as an extra parameter. */
591 #undef TARGET_STRUCT_VALUE_RTX
592 #define TARGET_STRUCT_VALUE_RTX hook_rtx_tree_int_null
594 #undef TARGET_RETURN_IN_MEMORY
595 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
597 #undef TARGET_SETUP_INCOMING_VARARGS
598 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
600 /* Always strict argument naming on rs6000. */
601 #undef TARGET_STRICT_ARGUMENT_NAMING
602 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
603 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
604 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
606 #undef TARGET_BUILD_BUILTIN_VA_LIST
607 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
609 struct gcc_target targetm = TARGET_INITIALIZER;
611 /* Override command line options. Mostly we process the processor
612 type and sometimes adjust other TARGET_ options. */
615 rs6000_override_options (const char *default_cpu)
618 struct rs6000_cpu_select *ptr;
620 /* Simplify the entries below by making a mask for any POWER
621 variant and any PowerPC variant. */
623 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
624 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
625 | MASK_PPC_GFXOPT | MASK_POWERPC64)
626 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
630 const char *const name; /* Canonical processor name. */
631 const enum processor_type processor; /* Processor type enum value. */
632 const int target_enable; /* Target flags to enable. */
633 const int target_disable; /* Target flags to disable. */
634 } const processor_target_table[]
635 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
636 POWER_MASKS | POWERPC_MASKS},
637 {"power", PROCESSOR_POWER,
638 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
639 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
640 {"power2", PROCESSOR_POWER,
641 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
642 POWERPC_MASKS | MASK_NEW_MNEMONICS},
643 {"power3", PROCESSOR_PPC630,
644 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
646 {"power4", PROCESSOR_POWER4,
647 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
649 {"powerpc", PROCESSOR_POWERPC,
650 MASK_POWERPC | MASK_NEW_MNEMONICS,
651 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
652 {"powerpc64", PROCESSOR_POWERPC64,
653 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
654 POWER_MASKS | POWERPC_OPT_MASKS},
655 {"rios", PROCESSOR_RIOS1,
656 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
657 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
658 {"rios1", PROCESSOR_RIOS1,
659 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
660 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
661 {"rsc", PROCESSOR_PPC601,
662 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
663 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
664 {"rsc1", PROCESSOR_PPC601,
665 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
666 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
667 {"rios2", PROCESSOR_RIOS2,
668 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
669 POWERPC_MASKS | MASK_NEW_MNEMONICS},
670 {"rs64a", PROCESSOR_RS64A,
671 MASK_POWERPC | MASK_NEW_MNEMONICS,
672 POWER_MASKS | POWERPC_OPT_MASKS},
673 {"401", PROCESSOR_PPC403,
674 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
675 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
676 {"403", PROCESSOR_PPC403,
677 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
678 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
679 {"405", PROCESSOR_PPC405,
680 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
681 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
682 {"405fp", PROCESSOR_PPC405,
683 MASK_POWERPC | MASK_NEW_MNEMONICS,
684 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
685 {"440", PROCESSOR_PPC440,
686 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
687 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
688 {"440fp", PROCESSOR_PPC440,
689 MASK_POWERPC | MASK_NEW_MNEMONICS,
690 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
691 {"505", PROCESSOR_MPCCORE,
692 MASK_POWERPC | MASK_NEW_MNEMONICS,
693 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
694 {"601", PROCESSOR_PPC601,
695 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
696 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
697 {"602", PROCESSOR_PPC603,
698 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
699 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
700 {"603", PROCESSOR_PPC603,
701 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
702 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
703 {"603e", PROCESSOR_PPC603,
704 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
705 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
706 {"ec603e", PROCESSOR_PPC603,
707 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
708 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
709 {"604", PROCESSOR_PPC604,
710 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
711 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
712 {"604e", PROCESSOR_PPC604e,
713 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
714 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
715 {"620", PROCESSOR_PPC620,
716 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
718 {"630", PROCESSOR_PPC630,
719 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
721 {"740", PROCESSOR_PPC750,
722 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
723 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
724 {"750", PROCESSOR_PPC750,
725 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
726 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
727 {"G3", PROCESSOR_PPC750,
728 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
729 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
730 {"7400", PROCESSOR_PPC7400,
731 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
732 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
733 {"7450", PROCESSOR_PPC7450,
734 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
735 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
736 {"G4", PROCESSOR_PPC7450,
737 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
738 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
739 {"8540", PROCESSOR_PPC8540,
740 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
741 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
742 {"801", PROCESSOR_MPCCORE,
743 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
744 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
745 {"821", PROCESSOR_MPCCORE,
746 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
747 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
748 {"823", PROCESSOR_MPCCORE,
749 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
750 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
751 {"860", PROCESSOR_MPCCORE,
752 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
753 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
754 {"970", PROCESSOR_POWER4,
755 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
757 {"G5", PROCESSOR_POWER4,
758 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
761 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
763 /* Save current -mmultiple/-mno-multiple status. */
764 int multiple = TARGET_MULTIPLE;
765 /* Save current -mstring/-mno-string status. */
766 int string = TARGET_STRING;
768 /* Identify the processor type. */
769 rs6000_select[0].string = default_cpu;
770 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
772 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
774 ptr = &rs6000_select[i];
775 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
777 for (j = 0; j < ptt_size; j++)
778 if (! strcmp (ptr->string, processor_target_table[j].name))
781 rs6000_cpu = processor_target_table[j].processor;
785 target_flags |= processor_target_table[j].target_enable;
786 target_flags &= ~processor_target_table[j].target_disable;
792 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
799 /* If we are optimizing big endian systems for space, use the load/store
800 multiple and string instructions. */
801 if (BYTES_BIG_ENDIAN && optimize_size)
802 target_flags |= MASK_MULTIPLE | MASK_STRING;
804 /* If -mmultiple or -mno-multiple was explicitly used, don't
805 override with the processor default */
806 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
807 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
809 /* If -mstring or -mno-string was explicitly used, don't override
810 with the processor default. */
811 if ((target_flags_explicit & MASK_STRING) != 0)
812 target_flags = (target_flags & ~MASK_STRING) | string;
814 /* Don't allow -mmultiple or -mstring on little endian systems
815 unless the cpu is a 750, because the hardware doesn't support the
816 instructions used in little endian mode, and causes an alignment
817 trap. The 750 does not cause an alignment trap (except when the
818 target is unaligned). */
820 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
824 target_flags &= ~MASK_MULTIPLE;
825 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
826 warning ("-mmultiple is not supported on little endian systems");
831 target_flags &= ~MASK_STRING;
832 if ((target_flags_explicit & MASK_STRING) != 0)
833 warning ("-mstring is not supported on little endian systems");
837 /* Set debug flags */
838 if (rs6000_debug_name)
840 if (! strcmp (rs6000_debug_name, "all"))
841 rs6000_debug_stack = rs6000_debug_arg = 1;
842 else if (! strcmp (rs6000_debug_name, "stack"))
843 rs6000_debug_stack = 1;
844 else if (! strcmp (rs6000_debug_name, "arg"))
845 rs6000_debug_arg = 1;
847 error ("unknown -mdebug-%s switch", rs6000_debug_name);
850 if (rs6000_traceback_name)
852 if (! strncmp (rs6000_traceback_name, "full", 4))
853 rs6000_traceback = traceback_full;
854 else if (! strncmp (rs6000_traceback_name, "part", 4))
855 rs6000_traceback = traceback_part;
856 else if (! strncmp (rs6000_traceback_name, "no", 2))
857 rs6000_traceback = traceback_none;
859 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
860 rs6000_traceback_name);
863 /* Set size of long double */
864 rs6000_long_double_type_size = 64;
865 if (rs6000_long_double_size_string)
868 int size = strtol (rs6000_long_double_size_string, &tail, 10);
869 if (*tail != '\0' || (size != 64 && size != 128))
870 error ("Unknown switch -mlong-double-%s",
871 rs6000_long_double_size_string);
873 rs6000_long_double_type_size = size;
876 /* Handle -mabi= options. */
877 rs6000_parse_abi_options ();
879 /* Handle -malign-XXXXX option. */
880 rs6000_parse_alignment_option ();
882 /* Handle generic -mFOO=YES/NO options. */
883 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
884 &rs6000_altivec_vrsave);
885 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
887 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
888 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
891 /* Handle -mtls-size option. */
892 rs6000_parse_tls_size_option ();
894 #ifdef SUBTARGET_OVERRIDE_OPTIONS
895 SUBTARGET_OVERRIDE_OPTIONS;
897 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
898 SUBSUBTARGET_OVERRIDE_OPTIONS;
903 /* The e500 does not have string instructions, and we set
904 MASK_STRING above when optimizing for size. */
905 if ((target_flags & MASK_STRING) != 0)
906 target_flags = target_flags & ~MASK_STRING;
908 /* No SPE means 64-bit long doubles, even if an E500. */
909 if (rs6000_spe_string != 0
910 && !strcmp (rs6000_spe_string, "no"))
911 rs6000_long_double_type_size = 64;
913 else if (rs6000_select[1].string != NULL)
915 /* For the powerpc-eabispe configuration, we set all these by
916 default, so let's unset them if we manually set another
917 CPU that is not the E500. */
918 if (rs6000_abi_string == 0)
920 if (rs6000_spe_string == 0)
922 if (rs6000_float_gprs_string == 0)
923 rs6000_float_gprs = 0;
924 if (rs6000_isel_string == 0)
926 if (rs6000_long_double_size_string == 0)
927 rs6000_long_double_type_size = 64;
930 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
931 using TARGET_OPTIONS to handle a toggle switch, but we're out of
932 bits in target_flags so TARGET_SWITCHES cannot be used.
933 Assumption here is that rs6000_longcall_switch points into the
934 text of the complete option, rather than being a copy, so we can
935 scan back for the presence or absence of the no- modifier. */
936 if (rs6000_longcall_switch)
938 const char *base = rs6000_longcall_switch;
939 while (base[-1] != 'm') base--;
941 if (*rs6000_longcall_switch != '\0')
942 error ("invalid option `%s'", base);
943 rs6000_default_long_calls = (base[0] != 'n');
946 /* Handle -mprioritize-restricted-insns option. */
947 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
948 if (rs6000_sched_restricted_insns_priority_str)
949 rs6000_sched_restricted_insns_priority =
950 atoi (rs6000_sched_restricted_insns_priority_str);
952 /* Handle -msched-costly-dep option. */
953 rs6000_sched_costly_dep = DEFAULT_SCHED_COSTLY_DEP;
954 if (rs6000_sched_costly_dep_str)
956 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
957 rs6000_sched_costly_dep = no_dep_costly;
958 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
959 rs6000_sched_costly_dep = all_deps_costly;
960 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
961 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
962 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
963 rs6000_sched_costly_dep = store_to_load_dep_costly;
965 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
968 /* Handle -minsert-sched-nops option. */
969 rs6000_sched_insert_nops = DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME;
970 if (rs6000_sched_insert_nops_str)
972 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
973 rs6000_sched_insert_nops = sched_finish_none;
974 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
975 rs6000_sched_insert_nops = sched_finish_pad_groups;
976 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
977 rs6000_sched_insert_nops = sched_finish_regroup_exact;
979 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
982 #ifdef TARGET_REGNAMES
983 /* If the user desires alternate register names, copy in the
984 alternate names now. */
986 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
989 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
990 If -maix-struct-return or -msvr4-struct-return was explicitly
991 used, don't override with the ABI default. */
992 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
994 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
995 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
997 target_flags |= MASK_AIX_STRUCT_RET;
1000 if (TARGET_LONG_DOUBLE_128
1001 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1002 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1004 /* Allocate an alias set for register saves & restores from stack. */
1005 rs6000_sr_alias_set = new_alias_set ();
1008 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1010 /* We can only guarantee the availability of DI pseudo-ops when
1011 assembling for 64-bit targets. */
1014 targetm.asm_out.aligned_op.di = NULL;
1015 targetm.asm_out.unaligned_op.di = NULL;
1018 /* Set maximum branch target alignment at two instructions, eight bytes. */
1019 align_jumps_max_skip = 8;
1020 align_loops_max_skip = 8;
1022 /* Arrange to save and restore machine status around nested functions. */
1023 init_machine_status = rs6000_init_machine_status;
1026 /* Handle generic options of the form -mfoo=yes/no.
1027 NAME is the option name.
1028 VALUE is the option value.
1029 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1030 whether the option value is 'yes' or 'no' respectively. */
1032 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1036 else if (!strcmp (value, "yes"))
1038 else if (!strcmp (value, "no"))
1041 error ("unknown -m%s= option specified: '%s'", name, value);
1044 /* Handle -mabi= options. */
1046 rs6000_parse_abi_options (void)
1048 if (rs6000_abi_string == 0)
1050 else if (! strcmp (rs6000_abi_string, "altivec"))
1051 rs6000_altivec_abi = 1;
1052 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1053 rs6000_altivec_abi = 0;
1054 else if (! strcmp (rs6000_abi_string, "spe"))
1057 if (!TARGET_SPE_ABI)
1058 error ("not configured for ABI: '%s'", rs6000_abi_string);
1061 else if (! strcmp (rs6000_abi_string, "no-spe"))
1064 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1067 /* Handle -malign-XXXXXX options. */
1069 rs6000_parse_alignment_option (void)
1071 if (rs6000_alignment_string == 0)
1073 else if (! strcmp (rs6000_alignment_string, "power"))
1074 rs6000_alignment_flags = MASK_ALIGN_POWER;
1075 else if (! strcmp (rs6000_alignment_string, "natural"))
1076 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1078 error ("unknown -malign-XXXXX option specified: '%s'",
1079 rs6000_alignment_string);
1082 /* Validate and record the size specified with the -mtls-size option. */
1085 rs6000_parse_tls_size_option (void)
1087 if (rs6000_tls_size_string == 0)
1089 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1090 rs6000_tls_size = 16;
1091 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1092 rs6000_tls_size = 32;
1093 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1094 rs6000_tls_size = 64;
1096 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1100 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1104 /* Do anything needed at the start of the asm file. */
1107 rs6000_file_start (void)
1111 const char *start = buffer;
1112 struct rs6000_cpu_select *ptr;
1113 const char *default_cpu = TARGET_CPU_DEFAULT;
1114 FILE *file = asm_out_file;
1116 default_file_start ();
1118 #ifdef TARGET_BI_ARCH
1119 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1123 if (flag_verbose_asm)
1125 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1126 rs6000_select[0].string = default_cpu;
1128 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1130 ptr = &rs6000_select[i];
1131 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1133 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1138 #ifdef USING_ELFOS_H
1139 switch (rs6000_sdata)
1141 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1142 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1143 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1144 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1147 if (rs6000_sdata && g_switch_value)
1149 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1160 /* Return nonzero if this function is known to have a null epilogue. */
1163 direct_return (void)
1165 if (reload_completed)
1167 rs6000_stack_t *info = rs6000_stack_info ();
1169 if (info->first_gp_reg_save == 32
1170 && info->first_fp_reg_save == 64
1171 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1172 && ! info->lr_save_p
1173 && ! info->cr_save_p
1174 && info->vrsave_mask == 0
1182 /* Returns 1 always. */
1185 any_operand (rtx op ATTRIBUTE_UNUSED,
1186 enum machine_mode mode ATTRIBUTE_UNUSED)
1191 /* Returns 1 if op is the count register. */
1193 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1195 if (GET_CODE (op) != REG)
1198 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1201 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1207 /* Returns 1 if op is an altivec register. */
1209 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1212 return (register_operand (op, mode)
1213 && (GET_CODE (op) != REG
1214 || REGNO (op) > FIRST_PSEUDO_REGISTER
1215 || ALTIVEC_REGNO_P (REGNO (op))));
1219 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1221 if (GET_CODE (op) != REG)
1224 if (XER_REGNO_P (REGNO (op)))
1230 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1231 by such constants completes more quickly. */
1234 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1236 return ( GET_CODE (op) == CONST_INT
1237 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1240 /* Return 1 if OP is a constant that can fit in a D field. */
1243 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1245 return (GET_CODE (op) == CONST_INT
1246 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1249 /* Similar for an unsigned D field. */
1252 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1254 return (GET_CODE (op) == CONST_INT
1255 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1258 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1261 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1263 return (GET_CODE (op) == CONST_INT
1264 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1267 /* Returns 1 if OP is a CONST_INT that is a positive value
1268 and an exact power of 2. */
1271 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1273 return (GET_CODE (op) == CONST_INT
1275 && exact_log2 (INTVAL (op)) >= 0);
1278 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1282 gpc_reg_operand (rtx op, enum machine_mode mode)
1284 return (register_operand (op, mode)
1285 && (GET_CODE (op) != REG
1286 || (REGNO (op) >= ARG_POINTER_REGNUM
1287 && !XER_REGNO_P (REGNO (op)))
1288 || REGNO (op) < MQ_REGNO));
1291 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1295 cc_reg_operand (rtx op, enum machine_mode mode)
1297 return (register_operand (op, mode)
1298 && (GET_CODE (op) != REG
1299 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1300 || CR_REGNO_P (REGNO (op))));
1303 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1304 CR field that isn't CR0. */
1307 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1309 return (register_operand (op, mode)
1310 && (GET_CODE (op) != REG
1311 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1312 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1315 /* Returns 1 if OP is either a constant integer valid for a D-field or
1316 a non-special register. If a register, it must be in the proper
1317 mode unless MODE is VOIDmode. */
1320 reg_or_short_operand (rtx op, enum machine_mode mode)
1322 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1325 /* Similar, except check if the negation of the constant would be
1326 valid for a D-field. */
1329 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1331 if (GET_CODE (op) == CONST_INT)
1332 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1334 return gpc_reg_operand (op, mode);
1337 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1338 a non-special register. If a register, it must be in the proper
1339 mode unless MODE is VOIDmode. */
1342 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1344 if (gpc_reg_operand (op, mode))
1346 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1353 /* Return 1 if the operand is either a register or an integer whose
1354 high-order 16 bits are zero. */
1357 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1359 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1362 /* Return 1 is the operand is either a non-special register or ANY
1363 constant integer. */
1366 reg_or_cint_operand (rtx op, enum machine_mode mode)
1368 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1371 /* Return 1 is the operand is either a non-special register or ANY
1372 32-bit signed constant integer. */
1375 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1377 return (gpc_reg_operand (op, mode)
1378 || (GET_CODE (op) == CONST_INT
1379 #if HOST_BITS_PER_WIDE_INT != 32
1380 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1381 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1386 /* Return 1 is the operand is either a non-special register or a 32-bit
1387 signed constant integer valid for 64-bit addition. */
1390 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1392 return (gpc_reg_operand (op, mode)
1393 || (GET_CODE (op) == CONST_INT
1394 #if HOST_BITS_PER_WIDE_INT == 32
1395 && INTVAL (op) < 0x7fff8000
1397 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1403 /* Return 1 is the operand is either a non-special register or a 32-bit
1404 signed constant integer valid for 64-bit subtraction. */
1407 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1409 return (gpc_reg_operand (op, mode)
1410 || (GET_CODE (op) == CONST_INT
1411 #if HOST_BITS_PER_WIDE_INT == 32
1412 && (- INTVAL (op)) < 0x7fff8000
1414 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1420 /* Return 1 is the operand is either a non-special register or ANY
1421 32-bit unsigned constant integer. */
1424 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1426 if (GET_CODE (op) == CONST_INT)
1428 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1430 if (GET_MODE_BITSIZE (mode) <= 32)
1433 if (INTVAL (op) < 0)
1437 return ((INTVAL (op) & GET_MODE_MASK (mode)
1438 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1440 else if (GET_CODE (op) == CONST_DOUBLE)
1442 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1446 return CONST_DOUBLE_HIGH (op) == 0;
1449 return gpc_reg_operand (op, mode);
1452 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1455 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1457 return (GET_CODE (op) == SYMBOL_REF
1458 || GET_CODE (op) == CONST
1459 || GET_CODE (op) == LABEL_REF);
1462 /* Return 1 if the operand is a simple references that can be loaded via
1463 the GOT (labels involving addition aren't allowed). */
1466 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1468 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1471 /* Return the number of instructions it takes to form a constant in an
1472 integer register. */
1475 num_insns_constant_wide (HOST_WIDE_INT value)
1477 /* signed constant loadable with {cal|addi} */
1478 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1481 /* constant loadable with {cau|addis} */
1482 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1485 #if HOST_BITS_PER_WIDE_INT == 64
1486 else if (TARGET_POWERPC64)
1488 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1489 HOST_WIDE_INT high = value >> 31;
1491 if (high == 0 || high == -1)
1497 return num_insns_constant_wide (high) + 1;
1499 return (num_insns_constant_wide (high)
1500 + num_insns_constant_wide (low) + 1);
1509 num_insns_constant (rtx op, enum machine_mode mode)
1511 if (GET_CODE (op) == CONST_INT)
1513 #if HOST_BITS_PER_WIDE_INT == 64
1514 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1515 && mask64_operand (op, mode))
1519 return num_insns_constant_wide (INTVAL (op));
1522 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1527 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1528 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1529 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1532 else if (GET_CODE (op) == CONST_DOUBLE)
1538 int endian = (WORDS_BIG_ENDIAN == 0);
1540 if (mode == VOIDmode || mode == DImode)
1542 high = CONST_DOUBLE_HIGH (op);
1543 low = CONST_DOUBLE_LOW (op);
1547 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1548 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1550 low = l[1 - endian];
1554 return (num_insns_constant_wide (low)
1555 + num_insns_constant_wide (high));
1559 if (high == 0 && low >= 0)
1560 return num_insns_constant_wide (low);
1562 else if (high == -1 && low < 0)
1563 return num_insns_constant_wide (low);
1565 else if (mask64_operand (op, mode))
1569 return num_insns_constant_wide (high) + 1;
1572 return (num_insns_constant_wide (high)
1573 + num_insns_constant_wide (low) + 1);
1581 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1582 register with one instruction per word. We only do this if we can
1583 safely read CONST_DOUBLE_{LOW,HIGH}. */
1586 easy_fp_constant (rtx op, enum machine_mode mode)
1588 if (GET_CODE (op) != CONST_DOUBLE
1589 || GET_MODE (op) != mode
1590 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1593 /* Consider all constants with -msoft-float to be easy. */
1594 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1598 /* If we are using V.4 style PIC, consider all constants to be hard. */
1599 if (flag_pic && DEFAULT_ABI == ABI_V4)
1602 #ifdef TARGET_RELOCATABLE
1603 /* Similarly if we are using -mrelocatable, consider all constants
1605 if (TARGET_RELOCATABLE)
1614 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1615 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1617 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1618 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1619 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1620 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1623 else if (mode == DFmode)
1628 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1629 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1631 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1632 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1635 else if (mode == SFmode)
1640 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1641 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1643 return num_insns_constant_wide (l) == 1;
1646 else if (mode == DImode)
1647 return ((TARGET_POWERPC64
1648 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1649 || (num_insns_constant (op, DImode) <= 2));
1651 else if (mode == SImode)
1657 /* Return nonzero if all elements of a vector have the same value. */
1660 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1664 units = CONST_VECTOR_NUNITS (op);
1666 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1667 for (i = 1; i < units; ++i)
1668 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1675 /* Return 1 if the operand is a CONST_INT and can be put into a
1676 register without using memory. */
1679 easy_vector_constant (rtx op, enum machine_mode mode)
1683 if (GET_CODE (op) != CONST_VECTOR
1688 if (zero_constant (op, mode)
1689 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1690 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1693 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1696 if (TARGET_SPE && mode == V1DImode)
1699 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1700 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1702 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1704 evmergelo r0, r0, r0
1707 I don't know how efficient it would be to allow bigger constants,
1708 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1709 instructions is better than a 64-bit memory load, but I don't
1710 have the e500 timing specs. */
1711 if (TARGET_SPE && mode == V2SImode
1712 && cst >= -0x7fff && cst <= 0x7fff
1713 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1716 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1719 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1725 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1728 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1732 if (!easy_vector_constant (op, mode))
1735 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1737 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1741 output_vec_const_move (rtx *operands)
1744 enum machine_mode mode;
1750 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1751 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1752 mode = GET_MODE (dest);
1756 if (zero_constant (vec, mode))
1757 return "vxor %0,%0,%0";
1758 else if (EASY_VECTOR_15 (cst, vec, mode))
1760 operands[1] = GEN_INT (cst);
1764 return "vspltisw %0,%1";
1766 return "vspltish %0,%1";
1768 return "vspltisb %0,%1";
1773 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1781 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1782 pattern of V1DI, V4HI, and V2SF.
1784 FIXME: We should probably return # and add post reload
1785 splitters for these, but this way is so easy ;-).
1787 operands[1] = GEN_INT (cst);
1788 operands[2] = GEN_INT (cst2);
1790 return "li %0,%1\n\tevmergelo %0,%0,%0";
1792 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1798 /* Return 1 if the operand is the constant 0. This works for scalars
1799 as well as vectors. */
1801 zero_constant (rtx op, enum machine_mode mode)
1803 return op == CONST0_RTX (mode);
1806 /* Return 1 if the operand is 0.0. */
1808 zero_fp_constant (rtx op, enum machine_mode mode)
1810 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1813 /* Return 1 if the operand is in volatile memory. Note that during
1814 the RTL generation phase, memory_operand does not return TRUE for
1815 volatile memory references. So this function allows us to
1816 recognize volatile references where its safe. */
1819 volatile_mem_operand (rtx op, enum machine_mode mode)
1821 if (GET_CODE (op) != MEM)
1824 if (!MEM_VOLATILE_P (op))
1827 if (mode != GET_MODE (op))
1830 if (reload_completed)
1831 return memory_operand (op, mode);
1833 if (reload_in_progress)
1834 return strict_memory_address_p (mode, XEXP (op, 0));
1836 return memory_address_p (mode, XEXP (op, 0));
1839 /* Return 1 if the operand is an offsettable memory operand. */
1842 offsettable_mem_operand (rtx op, enum machine_mode mode)
1844 return ((GET_CODE (op) == MEM)
1845 && offsettable_address_p (reload_completed || reload_in_progress,
1846 mode, XEXP (op, 0)));
1849 /* Return 1 if the operand is either an easy FP constant (see above) or
1853 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1855 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1858 /* Return 1 if the operand is either a non-special register or an item
1859 that can be used as the operand of a `mode' add insn. */
1862 add_operand (rtx op, enum machine_mode mode)
1864 if (GET_CODE (op) == CONST_INT)
1865 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1866 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1868 return gpc_reg_operand (op, mode);
1871 /* Return 1 if OP is a constant but not a valid add_operand. */
1874 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1876 return (GET_CODE (op) == CONST_INT
1877 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1878 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1881 /* Return 1 if the operand is a non-special register or a constant that
1882 can be used as the operand of an OR or XOR insn on the RS/6000. */
1885 logical_operand (rtx op, enum machine_mode mode)
1887 HOST_WIDE_INT opl, oph;
1889 if (gpc_reg_operand (op, mode))
1892 if (GET_CODE (op) == CONST_INT)
1894 opl = INTVAL (op) & GET_MODE_MASK (mode);
1896 #if HOST_BITS_PER_WIDE_INT <= 32
1897 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1901 else if (GET_CODE (op) == CONST_DOUBLE)
1903 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1906 opl = CONST_DOUBLE_LOW (op);
1907 oph = CONST_DOUBLE_HIGH (op);
1914 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1915 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1918 /* Return 1 if C is a constant that is not a logical operand (as
1919 above), but could be split into one. */
1922 non_logical_cint_operand (rtx op, enum machine_mode mode)
1924 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1925 && ! logical_operand (op, mode)
1926 && reg_or_logical_cint_operand (op, mode));
1929 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1930 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1931 Reject all ones and all zeros, since these should have been optimized
1932 away and confuse the making of MB and ME. */
1935 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1937 HOST_WIDE_INT c, lsb;
1939 if (GET_CODE (op) != CONST_INT)
1944 /* Fail in 64-bit mode if the mask wraps around because the upper
1945 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1946 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1949 /* We don't change the number of transitions by inverting,
1950 so make sure we start with the LS bit zero. */
1954 /* Reject all zeros or all ones. */
1958 /* Find the first transition. */
1961 /* Invert to look for a second transition. */
1964 /* Erase first transition. */
1967 /* Find the second transition (if any). */
1970 /* Match if all the bits above are 1's (or c is zero). */
1974 /* Return 1 for the PowerPC64 rlwinm corner case. */
1977 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1979 HOST_WIDE_INT c, lsb;
1981 if (GET_CODE (op) != CONST_INT)
1986 if ((c & 0x80000001) != 0x80000001)
2000 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2001 It is if there are no more than one 1->0 or 0->1 transitions.
2002 Reject all zeros, since zero should have been optimized away and
2003 confuses the making of MB and ME. */
2006 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2008 if (GET_CODE (op) == CONST_INT)
2010 HOST_WIDE_INT c, lsb;
2014 /* Reject all zeros. */
2018 /* We don't change the number of transitions by inverting,
2019 so make sure we start with the LS bit zero. */
2023 /* Find the transition, and check that all bits above are 1's. */
2026 /* Match if all the bits above are 1's (or c is zero). */
2032 /* Like mask64_operand, but allow up to three transitions. This
2033 predicate is used by insn patterns that generate two rldicl or
2034 rldicr machine insns. */
2037 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2039 if (GET_CODE (op) == CONST_INT)
2041 HOST_WIDE_INT c, lsb;
2045 /* Disallow all zeros. */
2049 /* We don't change the number of transitions by inverting,
2050 so make sure we start with the LS bit zero. */
2054 /* Find the first transition. */
2057 /* Invert to look for a second transition. */
2060 /* Erase first transition. */
2063 /* Find the second transition. */
2066 /* Invert to look for a third transition. */
2069 /* Erase second transition. */
2072 /* Find the third transition (if any). */
2075 /* Match if all the bits above are 1's (or c is zero). */
2081 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2082 implement ANDing by the mask IN. */
2084 build_mask64_2_operands (rtx in, rtx *out)
2086 #if HOST_BITS_PER_WIDE_INT >= 64
2087 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2090 if (GET_CODE (in) != CONST_INT)
2096 /* Assume c initially something like 0x00fff000000fffff. The idea
2097 is to rotate the word so that the middle ^^^^^^ group of zeros
2098 is at the MS end and can be cleared with an rldicl mask. We then
2099 rotate back and clear off the MS ^^ group of zeros with a
2101 c = ~c; /* c == 0xff000ffffff00000 */
2102 lsb = c & -c; /* lsb == 0x0000000000100000 */
2103 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2104 c = ~c; /* c == 0x00fff000000fffff */
2105 c &= -lsb; /* c == 0x00fff00000000000 */
2106 lsb = c & -c; /* lsb == 0x0000100000000000 */
2107 c = ~c; /* c == 0xff000fffffffffff */
2108 c &= -lsb; /* c == 0xff00000000000000 */
2110 while ((lsb >>= 1) != 0)
2111 shift++; /* shift == 44 on exit from loop */
2112 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2113 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2114 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2118 /* Assume c initially something like 0xff000f0000000000. The idea
2119 is to rotate the word so that the ^^^ middle group of zeros
2120 is at the LS end and can be cleared with an rldicr mask. We then
2121 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2123 lsb = c & -c; /* lsb == 0x0000010000000000 */
2124 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2125 c = ~c; /* c == 0x00fff0ffffffffff */
2126 c &= -lsb; /* c == 0x00fff00000000000 */
2127 lsb = c & -c; /* lsb == 0x0000100000000000 */
2128 c = ~c; /* c == 0xff000fffffffffff */
2129 c &= -lsb; /* c == 0xff00000000000000 */
2131 while ((lsb >>= 1) != 0)
2132 shift++; /* shift == 44 on exit from loop */
2133 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2134 m1 >>= shift; /* m1 == 0x0000000000000fff */
2135 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2138 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2139 masks will be all 1's. We are guaranteed more than one transition. */
2140 out[0] = GEN_INT (64 - shift);
2141 out[1] = GEN_INT (m1);
2142 out[2] = GEN_INT (shift);
2143 out[3] = GEN_INT (m2);
2151 /* Return 1 if the operand is either a non-special register or a constant
2152 that can be used as the operand of a PowerPC64 logical AND insn. */
2155 and64_operand (rtx op, enum machine_mode mode)
2157 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2158 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2160 return (logical_operand (op, mode) || mask64_operand (op, mode));
2163 /* Like the above, but also match constants that can be implemented
2164 with two rldicl or rldicr insns. */
2167 and64_2_operand (rtx op, enum machine_mode mode)
2169 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2170 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2172 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2175 /* Return 1 if the operand is either a non-special register or a
2176 constant that can be used as the operand of an RS/6000 logical AND insn. */
2179 and_operand (rtx op, enum machine_mode mode)
2181 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2182 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2184 return (logical_operand (op, mode) || mask_operand (op, mode));
2187 /* Return 1 if the operand is a general register or memory operand. */
2190 reg_or_mem_operand (rtx op, enum machine_mode mode)
2192 return (gpc_reg_operand (op, mode)
2193 || memory_operand (op, mode)
2194 || macho_lo_sum_memory_operand (op, mode)
2195 || volatile_mem_operand (op, mode));
2198 /* Return 1 if the operand is a general register or memory operand without
2199 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2203 lwa_operand (rtx op, enum machine_mode mode)
2207 if (reload_completed && GET_CODE (inner) == SUBREG)
2208 inner = SUBREG_REG (inner);
2210 return gpc_reg_operand (inner, mode)
2211 || (memory_operand (inner, mode)
2212 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2213 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2214 && (GET_CODE (XEXP (inner, 0)) != PLUS
2215 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2216 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2219 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2222 symbol_ref_operand (rtx op, enum machine_mode mode)
2224 if (mode != VOIDmode && GET_MODE (op) != mode)
2227 return (GET_CODE (op) == SYMBOL_REF
2228 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2231 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2232 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2235 call_operand (rtx op, enum machine_mode mode)
2237 if (mode != VOIDmode && GET_MODE (op) != mode)
2240 return (GET_CODE (op) == SYMBOL_REF
2241 || (GET_CODE (op) == REG
2242 && (REGNO (op) == LINK_REGISTER_REGNUM
2243 || REGNO (op) == COUNT_REGISTER_REGNUM
2244 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2247 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2251 current_file_function_operand (rtx op,
2252 enum machine_mode mode ATTRIBUTE_UNUSED)
2254 return (GET_CODE (op) == SYMBOL_REF
2255 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2256 && (SYMBOL_REF_LOCAL_P (op)
2257 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2260 /* Return 1 if this operand is a valid input for a move insn. */
2263 input_operand (rtx op, enum machine_mode mode)
2265 /* Memory is always valid. */
2266 if (memory_operand (op, mode))
2269 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2270 if (GET_CODE (op) == CONSTANT_P_RTX)
2273 /* For floating-point, easy constants are valid. */
2274 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2276 && easy_fp_constant (op, mode))
2279 /* Allow any integer constant. */
2280 if (GET_MODE_CLASS (mode) == MODE_INT
2281 && (GET_CODE (op) == CONST_INT
2282 || GET_CODE (op) == CONST_DOUBLE))
2285 /* Allow easy vector constants. */
2286 if (GET_CODE (op) == CONST_VECTOR
2287 && easy_vector_constant (op, mode))
2290 /* For floating-point or multi-word mode, the only remaining valid type
2292 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2293 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2294 return register_operand (op, mode);
2296 /* The only cases left are integral modes one word or smaller (we
2297 do not get called for MODE_CC values). These can be in any
2299 if (register_operand (op, mode))
2302 /* A SYMBOL_REF referring to the TOC is valid. */
2303 if (legitimate_constant_pool_address_p (op))
2306 /* A constant pool expression (relative to the TOC) is valid */
2307 if (toc_relative_expr_p (op))
2310 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2312 if (DEFAULT_ABI == ABI_V4
2313 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2314 && small_data_operand (op, Pmode))
2320 /* Return 1 for an operand in small memory on V.4/eabi. */
2323 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2324 enum machine_mode mode ATTRIBUTE_UNUSED)
2329 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2332 if (DEFAULT_ABI != ABI_V4)
2335 if (GET_CODE (op) == SYMBOL_REF)
2338 else if (GET_CODE (op) != CONST
2339 || GET_CODE (XEXP (op, 0)) != PLUS
2340 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2341 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2346 rtx sum = XEXP (op, 0);
2347 HOST_WIDE_INT summand;
2349 /* We have to be careful here, because it is the referenced address
2350 that must be 32k from _SDA_BASE_, not just the symbol. */
2351 summand = INTVAL (XEXP (sum, 1));
2352 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2355 sym_ref = XEXP (sum, 0);
2358 return SYMBOL_REF_SMALL_P (sym_ref);
2364 /* Return true if either operand is a general purpose register. */
2367 gpr_or_gpr_p (rtx op0, rtx op1)
2369 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2370 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2374 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2377 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2379 switch (GET_CODE(op))
2382 if (RS6000_SYMBOL_REF_TLS_P (op))
2384 else if (CONSTANT_POOL_ADDRESS_P (op))
2386 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2394 else if (! strcmp (XSTR (op, 0), toc_label_name))
2403 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2404 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2406 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2415 constant_pool_expr_p (rtx op)
2419 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2423 toc_relative_expr_p (rtx op)
2427 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2430 /* SPE offset addressing is limited to 5-bits worth of double words. */
2431 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2434 legitimate_constant_pool_address_p (rtx x)
2437 && GET_CODE (x) == PLUS
2438 && GET_CODE (XEXP (x, 0)) == REG
2439 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2440 && constant_pool_expr_p (XEXP (x, 1)));
2444 legitimate_small_data_p (enum machine_mode mode, rtx x)
2446 return (DEFAULT_ABI == ABI_V4
2447 && !flag_pic && !TARGET_TOC
2448 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2449 && small_data_operand (x, mode));
2453 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2455 unsigned HOST_WIDE_INT offset, extra;
2457 if (GET_CODE (x) != PLUS)
2459 if (GET_CODE (XEXP (x, 0)) != REG)
2461 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2463 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2466 offset = INTVAL (XEXP (x, 1));
2474 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2475 which leaves the only valid constant offset of zero, which by
2476 canonicalization rules is also invalid. */
2483 /* SPE vector modes. */
2484 return SPE_CONST_OFFSET_OK (offset);
2490 else if (offset & 3)
2498 else if (offset & 3)
2508 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2512 legitimate_indexed_address_p (rtx x, int strict)
2516 if (GET_CODE (x) != PLUS)
2521 if (!REG_P (op0) || !REG_P (op1))
2524 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2525 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2526 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2527 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2531 legitimate_indirect_address_p (rtx x, int strict)
2533 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2537 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2539 if (!TARGET_MACHO || !flag_pic
2540 || mode != SImode || GET_CODE(x) != MEM)
2544 if (GET_CODE (x) != LO_SUM)
2546 if (GET_CODE (XEXP (x, 0)) != REG)
2548 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2552 return CONSTANT_P (x);
2556 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2558 if (GET_CODE (x) != LO_SUM)
2560 if (GET_CODE (XEXP (x, 0)) != REG)
2562 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2568 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2572 if (GET_MODE_NUNITS (mode) != 1)
2574 if (GET_MODE_BITSIZE (mode) > 32
2575 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2578 return CONSTANT_P (x);
2585 /* Try machine-dependent ways of modifying an illegitimate address
2586 to be legitimate. If we find one, return the new, valid address.
2587 This is used from only one place: `memory_address' in explow.c.
2589 OLDX is the address as it was before break_out_memory_refs was
2590 called. In some cases it is useful to look at this to decide what
2593 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2595 It is always safe for this function to do nothing. It exists to
2596 recognize opportunities to optimize the output.
2598 On RS/6000, first check for the sum of a register with a constant
2599 integer that is out of range. If so, generate code to add the
2600 constant with the low-order 16 bits masked to the register and force
2601 this result into another register (this can be done with `cau').
2602 Then generate an address of REG+(CONST&0xffff), allowing for the
2603 possibility of bit 16 being a one.
2605 Then check for the sum of a register and something not constant, try to
2606 load the other things into a register and return the sum. */
2609 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2610 enum machine_mode mode)
2612 if (GET_CODE (x) == SYMBOL_REF)
2614 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2616 return rs6000_legitimize_tls_address (x, model);
2619 if (GET_CODE (x) == PLUS
2620 && GET_CODE (XEXP (x, 0)) == REG
2621 && GET_CODE (XEXP (x, 1)) == CONST_INT
2622 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2624 HOST_WIDE_INT high_int, low_int;
2626 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2627 high_int = INTVAL (XEXP (x, 1)) - low_int;
2628 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2629 GEN_INT (high_int)), 0);
2630 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2632 else if (GET_CODE (x) == PLUS
2633 && GET_CODE (XEXP (x, 0)) == REG
2634 && GET_CODE (XEXP (x, 1)) != CONST_INT
2635 && GET_MODE_NUNITS (mode) == 1
2636 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2638 || (mode != DFmode && mode != TFmode))
2639 && (TARGET_POWERPC64 || mode != DImode)
2642 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2643 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2645 else if (ALTIVEC_VECTOR_MODE (mode))
2649 /* Make sure both operands are registers. */
2650 if (GET_CODE (x) == PLUS)
2651 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2652 force_reg (Pmode, XEXP (x, 1)));
2654 reg = force_reg (Pmode, x);
2657 else if (SPE_VECTOR_MODE (mode))
2659 /* We accept [reg + reg] and [reg + OFFSET]. */
2661 if (GET_CODE (x) == PLUS)
2663 rtx op1 = XEXP (x, 0);
2664 rtx op2 = XEXP (x, 1);
2666 op1 = force_reg (Pmode, op1);
2668 if (GET_CODE (op2) != REG
2669 && (GET_CODE (op2) != CONST_INT
2670 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2671 op2 = force_reg (Pmode, op2);
2673 return gen_rtx_PLUS (Pmode, op1, op2);
2676 return force_reg (Pmode, x);
2682 && GET_CODE (x) != CONST_INT
2683 && GET_CODE (x) != CONST_DOUBLE
2685 && GET_MODE_NUNITS (mode) == 1
2686 && (GET_MODE_BITSIZE (mode) <= 32
2687 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2689 rtx reg = gen_reg_rtx (Pmode);
2690 emit_insn (gen_elf_high (reg, x));
2691 return gen_rtx_LO_SUM (Pmode, reg, x);
2693 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2696 && ! MACHO_DYNAMIC_NO_PIC_P
2698 && GET_CODE (x) != CONST_INT
2699 && GET_CODE (x) != CONST_DOUBLE
2701 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2705 rtx reg = gen_reg_rtx (Pmode);
2706 emit_insn (gen_macho_high (reg, x));
2707 return gen_rtx_LO_SUM (Pmode, reg, x);
2710 && constant_pool_expr_p (x)
2711 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2713 return create_TOC_reference (x);
2719 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2721 static GTY(()) rtx rs6000_tls_symbol;
2723 rs6000_tls_get_addr (void)
2725 if (!rs6000_tls_symbol)
2726 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2728 return rs6000_tls_symbol;
2731 /* Construct the SYMBOL_REF for TLS GOT references. */
2733 static GTY(()) rtx rs6000_got_symbol;
2735 rs6000_got_sym (void)
2737 if (!rs6000_got_symbol)
2739 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2740 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2741 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2744 return rs6000_got_symbol;
2747 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2748 this (thread-local) address. */
2751 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2755 dest = gen_reg_rtx (Pmode);
2756 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2762 tlsreg = gen_rtx_REG (Pmode, 13);
2763 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2767 tlsreg = gen_rtx_REG (Pmode, 2);
2768 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2772 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2776 tmp = gen_reg_rtx (Pmode);
2779 tlsreg = gen_rtx_REG (Pmode, 13);
2780 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2784 tlsreg = gen_rtx_REG (Pmode, 2);
2785 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2789 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2791 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2796 rtx r3, got, tga, tmp1, tmp2, eqv;
2799 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2803 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2806 rtx gsym = rs6000_got_sym ();
2807 got = gen_reg_rtx (Pmode);
2809 rs6000_emit_move (got, gsym, Pmode);
2813 static int tls_got_labelno = 0;
2814 rtx tempLR, lab, tmp3, mem;
2817 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2818 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2819 tempLR = gen_reg_rtx (Pmode);
2820 tmp1 = gen_reg_rtx (Pmode);
2821 tmp2 = gen_reg_rtx (Pmode);
2822 tmp3 = gen_reg_rtx (Pmode);
2823 mem = gen_rtx_MEM (Pmode, tmp1);
2824 RTX_UNCHANGING_P (mem) = 1;
2826 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2828 emit_move_insn (tmp1, tempLR);
2829 emit_move_insn (tmp2, mem);
2830 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2831 last = emit_move_insn (got, tmp3);
2832 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2834 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2836 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2842 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2844 r3 = gen_rtx_REG (Pmode, 3);
2846 insn = gen_tls_gd_64 (r3, got, addr);
2848 insn = gen_tls_gd_32 (r3, got, addr);
2851 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2852 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2853 insn = emit_call_insn (insn);
2854 CONST_OR_PURE_CALL_P (insn) = 1;
2855 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2856 insn = get_insns ();
2858 emit_libcall_block (insn, dest, r3, addr);
2860 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2862 r3 = gen_rtx_REG (Pmode, 3);
2864 insn = gen_tls_ld_64 (r3, got);
2866 insn = gen_tls_ld_32 (r3, got);
2869 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2870 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2871 insn = emit_call_insn (insn);
2872 CONST_OR_PURE_CALL_P (insn) = 1;
2873 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2874 insn = get_insns ();
2876 tmp1 = gen_reg_rtx (Pmode);
2877 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2879 emit_libcall_block (insn, tmp1, r3, eqv);
2880 if (rs6000_tls_size == 16)
2883 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2885 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2887 else if (rs6000_tls_size == 32)
2889 tmp2 = gen_reg_rtx (Pmode);
2891 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2893 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2896 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2898 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2902 tmp2 = gen_reg_rtx (Pmode);
2904 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2906 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2908 insn = gen_rtx_SET (Pmode, dest,
2909 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2915 /* IE, or 64 bit offset LE. */
2916 tmp2 = gen_reg_rtx (Pmode);
2918 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2920 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2923 insn = gen_tls_tls_64 (dest, tmp2, addr);
2925 insn = gen_tls_tls_32 (dest, tmp2, addr);
2933 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2934 instruction definitions. */
2937 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
2939 return RS6000_SYMBOL_REF_TLS_P (x);
2942 /* Return 1 if X contains a thread-local symbol. */
2945 rs6000_tls_referenced_p (rtx x)
2947 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2950 /* Return 1 if *X is a thread-local symbol. This is the same as
2951 rs6000_tls_symbol_ref except for the type of the unused argument. */
2954 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
2956 return RS6000_SYMBOL_REF_TLS_P (*x);
2959 /* The convention appears to be to define this wherever it is used.
2960 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2961 is now used here. */
2962 #ifndef REG_MODE_OK_FOR_BASE_P
2963 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2966 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2967 replace the input X, or the original X if no replacement is called for.
2968 The output parameter *WIN is 1 if the calling macro should goto WIN,
2971 For RS/6000, we wish to handle large displacements off a base
2972 register by splitting the addend across an addiu/addis and the mem insn.
2973 This cuts number of extra insns needed from 3 to 1.
2975 On Darwin, we use this to generate code for floating point constants.
2976 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2977 The Darwin code is inside #if TARGET_MACHO because only then is
2978 machopic_function_base_name() defined. */
2980 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
2981 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
2983 /* We must recognize output that we have already generated ourselves. */
2984 if (GET_CODE (x) == PLUS
2985 && GET_CODE (XEXP (x, 0)) == PLUS
2986 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2987 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2988 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2990 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2991 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2992 opnum, (enum reload_type)type);
2998 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2999 && GET_CODE (x) == LO_SUM
3000 && GET_CODE (XEXP (x, 0)) == PLUS
3001 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3002 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3003 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3004 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3005 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3006 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3007 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3009 /* Result of previous invocation of this function on Darwin
3010 floating point constant. */
3011 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3012 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3013 opnum, (enum reload_type)type);
3018 if (GET_CODE (x) == PLUS
3019 && GET_CODE (XEXP (x, 0)) == REG
3020 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3021 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3022 && GET_CODE (XEXP (x, 1)) == CONST_INT
3023 && !SPE_VECTOR_MODE (mode)
3024 && !ALTIVEC_VECTOR_MODE (mode))
3026 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3027 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3029 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3031 /* Check for 32-bit overflow. */
3032 if (high + low != val)
3038 /* Reload the high part into a base reg; leave the low part
3039 in the mem directly. */
3041 x = gen_rtx_PLUS (GET_MODE (x),
3042 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3046 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3047 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3048 opnum, (enum reload_type)type);
3053 if (GET_CODE (x) == SYMBOL_REF
3054 && DEFAULT_ABI == ABI_DARWIN
3055 && !ALTIVEC_VECTOR_MODE (mode)
3058 /* Darwin load of floating point constant. */
3059 rtx offset = gen_rtx (CONST, Pmode,
3060 gen_rtx (MINUS, Pmode, x,
3061 gen_rtx (SYMBOL_REF, Pmode,
3062 machopic_function_base_name ())));
3063 x = gen_rtx (LO_SUM, GET_MODE (x),
3064 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
3065 gen_rtx (HIGH, Pmode, offset)), offset);
3066 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3067 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3068 opnum, (enum reload_type)type);
3072 if (GET_CODE (x) == SYMBOL_REF
3073 && DEFAULT_ABI == ABI_DARWIN
3074 && !ALTIVEC_VECTOR_MODE (mode)
3075 && MACHO_DYNAMIC_NO_PIC_P)
3077 /* Darwin load of floating point constant. */
3078 x = gen_rtx (LO_SUM, GET_MODE (x),
3079 gen_rtx (HIGH, Pmode, x), x);
3080 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3081 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3082 opnum, (enum reload_type)type);
3088 && constant_pool_expr_p (x)
3089 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3091 (x) = create_TOC_reference (x);
3099 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3100 that is a valid memory address for an instruction.
3101 The MODE argument is the machine mode for the MEM expression
3102 that wants to use this address.
3104 On the RS/6000, there are four valid address: a SYMBOL_REF that
3105 refers to a constant pool entry of an address (or the sum of it
3106 plus a constant), a short (16-bit signed) constant plus a register,
3107 the sum of two registers, or a register indirect, possibly with an
3108 auto-increment. For DFmode and DImode with a constant plus register,
3109 we must ensure that both words are addressable or PowerPC64 with offset
3112 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3113 32-bit DImode, TImode), indexed addressing cannot be used because
3114 adjacent memory cells are accessed by adding word-sized offsets
3115 during assembly output. */
3117 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3119 if (RS6000_SYMBOL_REF_TLS_P (x))
3121 if (legitimate_indirect_address_p (x, reg_ok_strict))
3123 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3124 && !ALTIVEC_VECTOR_MODE (mode)
3125 && !SPE_VECTOR_MODE (mode)
3127 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3129 if (legitimate_small_data_p (mode, x))
3131 if (legitimate_constant_pool_address_p (x))
3133 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3135 && GET_CODE (x) == PLUS
3136 && GET_CODE (XEXP (x, 0)) == REG
3137 && XEXP (x, 0) == virtual_stack_vars_rtx
3138 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3140 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3143 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3145 || (mode != DFmode && mode != TFmode))
3146 && (TARGET_POWERPC64 || mode != DImode)
3147 && legitimate_indexed_address_p (x, reg_ok_strict))
3149 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3154 /* Go to LABEL if ADDR (a legitimate address expression)
3155 has an effect that depends on the machine mode it is used for.
3157 On the RS/6000 this is true of all integral offsets (since AltiVec
3158 modes don't allow them) or is a pre-increment or decrement.
3160 ??? Except that due to conceptual problems in offsettable_address_p
3161 we can't really report the problems of integral offsets. So leave
3162 this assuming that the adjustable offset must be valid for the
3163 sub-words of a TFmode operand, which is what we had before. */
3166 rs6000_mode_dependent_address (rtx addr)
3168 switch (GET_CODE (addr))
3171 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3173 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3174 return val + 12 + 0x8000 >= 0x10000;
3183 return TARGET_UPDATE;
3192 /* Try to output insns to set TARGET equal to the constant C if it can
3193 be done in less than N insns. Do all computations in MODE.
3194 Returns the place where the output has been placed if it can be
3195 done and the insns have been emitted. If it would take more than N
3196 insns, zero is returned and no insns and emitted. */
3199 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3200 rtx source, int n ATTRIBUTE_UNUSED)
3202 rtx result, insn, set;
3203 HOST_WIDE_INT c0, c1;
3205 if (mode == QImode || mode == HImode)
3208 dest = gen_reg_rtx (mode);
3209 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3212 else if (mode == SImode)
3214 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3216 emit_insn (gen_rtx_SET (VOIDmode, result,
3217 GEN_INT (INTVAL (source)
3218 & (~ (HOST_WIDE_INT) 0xffff))));
3219 emit_insn (gen_rtx_SET (VOIDmode, dest,
3220 gen_rtx_IOR (SImode, result,
3221 GEN_INT (INTVAL (source) & 0xffff))));
3224 else if (mode == DImode)
3226 if (GET_CODE (source) == CONST_INT)
3228 c0 = INTVAL (source);
3231 else if (GET_CODE (source) == CONST_DOUBLE)
3233 #if HOST_BITS_PER_WIDE_INT >= 64
3234 c0 = CONST_DOUBLE_LOW (source);
3237 c0 = CONST_DOUBLE_LOW (source);
3238 c1 = CONST_DOUBLE_HIGH (source);
3244 result = rs6000_emit_set_long_const (dest, c0, c1);
3249 insn = get_last_insn ();
3250 set = single_set (insn);
3251 if (! CONSTANT_P (SET_SRC (set)))
3252 set_unique_reg_note (insn, REG_EQUAL, source);
3257 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3258 fall back to a straight forward decomposition. We do this to avoid
3259 exponential run times encountered when looking for longer sequences
3260 with rs6000_emit_set_const. */
3262 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3264 if (!TARGET_POWERPC64)
3266 rtx operand1, operand2;
3268 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3270 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3272 emit_move_insn (operand1, GEN_INT (c1));
3273 emit_move_insn (operand2, GEN_INT (c2));
3277 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3280 ud2 = (c1 & 0xffff0000) >> 16;
3281 #if HOST_BITS_PER_WIDE_INT >= 64
3285 ud4 = (c2 & 0xffff0000) >> 16;
3287 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3288 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3291 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3293 emit_move_insn (dest, GEN_INT (ud1));
3296 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3297 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3300 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3303 emit_move_insn (dest, GEN_INT (ud2 << 16));
3305 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3307 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3308 || (ud4 == 0 && ! (ud3 & 0x8000)))
3311 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3314 emit_move_insn (dest, GEN_INT (ud3 << 16));
3317 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3318 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3320 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3325 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3328 emit_move_insn (dest, GEN_INT (ud4 << 16));
3331 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3333 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3335 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3336 GEN_INT (ud2 << 16)));
3338 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3344 /* Emit a move from SOURCE to DEST in mode MODE. */
3346 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3350 operands[1] = source;
3352 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3353 if (GET_CODE (operands[1]) == CONST_DOUBLE
3354 && ! FLOAT_MODE_P (mode)
3355 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3357 /* FIXME. This should never happen. */
3358 /* Since it seems that it does, do the safe thing and convert
3360 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3362 if (GET_CODE (operands[1]) == CONST_DOUBLE
3363 && ! FLOAT_MODE_P (mode)
3364 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3365 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3366 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3367 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3370 /* Check if GCC is setting up a block move that will end up using FP
3371 registers as temporaries. We must make sure this is acceptable. */
3372 if (GET_CODE (operands[0]) == MEM
3373 && GET_CODE (operands[1]) == MEM
3375 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3376 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3377 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3378 ? 32 : MEM_ALIGN (operands[0])))
3379 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3381 : MEM_ALIGN (operands[1]))))
3382 && ! MEM_VOLATILE_P (operands [0])
3383 && ! MEM_VOLATILE_P (operands [1]))
3385 emit_move_insn (adjust_address (operands[0], SImode, 0),
3386 adjust_address (operands[1], SImode, 0));
3387 emit_move_insn (adjust_address (operands[0], SImode, 4),
3388 adjust_address (operands[1], SImode, 4));
3392 if (!no_new_pseudos)
3394 if (GET_CODE (operands[1]) == MEM && optimize > 0
3395 && (mode == QImode || mode == HImode || mode == SImode)
3396 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3398 rtx reg = gen_reg_rtx (word_mode);
3400 emit_insn (gen_rtx_SET (word_mode, reg,
3401 gen_rtx_ZERO_EXTEND (word_mode,
3403 operands[1] = gen_lowpart (mode, reg);
3405 if (GET_CODE (operands[0]) != REG)
3406 operands[1] = force_reg (mode, operands[1]);
3409 if (mode == SFmode && ! TARGET_POWERPC
3410 && TARGET_HARD_FLOAT && TARGET_FPRS
3411 && GET_CODE (operands[0]) == MEM)
3415 if (reload_in_progress || reload_completed)
3416 regnum = true_regnum (operands[1]);
3417 else if (GET_CODE (operands[1]) == REG)
3418 regnum = REGNO (operands[1]);
3422 /* If operands[1] is a register, on POWER it may have
3423 double-precision data in it, so truncate it to single
3425 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3428 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3429 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3430 operands[1] = newreg;
3434 /* Recognize the case where operand[1] is a reference to thread-local
3435 data and load its address to a register. */
3436 if (GET_CODE (operands[1]) == SYMBOL_REF)
3438 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3440 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3443 /* Handle the case where reload calls us with an invalid address. */
3444 if (reload_in_progress && mode == Pmode
3445 && (! general_operand (operands[1], mode)
3446 || ! nonimmediate_operand (operands[0], mode)))
3449 /* Handle the case of CONSTANT_P_RTX. */
3450 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3453 /* FIXME: In the long term, this switch statement should go away
3454 and be replaced by a sequence of tests based on things like
3460 if (CONSTANT_P (operands[1])
3461 && GET_CODE (operands[1]) != CONST_INT)
3462 operands[1] = force_const_mem (mode, operands[1]);
3468 if (CONSTANT_P (operands[1])
3469 && ! easy_fp_constant (operands[1], mode))
3470 operands[1] = force_const_mem (mode, operands[1]);
3481 if (CONSTANT_P (operands[1])
3482 && !easy_vector_constant (operands[1], mode))
3483 operands[1] = force_const_mem (mode, operands[1]);
3488 /* Use default pattern for address of ELF small data */
3491 && DEFAULT_ABI == ABI_V4
3492 && (GET_CODE (operands[1]) == SYMBOL_REF
3493 || GET_CODE (operands[1]) == CONST)
3494 && small_data_operand (operands[1], mode))
3496 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3500 if (DEFAULT_ABI == ABI_V4
3501 && mode == Pmode && mode == SImode
3502 && flag_pic == 1 && got_operand (operands[1], mode))
3504 emit_insn (gen_movsi_got (operands[0], operands[1]));
3508 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3512 && CONSTANT_P (operands[1])
3513 && GET_CODE (operands[1]) != HIGH
3514 && GET_CODE (operands[1]) != CONST_INT)
3516 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3518 /* If this is a function address on -mcall-aixdesc,
3519 convert it to the address of the descriptor. */
3520 if (DEFAULT_ABI == ABI_AIX
3521 && GET_CODE (operands[1]) == SYMBOL_REF
3522 && XSTR (operands[1], 0)[0] == '.')
3524 const char *name = XSTR (operands[1], 0);
3526 while (*name == '.')
3528 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3529 CONSTANT_POOL_ADDRESS_P (new_ref)
3530 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3531 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3532 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3533 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3534 operands[1] = new_ref;
3537 if (DEFAULT_ABI == ABI_DARWIN)
3540 if (MACHO_DYNAMIC_NO_PIC_P)
3542 /* Take care of any required data indirection. */
3543 operands[1] = rs6000_machopic_legitimize_pic_address (
3544 operands[1], mode, operands[0]);
3545 if (operands[0] != operands[1])
3546 emit_insn (gen_rtx_SET (VOIDmode,
3547 operands[0], operands[1]));
3551 emit_insn (gen_macho_high (target, operands[1]));
3552 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3556 emit_insn (gen_elf_high (target, operands[1]));
3557 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3561 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3562 and we have put it in the TOC, we just need to make a TOC-relative
3565 && GET_CODE (operands[1]) == SYMBOL_REF
3566 && constant_pool_expr_p (operands[1])
3567 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3568 get_pool_mode (operands[1])))
3570 operands[1] = create_TOC_reference (operands[1]);
3572 else if (mode == Pmode
3573 && CONSTANT_P (operands[1])
3574 && ((GET_CODE (operands[1]) != CONST_INT
3575 && ! easy_fp_constant (operands[1], mode))
3576 || (GET_CODE (operands[1]) == CONST_INT
3577 && num_insns_constant (operands[1], mode) > 2)
3578 || (GET_CODE (operands[0]) == REG
3579 && FP_REGNO_P (REGNO (operands[0]))))
3580 && GET_CODE (operands[1]) != HIGH
3581 && ! legitimate_constant_pool_address_p (operands[1])
3582 && ! toc_relative_expr_p (operands[1]))
3584 /* Emit a USE operation so that the constant isn't deleted if
3585 expensive optimizations are turned on because nobody
3586 references it. This should only be done for operands that
3587 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3588 This should not be done for operands that contain LABEL_REFs.
3589 For now, we just handle the obvious case. */
3590 if (GET_CODE (operands[1]) != LABEL_REF)
3591 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3594 /* Darwin uses a special PIC legitimizer. */
3595 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3598 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3600 if (operands[0] != operands[1])
3601 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3606 /* If we are to limit the number of things we put in the TOC and
3607 this is a symbol plus a constant we can add in one insn,
3608 just put the symbol in the TOC and add the constant. Don't do
3609 this if reload is in progress. */
3610 if (GET_CODE (operands[1]) == CONST
3611 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3612 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3613 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3614 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3615 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3616 && ! side_effects_p (operands[0]))
3619 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3620 rtx other = XEXP (XEXP (operands[1], 0), 1);
3622 sym = force_reg (mode, sym);
3624 emit_insn (gen_addsi3 (operands[0], sym, other));
3626 emit_insn (gen_adddi3 (operands[0], sym, other));
3630 operands[1] = force_const_mem (mode, operands[1]);
3633 && constant_pool_expr_p (XEXP (operands[1], 0))
3634 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3635 get_pool_constant (XEXP (operands[1], 0)),
3636 get_pool_mode (XEXP (operands[1], 0))))
3639 = gen_rtx_MEM (mode,
3640 create_TOC_reference (XEXP (operands[1], 0)));
3641 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3642 RTX_UNCHANGING_P (operands[1]) = 1;
3648 if (GET_CODE (operands[0]) == MEM
3649 && GET_CODE (XEXP (operands[0], 0)) != REG
3650 && ! reload_in_progress)
3652 = replace_equiv_address (operands[0],
3653 copy_addr_to_reg (XEXP (operands[0], 0)));
3655 if (GET_CODE (operands[1]) == MEM
3656 && GET_CODE (XEXP (operands[1], 0)) != REG
3657 && ! reload_in_progress)
3659 = replace_equiv_address (operands[1],
3660 copy_addr_to_reg (XEXP (operands[1], 0)));
3663 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3665 gen_rtx_SET (VOIDmode,
3666 operands[0], operands[1]),
3667 gen_rtx_CLOBBER (VOIDmode,
3668 gen_rtx_SCRATCH (SImode)))));
3677 /* Above, we may have called force_const_mem which may have returned
3678 an invalid address. If we can, fix this up; otherwise, reload will
3679 have to deal with it. */
3680 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3681 operands[1] = validize_mem (operands[1]);
3684 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3687 /* Nonzero if we can use a floating-point register to pass this arg. */
3688 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3689 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3690 && (CUM)->fregno <= FP_ARG_MAX_REG \
3691 && TARGET_HARD_FLOAT && TARGET_FPRS)
3693 /* Nonzero if we can use an AltiVec register to pass this arg. */
3694 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3695 (ALTIVEC_VECTOR_MODE (MODE) \
3696 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3697 && TARGET_ALTIVEC_ABI \
3698 && (DEFAULT_ABI == ABI_V4 || (NAMED)))
3700 /* Return a nonzero value to say to return the function value in
3701 memory, just as large structures are always returned. TYPE will be
3702 the data type of the value, and FNTYPE will be the type of the
3703 function doing the returning, or @code{NULL} for libcalls.
3705 The AIX ABI for the RS/6000 specifies that all structures are
3706 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3707 specifies that structures <= 8 bytes are returned in r3/r4, but a
3708 draft put them in memory, and GCC used to implement the draft
3709 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3710 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3711 compatibility can change DRAFT_V4_STRUCT_RET to override the
3712 default, and -m switches get the final word. See
3713 rs6000_override_options for more details.
3715 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3716 long double support is enabled. These values are returned in memory.
3718 int_size_in_bytes returns -1 for variable size objects, which go in
3719 memory always. The cast to unsigned makes -1 > 8. */
3722 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3724 if (AGGREGATE_TYPE_P (type)
3725 && (TARGET_AIX_STRUCT_RET
3726 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3728 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3733 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3734 for a call to a function whose data type is FNTYPE.
3735 For a library call, FNTYPE is 0.
3737 For incoming args we set the number of arguments in the prototype large
3738 so we never return a PARALLEL. */
3741 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3742 rtx libname ATTRIBUTE_UNUSED, int incoming, int libcall)
3744 static CUMULATIVE_ARGS zero_cumulative;
3746 *cum = zero_cumulative;
3748 cum->fregno = FP_ARG_MIN_REG;
3749 cum->vregno = ALTIVEC_ARG_MIN_REG;
3750 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3751 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3752 ? CALL_LIBCALL : CALL_NORMAL);
3753 cum->sysv_gregno = GP_ARG_MIN_REG;
3754 cum->stdarg = fntype
3755 && (TYPE_ARG_TYPES (fntype) != 0
3756 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3757 != void_type_node));
3760 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3762 else if (cum->prototype)
3763 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3764 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3765 || rs6000_return_in_memory (TREE_TYPE (fntype),
3769 cum->nargs_prototype = 0;
3771 /* Check for a longcall attribute. */
3773 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3774 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3775 cum->call_cookie = CALL_LONG;
3777 if (TARGET_DEBUG_ARG)
3779 fprintf (stderr, "\ninit_cumulative_args:");
3782 tree ret_type = TREE_TYPE (fntype);
3783 fprintf (stderr, " ret code = %s,",
3784 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3787 if (cum->call_cookie & CALL_LONG)
3788 fprintf (stderr, " longcall,");
3790 fprintf (stderr, " proto = %d, nargs = %d\n",
3791 cum->prototype, cum->nargs_prototype);
3795 /* If defined, a C expression which determines whether, and in which
3796 direction, to pad out an argument with extra space. The value
3797 should be of type `enum direction': either `upward' to pad above
3798 the argument, `downward' to pad below, or `none' to inhibit
3801 For the AIX ABI structs are always stored left shifted in their
3805 function_arg_padding (enum machine_mode mode, tree type)
3807 #ifndef AGGREGATE_PADDING_FIXED
3808 #define AGGREGATE_PADDING_FIXED 0
3810 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3811 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3814 if (!AGGREGATE_PADDING_FIXED)
3816 /* GCC used to pass structures of the same size as integer types as
3817 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3818 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3819 passed padded downward, except that -mstrict-align further
3820 muddied the water in that multi-component structures of 2 and 4
3821 bytes in size were passed padded upward.
3823 The following arranges for best compatibility with previous
3824 versions of gcc, but removes the -mstrict-align dependency. */
3825 if (BYTES_BIG_ENDIAN)
3827 HOST_WIDE_INT size = 0;
3829 if (mode == BLKmode)
3831 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3832 size = int_size_in_bytes (type);
3835 size = GET_MODE_SIZE (mode);
3837 if (size == 1 || size == 2 || size == 4)
3843 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3845 if (type != 0 && AGGREGATE_TYPE_P (type))
3849 /* Fall back to the default. */
3850 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3853 /* If defined, a C expression that gives the alignment boundary, in bits,
3854 of an argument with the specified mode and type. If it is not defined,
3855 PARM_BOUNDARY is used for all arguments.
3857 V.4 wants long longs to be double word aligned. */
3860 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3862 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3864 else if (SPE_VECTOR_MODE (mode))
3866 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3869 return PARM_BOUNDARY;
3872 /* Update the data in CUM to advance over an argument
3873 of mode MODE and data type TYPE.
3874 (TYPE is null for libcalls where that information may not be available.) */
3877 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3878 tree type, int named)
3880 cum->nargs_prototype--;
3882 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3884 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
3887 /* In variable-argument functions, vector arguments get GPRs allocated
3888 even if they are going to be passed in a vector register. */
3889 if (cum->stdarg && DEFAULT_ABI != ABI_V4)
3893 /* Vector parameters must be 16-byte aligned. This places
3894 them at 2 mod 4 in terms of words in 32-bit mode, since
3895 the parameter save area starts at offset 24 from the
3896 stack. In 64-bit mode, they just have to start on an
3897 even word, since the parameter save area is 16-byte
3898 aligned. Space for GPRs is reserved even if the argument
3899 will be passed in memory. */
3901 align = ((6 - (cum->words & 3)) & 3);
3903 align = cum->words & 1;
3904 cum->words += align + RS6000_ARG_SIZE (mode, type);
3906 if (TARGET_DEBUG_ARG)
3908 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
3910 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
3911 cum->nargs_prototype, cum->prototype,
3912 GET_MODE_NAME (mode));
3916 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3918 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3920 else if (DEFAULT_ABI == ABI_V4)
3922 if (TARGET_HARD_FLOAT && TARGET_FPRS
3923 && (mode == SFmode || mode == DFmode))
3925 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3930 cum->words += cum->words & 1;
3931 cum->words += RS6000_ARG_SIZE (mode, type);
3937 int gregno = cum->sysv_gregno;
3939 /* Aggregates and IEEE quad get passed by reference. */
3940 if ((type && AGGREGATE_TYPE_P (type))
3944 n_words = RS6000_ARG_SIZE (mode, type);
3946 /* Long long and SPE vectors are put in odd registers. */
3947 if (n_words == 2 && (gregno & 1) == 0)
3950 /* Long long and SPE vectors are not split between registers
3952 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3954 /* Long long is aligned on the stack. */
3956 cum->words += cum->words & 1;
3957 cum->words += n_words;
3960 /* Note: continuing to accumulate gregno past when we've started
3961 spilling to the stack indicates the fact that we've started
3962 spilling to the stack to expand_builtin_saveregs. */
3963 cum->sysv_gregno = gregno + n_words;
3966 if (TARGET_DEBUG_ARG)
3968 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3969 cum->words, cum->fregno);
3970 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3971 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3972 fprintf (stderr, "mode = %4s, named = %d\n",
3973 GET_MODE_NAME (mode), named);
3978 int align = (TARGET_32BIT && (cum->words & 1) != 0
3979 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3981 cum->words += align + RS6000_ARG_SIZE (mode, type);
3983 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3984 && TARGET_HARD_FLOAT && TARGET_FPRS)
3985 cum->fregno += (mode == TFmode ? 2 : 1);
3987 if (TARGET_DEBUG_ARG)
3989 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3990 cum->words, cum->fregno);
3991 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3992 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3993 fprintf (stderr, "named = %d, align = %d\n", named, align);
3998 /* Determine where to put a SIMD argument on the SPE. */
4001 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4006 int gregno = cum->sysv_gregno;
4007 int n_words = RS6000_ARG_SIZE (mode, type);
4009 /* SPE vectors are put in odd registers. */
4010 if (n_words == 2 && (gregno & 1) == 0)
4013 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4016 enum machine_mode m = SImode;
4018 r1 = gen_rtx_REG (m, gregno);
4019 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4020 r2 = gen_rtx_REG (m, gregno + 1);
4021 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4022 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4029 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4030 return gen_rtx_REG (mode, cum->sysv_gregno);
4036 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4039 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4040 tree type, int align_words)
4044 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4045 in vararg list into zero, one or two GPRs */
4046 if (align_words >= GP_ARG_NUM_REG)
4047 return gen_rtx_PARALLEL (DFmode,
4049 gen_rtx_EXPR_LIST (VOIDmode,
4050 NULL_RTX, const0_rtx),
4051 gen_rtx_EXPR_LIST (VOIDmode,
4055 else if (align_words + RS6000_ARG_SIZE (mode, type)
4057 /* If this is partially on the stack, then we only
4058 include the portion actually in registers here. */
4059 return gen_rtx_PARALLEL (DFmode,
4061 gen_rtx_EXPR_LIST (VOIDmode,
4062 gen_rtx_REG (SImode,
4066 gen_rtx_EXPR_LIST (VOIDmode,
4071 /* split a DFmode arg into two GPRs */
4072 return gen_rtx_PARALLEL (DFmode,
4074 gen_rtx_EXPR_LIST (VOIDmode,
4075 gen_rtx_REG (SImode,
4079 gen_rtx_EXPR_LIST (VOIDmode,
4080 gen_rtx_REG (SImode,
4084 gen_rtx_EXPR_LIST (VOIDmode,
4085 gen_rtx_REG (mode, cum->fregno),
4088 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4090 else if (mode == DImode)
4092 if (align_words < GP_ARG_NUM_REG - 1)
4093 return gen_rtx_PARALLEL (DImode,
4095 gen_rtx_EXPR_LIST (VOIDmode,
4096 gen_rtx_REG (SImode,
4100 gen_rtx_EXPR_LIST (VOIDmode,
4101 gen_rtx_REG (SImode,
4105 else if (align_words == GP_ARG_NUM_REG - 1)
4106 return gen_rtx_PARALLEL (DImode,
4108 gen_rtx_EXPR_LIST (VOIDmode,
4109 NULL_RTX, const0_rtx),
4110 gen_rtx_EXPR_LIST (VOIDmode,
4111 gen_rtx_REG (SImode,
4116 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4119 int size = int_size_in_bytes (type);
4120 int no_units = ((size - 1) / 4) + 1;
4121 int max_no_words = GP_ARG_NUM_REG - align_words;
4122 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4123 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4125 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4127 for (k=0; k < rtlvec_len; k++)
4128 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4129 gen_rtx_REG (SImode,
4132 k == 0 ? const0_rtx : GEN_INT (k*4));
4134 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4140 /* Determine where to put an argument to a function.
4141 Value is zero to push the argument on the stack,
4142 or a hard register in which to store the argument.
4144 MODE is the argument's machine mode.
4145 TYPE is the data type of the argument (as a tree).
4146 This is null for libcalls where that information may
4148 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4149 the preceding args and about the function being called.
4150 NAMED is nonzero if this argument is a named parameter
4151 (otherwise it is an extra parameter matching an ellipsis).
4153 On RS/6000 the first eight words of non-FP are normally in registers
4154 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4155 Under V.4, the first 8 FP args are in registers.
4157 If this is floating-point and no prototype is specified, we use
4158 both an FP and integer register (or possibly FP reg and stack). Library
4159 functions (when CALL_LIBCALL is set) always have the proper types for args,
4160 so we can pass the FP value just in one register. emit_library_function
4161 doesn't support PARALLEL anyway. */
4164 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4165 tree type, int named)
4167 enum rs6000_abi abi = DEFAULT_ABI;
4169 /* Return a marker to indicate whether CR1 needs to set or clear the
4170 bit that V.4 uses to say fp args were passed in registers.
4171 Assume that we don't need the marker for software floating point,
4172 or compiler generated library calls. */
4173 if (mode == VOIDmode)
4176 && cum->nargs_prototype < 0
4177 && (cum->call_cookie & CALL_LIBCALL) == 0
4178 && (cum->prototype || TARGET_NO_PROTOTYPE))
4180 /* For the SPE, we need to crxor CR6 always. */
4182 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4183 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4184 return GEN_INT (cum->call_cookie
4185 | ((cum->fregno == FP_ARG_MIN_REG)
4186 ? CALL_V4_SET_FP_ARGS
4187 : CALL_V4_CLEAR_FP_ARGS));
4190 return GEN_INT (cum->call_cookie);
4193 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4194 return gen_rtx_REG (mode, cum->vregno);
4195 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4197 if (named || abi == ABI_V4)
4201 /* Vector parameters to varargs functions under AIX or Darwin
4202 get passed in memory and possibly also in GPRs. */
4203 int align, align_words;
4204 enum machine_mode part_mode = mode;
4206 /* Vector parameters must be 16-byte aligned. This places them at
4207 2 mod 4 in terms of words in 32-bit mode, since the parameter
4208 save area starts at offset 24 from the stack. In 64-bit mode,
4209 they just have to start on an even word, since the parameter
4210 save area is 16-byte aligned. */
4212 align = ((6 - (cum->words & 3)) & 3);
4214 align = cum->words & 1;
4215 align_words = cum->words + align;
4217 /* Out of registers? Memory, then. */
4218 if (align_words >= GP_ARG_NUM_REG)
4221 /* The vector value goes in GPRs. Only the part of the
4222 value in GPRs is reported here. */
4223 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4225 /* Fortunately, there are only two possibilites, the value
4226 is either wholly in GPRs or half in GPRs and half not. */
4229 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4232 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4233 return rs6000_spe_function_arg (cum, mode, type);
4234 else if (abi == ABI_V4)
4236 if (TARGET_HARD_FLOAT && TARGET_FPRS
4237 && (mode == SFmode || mode == DFmode))
4239 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4240 return gen_rtx_REG (mode, cum->fregno);
4247 int gregno = cum->sysv_gregno;
4249 /* Aggregates and IEEE quad get passed by reference. */
4250 if ((type && AGGREGATE_TYPE_P (type))
4254 n_words = RS6000_ARG_SIZE (mode, type);
4256 /* Long long and SPE vectors are put in odd registers. */
4257 if (n_words == 2 && (gregno & 1) == 0)
4260 /* Long long do not split between registers and stack. */
4261 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4262 return gen_rtx_REG (mode, gregno);
4269 int align = (TARGET_32BIT && (cum->words & 1) != 0
4270 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4271 int align_words = cum->words + align;
4273 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4276 if (TARGET_32BIT && TARGET_POWERPC64
4277 && (mode == DFmode || mode == DImode || mode == BLKmode))
4278 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4280 if (USE_FP_FOR_ARG_P (cum, mode, type))
4283 || ((cum->nargs_prototype > 0)
4284 /* IBM AIX extended its linkage convention definition always
4285 to require FP args after register save area hole on the
4287 && (DEFAULT_ABI != ABI_AIX
4289 || (align_words < GP_ARG_NUM_REG))))
4290 return gen_rtx_REG (mode, cum->fregno);
4292 return gen_rtx_PARALLEL (mode,
4294 gen_rtx_EXPR_LIST (VOIDmode,
4295 ((align_words >= GP_ARG_NUM_REG)
4298 + RS6000_ARG_SIZE (mode, type)
4300 /* If this is partially on the stack, then
4301 we only include the portion actually
4302 in registers here. */
4303 ? gen_rtx_REG (SImode,
4304 GP_ARG_MIN_REG + align_words)
4305 : gen_rtx_REG (mode,
4306 GP_ARG_MIN_REG + align_words))),
4308 gen_rtx_EXPR_LIST (VOIDmode,
4309 gen_rtx_REG (mode, cum->fregno),
4312 else if (align_words < GP_ARG_NUM_REG)
4313 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4319 /* For an arg passed partly in registers and partly in memory,
4320 this is the number of registers used.
4321 For args passed entirely in registers or entirely in memory, zero. */
4324 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4325 tree type, int named)
4327 if (DEFAULT_ABI == ABI_V4)
4330 if (USE_FP_FOR_ARG_P (cum, mode, type)
4331 || USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4333 if (cum->nargs_prototype >= 0)
4337 if (cum->words < GP_ARG_NUM_REG
4338 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4340 int ret = GP_ARG_NUM_REG - cum->words;
4341 if (ret && TARGET_DEBUG_ARG)
4342 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4350 /* A C expression that indicates when an argument must be passed by
4351 reference. If nonzero for an argument, a copy of that argument is
4352 made in memory and a pointer to the argument is passed instead of
4353 the argument itself. The pointer is passed in whatever way is
4354 appropriate for passing a pointer to that type.
4356 Under V.4, structures and unions are passed by reference.
4358 As an extension to all ABIs, variable sized types are passed by
4362 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4363 enum machine_mode mode ATTRIBUTE_UNUSED,
4364 tree type, int named ATTRIBUTE_UNUSED)
4366 if (DEFAULT_ABI == ABI_V4
4367 && ((type && AGGREGATE_TYPE_P (type))
4370 if (TARGET_DEBUG_ARG)
4371 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4375 return type && int_size_in_bytes (type) < 0;
4378 /* Perform any needed actions needed for a function that is receiving a
4379 variable number of arguments.
4383 MODE and TYPE are the mode and type of the current parameter.
4385 PRETEND_SIZE is a variable that should be set to the amount of stack
4386 that must be pushed by the prolog to pretend that our caller pushed
4389 Normally, this macro will push all remaining incoming registers on the
4390 stack and set PRETEND_SIZE to the length of the registers pushed. */
4393 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4394 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4396 CUMULATIVE_ARGS next_cum;
4397 int reg_size = TARGET_32BIT ? 4 : 8;
4398 rtx save_area = NULL_RTX, mem;
4399 int first_reg_offset, set;
4401 /* Skip the last named argument. */
4403 function_arg_advance (&next_cum, mode, type, 1);
4405 if (DEFAULT_ABI == ABI_V4)
4407 /* Indicate to allocate space on the stack for varargs save area. */
4408 cfun->machine->sysv_varargs_p = 1;
4410 save_area = plus_constant (virtual_stack_vars_rtx,
4411 - RS6000_VARARGS_SIZE);
4413 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4417 first_reg_offset = next_cum.words;
4418 save_area = virtual_incoming_args_rtx;
4419 cfun->machine->sysv_varargs_p = 0;
4421 if (MUST_PASS_IN_STACK (mode, type))
4422 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4425 set = get_varargs_alias_set ();
4426 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4428 mem = gen_rtx_MEM (BLKmode,
4429 plus_constant (save_area,
4430 first_reg_offset * reg_size)),
4431 set_mem_alias_set (mem, set);
4432 set_mem_align (mem, BITS_PER_WORD);
4434 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4435 GP_ARG_NUM_REG - first_reg_offset);
4438 /* Save FP registers if needed. */
4439 if (DEFAULT_ABI == ABI_V4
4440 && TARGET_HARD_FLOAT && TARGET_FPRS
4442 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4444 int fregno = next_cum.fregno;
4445 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4446 rtx lab = gen_label_rtx ();
4447 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4449 emit_jump_insn (gen_rtx_SET (VOIDmode,
4451 gen_rtx_IF_THEN_ELSE (VOIDmode,
4452 gen_rtx_NE (VOIDmode, cr1,
4454 gen_rtx_LABEL_REF (VOIDmode, lab),
4457 while (fregno <= FP_ARG_V4_MAX_REG)
4459 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4460 set_mem_alias_set (mem, set);
4461 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4470 /* Create the va_list data type. */
4473 rs6000_build_builtin_va_list (void)
4475 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4477 /* For AIX, prefer 'char *' because that's what the system
4478 header files like. */
4479 if (DEFAULT_ABI != ABI_V4)
4480 return build_pointer_type (char_type_node);
4482 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4483 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4485 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4486 unsigned_char_type_node);
4487 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4488 unsigned_char_type_node);
4489 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4491 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4492 short_unsigned_type_node);
4493 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4495 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4498 DECL_FIELD_CONTEXT (f_gpr) = record;
4499 DECL_FIELD_CONTEXT (f_fpr) = record;
4500 DECL_FIELD_CONTEXT (f_res) = record;
4501 DECL_FIELD_CONTEXT (f_ovf) = record;
4502 DECL_FIELD_CONTEXT (f_sav) = record;
4504 TREE_CHAIN (record) = type_decl;
4505 TYPE_NAME (record) = type_decl;
4506 TYPE_FIELDS (record) = f_gpr;
4507 TREE_CHAIN (f_gpr) = f_fpr;
4508 TREE_CHAIN (f_fpr) = f_res;
4509 TREE_CHAIN (f_res) = f_ovf;
4510 TREE_CHAIN (f_ovf) = f_sav;
4512 layout_type (record);
4514 /* The correct type is an array type of one element. */
4515 return build_array_type (record, build_index_type (size_zero_node));
4518 /* Implement va_start. */
4521 rs6000_va_start (tree valist, rtx nextarg)
4523 HOST_WIDE_INT words, n_gpr, n_fpr;
4524 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4525 tree gpr, fpr, ovf, sav, t;
4527 /* Only SVR4 needs something special. */
4528 if (DEFAULT_ABI != ABI_V4)
4530 std_expand_builtin_va_start (valist, nextarg);
4534 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4535 f_fpr = TREE_CHAIN (f_gpr);
4536 f_res = TREE_CHAIN (f_fpr);
4537 f_ovf = TREE_CHAIN (f_res);
4538 f_sav = TREE_CHAIN (f_ovf);
4540 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4541 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4542 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4543 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4544 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4546 /* Count number of gp and fp argument registers used. */
4547 words = current_function_args_info.words;
4548 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4549 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4551 if (TARGET_DEBUG_ARG)
4552 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4553 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4554 words, n_gpr, n_fpr);
4556 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4557 TREE_SIDE_EFFECTS (t) = 1;
4558 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4560 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4561 TREE_SIDE_EFFECTS (t) = 1;
4562 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4564 /* Find the overflow area. */
4565 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4567 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4568 build_int_2 (words * UNITS_PER_WORD, 0));
4569 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4570 TREE_SIDE_EFFECTS (t) = 1;
4571 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4573 /* Find the register save area. */
4574 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4575 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4576 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4577 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4578 TREE_SIDE_EFFECTS (t) = 1;
4579 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4582 /* Implement va_arg. */
4585 rs6000_va_arg (tree valist, tree type)
4587 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4588 tree gpr, fpr, ovf, sav, reg, t, u;
4589 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4590 rtx lab_false, lab_over, addr_rtx, r;
4592 if (DEFAULT_ABI != ABI_V4)
4594 /* Variable sized types are passed by reference. */
4595 if (int_size_in_bytes (type) < 0)
4597 u = build_pointer_type (type);
4599 /* Args grow upward. */
4600 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4601 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4602 TREE_SIDE_EFFECTS (t) = 1;
4604 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4605 TREE_SIDE_EFFECTS (t) = 1;
4607 t = build1 (INDIRECT_REF, u, t);
4608 TREE_SIDE_EFFECTS (t) = 1;
4610 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4614 /* Altivec arguments must be aligned to a 128-bit boundary. */
4615 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (TYPE_MODE (type)))
4617 tree vtype = TREE_TYPE (valist);
4618 tree new_valist, modify;
4620 /* Round address up to multiple of 16. Computes
4622 new_valist = fold (build (BIT_AND_EXPR, vtype,
4623 fold (build (PLUS_EXPR, vtype, valist,
4624 build_int_2 (15, 0))),
4625 build_int_2 (~15, -1)));
4627 /* Update valist. */
4628 modify = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
4630 TREE_SIDE_EFFECTS (modify) = 1;
4631 expand_expr (modify, const0_rtx, VOIDmode, EXPAND_NORMAL);
4634 return std_expand_builtin_va_arg (valist, type);
4638 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4639 f_fpr = TREE_CHAIN (f_gpr);
4640 f_res = TREE_CHAIN (f_fpr);
4641 f_ovf = TREE_CHAIN (f_res);
4642 f_sav = TREE_CHAIN (f_ovf);
4644 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4645 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4646 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4647 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4648 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4650 size = int_size_in_bytes (type);
4651 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4653 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4655 /* Aggregates and long doubles are passed by reference. */
4661 size = UNITS_PER_WORD;
4664 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4666 /* FP args go in FP registers, if present. */
4675 /* Otherwise into GP registers. */
4683 /* Pull the value out of the saved registers ... */
4685 lab_false = gen_label_rtx ();
4686 lab_over = gen_label_rtx ();
4687 addr_rtx = gen_reg_rtx (Pmode);
4689 /* AltiVec vectors never go in registers. */
4690 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4692 TREE_THIS_VOLATILE (reg) = 1;
4693 emit_cmp_and_jump_insns
4694 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4695 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4698 /* Long long is aligned in the registers. */
4701 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4702 build_int_2 (n_reg - 1, 0));
4703 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4704 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4705 TREE_SIDE_EFFECTS (u) = 1;
4706 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4710 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4714 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4715 build_int_2 (n_reg, 0));
4716 TREE_SIDE_EFFECTS (u) = 1;
4718 u = build1 (CONVERT_EXPR, integer_type_node, u);
4719 TREE_SIDE_EFFECTS (u) = 1;
4721 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4722 TREE_SIDE_EFFECTS (u) = 1;
4724 t = build (PLUS_EXPR, ptr_type_node, t, u);
4725 TREE_SIDE_EFFECTS (t) = 1;
4727 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4729 emit_move_insn (addr_rtx, r);
4731 emit_jump_insn (gen_jump (lab_over));
4735 emit_label (lab_false);
4737 /* ... otherwise out of the overflow area. */
4739 /* Make sure we don't find reg 7 for the next int arg.
4741 All AltiVec vectors go in the overflow area. So in the AltiVec
4742 case we need to get the vectors from the overflow area, but
4743 remember where the GPRs and FPRs are. */
4744 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4745 || !TARGET_ALTIVEC))
4747 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4748 TREE_SIDE_EFFECTS (t) = 1;
4749 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4752 /* Care for on-stack alignment if needed. */
4759 /* AltiVec vectors are 16 byte aligned. */
4760 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4765 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4766 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4770 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4772 emit_move_insn (addr_rtx, r);
4774 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4775 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4776 TREE_SIDE_EFFECTS (t) = 1;
4777 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4779 emit_label (lab_over);
4783 r = gen_rtx_MEM (Pmode, addr_rtx);
4784 set_mem_alias_set (r, get_varargs_alias_set ());
4785 emit_move_insn (addr_rtx, r);
4793 #define def_builtin(MASK, NAME, TYPE, CODE) \
4795 if ((MASK) & target_flags) \
4796 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4800 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4802 static const struct builtin_description bdesc_3arg[] =
4804 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4805 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4806 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4807 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4808 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4809 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4810 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4811 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4812 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4813 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4814 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4815 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4816 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4817 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4818 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4819 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4820 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4821 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4822 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4823 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4824 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4825 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4826 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4829 /* DST operations: void foo (void *, const int, const char). */
4831 static const struct builtin_description bdesc_dst[] =
4833 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4834 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4835 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4836 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4839 /* Simple binary operations: VECc = foo (VECa, VECb). */
4841 static struct builtin_description bdesc_2arg[] =
4843 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4844 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4845 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4846 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4847 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4848 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4849 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4850 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4851 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4852 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4853 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4854 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4855 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4856 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4857 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4858 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4859 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4860 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4861 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4862 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4863 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4864 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4865 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4866 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4867 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4868 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4869 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4870 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4871 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4872 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4873 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4874 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4875 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4876 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4877 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4878 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4879 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4880 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4881 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4882 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4883 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4884 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4885 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4886 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4887 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4888 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4889 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4890 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4891 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4892 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4893 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4894 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4895 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4896 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4897 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4898 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4899 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4900 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4901 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4902 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4903 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4904 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4905 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4906 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4907 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4908 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4909 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4910 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4911 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4912 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4913 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4914 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4915 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4916 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4917 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4918 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4919 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4920 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4921 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4922 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4923 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4924 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4925 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4926 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4927 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4928 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4929 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4930 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4931 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4932 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4933 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4934 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4935 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4936 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4937 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4938 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4939 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4940 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4941 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4942 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4943 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4944 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4945 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4946 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4947 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4948 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4949 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4950 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4951 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4952 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4953 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4954 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4955 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4957 /* Place holder, leave as first spe builtin. */
4958 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4959 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4960 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4961 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4962 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4963 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4964 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4965 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4966 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4967 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4968 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4969 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4970 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4971 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4972 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4973 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4974 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4975 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4976 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4977 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4978 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4979 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4980 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4981 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4982 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4983 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4984 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4985 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4986 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4987 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4988 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4989 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4990 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4991 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4992 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4993 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4994 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4995 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4996 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4997 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4998 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4999 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5000 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5001 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5002 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5003 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5004 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5005 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5006 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5007 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5008 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5009 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5010 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5011 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5012 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5013 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5014 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5015 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5016 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5017 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5018 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5019 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5020 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5021 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5022 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5023 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5024 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5025 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5026 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5027 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5028 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5029 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5030 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5031 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5032 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5033 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5034 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5035 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5036 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5037 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5038 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5039 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5040 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5041 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5042 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5043 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5044 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5045 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5046 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5047 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5048 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5049 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5050 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5051 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5052 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5053 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5054 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5055 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5056 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5057 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5058 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5059 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5060 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5061 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5062 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5063 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5064 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5065 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5066 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5068 /* SPE binary operations expecting a 5-bit unsigned literal. */
5069 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5071 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5072 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5073 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5074 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5075 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5076 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5077 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5078 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5079 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5080 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5081 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5082 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5083 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5084 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5085 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5086 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5087 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5088 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5089 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5090 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5091 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5092 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5093 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5094 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5095 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5096 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5098 /* Place-holder. Leave as last binary SPE builtin. */
5099 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5102 /* AltiVec predicates. */
5104 struct builtin_description_predicates
5106 const unsigned int mask;
5107 const enum insn_code icode;
5109 const char *const name;
5110 const enum rs6000_builtins code;
5113 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5115 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5116 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5117 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5118 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5119 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5120 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5121 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5122 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5123 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5124 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5125 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5126 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5127 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5130 /* SPE predicates. */
5131 static struct builtin_description bdesc_spe_predicates[] =
5133 /* Place-holder. Leave as first. */
5134 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5135 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5136 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5137 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5138 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5139 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5140 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5141 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5142 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5143 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5144 /* Place-holder. Leave as last. */
5145 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5148 /* SPE evsel predicates. */
5149 static struct builtin_description bdesc_spe_evsel[] =
5151 /* Place-holder. Leave as first. */
5152 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5153 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5154 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5155 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5156 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5157 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5158 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5159 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5160 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5161 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5162 /* Place-holder. Leave as last. */
5163 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5166 /* ABS* operations. */
5168 static const struct builtin_description bdesc_abs[] =
5170 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5171 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5172 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5173 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5174 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5175 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5176 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5179 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5182 static struct builtin_description bdesc_1arg[] =
5184 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5185 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5186 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5187 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5188 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5189 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5190 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5191 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5192 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5193 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5194 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5195 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5196 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5197 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5198 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5199 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5200 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5202 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5203 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5204 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5205 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5206 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5207 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5208 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5209 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5210 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5211 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5212 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5213 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5214 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5215 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5216 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5217 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5218 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5219 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5220 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5221 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5222 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5223 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5224 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5225 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5226 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5227 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5228 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5229 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5230 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5231 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5233 /* Place-holder. Leave as last unary SPE builtin. */
5234 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5238 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5241 tree arg0 = TREE_VALUE (arglist);
5242 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5243 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5244 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5246 if (icode == CODE_FOR_nothing)
5247 /* Builtin not supported on this processor. */
5250 /* If we got invalid arguments bail out before generating bad rtl. */
5251 if (arg0 == error_mark_node)
5254 if (icode == CODE_FOR_altivec_vspltisb
5255 || icode == CODE_FOR_altivec_vspltish
5256 || icode == CODE_FOR_altivec_vspltisw
5257 || icode == CODE_FOR_spe_evsplatfi
5258 || icode == CODE_FOR_spe_evsplati)
5260 /* Only allow 5-bit *signed* literals. */
5261 if (GET_CODE (op0) != CONST_INT
5262 || INTVAL (op0) > 0x1f
5263 || INTVAL (op0) < -0x1f)
5265 error ("argument 1 must be a 5-bit signed literal");
5271 || GET_MODE (target) != tmode
5272 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5273 target = gen_reg_rtx (tmode);
5275 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5276 op0 = copy_to_mode_reg (mode0, op0);
5278 pat = GEN_FCN (icode) (target, op0);
5287 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5289 rtx pat, scratch1, scratch2;
5290 tree arg0 = TREE_VALUE (arglist);
5291 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5292 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5293 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5295 /* If we have invalid arguments, bail out before generating bad rtl. */
5296 if (arg0 == error_mark_node)
5300 || GET_MODE (target) != tmode
5301 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5302 target = gen_reg_rtx (tmode);
5304 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5305 op0 = copy_to_mode_reg (mode0, op0);
5307 scratch1 = gen_reg_rtx (mode0);
5308 scratch2 = gen_reg_rtx (mode0);
5310 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5319 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5322 tree arg0 = TREE_VALUE (arglist);
5323 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5324 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5325 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5326 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5327 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5328 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5330 if (icode == CODE_FOR_nothing)
5331 /* Builtin not supported on this processor. */
5334 /* If we got invalid arguments bail out before generating bad rtl. */
5335 if (arg0 == error_mark_node || arg1 == error_mark_node)
5338 if (icode == CODE_FOR_altivec_vcfux
5339 || icode == CODE_FOR_altivec_vcfsx
5340 || icode == CODE_FOR_altivec_vctsxs
5341 || icode == CODE_FOR_altivec_vctuxs
5342 || icode == CODE_FOR_altivec_vspltb
5343 || icode == CODE_FOR_altivec_vsplth
5344 || icode == CODE_FOR_altivec_vspltw
5345 || icode == CODE_FOR_spe_evaddiw
5346 || icode == CODE_FOR_spe_evldd
5347 || icode == CODE_FOR_spe_evldh
5348 || icode == CODE_FOR_spe_evldw
5349 || icode == CODE_FOR_spe_evlhhesplat
5350 || icode == CODE_FOR_spe_evlhhossplat
5351 || icode == CODE_FOR_spe_evlhhousplat
5352 || icode == CODE_FOR_spe_evlwhe
5353 || icode == CODE_FOR_spe_evlwhos
5354 || icode == CODE_FOR_spe_evlwhou
5355 || icode == CODE_FOR_spe_evlwhsplat
5356 || icode == CODE_FOR_spe_evlwwsplat
5357 || icode == CODE_FOR_spe_evrlwi
5358 || icode == CODE_FOR_spe_evslwi
5359 || icode == CODE_FOR_spe_evsrwis
5360 || icode == CODE_FOR_spe_evsubifw
5361 || icode == CODE_FOR_spe_evsrwiu)
5363 /* Only allow 5-bit unsigned literals. */
5364 if (TREE_CODE (arg1) != INTEGER_CST
5365 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5367 error ("argument 2 must be a 5-bit unsigned literal");
5373 || GET_MODE (target) != tmode
5374 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5375 target = gen_reg_rtx (tmode);
5377 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5378 op0 = copy_to_mode_reg (mode0, op0);
5379 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5380 op1 = copy_to_mode_reg (mode1, op1);
5382 pat = GEN_FCN (icode) (target, op0, op1);
5391 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5392 tree arglist, rtx target)
5395 tree cr6_form = TREE_VALUE (arglist);
5396 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5397 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5398 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5399 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5400 enum machine_mode tmode = SImode;
5401 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5402 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5405 if (TREE_CODE (cr6_form) != INTEGER_CST)
5407 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5411 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5416 /* If we have invalid arguments, bail out before generating bad rtl. */
5417 if (arg0 == error_mark_node || arg1 == error_mark_node)
5421 || GET_MODE (target) != tmode
5422 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5423 target = gen_reg_rtx (tmode);
5425 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5426 op0 = copy_to_mode_reg (mode0, op0);
5427 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5428 op1 = copy_to_mode_reg (mode1, op1);
5430 scratch = gen_reg_rtx (mode0);
5432 pat = GEN_FCN (icode) (scratch, op0, op1,
5433 gen_rtx (SYMBOL_REF, Pmode, opcode));
5438 /* The vec_any* and vec_all* predicates use the same opcodes for two
5439 different operations, but the bits in CR6 will be different
5440 depending on what information we want. So we have to play tricks
5441 with CR6 to get the right bits out.
5443 If you think this is disgusting, look at the specs for the
5444 AltiVec predicates. */
5446 switch (cr6_form_int)
5449 emit_insn (gen_cr6_test_for_zero (target));
5452 emit_insn (gen_cr6_test_for_zero_reverse (target));
5455 emit_insn (gen_cr6_test_for_lt (target));
5458 emit_insn (gen_cr6_test_for_lt_reverse (target));
5461 error ("argument 1 of __builtin_altivec_predicate is out of range");
5469 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5472 tree arg0 = TREE_VALUE (arglist);
5473 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5474 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5475 enum machine_mode mode0 = Pmode;
5476 enum machine_mode mode1 = Pmode;
5477 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5478 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5480 if (icode == CODE_FOR_nothing)
5481 /* Builtin not supported on this processor. */
5484 /* If we got invalid arguments bail out before generating bad rtl. */
5485 if (arg0 == error_mark_node || arg1 == error_mark_node)
5489 || GET_MODE (target) != tmode
5490 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5491 target = gen_reg_rtx (tmode);
5493 op1 = copy_to_mode_reg (mode1, op1);
5495 if (op0 == const0_rtx)
5497 addr = gen_rtx_MEM (tmode, op1);
5501 op0 = copy_to_mode_reg (mode0, op0);
5502 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5505 pat = GEN_FCN (icode) (target, addr);
5515 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5517 tree arg0 = TREE_VALUE (arglist);
5518 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5519 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5520 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5521 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5522 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5524 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5525 enum machine_mode mode1 = Pmode;
5526 enum machine_mode mode2 = Pmode;
5528 /* Invalid arguments. Bail before doing anything stoopid! */
5529 if (arg0 == error_mark_node
5530 || arg1 == error_mark_node
5531 || arg2 == error_mark_node)
5534 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5535 op0 = copy_to_mode_reg (tmode, op0);
5537 op2 = copy_to_mode_reg (mode2, op2);
5539 if (op1 == const0_rtx)
5541 addr = gen_rtx_MEM (tmode, op2);
5545 op1 = copy_to_mode_reg (mode1, op1);
5546 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5549 pat = GEN_FCN (icode) (addr, op0);
5556 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5559 tree arg0 = TREE_VALUE (arglist);
5560 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5561 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5562 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5563 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5564 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5565 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5566 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5567 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5568 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5570 if (icode == CODE_FOR_nothing)
5571 /* Builtin not supported on this processor. */
5574 /* If we got invalid arguments bail out before generating bad rtl. */
5575 if (arg0 == error_mark_node
5576 || arg1 == error_mark_node
5577 || arg2 == error_mark_node)
5580 if (icode == CODE_FOR_altivec_vsldoi_4sf
5581 || icode == CODE_FOR_altivec_vsldoi_4si
5582 || icode == CODE_FOR_altivec_vsldoi_8hi
5583 || icode == CODE_FOR_altivec_vsldoi_16qi)
5585 /* Only allow 4-bit unsigned literals. */
5586 if (TREE_CODE (arg2) != INTEGER_CST
5587 || TREE_INT_CST_LOW (arg2) & ~0xf)
5589 error ("argument 3 must be a 4-bit unsigned literal");
5595 || GET_MODE (target) != tmode
5596 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5597 target = gen_reg_rtx (tmode);
5599 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5600 op0 = copy_to_mode_reg (mode0, op0);
5601 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5602 op1 = copy_to_mode_reg (mode1, op1);
5603 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5604 op2 = copy_to_mode_reg (mode2, op2);
5606 pat = GEN_FCN (icode) (target, op0, op1, op2);
5614 /* Expand the lvx builtins. */
5616 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5618 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5619 tree arglist = TREE_OPERAND (exp, 1);
5620 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5622 enum machine_mode tmode, mode0;
5624 enum insn_code icode;
5628 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5629 icode = CODE_FOR_altivec_lvx_16qi;
5631 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5632 icode = CODE_FOR_altivec_lvx_8hi;
5634 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5635 icode = CODE_FOR_altivec_lvx_4si;
5637 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5638 icode = CODE_FOR_altivec_lvx_4sf;
5647 arg0 = TREE_VALUE (arglist);
5648 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5649 tmode = insn_data[icode].operand[0].mode;
5650 mode0 = insn_data[icode].operand[1].mode;
5653 || GET_MODE (target) != tmode
5654 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5655 target = gen_reg_rtx (tmode);
5657 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5658 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5660 pat = GEN_FCN (icode) (target, op0);
5667 /* Expand the stvx builtins. */
5669 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5672 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5673 tree arglist = TREE_OPERAND (exp, 1);
5674 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5676 enum machine_mode mode0, mode1;
5678 enum insn_code icode;
5682 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5683 icode = CODE_FOR_altivec_stvx_16qi;
5685 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5686 icode = CODE_FOR_altivec_stvx_8hi;
5688 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5689 icode = CODE_FOR_altivec_stvx_4si;
5691 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5692 icode = CODE_FOR_altivec_stvx_4sf;
5699 arg0 = TREE_VALUE (arglist);
5700 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5701 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5702 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5703 mode0 = insn_data[icode].operand[0].mode;
5704 mode1 = insn_data[icode].operand[1].mode;
5706 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5707 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5708 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5709 op1 = copy_to_mode_reg (mode1, op1);
5711 pat = GEN_FCN (icode) (op0, op1);
5719 /* Expand the dst builtins. */
5721 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5724 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5725 tree arglist = TREE_OPERAND (exp, 1);
5726 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5727 tree arg0, arg1, arg2;
5728 enum machine_mode mode0, mode1, mode2;
5729 rtx pat, op0, op1, op2;
5730 struct builtin_description *d;
5735 /* Handle DST variants. */
5736 d = (struct builtin_description *) bdesc_dst;
5737 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5738 if (d->code == fcode)
5740 arg0 = TREE_VALUE (arglist);
5741 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5742 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5743 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5744 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5745 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5746 mode0 = insn_data[d->icode].operand[0].mode;
5747 mode1 = insn_data[d->icode].operand[1].mode;
5748 mode2 = insn_data[d->icode].operand[2].mode;
5750 /* Invalid arguments, bail out before generating bad rtl. */
5751 if (arg0 == error_mark_node
5752 || arg1 == error_mark_node
5753 || arg2 == error_mark_node)
5756 if (TREE_CODE (arg2) != INTEGER_CST
5757 || TREE_INT_CST_LOW (arg2) & ~0x3)
5759 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5763 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5764 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5765 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5766 op1 = copy_to_mode_reg (mode1, op1);
5768 pat = GEN_FCN (d->icode) (op0, op1, op2);
5779 /* Expand the builtin in EXP and store the result in TARGET. Store
5780 true in *EXPANDEDP if we found a builtin to expand. */
5782 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
5784 struct builtin_description *d;
5785 struct builtin_description_predicates *dp;
5787 enum insn_code icode;
5788 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5789 tree arglist = TREE_OPERAND (exp, 1);
5792 enum machine_mode tmode, mode0;
5793 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5795 target = altivec_expand_ld_builtin (exp, target, expandedp);
5799 target = altivec_expand_st_builtin (exp, target, expandedp);
5803 target = altivec_expand_dst_builtin (exp, target, expandedp);
5811 case ALTIVEC_BUILTIN_STVX:
5812 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5813 case ALTIVEC_BUILTIN_STVEBX:
5814 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5815 case ALTIVEC_BUILTIN_STVEHX:
5816 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5817 case ALTIVEC_BUILTIN_STVEWX:
5818 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5819 case ALTIVEC_BUILTIN_STVXL:
5820 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5822 case ALTIVEC_BUILTIN_MFVSCR:
5823 icode = CODE_FOR_altivec_mfvscr;
5824 tmode = insn_data[icode].operand[0].mode;
5827 || GET_MODE (target) != tmode
5828 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5829 target = gen_reg_rtx (tmode);
5831 pat = GEN_FCN (icode) (target);
5837 case ALTIVEC_BUILTIN_MTVSCR:
5838 icode = CODE_FOR_altivec_mtvscr;
5839 arg0 = TREE_VALUE (arglist);
5840 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5841 mode0 = insn_data[icode].operand[0].mode;
5843 /* If we got invalid arguments bail out before generating bad rtl. */
5844 if (arg0 == error_mark_node)
5847 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5848 op0 = copy_to_mode_reg (mode0, op0);
5850 pat = GEN_FCN (icode) (op0);
5855 case ALTIVEC_BUILTIN_DSSALL:
5856 emit_insn (gen_altivec_dssall ());
5859 case ALTIVEC_BUILTIN_DSS:
5860 icode = CODE_FOR_altivec_dss;
5861 arg0 = TREE_VALUE (arglist);
5862 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5863 mode0 = insn_data[icode].operand[0].mode;
5865 /* If we got invalid arguments bail out before generating bad rtl. */
5866 if (arg0 == error_mark_node)
5869 if (TREE_CODE (arg0) != INTEGER_CST
5870 || TREE_INT_CST_LOW (arg0) & ~0x3)
5872 error ("argument to dss must be a 2-bit unsigned literal");
5876 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5877 op0 = copy_to_mode_reg (mode0, op0);
5879 emit_insn (gen_altivec_dss (op0));
5883 /* Expand abs* operations. */
5884 d = (struct builtin_description *) bdesc_abs;
5885 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5886 if (d->code == fcode)
5887 return altivec_expand_abs_builtin (d->icode, arglist, target);
5889 /* Expand the AltiVec predicates. */
5890 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5891 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5892 if (dp->code == fcode)
5893 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5895 /* LV* are funky. We initialized them differently. */
5898 case ALTIVEC_BUILTIN_LVSL:
5899 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
5901 case ALTIVEC_BUILTIN_LVSR:
5902 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
5904 case ALTIVEC_BUILTIN_LVEBX:
5905 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
5907 case ALTIVEC_BUILTIN_LVEHX:
5908 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
5910 case ALTIVEC_BUILTIN_LVEWX:
5911 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
5913 case ALTIVEC_BUILTIN_LVXL:
5914 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
5916 case ALTIVEC_BUILTIN_LVX:
5917 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
5928 /* Binops that need to be initialized manually, but can be expanded
5929 automagically by rs6000_expand_binop_builtin. */
5930 static struct builtin_description bdesc_2arg_spe[] =
5932 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5933 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5934 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5935 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5936 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5937 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5938 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5939 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5940 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5941 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5942 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5943 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5944 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5945 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5946 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5947 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5948 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5949 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5950 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5951 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5952 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5953 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5956 /* Expand the builtin in EXP and store the result in TARGET. Store
5957 true in *EXPANDEDP if we found a builtin to expand.
5959 This expands the SPE builtins that are not simple unary and binary
5962 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
5964 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5965 tree arglist = TREE_OPERAND (exp, 1);
5967 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5968 enum insn_code icode;
5969 enum machine_mode tmode, mode0;
5971 struct builtin_description *d;
5976 /* Syntax check for a 5-bit unsigned immediate. */
5979 case SPE_BUILTIN_EVSTDD:
5980 case SPE_BUILTIN_EVSTDH:
5981 case SPE_BUILTIN_EVSTDW:
5982 case SPE_BUILTIN_EVSTWHE:
5983 case SPE_BUILTIN_EVSTWHO:
5984 case SPE_BUILTIN_EVSTWWE:
5985 case SPE_BUILTIN_EVSTWWO:
5986 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5987 if (TREE_CODE (arg1) != INTEGER_CST
5988 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5990 error ("argument 2 must be a 5-bit unsigned literal");
5998 /* The evsplat*i instructions are not quite generic. */
6001 case SPE_BUILTIN_EVSPLATFI:
6002 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6004 case SPE_BUILTIN_EVSPLATI:
6005 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6011 d = (struct builtin_description *) bdesc_2arg_spe;
6012 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6013 if (d->code == fcode)
6014 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6016 d = (struct builtin_description *) bdesc_spe_predicates;
6017 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6018 if (d->code == fcode)
6019 return spe_expand_predicate_builtin (d->icode, arglist, target);
6021 d = (struct builtin_description *) bdesc_spe_evsel;
6022 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6023 if (d->code == fcode)
6024 return spe_expand_evsel_builtin (d->icode, arglist, target);
6028 case SPE_BUILTIN_EVSTDDX:
6029 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6030 case SPE_BUILTIN_EVSTDHX:
6031 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6032 case SPE_BUILTIN_EVSTDWX:
6033 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6034 case SPE_BUILTIN_EVSTWHEX:
6035 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6036 case SPE_BUILTIN_EVSTWHOX:
6037 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6038 case SPE_BUILTIN_EVSTWWEX:
6039 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6040 case SPE_BUILTIN_EVSTWWOX:
6041 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6042 case SPE_BUILTIN_EVSTDD:
6043 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6044 case SPE_BUILTIN_EVSTDH:
6045 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6046 case SPE_BUILTIN_EVSTDW:
6047 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6048 case SPE_BUILTIN_EVSTWHE:
6049 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6050 case SPE_BUILTIN_EVSTWHO:
6051 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6052 case SPE_BUILTIN_EVSTWWE:
6053 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6054 case SPE_BUILTIN_EVSTWWO:
6055 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6056 case SPE_BUILTIN_MFSPEFSCR:
6057 icode = CODE_FOR_spe_mfspefscr;
6058 tmode = insn_data[icode].operand[0].mode;
6061 || GET_MODE (target) != tmode
6062 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6063 target = gen_reg_rtx (tmode);
6065 pat = GEN_FCN (icode) (target);
6070 case SPE_BUILTIN_MTSPEFSCR:
6071 icode = CODE_FOR_spe_mtspefscr;
6072 arg0 = TREE_VALUE (arglist);
6073 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6074 mode0 = insn_data[icode].operand[0].mode;
6076 if (arg0 == error_mark_node)
6079 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6080 op0 = copy_to_mode_reg (mode0, op0);
6082 pat = GEN_FCN (icode) (op0);
6095 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6097 rtx pat, scratch, tmp;
6098 tree form = TREE_VALUE (arglist);
6099 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6100 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6101 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6102 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6103 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6104 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6108 if (TREE_CODE (form) != INTEGER_CST)
6110 error ("argument 1 of __builtin_spe_predicate must be a constant");
6114 form_int = TREE_INT_CST_LOW (form);
6119 if (arg0 == error_mark_node || arg1 == error_mark_node)
6123 || GET_MODE (target) != SImode
6124 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6125 target = gen_reg_rtx (SImode);
6127 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6128 op0 = copy_to_mode_reg (mode0, op0);
6129 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6130 op1 = copy_to_mode_reg (mode1, op1);
6132 scratch = gen_reg_rtx (CCmode);
6134 pat = GEN_FCN (icode) (scratch, op0, op1);
6139 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6140 _lower_. We use one compare, but look in different bits of the
6141 CR for each variant.
6143 There are 2 elements in each SPE simd type (upper/lower). The CR
6144 bits are set as follows:
6146 BIT0 | BIT 1 | BIT 2 | BIT 3
6147 U | L | (U | L) | (U & L)
6149 So, for an "all" relationship, BIT 3 would be set.
6150 For an "any" relationship, BIT 2 would be set. Etc.
6152 Following traditional nomenclature, these bits map to:
6154 BIT0 | BIT 1 | BIT 2 | BIT 3
6157 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6162 /* All variant. OV bit. */
6164 /* We need to get to the OV bit, which is the ORDERED bit. We
6165 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6166 that's ugly and will trigger a validate_condition_mode abort.
6167 So let's just use another pattern. */
6168 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6170 /* Any variant. EQ bit. */
6174 /* Upper variant. LT bit. */
6178 /* Lower variant. GT bit. */
6183 error ("argument 1 of __builtin_spe_predicate is out of range");
6187 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6188 emit_move_insn (target, tmp);
6193 /* The evsel builtins look like this:
6195 e = __builtin_spe_evsel_OP (a, b, c, d);
6199 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6200 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6204 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6207 tree arg0 = TREE_VALUE (arglist);
6208 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6209 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6210 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6211 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6212 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6213 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6214 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6215 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6216 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6221 if (arg0 == error_mark_node || arg1 == error_mark_node
6222 || arg2 == error_mark_node || arg3 == error_mark_node)
6226 || GET_MODE (target) != mode0
6227 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6228 target = gen_reg_rtx (mode0);
6230 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6231 op0 = copy_to_mode_reg (mode0, op0);
6232 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6233 op1 = copy_to_mode_reg (mode0, op1);
6234 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6235 op2 = copy_to_mode_reg (mode0, op2);
6236 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6237 op3 = copy_to_mode_reg (mode0, op3);
6239 /* Generate the compare. */
6240 scratch = gen_reg_rtx (CCmode);
6241 pat = GEN_FCN (icode) (scratch, op0, op1);
6246 if (mode0 == V2SImode)
6247 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6249 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6254 /* Expand an expression EXP that calls a built-in function,
6255 with result going to TARGET if that's convenient
6256 (and in mode MODE if that's convenient).
6257 SUBTARGET may be used as the target for computing one of EXP's operands.
6258 IGNORE is nonzero if the value is to be ignored. */
6261 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6262 enum machine_mode mode ATTRIBUTE_UNUSED,
6263 int ignore ATTRIBUTE_UNUSED)
6265 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6266 tree arglist = TREE_OPERAND (exp, 1);
6267 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6268 struct builtin_description *d;
6275 ret = altivec_expand_builtin (exp, target, &success);
6282 ret = spe_expand_builtin (exp, target, &success);
6288 if (TARGET_ALTIVEC || TARGET_SPE)
6290 /* Handle simple unary operations. */
6291 d = (struct builtin_description *) bdesc_1arg;
6292 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6293 if (d->code == fcode)
6294 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6296 /* Handle simple binary operations. */
6297 d = (struct builtin_description *) bdesc_2arg;
6298 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6299 if (d->code == fcode)
6300 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6302 /* Handle simple ternary operations. */
6303 d = (struct builtin_description *) bdesc_3arg;
6304 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6305 if (d->code == fcode)
6306 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6314 rs6000_init_builtins (void)
6316 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6317 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6318 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6321 spe_init_builtins ();
6323 altivec_init_builtins ();
6324 if (TARGET_ALTIVEC || TARGET_SPE)
6325 rs6000_common_init_builtins ();
6328 /* Search through a set of builtins and enable the mask bits.
6329 DESC is an array of builtins.
6330 SIZE is the total number of builtins.
6331 START is the builtin enum at which to start.
6332 END is the builtin enum at which to end. */
6334 enable_mask_for_builtins (struct builtin_description *desc, int size,
6335 enum rs6000_builtins start,
6336 enum rs6000_builtins end)
6340 for (i = 0; i < size; ++i)
6341 if (desc[i].code == start)
6347 for (; i < size; ++i)
6349 /* Flip all the bits on. */
6350 desc[i].mask = target_flags;
6351 if (desc[i].code == end)
6357 spe_init_builtins (void)
6359 tree endlink = void_list_node;
6360 tree puint_type_node = build_pointer_type (unsigned_type_node);
6361 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6362 struct builtin_description *d;
6365 tree v2si_ftype_4_v2si
6366 = build_function_type
6367 (opaque_V2SI_type_node,
6368 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6369 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6370 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6371 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6374 tree v2sf_ftype_4_v2sf
6375 = build_function_type
6376 (opaque_V2SF_type_node,
6377 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6378 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6379 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6380 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6383 tree int_ftype_int_v2si_v2si
6384 = build_function_type
6386 tree_cons (NULL_TREE, integer_type_node,
6387 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6388 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6391 tree int_ftype_int_v2sf_v2sf
6392 = build_function_type
6394 tree_cons (NULL_TREE, integer_type_node,
6395 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6396 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6399 tree void_ftype_v2si_puint_int
6400 = build_function_type (void_type_node,
6401 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6402 tree_cons (NULL_TREE, puint_type_node,
6403 tree_cons (NULL_TREE,
6407 tree void_ftype_v2si_puint_char
6408 = build_function_type (void_type_node,
6409 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6410 tree_cons (NULL_TREE, puint_type_node,
6411 tree_cons (NULL_TREE,
6415 tree void_ftype_v2si_pv2si_int
6416 = build_function_type (void_type_node,
6417 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6418 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6419 tree_cons (NULL_TREE,
6423 tree void_ftype_v2si_pv2si_char
6424 = build_function_type (void_type_node,
6425 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6426 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6427 tree_cons (NULL_TREE,
6432 = build_function_type (void_type_node,
6433 tree_cons (NULL_TREE, integer_type_node, endlink));
6436 = build_function_type (integer_type_node, endlink);
6438 tree v2si_ftype_pv2si_int
6439 = build_function_type (opaque_V2SI_type_node,
6440 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6441 tree_cons (NULL_TREE, integer_type_node,
6444 tree v2si_ftype_puint_int
6445 = build_function_type (opaque_V2SI_type_node,
6446 tree_cons (NULL_TREE, puint_type_node,
6447 tree_cons (NULL_TREE, integer_type_node,
6450 tree v2si_ftype_pushort_int
6451 = build_function_type (opaque_V2SI_type_node,
6452 tree_cons (NULL_TREE, pushort_type_node,
6453 tree_cons (NULL_TREE, integer_type_node,
6456 tree v2si_ftype_signed_char
6457 = build_function_type (opaque_V2SI_type_node,
6458 tree_cons (NULL_TREE, signed_char_type_node,
6461 /* The initialization of the simple binary and unary builtins is
6462 done in rs6000_common_init_builtins, but we have to enable the
6463 mask bits here manually because we have run out of `target_flags'
6464 bits. We really need to redesign this mask business. */
6466 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6467 ARRAY_SIZE (bdesc_2arg),
6470 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6471 ARRAY_SIZE (bdesc_1arg),
6473 SPE_BUILTIN_EVSUBFUSIAAW);
6474 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6475 ARRAY_SIZE (bdesc_spe_predicates),
6476 SPE_BUILTIN_EVCMPEQ,
6477 SPE_BUILTIN_EVFSTSTLT);
6478 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6479 ARRAY_SIZE (bdesc_spe_evsel),
6480 SPE_BUILTIN_EVSEL_CMPGTS,
6481 SPE_BUILTIN_EVSEL_FSTSTEQ);
6483 (*lang_hooks.decls.pushdecl)
6484 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6485 opaque_V2SI_type_node));
6487 /* Initialize irregular SPE builtins. */
6489 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6490 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6491 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6492 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6493 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6494 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6495 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6496 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6497 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6498 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6499 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6500 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6501 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6502 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6503 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6504 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6505 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6506 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6509 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6510 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6511 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6512 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6513 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6514 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6515 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6516 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6517 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6518 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6519 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6520 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6521 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6522 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6523 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6524 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6525 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6526 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6527 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6528 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6529 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6530 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6533 d = (struct builtin_description *) bdesc_spe_predicates;
6534 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6538 switch (insn_data[d->icode].operand[1].mode)
6541 type = int_ftype_int_v2si_v2si;
6544 type = int_ftype_int_v2sf_v2sf;
6550 def_builtin (d->mask, d->name, type, d->code);
6553 /* Evsel predicates. */
6554 d = (struct builtin_description *) bdesc_spe_evsel;
6555 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6559 switch (insn_data[d->icode].operand[1].mode)
6562 type = v2si_ftype_4_v2si;
6565 type = v2sf_ftype_4_v2sf;
6571 def_builtin (d->mask, d->name, type, d->code);
6576 altivec_init_builtins (void)
6578 struct builtin_description *d;
6579 struct builtin_description_predicates *dp;
6581 tree pfloat_type_node = build_pointer_type (float_type_node);
6582 tree pint_type_node = build_pointer_type (integer_type_node);
6583 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6584 tree pchar_type_node = build_pointer_type (char_type_node);
6586 tree pvoid_type_node = build_pointer_type (void_type_node);
6588 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6589 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6590 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6591 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6593 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6595 tree int_ftype_int_v4si_v4si
6596 = build_function_type_list (integer_type_node,
6597 integer_type_node, V4SI_type_node,
6598 V4SI_type_node, NULL_TREE);
6599 tree v4sf_ftype_pcfloat
6600 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6601 tree void_ftype_pfloat_v4sf
6602 = build_function_type_list (void_type_node,
6603 pfloat_type_node, V4SF_type_node, NULL_TREE);
6604 tree v4si_ftype_pcint
6605 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6606 tree void_ftype_pint_v4si
6607 = build_function_type_list (void_type_node,
6608 pint_type_node, V4SI_type_node, NULL_TREE);
6609 tree v8hi_ftype_pcshort
6610 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6611 tree void_ftype_pshort_v8hi
6612 = build_function_type_list (void_type_node,
6613 pshort_type_node, V8HI_type_node, NULL_TREE);
6614 tree v16qi_ftype_pcchar
6615 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6616 tree void_ftype_pchar_v16qi
6617 = build_function_type_list (void_type_node,
6618 pchar_type_node, V16QI_type_node, NULL_TREE);
6619 tree void_ftype_v4si
6620 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6621 tree v8hi_ftype_void
6622 = build_function_type (V8HI_type_node, void_list_node);
6623 tree void_ftype_void
6624 = build_function_type (void_type_node, void_list_node);
6626 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6628 tree v16qi_ftype_long_pcvoid
6629 = build_function_type_list (V16QI_type_node,
6630 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6631 tree v8hi_ftype_long_pcvoid
6632 = build_function_type_list (V8HI_type_node,
6633 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6634 tree v4si_ftype_long_pcvoid
6635 = build_function_type_list (V4SI_type_node,
6636 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6638 tree void_ftype_v4si_long_pvoid
6639 = build_function_type_list (void_type_node,
6640 V4SI_type_node, long_integer_type_node,
6641 pvoid_type_node, NULL_TREE);
6642 tree void_ftype_v16qi_long_pvoid
6643 = build_function_type_list (void_type_node,
6644 V16QI_type_node, long_integer_type_node,
6645 pvoid_type_node, NULL_TREE);
6646 tree void_ftype_v8hi_long_pvoid
6647 = build_function_type_list (void_type_node,
6648 V8HI_type_node, long_integer_type_node,
6649 pvoid_type_node, NULL_TREE);
6650 tree int_ftype_int_v8hi_v8hi
6651 = build_function_type_list (integer_type_node,
6652 integer_type_node, V8HI_type_node,
6653 V8HI_type_node, NULL_TREE);
6654 tree int_ftype_int_v16qi_v16qi
6655 = build_function_type_list (integer_type_node,
6656 integer_type_node, V16QI_type_node,
6657 V16QI_type_node, NULL_TREE);
6658 tree int_ftype_int_v4sf_v4sf
6659 = build_function_type_list (integer_type_node,
6660 integer_type_node, V4SF_type_node,
6661 V4SF_type_node, NULL_TREE);
6662 tree v4si_ftype_v4si
6663 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6664 tree v8hi_ftype_v8hi
6665 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6666 tree v16qi_ftype_v16qi
6667 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6668 tree v4sf_ftype_v4sf
6669 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6670 tree void_ftype_pcvoid_int_char
6671 = build_function_type_list (void_type_node,
6672 pcvoid_type_node, integer_type_node,
6673 char_type_node, NULL_TREE);
6675 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6676 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6677 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6678 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6679 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6680 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6681 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6682 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6683 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6684 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6685 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6686 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6687 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6688 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6689 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6690 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6691 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6692 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6693 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6694 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6695 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
6696 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
6697 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6698 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6699 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6700 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
6701 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
6702 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
6703 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
6704 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
6705 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
6706 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
6708 /* Add the DST variants. */
6709 d = (struct builtin_description *) bdesc_dst;
6710 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6711 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6713 /* Initialize the predicates. */
6714 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6715 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6717 enum machine_mode mode1;
6720 mode1 = insn_data[dp->icode].operand[1].mode;
6725 type = int_ftype_int_v4si_v4si;
6728 type = int_ftype_int_v8hi_v8hi;
6731 type = int_ftype_int_v16qi_v16qi;
6734 type = int_ftype_int_v4sf_v4sf;
6740 def_builtin (dp->mask, dp->name, type, dp->code);
6743 /* Initialize the abs* operators. */
6744 d = (struct builtin_description *) bdesc_abs;
6745 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6747 enum machine_mode mode0;
6750 mode0 = insn_data[d->icode].operand[0].mode;
6755 type = v4si_ftype_v4si;
6758 type = v8hi_ftype_v8hi;
6761 type = v16qi_ftype_v16qi;
6764 type = v4sf_ftype_v4sf;
6770 def_builtin (d->mask, d->name, type, d->code);
6775 rs6000_common_init_builtins (void)
6777 struct builtin_description *d;
6780 tree v4sf_ftype_v4sf_v4sf_v16qi
6781 = build_function_type_list (V4SF_type_node,
6782 V4SF_type_node, V4SF_type_node,
6783 V16QI_type_node, NULL_TREE);
6784 tree v4si_ftype_v4si_v4si_v16qi
6785 = build_function_type_list (V4SI_type_node,
6786 V4SI_type_node, V4SI_type_node,
6787 V16QI_type_node, NULL_TREE);
6788 tree v8hi_ftype_v8hi_v8hi_v16qi
6789 = build_function_type_list (V8HI_type_node,
6790 V8HI_type_node, V8HI_type_node,
6791 V16QI_type_node, NULL_TREE);
6792 tree v16qi_ftype_v16qi_v16qi_v16qi
6793 = build_function_type_list (V16QI_type_node,
6794 V16QI_type_node, V16QI_type_node,
6795 V16QI_type_node, NULL_TREE);
6796 tree v4si_ftype_char
6797 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6798 tree v8hi_ftype_char
6799 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6800 tree v16qi_ftype_char
6801 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6802 tree v8hi_ftype_v16qi
6803 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6804 tree v4sf_ftype_v4sf
6805 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6807 tree v2si_ftype_v2si_v2si
6808 = build_function_type_list (opaque_V2SI_type_node,
6809 opaque_V2SI_type_node,
6810 opaque_V2SI_type_node, NULL_TREE);
6812 tree v2sf_ftype_v2sf_v2sf
6813 = build_function_type_list (opaque_V2SF_type_node,
6814 opaque_V2SF_type_node,
6815 opaque_V2SF_type_node, NULL_TREE);
6817 tree v2si_ftype_int_int
6818 = build_function_type_list (opaque_V2SI_type_node,
6819 integer_type_node, integer_type_node,
6822 tree v2si_ftype_v2si
6823 = build_function_type_list (opaque_V2SI_type_node,
6824 opaque_V2SI_type_node, NULL_TREE);
6826 tree v2sf_ftype_v2sf
6827 = build_function_type_list (opaque_V2SF_type_node,
6828 opaque_V2SF_type_node, NULL_TREE);
6830 tree v2sf_ftype_v2si
6831 = build_function_type_list (opaque_V2SF_type_node,
6832 opaque_V2SI_type_node, NULL_TREE);
6834 tree v2si_ftype_v2sf
6835 = build_function_type_list (opaque_V2SI_type_node,
6836 opaque_V2SF_type_node, NULL_TREE);
6838 tree v2si_ftype_v2si_char
6839 = build_function_type_list (opaque_V2SI_type_node,
6840 opaque_V2SI_type_node,
6841 char_type_node, NULL_TREE);
6843 tree v2si_ftype_int_char
6844 = build_function_type_list (opaque_V2SI_type_node,
6845 integer_type_node, char_type_node, NULL_TREE);
6847 tree v2si_ftype_char
6848 = build_function_type_list (opaque_V2SI_type_node,
6849 char_type_node, NULL_TREE);
6851 tree int_ftype_int_int
6852 = build_function_type_list (integer_type_node,
6853 integer_type_node, integer_type_node,
6856 tree v4si_ftype_v4si_v4si
6857 = build_function_type_list (V4SI_type_node,
6858 V4SI_type_node, V4SI_type_node, NULL_TREE);
6859 tree v4sf_ftype_v4si_char
6860 = build_function_type_list (V4SF_type_node,
6861 V4SI_type_node, char_type_node, NULL_TREE);
6862 tree v4si_ftype_v4sf_char
6863 = build_function_type_list (V4SI_type_node,
6864 V4SF_type_node, char_type_node, NULL_TREE);
6865 tree v4si_ftype_v4si_char
6866 = build_function_type_list (V4SI_type_node,
6867 V4SI_type_node, char_type_node, NULL_TREE);
6868 tree v8hi_ftype_v8hi_char
6869 = build_function_type_list (V8HI_type_node,
6870 V8HI_type_node, char_type_node, NULL_TREE);
6871 tree v16qi_ftype_v16qi_char
6872 = build_function_type_list (V16QI_type_node,
6873 V16QI_type_node, char_type_node, NULL_TREE);
6874 tree v16qi_ftype_v16qi_v16qi_char
6875 = build_function_type_list (V16QI_type_node,
6876 V16QI_type_node, V16QI_type_node,
6877 char_type_node, NULL_TREE);
6878 tree v8hi_ftype_v8hi_v8hi_char
6879 = build_function_type_list (V8HI_type_node,
6880 V8HI_type_node, V8HI_type_node,
6881 char_type_node, NULL_TREE);
6882 tree v4si_ftype_v4si_v4si_char
6883 = build_function_type_list (V4SI_type_node,
6884 V4SI_type_node, V4SI_type_node,
6885 char_type_node, NULL_TREE);
6886 tree v4sf_ftype_v4sf_v4sf_char
6887 = build_function_type_list (V4SF_type_node,
6888 V4SF_type_node, V4SF_type_node,
6889 char_type_node, NULL_TREE);
6890 tree v4sf_ftype_v4sf_v4sf
6891 = build_function_type_list (V4SF_type_node,
6892 V4SF_type_node, V4SF_type_node, NULL_TREE);
6893 tree v4sf_ftype_v4sf_v4sf_v4si
6894 = build_function_type_list (V4SF_type_node,
6895 V4SF_type_node, V4SF_type_node,
6896 V4SI_type_node, NULL_TREE);
6897 tree v4sf_ftype_v4sf_v4sf_v4sf
6898 = build_function_type_list (V4SF_type_node,
6899 V4SF_type_node, V4SF_type_node,
6900 V4SF_type_node, NULL_TREE);
6901 tree v4si_ftype_v4si_v4si_v4si
6902 = build_function_type_list (V4SI_type_node,
6903 V4SI_type_node, V4SI_type_node,
6904 V4SI_type_node, NULL_TREE);
6905 tree v8hi_ftype_v8hi_v8hi
6906 = build_function_type_list (V8HI_type_node,
6907 V8HI_type_node, V8HI_type_node, NULL_TREE);
6908 tree v8hi_ftype_v8hi_v8hi_v8hi
6909 = build_function_type_list (V8HI_type_node,
6910 V8HI_type_node, V8HI_type_node,
6911 V8HI_type_node, NULL_TREE);
6912 tree v4si_ftype_v8hi_v8hi_v4si
6913 = build_function_type_list (V4SI_type_node,
6914 V8HI_type_node, V8HI_type_node,
6915 V4SI_type_node, NULL_TREE);
6916 tree v4si_ftype_v16qi_v16qi_v4si
6917 = build_function_type_list (V4SI_type_node,
6918 V16QI_type_node, V16QI_type_node,
6919 V4SI_type_node, NULL_TREE);
6920 tree v16qi_ftype_v16qi_v16qi
6921 = build_function_type_list (V16QI_type_node,
6922 V16QI_type_node, V16QI_type_node, NULL_TREE);
6923 tree v4si_ftype_v4sf_v4sf
6924 = build_function_type_list (V4SI_type_node,
6925 V4SF_type_node, V4SF_type_node, NULL_TREE);
6926 tree v8hi_ftype_v16qi_v16qi
6927 = build_function_type_list (V8HI_type_node,
6928 V16QI_type_node, V16QI_type_node, NULL_TREE);
6929 tree v4si_ftype_v8hi_v8hi
6930 = build_function_type_list (V4SI_type_node,
6931 V8HI_type_node, V8HI_type_node, NULL_TREE);
6932 tree v8hi_ftype_v4si_v4si
6933 = build_function_type_list (V8HI_type_node,
6934 V4SI_type_node, V4SI_type_node, NULL_TREE);
6935 tree v16qi_ftype_v8hi_v8hi
6936 = build_function_type_list (V16QI_type_node,
6937 V8HI_type_node, V8HI_type_node, NULL_TREE);
6938 tree v4si_ftype_v16qi_v4si
6939 = build_function_type_list (V4SI_type_node,
6940 V16QI_type_node, V4SI_type_node, NULL_TREE);
6941 tree v4si_ftype_v16qi_v16qi
6942 = build_function_type_list (V4SI_type_node,
6943 V16QI_type_node, V16QI_type_node, NULL_TREE);
6944 tree v4si_ftype_v8hi_v4si
6945 = build_function_type_list (V4SI_type_node,
6946 V8HI_type_node, V4SI_type_node, NULL_TREE);
6947 tree v4si_ftype_v8hi
6948 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6949 tree int_ftype_v4si_v4si
6950 = build_function_type_list (integer_type_node,
6951 V4SI_type_node, V4SI_type_node, NULL_TREE);
6952 tree int_ftype_v4sf_v4sf
6953 = build_function_type_list (integer_type_node,
6954 V4SF_type_node, V4SF_type_node, NULL_TREE);
6955 tree int_ftype_v16qi_v16qi
6956 = build_function_type_list (integer_type_node,
6957 V16QI_type_node, V16QI_type_node, NULL_TREE);
6958 tree int_ftype_v8hi_v8hi
6959 = build_function_type_list (integer_type_node,
6960 V8HI_type_node, V8HI_type_node, NULL_TREE);
6962 /* Add the simple ternary operators. */
6963 d = (struct builtin_description *) bdesc_3arg;
6964 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6967 enum machine_mode mode0, mode1, mode2, mode3;
6970 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6973 mode0 = insn_data[d->icode].operand[0].mode;
6974 mode1 = insn_data[d->icode].operand[1].mode;
6975 mode2 = insn_data[d->icode].operand[2].mode;
6976 mode3 = insn_data[d->icode].operand[3].mode;
6978 /* When all four are of the same mode. */
6979 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6984 type = v4si_ftype_v4si_v4si_v4si;
6987 type = v4sf_ftype_v4sf_v4sf_v4sf;
6990 type = v8hi_ftype_v8hi_v8hi_v8hi;
6993 type = v16qi_ftype_v16qi_v16qi_v16qi;
6999 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7004 type = v4si_ftype_v4si_v4si_v16qi;
7007 type = v4sf_ftype_v4sf_v4sf_v16qi;
7010 type = v8hi_ftype_v8hi_v8hi_v16qi;
7013 type = v16qi_ftype_v16qi_v16qi_v16qi;
7019 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7020 && mode3 == V4SImode)
7021 type = v4si_ftype_v16qi_v16qi_v4si;
7022 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7023 && mode3 == V4SImode)
7024 type = v4si_ftype_v8hi_v8hi_v4si;
7025 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7026 && mode3 == V4SImode)
7027 type = v4sf_ftype_v4sf_v4sf_v4si;
7029 /* vchar, vchar, vchar, 4 bit literal. */
7030 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7032 type = v16qi_ftype_v16qi_v16qi_char;
7034 /* vshort, vshort, vshort, 4 bit literal. */
7035 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7037 type = v8hi_ftype_v8hi_v8hi_char;
7039 /* vint, vint, vint, 4 bit literal. */
7040 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7042 type = v4si_ftype_v4si_v4si_char;
7044 /* vfloat, vfloat, vfloat, 4 bit literal. */
7045 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7047 type = v4sf_ftype_v4sf_v4sf_char;
7052 def_builtin (d->mask, d->name, type, d->code);
7055 /* Add the simple binary operators. */
7056 d = (struct builtin_description *) bdesc_2arg;
7057 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7059 enum machine_mode mode0, mode1, mode2;
7062 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7065 mode0 = insn_data[d->icode].operand[0].mode;
7066 mode1 = insn_data[d->icode].operand[1].mode;
7067 mode2 = insn_data[d->icode].operand[2].mode;
7069 /* When all three operands are of the same mode. */
7070 if (mode0 == mode1 && mode1 == mode2)
7075 type = v4sf_ftype_v4sf_v4sf;
7078 type = v4si_ftype_v4si_v4si;
7081 type = v16qi_ftype_v16qi_v16qi;
7084 type = v8hi_ftype_v8hi_v8hi;
7087 type = v2si_ftype_v2si_v2si;
7090 type = v2sf_ftype_v2sf_v2sf;
7093 type = int_ftype_int_int;
7100 /* A few other combos we really don't want to do manually. */
7102 /* vint, vfloat, vfloat. */
7103 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7104 type = v4si_ftype_v4sf_v4sf;
7106 /* vshort, vchar, vchar. */
7107 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7108 type = v8hi_ftype_v16qi_v16qi;
7110 /* vint, vshort, vshort. */
7111 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7112 type = v4si_ftype_v8hi_v8hi;
7114 /* vshort, vint, vint. */
7115 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7116 type = v8hi_ftype_v4si_v4si;
7118 /* vchar, vshort, vshort. */
7119 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7120 type = v16qi_ftype_v8hi_v8hi;
7122 /* vint, vchar, vint. */
7123 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7124 type = v4si_ftype_v16qi_v4si;
7126 /* vint, vchar, vchar. */
7127 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7128 type = v4si_ftype_v16qi_v16qi;
7130 /* vint, vshort, vint. */
7131 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7132 type = v4si_ftype_v8hi_v4si;
7134 /* vint, vint, 5 bit literal. */
7135 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7136 type = v4si_ftype_v4si_char;
7138 /* vshort, vshort, 5 bit literal. */
7139 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7140 type = v8hi_ftype_v8hi_char;
7142 /* vchar, vchar, 5 bit literal. */
7143 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7144 type = v16qi_ftype_v16qi_char;
7146 /* vfloat, vint, 5 bit literal. */
7147 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7148 type = v4sf_ftype_v4si_char;
7150 /* vint, vfloat, 5 bit literal. */
7151 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7152 type = v4si_ftype_v4sf_char;
7154 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7155 type = v2si_ftype_int_int;
7157 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7158 type = v2si_ftype_v2si_char;
7160 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7161 type = v2si_ftype_int_char;
7164 else if (mode0 == SImode)
7169 type = int_ftype_v4si_v4si;
7172 type = int_ftype_v4sf_v4sf;
7175 type = int_ftype_v16qi_v16qi;
7178 type = int_ftype_v8hi_v8hi;
7188 def_builtin (d->mask, d->name, type, d->code);
7191 /* Add the simple unary operators. */
7192 d = (struct builtin_description *) bdesc_1arg;
7193 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7195 enum machine_mode mode0, mode1;
7198 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7201 mode0 = insn_data[d->icode].operand[0].mode;
7202 mode1 = insn_data[d->icode].operand[1].mode;
7204 if (mode0 == V4SImode && mode1 == QImode)
7205 type = v4si_ftype_char;
7206 else if (mode0 == V8HImode && mode1 == QImode)
7207 type = v8hi_ftype_char;
7208 else if (mode0 == V16QImode && mode1 == QImode)
7209 type = v16qi_ftype_char;
7210 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7211 type = v4sf_ftype_v4sf;
7212 else if (mode0 == V8HImode && mode1 == V16QImode)
7213 type = v8hi_ftype_v16qi;
7214 else if (mode0 == V4SImode && mode1 == V8HImode)
7215 type = v4si_ftype_v8hi;
7216 else if (mode0 == V2SImode && mode1 == V2SImode)
7217 type = v2si_ftype_v2si;
7218 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7219 type = v2sf_ftype_v2sf;
7220 else if (mode0 == V2SFmode && mode1 == V2SImode)
7221 type = v2sf_ftype_v2si;
7222 else if (mode0 == V2SImode && mode1 == V2SFmode)
7223 type = v2si_ftype_v2sf;
7224 else if (mode0 == V2SImode && mode1 == QImode)
7225 type = v2si_ftype_char;
7229 def_builtin (d->mask, d->name, type, d->code);
7234 rs6000_init_libfuncs (void)
7236 if (!TARGET_HARD_FLOAT)
7239 if (DEFAULT_ABI != ABI_V4)
7241 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7243 /* AIX library routines for float->int conversion. */
7244 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7245 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7248 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7249 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7250 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7251 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7252 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7256 /* 32-bit SVR4 quad floating point routines. */
7258 set_optab_libfunc (add_optab, TFmode, "_q_add");
7259 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7260 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7261 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7262 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7263 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7264 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7266 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7267 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7268 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7269 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7270 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7271 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7273 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7274 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7275 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7276 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7277 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7278 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7279 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7283 /* Expand a block move operation, and return 1 if successful. Return 0
7284 if we should let the compiler generate normal code.
7286 operands[0] is the destination
7287 operands[1] is the source
7288 operands[2] is the length
7289 operands[3] is the alignment */
7291 #define MAX_MOVE_REG 4
7294 expand_block_move (rtx operands[])
7296 rtx orig_dest = operands[0];
7297 rtx orig_src = operands[1];
7298 rtx bytes_rtx = operands[2];
7299 rtx align_rtx = operands[3];
7300 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7305 rtx stores[MAX_MOVE_REG];
7308 /* If this is not a fixed size move, just call memcpy */
7312 /* If this is not a fixed size alignment, abort */
7313 if (GET_CODE (align_rtx) != CONST_INT)
7315 align = INTVAL (align_rtx);
7317 /* Anything to move? */
7318 bytes = INTVAL (bytes_rtx);
7322 /* store_one_arg depends on expand_block_move to handle at least the size of
7323 reg_parm_stack_space. */
7324 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7327 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7330 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7331 rtx (*mov) (rtx, rtx);
7333 enum machine_mode mode = BLKmode;
7337 && bytes > 24 /* move up to 32 bytes at a time */
7345 && ! fixed_regs[12])
7347 move_bytes = (bytes > 32) ? 32 : bytes;
7348 gen_func.movstrsi = gen_movstrsi_8reg;
7350 else if (TARGET_STRING
7351 && bytes > 16 /* move up to 24 bytes at a time */
7357 && ! fixed_regs[10])
7359 move_bytes = (bytes > 24) ? 24 : bytes;
7360 gen_func.movstrsi = gen_movstrsi_6reg;
7362 else if (TARGET_STRING
7363 && bytes > 8 /* move up to 16 bytes at a time */
7369 move_bytes = (bytes > 16) ? 16 : bytes;
7370 gen_func.movstrsi = gen_movstrsi_4reg;
7372 else if (bytes >= 8 && TARGET_POWERPC64
7373 /* 64-bit loads and stores require word-aligned
7375 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7379 gen_func.mov = gen_movdi;
7381 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7382 { /* move up to 8 bytes at a time */
7383 move_bytes = (bytes > 8) ? 8 : bytes;
7384 gen_func.movstrsi = gen_movstrsi_2reg;
7386 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7387 { /* move 4 bytes */
7390 gen_func.mov = gen_movsi;
7392 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7393 { /* move 2 bytes */
7396 gen_func.mov = gen_movhi;
7398 else if (TARGET_STRING && bytes > 1)
7399 { /* move up to 4 bytes at a time */
7400 move_bytes = (bytes > 4) ? 4 : bytes;
7401 gen_func.movstrsi = gen_movstrsi_1reg;
7403 else /* move 1 byte at a time */
7407 gen_func.mov = gen_movqi;
7410 src = adjust_address (orig_src, mode, offset);
7411 dest = adjust_address (orig_dest, mode, offset);
7413 if (mode != BLKmode)
7415 rtx tmp_reg = gen_reg_rtx (mode);
7417 emit_insn ((*gen_func.mov) (tmp_reg, src));
7418 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7421 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7424 for (i = 0; i < num_reg; i++)
7425 emit_insn (stores[i]);
7429 if (mode == BLKmode)
7431 /* Move the address into scratch registers. The movstrsi
7432 patterns require zero offset. */
7433 if (!REG_P (XEXP (src, 0)))
7435 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7436 src = replace_equiv_address (src, src_reg);
7438 set_mem_size (src, GEN_INT (move_bytes));
7440 if (!REG_P (XEXP (dest, 0)))
7442 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7443 dest = replace_equiv_address (dest, dest_reg);
7445 set_mem_size (dest, GEN_INT (move_bytes));
7447 emit_insn ((*gen_func.movstrsi) (dest, src,
7448 GEN_INT (move_bytes & 31),
7457 /* Return 1 if OP is a load multiple operation. It is known to be a
7458 PARALLEL and the first section will be tested. */
7461 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7463 int count = XVECLEN (op, 0);
7464 unsigned int dest_regno;
7468 /* Perform a quick check so we don't blow up below. */
7470 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7471 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7472 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7475 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7476 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7478 for (i = 1; i < count; i++)
7480 rtx elt = XVECEXP (op, 0, i);
7482 if (GET_CODE (elt) != SET
7483 || GET_CODE (SET_DEST (elt)) != REG
7484 || GET_MODE (SET_DEST (elt)) != SImode
7485 || REGNO (SET_DEST (elt)) != dest_regno + i
7486 || GET_CODE (SET_SRC (elt)) != MEM
7487 || GET_MODE (SET_SRC (elt)) != SImode
7488 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7489 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7490 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7491 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7498 /* Similar, but tests for store multiple. Here, the second vector element
7499 is a CLOBBER. It will be tested later. */
7502 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7504 int count = XVECLEN (op, 0) - 1;
7505 unsigned int src_regno;
7509 /* Perform a quick check so we don't blow up below. */
7511 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7512 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7513 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7516 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7517 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7519 for (i = 1; i < count; i++)
7521 rtx elt = XVECEXP (op, 0, i + 1);
7523 if (GET_CODE (elt) != SET
7524 || GET_CODE (SET_SRC (elt)) != REG
7525 || GET_MODE (SET_SRC (elt)) != SImode
7526 || REGNO (SET_SRC (elt)) != src_regno + i
7527 || GET_CODE (SET_DEST (elt)) != MEM
7528 || GET_MODE (SET_DEST (elt)) != SImode
7529 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7530 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7531 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7532 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7539 /* Return a string to perform a load_multiple operation.
7540 operands[0] is the vector.
7541 operands[1] is the source address.
7542 operands[2] is the first destination register. */
7545 rs6000_output_load_multiple (rtx operands[3])
7547 /* We have to handle the case where the pseudo used to contain the address
7548 is assigned to one of the output registers. */
7550 int words = XVECLEN (operands[0], 0);
7553 if (XVECLEN (operands[0], 0) == 1)
7554 return "{l|lwz} %2,0(%1)";
7556 for (i = 0; i < words; i++)
7557 if (refers_to_regno_p (REGNO (operands[2]) + i,
7558 REGNO (operands[2]) + i + 1, operands[1], 0))
7562 xop[0] = GEN_INT (4 * (words-1));
7563 xop[1] = operands[1];
7564 xop[2] = operands[2];
7565 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7570 xop[0] = GEN_INT (4 * (words-1));
7571 xop[1] = operands[1];
7572 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7573 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7578 for (j = 0; j < words; j++)
7581 xop[0] = GEN_INT (j * 4);
7582 xop[1] = operands[1];
7583 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7584 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7586 xop[0] = GEN_INT (i * 4);
7587 xop[1] = operands[1];
7588 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7593 return "{lsi|lswi} %2,%1,%N0";
7596 /* Return 1 for a parallel vrsave operation. */
7599 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7601 int count = XVECLEN (op, 0);
7602 unsigned int dest_regno, src_regno;
7606 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7607 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7608 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7611 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7612 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7614 if (dest_regno != VRSAVE_REGNO
7615 && src_regno != VRSAVE_REGNO)
7618 for (i = 1; i < count; i++)
7620 rtx elt = XVECEXP (op, 0, i);
7622 if (GET_CODE (elt) != CLOBBER
7623 && GET_CODE (elt) != SET)
7630 /* Return 1 for an PARALLEL suitable for mfcr. */
7633 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7635 int count = XVECLEN (op, 0);
7638 /* Perform a quick check so we don't blow up below. */
7640 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7641 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7642 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7645 for (i = 0; i < count; i++)
7647 rtx exp = XVECEXP (op, 0, i);
7652 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7654 if (GET_CODE (src_reg) != REG
7655 || GET_MODE (src_reg) != CCmode
7656 || ! CR_REGNO_P (REGNO (src_reg)))
7659 if (GET_CODE (exp) != SET
7660 || GET_CODE (SET_DEST (exp)) != REG
7661 || GET_MODE (SET_DEST (exp)) != SImode
7662 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7664 unspec = SET_SRC (exp);
7665 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7667 if (GET_CODE (unspec) != UNSPEC
7668 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7669 || XVECLEN (unspec, 0) != 2
7670 || XVECEXP (unspec, 0, 0) != src_reg
7671 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7672 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7678 /* Return 1 for an PARALLEL suitable for mtcrf. */
7681 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7683 int count = XVECLEN (op, 0);
7687 /* Perform a quick check so we don't blow up below. */
7689 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7690 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7691 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7693 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7695 if (GET_CODE (src_reg) != REG
7696 || GET_MODE (src_reg) != SImode
7697 || ! INT_REGNO_P (REGNO (src_reg)))
7700 for (i = 0; i < count; i++)
7702 rtx exp = XVECEXP (op, 0, i);
7706 if (GET_CODE (exp) != SET
7707 || GET_CODE (SET_DEST (exp)) != REG
7708 || GET_MODE (SET_DEST (exp)) != CCmode
7709 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7711 unspec = SET_SRC (exp);
7712 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7714 if (GET_CODE (unspec) != UNSPEC
7715 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7716 || XVECLEN (unspec, 0) != 2
7717 || XVECEXP (unspec, 0, 0) != src_reg
7718 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7719 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7725 /* Return 1 for an PARALLEL suitable for lmw. */
7728 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7730 int count = XVECLEN (op, 0);
7731 unsigned int dest_regno;
7733 unsigned int base_regno;
7734 HOST_WIDE_INT offset;
7737 /* Perform a quick check so we don't blow up below. */
7739 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7740 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7741 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7744 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7745 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7748 || count != 32 - (int) dest_regno)
7751 if (legitimate_indirect_address_p (src_addr, 0))
7754 base_regno = REGNO (src_addr);
7755 if (base_regno == 0)
7758 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7760 offset = INTVAL (XEXP (src_addr, 1));
7761 base_regno = REGNO (XEXP (src_addr, 0));
7766 for (i = 0; i < count; i++)
7768 rtx elt = XVECEXP (op, 0, i);
7771 HOST_WIDE_INT newoffset;
7773 if (GET_CODE (elt) != SET
7774 || GET_CODE (SET_DEST (elt)) != REG
7775 || GET_MODE (SET_DEST (elt)) != SImode
7776 || REGNO (SET_DEST (elt)) != dest_regno + i
7777 || GET_CODE (SET_SRC (elt)) != MEM
7778 || GET_MODE (SET_SRC (elt)) != SImode)
7780 newaddr = XEXP (SET_SRC (elt), 0);
7781 if (legitimate_indirect_address_p (newaddr, 0))
7786 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7788 addr_reg = XEXP (newaddr, 0);
7789 newoffset = INTVAL (XEXP (newaddr, 1));
7793 if (REGNO (addr_reg) != base_regno
7794 || newoffset != offset + 4 * i)
7801 /* Return 1 for an PARALLEL suitable for stmw. */
7804 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7806 int count = XVECLEN (op, 0);
7807 unsigned int src_regno;
7809 unsigned int base_regno;
7810 HOST_WIDE_INT offset;
7813 /* Perform a quick check so we don't blow up below. */
7815 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7816 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7817 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7820 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7821 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7824 || count != 32 - (int) src_regno)
7827 if (legitimate_indirect_address_p (dest_addr, 0))
7830 base_regno = REGNO (dest_addr);
7831 if (base_regno == 0)
7834 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7836 offset = INTVAL (XEXP (dest_addr, 1));
7837 base_regno = REGNO (XEXP (dest_addr, 0));
7842 for (i = 0; i < count; i++)
7844 rtx elt = XVECEXP (op, 0, i);
7847 HOST_WIDE_INT newoffset;
7849 if (GET_CODE (elt) != SET
7850 || GET_CODE (SET_SRC (elt)) != REG
7851 || GET_MODE (SET_SRC (elt)) != SImode
7852 || REGNO (SET_SRC (elt)) != src_regno + i
7853 || GET_CODE (SET_DEST (elt)) != MEM
7854 || GET_MODE (SET_DEST (elt)) != SImode)
7856 newaddr = XEXP (SET_DEST (elt), 0);
7857 if (legitimate_indirect_address_p (newaddr, 0))
7862 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7864 addr_reg = XEXP (newaddr, 0);
7865 newoffset = INTVAL (XEXP (newaddr, 1));
7869 if (REGNO (addr_reg) != base_regno
7870 || newoffset != offset + 4 * i)
7877 /* A validation routine: say whether CODE, a condition code, and MODE
7878 match. The other alternatives either don't make sense or should
7879 never be generated. */
7882 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
7884 if (GET_RTX_CLASS (code) != '<'
7885 || GET_MODE_CLASS (mode) != MODE_CC)
7888 /* These don't make sense. */
7889 if ((code == GT || code == LT || code == GE || code == LE)
7890 && mode == CCUNSmode)
7893 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7894 && mode != CCUNSmode)
7897 if (mode != CCFPmode
7898 && (code == ORDERED || code == UNORDERED
7899 || code == UNEQ || code == LTGT
7900 || code == UNGT || code == UNLT
7901 || code == UNGE || code == UNLE))
7904 /* These should never be generated except for
7905 flag_finite_math_only. */
7906 if (mode == CCFPmode
7907 && ! flag_finite_math_only
7908 && (code == LE || code == GE
7909 || code == UNEQ || code == LTGT
7910 || code == UNGT || code == UNLT))
7913 /* These are invalid; the information is not there. */
7914 if (mode == CCEQmode
7915 && code != EQ && code != NE)
7919 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7920 We only check the opcode against the mode of the CC value here. */
7923 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7925 enum rtx_code code = GET_CODE (op);
7926 enum machine_mode cc_mode;
7928 if (GET_RTX_CLASS (code) != '<')
7931 cc_mode = GET_MODE (XEXP (op, 0));
7932 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7935 validate_condition_mode (code, cc_mode);
7940 /* Return 1 if OP is a comparison operation that is valid for a branch
7941 insn and which is true if the corresponding bit in the CC register
7945 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
7949 if (! branch_comparison_operator (op, mode))
7952 code = GET_CODE (op);
7953 return (code == EQ || code == LT || code == GT
7954 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7955 || code == LTU || code == GTU
7956 || code == UNORDERED);
7959 /* Return 1 if OP is a comparison operation that is valid for an scc
7960 insn: it must be a positive comparison. */
7963 scc_comparison_operator (rtx op, enum machine_mode mode)
7965 return branch_positive_comparison_operator (op, mode);
7969 trap_comparison_operator (rtx op, enum machine_mode mode)
7971 if (mode != VOIDmode && mode != GET_MODE (op))
7973 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7977 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7979 enum rtx_code code = GET_CODE (op);
7980 return (code == AND || code == IOR || code == XOR);
7984 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7986 enum rtx_code code = GET_CODE (op);
7987 return (code == IOR || code == XOR);
7991 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7993 enum rtx_code code = GET_CODE (op);
7994 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7997 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7998 mask required to convert the result of a rotate insn into a shift
7999 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8002 includes_lshift_p (rtx shiftop, rtx andop)
8004 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8006 shift_mask <<= INTVAL (shiftop);
8008 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8011 /* Similar, but for right shift. */
8014 includes_rshift_p (rtx shiftop, rtx andop)
8016 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8018 shift_mask >>= INTVAL (shiftop);
8020 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8023 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8024 to perform a left shift. It must have exactly SHIFTOP least
8025 significant 0's, then one or more 1's, then zero or more 0's. */
8028 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8030 if (GET_CODE (andop) == CONST_INT)
8032 HOST_WIDE_INT c, lsb, shift_mask;
8035 if (c == 0 || c == ~0)
8039 shift_mask <<= INTVAL (shiftop);
8041 /* Find the least significant one bit. */
8044 /* It must coincide with the LSB of the shift mask. */
8045 if (-lsb != shift_mask)
8048 /* Invert to look for the next transition (if any). */
8051 /* Remove the low group of ones (originally low group of zeros). */
8054 /* Again find the lsb, and check we have all 1's above. */
8058 else if (GET_CODE (andop) == CONST_DOUBLE
8059 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8061 HOST_WIDE_INT low, high, lsb;
8062 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8064 low = CONST_DOUBLE_LOW (andop);
8065 if (HOST_BITS_PER_WIDE_INT < 64)
8066 high = CONST_DOUBLE_HIGH (andop);
8068 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8069 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8072 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8074 shift_mask_high = ~0;
8075 if (INTVAL (shiftop) > 32)
8076 shift_mask_high <<= INTVAL (shiftop) - 32;
8080 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8087 return high == -lsb;
8090 shift_mask_low = ~0;
8091 shift_mask_low <<= INTVAL (shiftop);
8095 if (-lsb != shift_mask_low)
8098 if (HOST_BITS_PER_WIDE_INT < 64)
8103 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8106 return high == -lsb;
8110 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8116 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8117 to perform a left shift. It must have SHIFTOP or more least
8118 significant 0's, with the remainder of the word 1's. */
8121 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8123 if (GET_CODE (andop) == CONST_INT)
8125 HOST_WIDE_INT c, lsb, shift_mask;
8128 shift_mask <<= INTVAL (shiftop);
8131 /* Find the least significant one bit. */
8134 /* It must be covered by the shift mask.
8135 This test also rejects c == 0. */
8136 if ((lsb & shift_mask) == 0)
8139 /* Check we have all 1's above the transition, and reject all 1's. */
8140 return c == -lsb && lsb != 1;
8142 else if (GET_CODE (andop) == CONST_DOUBLE
8143 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8145 HOST_WIDE_INT low, lsb, shift_mask_low;
8147 low = CONST_DOUBLE_LOW (andop);
8149 if (HOST_BITS_PER_WIDE_INT < 64)
8151 HOST_WIDE_INT high, shift_mask_high;
8153 high = CONST_DOUBLE_HIGH (andop);
8157 shift_mask_high = ~0;
8158 if (INTVAL (shiftop) > 32)
8159 shift_mask_high <<= INTVAL (shiftop) - 32;
8163 if ((lsb & shift_mask_high) == 0)
8166 return high == -lsb;
8172 shift_mask_low = ~0;
8173 shift_mask_low <<= INTVAL (shiftop);
8177 if ((lsb & shift_mask_low) == 0)
8180 return low == -lsb && lsb != 1;
8186 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8187 for lfq and stfq insns.
8189 Note reg1 and reg2 *must* be hard registers. To be sure we will
8190 abort if we are passed pseudo registers. */
8193 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8195 /* We might have been passed a SUBREG. */
8196 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8199 return (REGNO (reg1) == REGNO (reg2) - 1);
8202 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8203 addr1 and addr2 must be in consecutive memory locations
8204 (addr2 == addr1 + 8). */
8207 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8212 /* Extract an offset (if used) from the first addr. */
8213 if (GET_CODE (addr1) == PLUS)
8215 /* If not a REG, return zero. */
8216 if (GET_CODE (XEXP (addr1, 0)) != REG)
8220 reg1 = REGNO (XEXP (addr1, 0));
8221 /* The offset must be constant! */
8222 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8224 offset1 = INTVAL (XEXP (addr1, 1));
8227 else if (GET_CODE (addr1) != REG)
8231 reg1 = REGNO (addr1);
8232 /* This was a simple (mem (reg)) expression. Offset is 0. */
8236 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8237 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8238 register as addr1. */
8239 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8241 if (GET_CODE (addr2) != PLUS)
8244 if (GET_CODE (XEXP (addr2, 0)) != REG
8245 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8248 if (reg1 != REGNO (XEXP (addr2, 0)))
8251 /* The offset for the second addr must be 8 more than the first addr. */
8252 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8255 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8260 /* Return the register class of a scratch register needed to copy IN into
8261 or out of a register in CLASS in MODE. If it can be done directly,
8262 NO_REGS is returned. */
8265 secondary_reload_class (enum reg_class class,
8266 enum machine_mode mode ATTRIBUTE_UNUSED, rtx in)
8270 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8272 && MACHOPIC_INDIRECT
8276 /* We cannot copy a symbolic operand directly into anything
8277 other than BASE_REGS for TARGET_ELF. So indicate that a
8278 register from BASE_REGS is needed as an intermediate
8281 On Darwin, pic addresses require a load from memory, which
8282 needs a base register. */
8283 if (class != BASE_REGS
8284 && (GET_CODE (in) == SYMBOL_REF
8285 || GET_CODE (in) == HIGH
8286 || GET_CODE (in) == LABEL_REF
8287 || GET_CODE (in) == CONST))
8291 if (GET_CODE (in) == REG)
8294 if (regno >= FIRST_PSEUDO_REGISTER)
8296 regno = true_regnum (in);
8297 if (regno >= FIRST_PSEUDO_REGISTER)
8301 else if (GET_CODE (in) == SUBREG)
8303 regno = true_regnum (in);
8304 if (regno >= FIRST_PSEUDO_REGISTER)
8310 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8312 if (class == GENERAL_REGS || class == BASE_REGS
8313 || (regno >= 0 && INT_REGNO_P (regno)))
8316 /* Constants, memory, and FP registers can go into FP registers. */
8317 if ((regno == -1 || FP_REGNO_P (regno))
8318 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8321 /* Memory, and AltiVec registers can go into AltiVec registers. */
8322 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8323 && class == ALTIVEC_REGS)
8326 /* We can copy among the CR registers. */
8327 if ((class == CR_REGS || class == CR0_REGS)
8328 && regno >= 0 && CR_REGNO_P (regno))
8331 /* Otherwise, we need GENERAL_REGS. */
8332 return GENERAL_REGS;
8335 /* Given a comparison operation, return the bit number in CCR to test. We
8336 know this is a valid comparison.
8338 SCC_P is 1 if this is for an scc. That means that %D will have been
8339 used instead of %C, so the bits will be in different places.
8341 Return -1 if OP isn't a valid comparison for some reason. */
8344 ccr_bit (rtx op, int scc_p)
8346 enum rtx_code code = GET_CODE (op);
8347 enum machine_mode cc_mode;
8352 if (GET_RTX_CLASS (code) != '<')
8357 if (GET_CODE (reg) != REG
8358 || ! CR_REGNO_P (REGNO (reg)))
8361 cc_mode = GET_MODE (reg);
8362 cc_regnum = REGNO (reg);
8363 base_bit = 4 * (cc_regnum - CR0_REGNO);
8365 validate_condition_mode (code, cc_mode);
8367 /* When generating a sCOND operation, only positive conditions are
8369 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8370 && code != GTU && code != LTU)
8376 if (TARGET_E500 && !TARGET_FPRS
8377 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8378 return base_bit + 1;
8379 return scc_p ? base_bit + 3 : base_bit + 2;
8381 if (TARGET_E500 && !TARGET_FPRS
8382 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8383 return base_bit + 1;
8384 return base_bit + 2;
8385 case GT: case GTU: case UNLE:
8386 return base_bit + 1;
8387 case LT: case LTU: case UNGE:
8389 case ORDERED: case UNORDERED:
8390 return base_bit + 3;
8393 /* If scc, we will have done a cror to put the bit in the
8394 unordered position. So test that bit. For integer, this is ! LT
8395 unless this is an scc insn. */
8396 return scc_p ? base_bit + 3 : base_bit;
8399 return scc_p ? base_bit + 3 : base_bit + 1;
8406 /* Return the GOT register. */
8409 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8411 /* The second flow pass currently (June 1999) can't update
8412 regs_ever_live without disturbing other parts of the compiler, so
8413 update it here to make the prolog/epilogue code happy. */
8414 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8415 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8417 current_function_uses_pic_offset_table = 1;
8419 return pic_offset_table_rtx;
8422 /* Function to init struct machine_function.
8423 This will be called, via a pointer variable,
8424 from push_function_context. */
8426 static struct machine_function *
8427 rs6000_init_machine_status (void)
8429 return ggc_alloc_cleared (sizeof (machine_function));
8432 /* These macros test for integers and extract the low-order bits. */
8434 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8435 && GET_MODE (X) == VOIDmode)
8437 #define INT_LOWPART(X) \
8438 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8444 unsigned long val = INT_LOWPART (op);
8446 /* If the high bit is zero, the value is the first 1 bit we find
8448 if ((val & 0x80000000) == 0)
8450 if ((val & 0xffffffff) == 0)
8454 while (((val <<= 1) & 0x80000000) == 0)
8459 /* If the high bit is set and the low bit is not, or the mask is all
8460 1's, the value is zero. */
8461 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8464 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8467 while (((val >>= 1) & 1) != 0)
8477 unsigned long val = INT_LOWPART (op);
8479 /* If the low bit is zero, the value is the first 1 bit we find from
8483 if ((val & 0xffffffff) == 0)
8487 while (((val >>= 1) & 1) == 0)
8493 /* If the low bit is set and the high bit is not, or the mask is all
8494 1's, the value is 31. */
8495 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8498 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8501 while (((val <<= 1) & 0x80000000) != 0)
8507 /* Locate some local-dynamic symbol still in use by this function
8508 so that we can print its name in some tls_ld pattern. */
8511 rs6000_get_some_local_dynamic_name (void)
8515 if (cfun->machine->some_ld_name)
8516 return cfun->machine->some_ld_name;
8518 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8520 && for_each_rtx (&PATTERN (insn),
8521 rs6000_get_some_local_dynamic_name_1, 0))
8522 return cfun->machine->some_ld_name;
8527 /* Helper function for rs6000_get_some_local_dynamic_name. */
8530 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8534 if (GET_CODE (x) == SYMBOL_REF)
8536 const char *str = XSTR (x, 0);
8537 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8539 cfun->machine->some_ld_name = str;
8547 /* Print an operand. Recognize special options, documented below. */
8550 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8551 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8553 #define SMALL_DATA_RELOC "sda21"
8554 #define SMALL_DATA_REG 0
8558 print_operand (FILE *file, rtx x, int code)
8562 unsigned HOST_WIDE_INT uval;
8567 /* Write out an instruction after the call which may be replaced
8568 with glue code by the loader. This depends on the AIX version. */
8569 asm_fprintf (file, RS6000_CALL_GLUE);
8572 /* %a is output_address. */
8575 /* If X is a constant integer whose low-order 5 bits are zero,
8576 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8577 in the AIX assembler where "sri" with a zero shift count
8578 writes a trash instruction. */
8579 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8586 /* If constant, low-order 16 bits of constant, unsigned.
8587 Otherwise, write normally. */
8589 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8591 print_operand (file, x, 0);
8595 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8596 for 64-bit mask direction. */
8597 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8600 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8604 /* X is a CR register. Print the number of the EQ bit of the CR */
8605 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8606 output_operand_lossage ("invalid %%E value");
8608 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8612 /* X is a CR register. Print the shift count needed to move it
8613 to the high-order four bits. */
8614 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8615 output_operand_lossage ("invalid %%f value");
8617 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8621 /* Similar, but print the count for the rotate in the opposite
8623 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8624 output_operand_lossage ("invalid %%F value");
8626 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8630 /* X is a constant integer. If it is negative, print "m",
8631 otherwise print "z". This is to make an aze or ame insn. */
8632 if (GET_CODE (x) != CONST_INT)
8633 output_operand_lossage ("invalid %%G value");
8634 else if (INTVAL (x) >= 0)
8641 /* If constant, output low-order five bits. Otherwise, write
8644 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8646 print_operand (file, x, 0);
8650 /* If constant, output low-order six bits. Otherwise, write
8653 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8655 print_operand (file, x, 0);
8659 /* Print `i' if this is a constant, else nothing. */
8665 /* Write the bit number in CCR for jump. */
8668 output_operand_lossage ("invalid %%j code");
8670 fprintf (file, "%d", i);
8674 /* Similar, but add one for shift count in rlinm for scc and pass
8675 scc flag to `ccr_bit'. */
8678 output_operand_lossage ("invalid %%J code");
8680 /* If we want bit 31, write a shift count of zero, not 32. */
8681 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8685 /* X must be a constant. Write the 1's complement of the
8688 output_operand_lossage ("invalid %%k value");
8690 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8694 /* X must be a symbolic constant on ELF. Write an
8695 expression suitable for an 'addi' that adds in the low 16
8697 if (GET_CODE (x) != CONST)
8699 print_operand_address (file, x);
8704 if (GET_CODE (XEXP (x, 0)) != PLUS
8705 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8706 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8707 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8708 output_operand_lossage ("invalid %%K value");
8709 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8711 /* For GNU as, there must be a non-alphanumeric character
8712 between 'l' and the number. The '-' is added by
8713 print_operand() already. */
8714 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8716 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8720 /* %l is output_asm_label. */
8723 /* Write second word of DImode or DFmode reference. Works on register
8724 or non-indexed memory only. */
8725 if (GET_CODE (x) == REG)
8726 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8727 else if (GET_CODE (x) == MEM)
8729 /* Handle possible auto-increment. Since it is pre-increment and
8730 we have already done it, we can just use an offset of word. */
8731 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8732 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8733 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8736 output_address (XEXP (adjust_address_nv (x, SImode,
8740 if (small_data_operand (x, GET_MODE (x)))
8741 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8742 reg_names[SMALL_DATA_REG]);
8747 /* MB value for a mask operand. */
8748 if (! mask_operand (x, SImode))
8749 output_operand_lossage ("invalid %%m value");
8751 fprintf (file, "%d", extract_MB (x));
8755 /* ME value for a mask operand. */
8756 if (! mask_operand (x, SImode))
8757 output_operand_lossage ("invalid %%M value");
8759 fprintf (file, "%d", extract_ME (x));
8762 /* %n outputs the negative of its operand. */
8765 /* Write the number of elements in the vector times 4. */
8766 if (GET_CODE (x) != PARALLEL)
8767 output_operand_lossage ("invalid %%N value");
8769 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8773 /* Similar, but subtract 1 first. */
8774 if (GET_CODE (x) != PARALLEL)
8775 output_operand_lossage ("invalid %%O value");
8777 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8781 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8783 || INT_LOWPART (x) < 0
8784 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8785 output_operand_lossage ("invalid %%p value");
8787 fprintf (file, "%d", i);
8791 /* The operand must be an indirect memory reference. The result
8792 is the register number. */
8793 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8794 || REGNO (XEXP (x, 0)) >= 32)
8795 output_operand_lossage ("invalid %%P value");
8797 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8801 /* This outputs the logical code corresponding to a boolean
8802 expression. The expression may have one or both operands
8803 negated (if one, only the first one). For condition register
8804 logical operations, it will also treat the negated
8805 CR codes as NOTs, but not handle NOTs of them. */
8807 const char *const *t = 0;
8809 enum rtx_code code = GET_CODE (x);
8810 static const char * const tbl[3][3] = {
8811 { "and", "andc", "nor" },
8812 { "or", "orc", "nand" },
8813 { "xor", "eqv", "xor" } };
8817 else if (code == IOR)
8819 else if (code == XOR)
8822 output_operand_lossage ("invalid %%q value");
8824 if (GET_CODE (XEXP (x, 0)) != NOT)
8828 if (GET_CODE (XEXP (x, 1)) == NOT)
8846 /* X is a CR register. Print the mask for `mtcrf'. */
8847 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8848 output_operand_lossage ("invalid %%R value");
8850 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8854 /* Low 5 bits of 32 - value */
8856 output_operand_lossage ("invalid %%s value");
8858 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8862 /* PowerPC64 mask position. All 0's is excluded.
8863 CONST_INT 32-bit mask is considered sign-extended so any
8864 transition must occur within the CONST_INT, not on the boundary. */
8865 if (! mask64_operand (x, DImode))
8866 output_operand_lossage ("invalid %%S value");
8868 uval = INT_LOWPART (x);
8870 if (uval & 1) /* Clear Left */
8872 #if HOST_BITS_PER_WIDE_INT > 64
8873 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8877 else /* Clear Right */
8880 #if HOST_BITS_PER_WIDE_INT > 64
8881 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8889 fprintf (file, "%d", i);
8893 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8894 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8897 /* Bit 3 is OV bit. */
8898 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8900 /* If we want bit 31, write a shift count of zero, not 32. */
8901 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8905 /* Print the symbolic name of a branch target register. */
8906 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8907 && REGNO (x) != COUNT_REGISTER_REGNUM))
8908 output_operand_lossage ("invalid %%T value");
8909 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8910 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8912 fputs ("ctr", file);
8916 /* High-order 16 bits of constant for use in unsigned operand. */
8918 output_operand_lossage ("invalid %%u value");
8920 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8921 (INT_LOWPART (x) >> 16) & 0xffff);
8925 /* High-order 16 bits of constant for use in signed operand. */
8927 output_operand_lossage ("invalid %%v value");
8929 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8930 (INT_LOWPART (x) >> 16) & 0xffff);
8934 /* Print `u' if this has an auto-increment or auto-decrement. */
8935 if (GET_CODE (x) == MEM
8936 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8937 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8942 /* Print the trap code for this operand. */
8943 switch (GET_CODE (x))
8946 fputs ("eq", file); /* 4 */
8949 fputs ("ne", file); /* 24 */
8952 fputs ("lt", file); /* 16 */
8955 fputs ("le", file); /* 20 */
8958 fputs ("gt", file); /* 8 */
8961 fputs ("ge", file); /* 12 */
8964 fputs ("llt", file); /* 2 */
8967 fputs ("lle", file); /* 6 */
8970 fputs ("lgt", file); /* 1 */
8973 fputs ("lge", file); /* 5 */
8981 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8984 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8985 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8987 print_operand (file, x, 0);
8991 /* MB value for a PowerPC64 rldic operand. */
8992 val = (GET_CODE (x) == CONST_INT
8993 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8998 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8999 if ((val <<= 1) < 0)
9002 #if HOST_BITS_PER_WIDE_INT == 32
9003 if (GET_CODE (x) == CONST_INT && i >= 0)
9004 i += 32; /* zero-extend high-part was all 0's */
9005 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9007 val = CONST_DOUBLE_LOW (x);
9014 for ( ; i < 64; i++)
9015 if ((val <<= 1) < 0)
9020 fprintf (file, "%d", i + 1);
9024 if (GET_CODE (x) == MEM
9025 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9030 /* Like 'L', for third word of TImode */
9031 if (GET_CODE (x) == REG)
9032 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9033 else if (GET_CODE (x) == MEM)
9035 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9036 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9037 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9039 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9040 if (small_data_operand (x, GET_MODE (x)))
9041 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9042 reg_names[SMALL_DATA_REG]);
9047 /* X is a SYMBOL_REF. Write out the name preceded by a
9048 period and without any trailing data in brackets. Used for function
9049 names. If we are configured for System V (or the embedded ABI) on
9050 the PowerPC, do not emit the period, since those systems do not use
9051 TOCs and the like. */
9052 if (GET_CODE (x) != SYMBOL_REF)
9055 if (XSTR (x, 0)[0] != '.')
9057 switch (DEFAULT_ABI)
9072 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9074 assemble_name (file, XSTR (x, 0));
9078 /* Like 'L', for last word of TImode. */
9079 if (GET_CODE (x) == REG)
9080 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9081 else if (GET_CODE (x) == MEM)
9083 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9084 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9085 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9087 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9088 if (small_data_operand (x, GET_MODE (x)))
9089 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9090 reg_names[SMALL_DATA_REG]);
9094 /* Print AltiVec or SPE memory operand. */
9099 if (GET_CODE (x) != MEM)
9107 if (GET_CODE (tmp) == REG)
9109 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9112 /* Handle [reg+UIMM]. */
9113 else if (GET_CODE (tmp) == PLUS &&
9114 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9118 if (GET_CODE (XEXP (tmp, 0)) != REG)
9121 x = INTVAL (XEXP (tmp, 1));
9122 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9126 /* Fall through. Must be [reg+reg]. */
9128 if (GET_CODE (tmp) == REG)
9129 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9130 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9132 if (REGNO (XEXP (tmp, 0)) == 0)
9133 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9134 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9136 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9137 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9145 if (GET_CODE (x) == REG)
9146 fprintf (file, "%s", reg_names[REGNO (x)]);
9147 else if (GET_CODE (x) == MEM)
9149 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9150 know the width from the mode. */
9151 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9152 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9153 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9154 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9155 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9156 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9158 output_address (XEXP (x, 0));
9161 output_addr_const (file, x);
9165 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9169 output_operand_lossage ("invalid %%xn code");
9173 /* Print the address of an operand. */
9176 print_operand_address (FILE *file, rtx x)
9178 if (GET_CODE (x) == REG)
9179 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9180 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9181 || GET_CODE (x) == LABEL_REF)
9183 output_addr_const (file, x);
9184 if (small_data_operand (x, GET_MODE (x)))
9185 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9186 reg_names[SMALL_DATA_REG]);
9187 else if (TARGET_TOC)
9190 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9192 if (REGNO (XEXP (x, 0)) == 0)
9193 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9194 reg_names[ REGNO (XEXP (x, 0)) ]);
9196 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9197 reg_names[ REGNO (XEXP (x, 1)) ]);
9199 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9200 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9201 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9203 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9204 && CONSTANT_P (XEXP (x, 1)))
9206 output_addr_const (file, XEXP (x, 1));
9207 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9211 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9212 && CONSTANT_P (XEXP (x, 1)))
9214 fprintf (file, "lo16(");
9215 output_addr_const (file, XEXP (x, 1));
9216 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9219 else if (legitimate_constant_pool_address_p (x))
9221 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9223 rtx contains_minus = XEXP (x, 1);
9227 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9228 turn it into (sym) for output_addr_const. */
9229 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9230 contains_minus = XEXP (contains_minus, 0);
9232 minus = XEXP (contains_minus, 0);
9233 symref = XEXP (minus, 0);
9234 XEXP (contains_minus, 0) = symref;
9239 name = XSTR (symref, 0);
9240 newname = alloca (strlen (name) + sizeof ("@toc"));
9241 strcpy (newname, name);
9242 strcat (newname, "@toc");
9243 XSTR (symref, 0) = newname;
9245 output_addr_const (file, XEXP (x, 1));
9247 XSTR (symref, 0) = name;
9248 XEXP (contains_minus, 0) = minus;
9251 output_addr_const (file, XEXP (x, 1));
9253 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9259 /* Target hook for assembling integer objects. The PowerPC version has
9260 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9261 is defined. It also needs to handle DI-mode objects on 64-bit
9265 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9267 #ifdef RELOCATABLE_NEEDS_FIXUP
9268 /* Special handling for SI values. */
9269 if (size == 4 && aligned_p)
9271 extern int in_toc_section (void);
9272 static int recurse = 0;
9274 /* For -mrelocatable, we mark all addresses that need to be fixed up
9275 in the .fixup section. */
9276 if (TARGET_RELOCATABLE
9277 && !in_toc_section ()
9278 && !in_text_section ()
9280 && GET_CODE (x) != CONST_INT
9281 && GET_CODE (x) != CONST_DOUBLE
9287 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9289 ASM_OUTPUT_LABEL (asm_out_file, buf);
9290 fprintf (asm_out_file, "\t.long\t(");
9291 output_addr_const (asm_out_file, x);
9292 fprintf (asm_out_file, ")@fixup\n");
9293 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9294 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9295 fprintf (asm_out_file, "\t.long\t");
9296 assemble_name (asm_out_file, buf);
9297 fprintf (asm_out_file, "\n\t.previous\n");
9301 /* Remove initial .'s to turn a -mcall-aixdesc function
9302 address into the address of the descriptor, not the function
9304 else if (GET_CODE (x) == SYMBOL_REF
9305 && XSTR (x, 0)[0] == '.'
9306 && DEFAULT_ABI == ABI_AIX)
9308 const char *name = XSTR (x, 0);
9309 while (*name == '.')
9312 fprintf (asm_out_file, "\t.long\t%s\n", name);
9316 #endif /* RELOCATABLE_NEEDS_FIXUP */
9317 return default_assemble_integer (x, size, aligned_p);
9320 #ifdef HAVE_GAS_HIDDEN
9321 /* Emit an assembler directive to set symbol visibility for DECL to
9325 rs6000_assemble_visibility (tree decl, int vis)
9327 /* Functions need to have their entry point symbol visibility set as
9328 well as their descriptor symbol visibility. */
9329 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9331 static const char * const visibility_types[] = {
9332 NULL, "internal", "hidden", "protected"
9335 const char *name, *type;
9337 name = ((* targetm.strip_name_encoding)
9338 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9339 type = visibility_types[vis];
9341 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9342 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9345 default_assemble_visibility (decl, vis);
9350 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9352 /* Reversal of FP compares takes care -- an ordered compare
9353 becomes an unordered compare and vice versa. */
9354 if (mode == CCFPmode
9355 && (!flag_finite_math_only
9356 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9357 || code == UNEQ || code == LTGT))
9358 return reverse_condition_maybe_unordered (code);
9360 return reverse_condition (code);
9363 /* Generate a compare for CODE. Return a brand-new rtx that
9364 represents the result of the compare. */
9367 rs6000_generate_compare (enum rtx_code code)
9369 enum machine_mode comp_mode;
9372 if (rs6000_compare_fp_p)
9373 comp_mode = CCFPmode;
9374 else if (code == GTU || code == LTU
9375 || code == GEU || code == LEU)
9376 comp_mode = CCUNSmode;
9380 /* First, the compare. */
9381 compare_result = gen_reg_rtx (comp_mode);
9383 /* SPE FP compare instructions on the GPRs. Yuck! */
9384 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9385 && rs6000_compare_fp_p)
9387 rtx cmp, or1, or2, or_result, compare_result2;
9395 cmp = flag_finite_math_only
9396 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9398 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9399 rs6000_compare_op1);
9407 cmp = flag_finite_math_only
9408 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9410 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9411 rs6000_compare_op1);
9419 cmp = flag_finite_math_only
9420 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9422 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9423 rs6000_compare_op1);
9429 /* Synthesize LE and GE from LT/GT || EQ. */
9430 if (code == LE || code == GE || code == LEU || code == GEU)
9432 /* Synthesize GE/LE frome GT/LT || EQ. */
9438 case LE: code = LT; break;
9439 case GE: code = GT; break;
9440 case LEU: code = LT; break;
9441 case GEU: code = GT; break;
9445 or1 = gen_reg_rtx (SImode);
9446 or2 = gen_reg_rtx (SImode);
9447 or_result = gen_reg_rtx (CCEQmode);
9448 compare_result2 = gen_reg_rtx (CCFPmode);
9451 cmp = flag_finite_math_only
9452 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9454 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9455 rs6000_compare_op1);
9458 /* The MC8540 FP compare instructions set the CR bits
9459 differently than other PPC compare instructions. For
9460 that matter, there is no generic test instruction, but a
9461 testgt, testlt, and testeq. For a true condition, bit 2
9462 is set (x1xx) in the CR. Following the traditional CR
9468 ... bit 2 would be a GT CR alias, so later on we
9469 look in the GT bits for the branch instructions.
9470 However, we must be careful to emit correct RTL in
9471 the meantime, so optimizations don't get confused. */
9473 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9474 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9476 /* OR them together. */
9477 cmp = gen_rtx_SET (VOIDmode, or_result,
9478 gen_rtx_COMPARE (CCEQmode,
9479 gen_rtx_IOR (SImode, or1, or2),
9481 compare_result = or_result;
9486 /* We only care about 1 bit (x1xx), so map everything to NE to
9487 maintain rtl sanity. We'll get to the right bit (x1xx) at
9488 code output time. */
9489 if (code == NE || code == LTGT)
9490 /* Do the inverse here because we have no cmpne
9491 instruction. We use the cmpeq instruction and expect
9492 to get a 0 instead. */
9501 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9502 gen_rtx_COMPARE (comp_mode,
9504 rs6000_compare_op1)));
9506 /* Some kinds of FP comparisons need an OR operation;
9507 under flag_finite_math_only we don't bother. */
9508 if (rs6000_compare_fp_p
9509 && ! flag_finite_math_only
9510 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9511 && (code == LE || code == GE
9512 || code == UNEQ || code == LTGT
9513 || code == UNGT || code == UNLT))
9515 enum rtx_code or1, or2;
9516 rtx or1_rtx, or2_rtx, compare2_rtx;
9517 rtx or_result = gen_reg_rtx (CCEQmode);
9521 case LE: or1 = LT; or2 = EQ; break;
9522 case GE: or1 = GT; or2 = EQ; break;
9523 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9524 case LTGT: or1 = LT; or2 = GT; break;
9525 case UNGT: or1 = UNORDERED; or2 = GT; break;
9526 case UNLT: or1 = UNORDERED; or2 = LT; break;
9529 validate_condition_mode (or1, comp_mode);
9530 validate_condition_mode (or2, comp_mode);
9531 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9532 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9533 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9534 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9536 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9538 compare_result = or_result;
9542 validate_condition_mode (code, GET_MODE (compare_result));
9544 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9548 /* Emit the RTL for an sCOND pattern. */
9551 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9554 enum machine_mode op_mode;
9555 enum rtx_code cond_code;
9557 condition_rtx = rs6000_generate_compare (code);
9558 cond_code = GET_CODE (condition_rtx);
9561 || cond_code == GE || cond_code == LE
9562 || cond_code == GEU || cond_code == LEU
9563 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9565 rtx not_result = gen_reg_rtx (CCEQmode);
9566 rtx not_op, rev_cond_rtx;
9567 enum machine_mode cc_mode;
9569 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9571 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9572 SImode, XEXP (condition_rtx, 0), const0_rtx);
9573 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9574 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9575 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9578 op_mode = GET_MODE (rs6000_compare_op0);
9579 if (op_mode == VOIDmode)
9580 op_mode = GET_MODE (rs6000_compare_op1);
9582 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9584 PUT_MODE (condition_rtx, DImode);
9585 convert_move (result, condition_rtx, 0);
9589 PUT_MODE (condition_rtx, SImode);
9590 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9594 /* Emit a branch of kind CODE to location LOC. */
9597 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9599 rtx condition_rtx, loc_ref;
9601 condition_rtx = rs6000_generate_compare (code);
9602 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9603 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9604 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9608 /* Return the string to output a conditional branch to LABEL, which is
9609 the operand number of the label, or -1 if the branch is really a
9612 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9613 condition code register and its mode specifies what kind of
9616 REVERSED is nonzero if we should reverse the sense of the comparison.
9618 INSN is the insn. */
9621 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9623 static char string[64];
9624 enum rtx_code code = GET_CODE (op);
9625 rtx cc_reg = XEXP (op, 0);
9626 enum machine_mode mode = GET_MODE (cc_reg);
9627 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9628 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9629 int really_reversed = reversed ^ need_longbranch;
9635 validate_condition_mode (code, mode);
9637 /* Work out which way this really branches. We could use
9638 reverse_condition_maybe_unordered here always but this
9639 makes the resulting assembler clearer. */
9640 if (really_reversed)
9642 /* Reversal of FP compares takes care -- an ordered compare
9643 becomes an unordered compare and vice versa. */
9644 if (mode == CCFPmode)
9645 code = reverse_condition_maybe_unordered (code);
9647 code = reverse_condition (code);
9650 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9652 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9655 /* Opposite of GT. */
9657 else if (code == NE)
9665 /* Not all of these are actually distinct opcodes, but
9666 we distinguish them for clarity of the resulting assembler. */
9668 ccode = "ne"; break;
9670 ccode = "eq"; break;
9672 ccode = "ge"; break;
9673 case GT: case GTU: case UNGT:
9674 ccode = "gt"; break;
9676 ccode = "le"; break;
9677 case LT: case LTU: case UNLT:
9678 ccode = "lt"; break;
9679 case UNORDERED: ccode = "un"; break;
9680 case ORDERED: ccode = "nu"; break;
9681 case UNGE: ccode = "nl"; break;
9682 case UNLE: ccode = "ng"; break;
9687 /* Maybe we have a guess as to how likely the branch is.
9688 The old mnemonics don't have a way to specify this information. */
9690 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9691 if (note != NULL_RTX)
9693 /* PROB is the difference from 50%. */
9694 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9695 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9697 /* Only hint for highly probable/improbable branches on newer
9698 cpus as static prediction overrides processor dynamic
9699 prediction. For older cpus we may as well always hint, but
9700 assume not taken for branches that are very close to 50% as a
9701 mispredicted taken branch is more expensive than a
9702 mispredicted not-taken branch. */
9704 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9706 if (abs (prob) > REG_BR_PROB_BASE / 20
9707 && ((prob > 0) ^ need_longbranch))
9715 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9717 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9719 /* We need to escape any '%' characters in the reg_names string.
9720 Assume they'd only be the first character... */
9721 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9723 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9727 /* If the branch distance was too far, we may have to use an
9728 unconditional branch to go the distance. */
9729 if (need_longbranch)
9730 s += sprintf (s, ",$+8\n\tb %s", label);
9732 s += sprintf (s, ",%s", label);
9738 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9739 operands of the last comparison is nonzero/true, FALSE_COND if it
9740 is zero/false. Return 0 if the hardware has no such operation. */
9743 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9745 enum rtx_code code = GET_CODE (op);
9746 rtx op0 = rs6000_compare_op0;
9747 rtx op1 = rs6000_compare_op1;
9749 enum machine_mode compare_mode = GET_MODE (op0);
9750 enum machine_mode result_mode = GET_MODE (dest);
9753 /* These modes should always match. */
9754 if (GET_MODE (op1) != compare_mode
9755 /* In the isel case however, we can use a compare immediate, so
9756 op1 may be a small constant. */
9757 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9759 if (GET_MODE (true_cond) != result_mode)
9761 if (GET_MODE (false_cond) != result_mode)
9764 /* First, work out if the hardware can do this at all, or
9765 if it's too slow... */
9766 if (! rs6000_compare_fp_p)
9769 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9773 /* Eliminate half of the comparisons by switching operands, this
9774 makes the remaining code simpler. */
9775 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9776 || code == LTGT || code == LT || code == UNLE)
9778 code = reverse_condition_maybe_unordered (code);
9780 true_cond = false_cond;
9784 /* UNEQ and LTGT take four instructions for a comparison with zero,
9785 it'll probably be faster to use a branch here too. */
9786 if (code == UNEQ && HONOR_NANS (compare_mode))
9789 if (GET_CODE (op1) == CONST_DOUBLE)
9790 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9792 /* We're going to try to implement comparisons by performing
9793 a subtract, then comparing against zero. Unfortunately,
9794 Inf - Inf is NaN which is not zero, and so if we don't
9795 know that the operand is finite and the comparison
9796 would treat EQ different to UNORDERED, we can't do it. */
9797 if (HONOR_INFINITIES (compare_mode)
9798 && code != GT && code != UNGE
9799 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9800 /* Constructs of the form (a OP b ? a : b) are safe. */
9801 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9802 || (! rtx_equal_p (op0, true_cond)
9803 && ! rtx_equal_p (op1, true_cond))))
9805 /* At this point we know we can use fsel. */
9807 /* Reduce the comparison to a comparison against zero. */
9808 temp = gen_reg_rtx (compare_mode);
9809 emit_insn (gen_rtx_SET (VOIDmode, temp,
9810 gen_rtx_MINUS (compare_mode, op0, op1)));
9812 op1 = CONST0_RTX (compare_mode);
9814 /* If we don't care about NaNs we can reduce some of the comparisons
9815 down to faster ones. */
9816 if (! HONOR_NANS (compare_mode))
9822 true_cond = false_cond;
9835 /* Now, reduce everything down to a GE. */
9842 temp = gen_reg_rtx (compare_mode);
9843 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9848 temp = gen_reg_rtx (compare_mode);
9849 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9854 temp = gen_reg_rtx (compare_mode);
9855 emit_insn (gen_rtx_SET (VOIDmode, temp,
9856 gen_rtx_NEG (compare_mode,
9857 gen_rtx_ABS (compare_mode, op0))));
9862 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9863 temp = gen_reg_rtx (result_mode);
9864 emit_insn (gen_rtx_SET (VOIDmode, temp,
9865 gen_rtx_IF_THEN_ELSE (result_mode,
9866 gen_rtx_GE (VOIDmode,
9868 true_cond, false_cond)));
9869 false_cond = true_cond;
9872 temp = gen_reg_rtx (compare_mode);
9873 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9878 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9879 temp = gen_reg_rtx (result_mode);
9880 emit_insn (gen_rtx_SET (VOIDmode, temp,
9881 gen_rtx_IF_THEN_ELSE (result_mode,
9882 gen_rtx_GE (VOIDmode,
9884 true_cond, false_cond)));
9885 true_cond = false_cond;
9888 temp = gen_reg_rtx (compare_mode);
9889 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9897 emit_insn (gen_rtx_SET (VOIDmode, dest,
9898 gen_rtx_IF_THEN_ELSE (result_mode,
9899 gen_rtx_GE (VOIDmode,
9901 true_cond, false_cond)));
9905 /* Same as above, but for ints (isel). */
9908 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9910 rtx condition_rtx, cr;
9912 /* All isel implementations thus far are 32-bits. */
9913 if (GET_MODE (rs6000_compare_op0) != SImode)
9916 /* We still have to do the compare, because isel doesn't do a
9917 compare, it just looks at the CRx bits set by a previous compare
9919 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9920 cr = XEXP (condition_rtx, 0);
9922 if (GET_MODE (cr) == CCmode)
9923 emit_insn (gen_isel_signed (dest, condition_rtx,
9924 true_cond, false_cond, cr));
9926 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9927 true_cond, false_cond, cr));
9933 output_isel (rtx *operands)
9937 code = GET_CODE (operands[1]);
9938 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9940 PUT_CODE (operands[1], reverse_condition (code));
9941 return "isel %0,%3,%2,%j1";
9944 return "isel %0,%2,%3,%j1";
9948 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
9950 enum machine_mode mode = GET_MODE (op0);
9954 if (code == SMAX || code == SMIN)
9959 if (code == SMAX || code == UMAX)
9960 target = emit_conditional_move (dest, c, op0, op1, mode,
9963 target = emit_conditional_move (dest, c, op0, op1, mode,
9965 if (target == NULL_RTX)
9968 emit_move_insn (dest, target);
9971 /* Called by splitter for multireg moves.
9973 operands[0] : Destination of move
9974 operands[1] : Source of move
9977 operands[2-n] : Destination slots
9978 operands[n-m] : Source slots
9979 where n = 2 + HARD_REGNO_NREGS (reg, GET_MODE (operands[0]))
9980 m = 2 + 2 * HARD_REGNO_NREGS (reg, GET_MODE (operands[0])) - 1
9982 Splits the move of operands[1] to operands[0].
9983 This is done, if GPRs are one of the operands. In this case
9984 a sequence of simple move insns has to be issued. The sequence of these
9985 move insns has to be done in correct order to avoid early clobber of the
9986 base register or destructive overlap of registers.
9990 rs6000_split_multireg_move (rtx *operands)
9992 int nregs, reg, i, j, used_update = 0;
9993 enum machine_mode mode;
9994 rtx dst = operands[0];
9995 rtx src = operands[1];
9998 /* Calculate number to move (2/4 for 32/64 bit mode). */
10000 reg = REG_P (operands[0]) ? REGNO (operands[0]) : REGNO (operands[1]);
10001 mode = GET_MODE (operands[0]);
10002 nregs = HARD_REGNO_NREGS (reg, mode);
10004 if (REG_P (operands[1])
10005 && REG_P (operands[0])
10006 && (REGNO (operands[1]) < REGNO (operands[0])))
10008 /* Move register range backwards, if we have destructive overlap. */
10011 for (i = 0; i < nregs; i++)
10014 operands[i+2] = operand_subword (operands[0], j, 0, mode);
10015 operands[i+2+nregs] =
10016 operand_subword (operands[1], j, 0, mode);
10023 if (GET_CODE (operands[1]) == MEM)
10027 if (GET_CODE (XEXP (operands[1], 0)) == PRE_INC
10028 || GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
10031 breg = XEXP (XEXP (operands[1], 0), 0);
10032 delta_rtx = GET_CODE (XEXP (operands[1], 0)) == PRE_INC
10033 ? GEN_INT (GET_MODE_SIZE (GET_MODE (operands[1])))
10034 : GEN_INT (-GET_MODE_SIZE (GET_MODE (operands[1])));
10035 insn = emit_insn (TARGET_32BIT
10036 ? gen_addsi3 (breg, breg, delta_rtx)
10037 : gen_adddi3 (breg, breg, delta_rtx));
10038 src = gen_rtx_MEM (mode, breg);
10041 /* We have now address involving an base register only.
10042 If we use one of the registers to address memory,
10043 we have change that register last. */
10045 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10046 ? XEXP (XEXP (src, 0), 0)
10052 if (REGNO (breg) >= REGNO (dst)
10053 && REGNO (breg) < REGNO (dst) + nregs)
10054 j = REGNO (breg) - REGNO (dst);
10057 if (GET_CODE (operands[0]) == MEM)
10061 if (GET_CODE (XEXP (operands[0], 0)) == PRE_INC
10062 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
10065 breg = XEXP (XEXP (operands[0], 0), 0);
10066 delta_rtx = GET_CODE (XEXP (operands[0], 0)) == PRE_INC
10067 ? GEN_INT (GET_MODE_SIZE (GET_MODE (operands[0])))
10068 : GEN_INT (-GET_MODE_SIZE (GET_MODE (operands[0])));
10070 /* We have to update the breg before doing the store.
10071 Use store with update, if available. */
10075 insn = emit_insn (TARGET_32BIT
10076 ? gen_movsi_update (breg, breg, delta_rtx,
10077 operand_subword (src, 0, 0, mode))
10078 : gen_movdi_update (breg, breg, delta_rtx,
10079 operand_subword (src, 0, 0, mode)));
10083 insn = emit_insn (TARGET_32BIT
10084 ? gen_addsi3 (breg, breg, delta_rtx)
10085 : gen_adddi3 (breg, breg, delta_rtx));
10086 dst = gen_rtx_MEM (mode, breg);
10090 for (i = 0; i < nregs; i++)
10092 /* Calculate index to next subword. */
10097 operands[i+2] = operand_subword (dst, j, 0, mode);
10098 operands[i+2+nregs] = operand_subword (src, j, 0, mode);
10100 if (j == 0 && used_update)
10102 /* Already emited move of first word by
10103 store with update -> emit dead insn instead (r := r). */
10104 operands[i+2] = operands[i+2+nregs];
10111 /* This page contains routines that are used to determine what the
10112 function prologue and epilogue code will do and write them out. */
10114 /* Return the first fixed-point register that is required to be
10115 saved. 32 if none. */
10118 first_reg_to_save (void)
10122 /* Find lowest numbered live register. */
10123 for (first_reg = 13; first_reg <= 31; first_reg++)
10124 if (regs_ever_live[first_reg]
10125 && (! call_used_regs[first_reg]
10126 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10127 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10128 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10133 && current_function_uses_pic_offset_table
10134 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10135 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10141 /* Similar, for FP regs. */
10144 first_fp_reg_to_save (void)
10148 /* Find lowest numbered live register. */
10149 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10150 if (regs_ever_live[first_reg])
10156 /* Similar, for AltiVec regs. */
10159 first_altivec_reg_to_save (void)
10163 /* Stack frame remains as is unless we are in AltiVec ABI. */
10164 if (! TARGET_ALTIVEC_ABI)
10165 return LAST_ALTIVEC_REGNO + 1;
10167 /* Find lowest numbered live register. */
10168 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10169 if (regs_ever_live[i])
10175 /* Return a 32-bit mask of the AltiVec registers we need to set in
10176 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10177 the 32-bit word is 0. */
10179 static unsigned int
10180 compute_vrsave_mask (void)
10182 unsigned int i, mask = 0;
10184 /* First, find out if we use _any_ altivec registers. */
10185 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10186 if (regs_ever_live[i])
10187 mask |= ALTIVEC_REG_BIT (i);
10192 /* Next, remove the argument registers from the set. These must
10193 be in the VRSAVE mask set by the caller, so we don't need to add
10194 them in again. More importantly, the mask we compute here is
10195 used to generate CLOBBERs in the set_vrsave insn, and we do not
10196 wish the argument registers to die. */
10197 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10198 mask &= ~ALTIVEC_REG_BIT (i);
10200 /* Similarly, remove the return value from the set. */
10203 diddle_return_value (is_altivec_return_reg, &yes);
10205 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10212 is_altivec_return_reg (rtx reg, void *xyes)
10214 bool *yes = (bool *) xyes;
10215 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10220 /* Calculate the stack information for the current function. This is
10221 complicated by having two separate calling sequences, the AIX calling
10222 sequence and the V.4 calling sequence.
10224 AIX (and Darwin/Mac OS X) stack frames look like:
10226 SP----> +---------------------------------------+
10227 | back chain to caller | 0 0
10228 +---------------------------------------+
10229 | saved CR | 4 8 (8-11)
10230 +---------------------------------------+
10232 +---------------------------------------+
10233 | reserved for compilers | 12 24
10234 +---------------------------------------+
10235 | reserved for binders | 16 32
10236 +---------------------------------------+
10237 | saved TOC pointer | 20 40
10238 +---------------------------------------+
10239 | Parameter save area (P) | 24 48
10240 +---------------------------------------+
10241 | Alloca space (A) | 24+P etc.
10242 +---------------------------------------+
10243 | Local variable space (L) | 24+P+A
10244 +---------------------------------------+
10245 | Float/int conversion temporary (X) | 24+P+A+L
10246 +---------------------------------------+
10247 | Save area for AltiVec registers (W) | 24+P+A+L+X
10248 +---------------------------------------+
10249 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10250 +---------------------------------------+
10251 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10252 +---------------------------------------+
10253 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10254 +---------------------------------------+
10255 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10256 +---------------------------------------+
10257 old SP->| back chain to caller's caller |
10258 +---------------------------------------+
10260 The required alignment for AIX configurations is two words (i.e., 8
10264 V.4 stack frames look like:
10266 SP----> +---------------------------------------+
10267 | back chain to caller | 0
10268 +---------------------------------------+
10269 | caller's saved LR | 4
10270 +---------------------------------------+
10271 | Parameter save area (P) | 8
10272 +---------------------------------------+
10273 | Alloca space (A) | 8+P
10274 +---------------------------------------+
10275 | Varargs save area (V) | 8+P+A
10276 +---------------------------------------+
10277 | Local variable space (L) | 8+P+A+V
10278 +---------------------------------------+
10279 | Float/int conversion temporary (X) | 8+P+A+V+L
10280 +---------------------------------------+
10281 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10282 +---------------------------------------+
10283 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10284 +---------------------------------------+
10285 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10286 +---------------------------------------+
10287 | SPE: area for 64-bit GP registers |
10288 +---------------------------------------+
10289 | SPE alignment padding |
10290 +---------------------------------------+
10291 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10292 +---------------------------------------+
10293 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10294 +---------------------------------------+
10295 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10296 +---------------------------------------+
10297 old SP->| back chain to caller's caller |
10298 +---------------------------------------+
10300 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10301 given. (But note below and in sysv4.h that we require only 8 and
10302 may round up the size of our stack frame anyways. The historical
10303 reason is early versions of powerpc-linux which didn't properly
10304 align the stack at program startup. A happy side-effect is that
10305 -mno-eabi libraries can be used with -meabi programs.)
10307 The EABI configuration defaults to the V.4 layout. However,
10308 the stack alignment requirements may differ. If -mno-eabi is not
10309 given, the required stack alignment is 8 bytes; if -mno-eabi is
10310 given, the required alignment is 16 bytes. (But see V.4 comment
10313 #ifndef ABI_STACK_BOUNDARY
10314 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10317 static rs6000_stack_t *
10318 rs6000_stack_info (void)
10320 static rs6000_stack_t info, zero_info;
10321 rs6000_stack_t *info_ptr = &info;
10322 int reg_size = TARGET_POWERPC64 ? 8 : 4;
10324 HOST_WIDE_INT total_raw_size;
10326 /* Zero all fields portably. */
10331 /* Cache value so we don't rescan instruction chain over and over. */
10332 if (cfun->machine->insn_chain_scanned_p == 0)
10334 cfun->machine->insn_chain_scanned_p = 1;
10335 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10339 /* Select which calling sequence. */
10340 info_ptr->abi = DEFAULT_ABI;
10342 /* Calculate which registers need to be saved & save area size. */
10343 info_ptr->first_gp_reg_save = first_reg_to_save ();
10344 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10345 even if it currently looks like we won't. */
10346 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10347 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10348 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10349 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10350 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10352 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10354 /* For the SPE, we have an additional upper 32-bits on each GPR.
10355 Ideally we should save the entire 64-bits only when the upper
10356 half is used in SIMD instructions. Since we only record
10357 registers live (not the size they are used in), this proves
10358 difficult because we'd have to traverse the instruction chain at
10359 the right time, taking reload into account. This is a real pain,
10360 so we opt to save the GPRs in 64-bits always if but one register
10361 gets used in 64-bits. Otherwise, all the registers in the frame
10362 get saved in 32-bits.
10364 So... since when we save all GPRs (except the SP) in 64-bits, the
10365 traditional GP save area will be empty. */
10366 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10367 info_ptr->gp_size = 0;
10369 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10370 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10372 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10373 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10374 - info_ptr->first_altivec_reg_save);
10376 /* Does this function call anything? */
10377 info_ptr->calls_p = (! current_function_is_leaf
10378 || cfun->machine->ra_needs_full_frame);
10380 /* Determine if we need to save the link register. */
10381 if (rs6000_ra_ever_killed ()
10382 || (DEFAULT_ABI == ABI_AIX
10383 && current_function_profile
10384 && !TARGET_PROFILE_KERNEL)
10385 #ifdef TARGET_RELOCATABLE
10386 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10388 || (info_ptr->first_fp_reg_save != 64
10389 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10390 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10391 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10392 || (DEFAULT_ABI == ABI_DARWIN
10394 && current_function_uses_pic_offset_table)
10395 || info_ptr->calls_p)
10397 info_ptr->lr_save_p = 1;
10398 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10401 /* Determine if we need to save the condition code registers. */
10402 if (regs_ever_live[CR2_REGNO]
10403 || regs_ever_live[CR3_REGNO]
10404 || regs_ever_live[CR4_REGNO])
10406 info_ptr->cr_save_p = 1;
10407 if (DEFAULT_ABI == ABI_V4)
10408 info_ptr->cr_size = reg_size;
10411 /* If the current function calls __builtin_eh_return, then we need
10412 to allocate stack space for registers that will hold data for
10413 the exception handler. */
10414 if (current_function_calls_eh_return)
10417 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10420 /* SPE saves EH registers in 64-bits. */
10421 ehrd_size = i * (TARGET_SPE_ABI
10422 && info_ptr->spe_64bit_regs_used != 0
10423 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10428 /* Determine various sizes. */
10429 info_ptr->reg_size = reg_size;
10430 info_ptr->fixed_size = RS6000_SAVE_AREA;
10431 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10432 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10433 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10436 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10437 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10439 info_ptr->spe_gp_size = 0;
10441 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
10443 info_ptr->vrsave_mask = compute_vrsave_mask ();
10444 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
10448 info_ptr->vrsave_mask = 0;
10449 info_ptr->vrsave_size = 0;
10452 /* Calculate the offsets. */
10453 switch (DEFAULT_ABI)
10461 info_ptr->fp_save_offset = - info_ptr->fp_size;
10462 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10464 if (TARGET_ALTIVEC_ABI)
10466 info_ptr->vrsave_save_offset
10467 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10469 /* Align stack so vector save area is on a quadword boundary. */
10470 if (info_ptr->altivec_size != 0)
10471 info_ptr->altivec_padding_size
10472 = 16 - (-info_ptr->vrsave_save_offset % 16);
10474 info_ptr->altivec_padding_size = 0;
10476 info_ptr->altivec_save_offset
10477 = info_ptr->vrsave_save_offset
10478 - info_ptr->altivec_padding_size
10479 - info_ptr->altivec_size;
10481 /* Adjust for AltiVec case. */
10482 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10485 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10486 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10487 info_ptr->lr_save_offset = 2*reg_size;
10491 info_ptr->fp_save_offset = - info_ptr->fp_size;
10492 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10493 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10495 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10497 /* Align stack so SPE GPR save area is aligned on a
10498 double-word boundary. */
10499 if (info_ptr->spe_gp_size != 0)
10500 info_ptr->spe_padding_size
10501 = 8 - (-info_ptr->cr_save_offset % 8);
10503 info_ptr->spe_padding_size = 0;
10505 info_ptr->spe_gp_save_offset
10506 = info_ptr->cr_save_offset
10507 - info_ptr->spe_padding_size
10508 - info_ptr->spe_gp_size;
10510 /* Adjust for SPE case. */
10511 info_ptr->toc_save_offset
10512 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10514 else if (TARGET_ALTIVEC_ABI)
10516 info_ptr->vrsave_save_offset
10517 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10519 /* Align stack so vector save area is on a quadword boundary. */
10520 if (info_ptr->altivec_size != 0)
10521 info_ptr->altivec_padding_size
10522 = 16 - (-info_ptr->vrsave_save_offset % 16);
10524 info_ptr->altivec_padding_size = 0;
10526 info_ptr->altivec_save_offset
10527 = info_ptr->vrsave_save_offset
10528 - info_ptr->altivec_padding_size
10529 - info_ptr->altivec_size;
10531 /* Adjust for AltiVec case. */
10532 info_ptr->toc_save_offset
10533 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10536 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10537 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10538 info_ptr->lr_save_offset = reg_size;
10542 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10543 + info_ptr->gp_size
10544 + info_ptr->altivec_size
10545 + info_ptr->altivec_padding_size
10546 + info_ptr->spe_gp_size
10547 + info_ptr->spe_padding_size
10549 + info_ptr->cr_size
10550 + info_ptr->lr_size
10551 + info_ptr->vrsave_size
10552 + info_ptr->toc_size,
10553 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10556 total_raw_size = (info_ptr->vars_size
10557 + info_ptr->parm_size
10558 + info_ptr->save_size
10559 + info_ptr->varargs_size
10560 + info_ptr->fixed_size);
10562 info_ptr->total_size =
10563 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10565 /* Determine if we need to allocate any stack frame:
10567 For AIX we need to push the stack if a frame pointer is needed
10568 (because the stack might be dynamically adjusted), if we are
10569 debugging, if we make calls, or if the sum of fp_save, gp_save,
10570 and local variables are more than the space needed to save all
10571 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10572 + 18*8 = 288 (GPR13 reserved).
10574 For V.4 we don't have the stack cushion that AIX uses, but assume
10575 that the debugger can handle stackless frames. */
10577 if (info_ptr->calls_p)
10578 info_ptr->push_p = 1;
10580 else if (DEFAULT_ABI == ABI_V4)
10581 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10583 else if (frame_pointer_needed)
10584 info_ptr->push_p = 1;
10586 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10587 info_ptr->push_p = 1;
10591 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10593 /* Zero offsets if we're not saving those registers. */
10594 if (info_ptr->fp_size == 0)
10595 info_ptr->fp_save_offset = 0;
10597 if (info_ptr->gp_size == 0)
10598 info_ptr->gp_save_offset = 0;
10600 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10601 info_ptr->altivec_save_offset = 0;
10603 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10604 info_ptr->vrsave_save_offset = 0;
10606 if (! TARGET_SPE_ABI
10607 || info_ptr->spe_64bit_regs_used == 0
10608 || info_ptr->spe_gp_size == 0)
10609 info_ptr->spe_gp_save_offset = 0;
10611 if (! info_ptr->lr_save_p)
10612 info_ptr->lr_save_offset = 0;
10614 if (! info_ptr->cr_save_p)
10615 info_ptr->cr_save_offset = 0;
10617 if (! info_ptr->toc_save_p)
10618 info_ptr->toc_save_offset = 0;
10623 /* Return true if the current function uses any GPRs in 64-bit SIMD
10627 spe_func_has_64bit_regs_p (void)
10631 /* Functions that save and restore all the call-saved registers will
10632 need to save/restore the registers in 64-bits. */
10633 if (current_function_calls_eh_return
10634 || current_function_calls_setjmp
10635 || current_function_has_nonlocal_goto)
10638 insns = get_insns ();
10640 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10646 i = PATTERN (insn);
10647 if (GET_CODE (i) == SET
10648 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10657 debug_stack_info (rs6000_stack_t *info)
10659 const char *abi_string;
10662 info = rs6000_stack_info ();
10664 fprintf (stderr, "\nStack information for function %s:\n",
10665 ((current_function_decl && DECL_NAME (current_function_decl))
10666 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10671 default: abi_string = "Unknown"; break;
10672 case ABI_NONE: abi_string = "NONE"; break;
10673 case ABI_AIX: abi_string = "AIX"; break;
10674 case ABI_DARWIN: abi_string = "Darwin"; break;
10675 case ABI_V4: abi_string = "V.4"; break;
10678 fprintf (stderr, "\tABI = %5s\n", abi_string);
10680 if (TARGET_ALTIVEC_ABI)
10681 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10683 if (TARGET_SPE_ABI)
10684 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10686 if (info->first_gp_reg_save != 32)
10687 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10689 if (info->first_fp_reg_save != 64)
10690 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10692 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10693 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10694 info->first_altivec_reg_save);
10696 if (info->lr_save_p)
10697 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10699 if (info->cr_save_p)
10700 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10702 if (info->toc_save_p)
10703 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10705 if (info->vrsave_mask)
10706 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10709 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10712 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10714 if (info->gp_save_offset)
10715 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10717 if (info->fp_save_offset)
10718 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10720 if (info->altivec_save_offset)
10721 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10722 info->altivec_save_offset);
10724 if (info->spe_gp_save_offset)
10725 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10726 info->spe_gp_save_offset);
10728 if (info->vrsave_save_offset)
10729 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10730 info->vrsave_save_offset);
10732 if (info->lr_save_offset)
10733 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10735 if (info->cr_save_offset)
10736 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10738 if (info->toc_save_offset)
10739 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10741 if (info->varargs_save_offset)
10742 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10744 if (info->total_size)
10745 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
10748 if (info->varargs_size)
10749 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10751 if (info->vars_size)
10752 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
10755 if (info->parm_size)
10756 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10758 if (info->fixed_size)
10759 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10762 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10764 if (info->spe_gp_size)
10765 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10768 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10770 if (info->altivec_size)
10771 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10773 if (info->vrsave_size)
10774 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10776 if (info->altivec_padding_size)
10777 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10778 info->altivec_padding_size);
10780 if (info->spe_padding_size)
10781 fprintf (stderr, "\tspe_padding_size = %5d\n",
10782 info->spe_padding_size);
10785 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10788 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10790 if (info->toc_size)
10791 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10793 if (info->save_size)
10794 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10796 if (info->reg_size != 4)
10797 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10799 fprintf (stderr, "\n");
10803 rs6000_return_addr (int count, rtx frame)
10805 /* Currently we don't optimize very well between prolog and body
10806 code and for PIC code the code can be actually quite bad, so
10807 don't try to be too clever here. */
10808 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10810 cfun->machine->ra_needs_full_frame = 1;
10817 plus_constant (copy_to_reg
10818 (gen_rtx_MEM (Pmode,
10819 memory_address (Pmode, frame))),
10820 RETURN_ADDRESS_OFFSET)));
10823 cfun->machine->ra_need_lr = 1;
10824 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10827 /* Say whether a function is a candidate for sibcall handling or not.
10828 We do not allow indirect calls to be optimized into sibling calls.
10829 Also, we can't do it if there are any vector parameters; there's
10830 nowhere to put the VRsave code so it works; note that functions with
10831 vector parameters are required to have a prototype, so the argument
10832 type info must be available here. (The tail recursion case can work
10833 with vector parameters, but there's no way to distinguish here.) */
10835 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
10840 if (TARGET_ALTIVEC_VRSAVE)
10842 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10843 type; type = TREE_CHAIN (type))
10845 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10849 if (DEFAULT_ABI == ABI_DARWIN
10850 || (*targetm.binds_local_p) (decl))
10852 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10854 if (!lookup_attribute ("longcall", attr_list)
10855 || lookup_attribute ("shortcall", attr_list))
10863 rs6000_ra_ever_killed (void)
10869 /* Irritatingly, there are two kinds of thunks -- those created with
10870 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10871 through the regular part of the compiler. This is a very hacky
10872 way to tell them apart. */
10873 if (current_function_is_thunk && !no_new_pseudos)
10876 /* regs_ever_live has LR marked as used if any sibcalls are present,
10877 but this should not force saving and restoring in the
10878 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10879 clobbers LR, so that is inappropriate. */
10881 /* Also, the prologue can generate a store into LR that
10882 doesn't really count, like this:
10885 bcl to set PIC register
10889 When we're called from the epilogue, we need to avoid counting
10890 this as a store. */
10892 push_topmost_sequence ();
10893 top = get_insns ();
10894 pop_topmost_sequence ();
10895 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10897 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10901 if (FIND_REG_INC_NOTE (insn, reg))
10903 else if (GET_CODE (insn) == CALL_INSN
10904 && !SIBLING_CALL_P (insn))
10906 else if (set_of (reg, insn) != NULL_RTX
10907 && !prologue_epilogue_contains (insn))
10914 /* Add a REG_MAYBE_DEAD note to the insn. */
10916 rs6000_maybe_dead (rtx insn)
10918 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10923 /* Emit instructions needed to load the TOC register.
10924 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10925 a constant pool; or for SVR4 -fpic. */
10928 rs6000_emit_load_toc_table (int fromprolog)
10931 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10933 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10935 rtx temp = (fromprolog
10936 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10937 : gen_reg_rtx (Pmode));
10938 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10940 rs6000_maybe_dead (insn);
10941 insn = emit_move_insn (dest, temp);
10943 rs6000_maybe_dead (insn);
10945 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10948 rtx tempLR = (fromprolog
10949 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10950 : gen_reg_rtx (Pmode));
10951 rtx temp0 = (fromprolog
10952 ? gen_rtx_REG (Pmode, 0)
10953 : gen_reg_rtx (Pmode));
10956 /* possibly create the toc section */
10957 if (! toc_initialized)
10960 function_section (current_function_decl);
10967 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10968 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10970 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10971 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10973 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10975 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10976 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10983 static int reload_toc_labelno = 0;
10985 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10987 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10988 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10990 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10991 emit_move_insn (dest, tempLR);
10992 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10994 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10996 rs6000_maybe_dead (insn);
10998 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11000 /* This is for AIX code running in non-PIC ELF32. */
11003 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11004 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11006 insn = emit_insn (gen_elf_high (dest, realsym));
11008 rs6000_maybe_dead (insn);
11009 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11011 rs6000_maybe_dead (insn);
11013 else if (DEFAULT_ABI == ABI_AIX)
11016 insn = emit_insn (gen_load_toc_aix_si (dest));
11018 insn = emit_insn (gen_load_toc_aix_di (dest));
11020 rs6000_maybe_dead (insn);
11026 /* Emit instructions to restore the link register after determining where
11027 its value has been stored. */
11030 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11032 rs6000_stack_t *info = rs6000_stack_info ();
11035 operands[0] = source;
11036 operands[1] = scratch;
11038 if (info->lr_save_p)
11040 rtx frame_rtx = stack_pointer_rtx;
11041 HOST_WIDE_INT sp_offset = 0;
11044 if (frame_pointer_needed
11045 || current_function_calls_alloca
11046 || info->total_size > 32767)
11048 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11049 frame_rtx = operands[1];
11051 else if (info->push_p)
11052 sp_offset = info->total_size;
11054 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11055 tmp = gen_rtx_MEM (Pmode, tmp);
11056 emit_move_insn (tmp, operands[0]);
11059 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11063 get_TOC_alias_set (void)
11065 static int set = -1;
11067 set = new_alias_set ();
11071 /* This returns nonzero if the current function uses the TOC. This is
11072 determined by the presence of (unspec ... UNSPEC_TOC) or
11073 use (unspec ... UNSPEC_TOC), which are generated by the various
11074 load_toc_* patterns. */
11081 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11084 rtx pat = PATTERN (insn);
11087 if (GET_CODE (pat) == PARALLEL)
11088 for (i = 0; i < XVECLEN (pat, 0); i++)
11090 rtx sub = XVECEXP (pat, 0, i);
11091 if (GET_CODE (sub) == USE)
11093 sub = XEXP (sub, 0);
11094 if (GET_CODE (sub) == UNSPEC
11095 && XINT (sub, 1) == UNSPEC_TOC)
11104 create_TOC_reference (rtx symbol)
11106 return gen_rtx_PLUS (Pmode,
11107 gen_rtx_REG (Pmode, TOC_REGISTER),
11108 gen_rtx_CONST (Pmode,
11109 gen_rtx_MINUS (Pmode, symbol,
11110 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11113 /* If _Unwind_* has been called from within the same module,
11114 toc register is not guaranteed to be saved to 40(1) on function
11115 entry. Save it there in that case. */
11118 rs6000_aix_emit_builtin_unwind_init (void)
11121 rtx stack_top = gen_reg_rtx (Pmode);
11122 rtx opcode_addr = gen_reg_rtx (Pmode);
11123 rtx opcode = gen_reg_rtx (SImode);
11124 rtx tocompare = gen_reg_rtx (SImode);
11125 rtx no_toc_save_needed = gen_label_rtx ();
11127 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11128 emit_move_insn (stack_top, mem);
11130 mem = gen_rtx_MEM (Pmode,
11131 gen_rtx_PLUS (Pmode, stack_top,
11132 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11133 emit_move_insn (opcode_addr, mem);
11134 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11135 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11136 : 0xE8410028, SImode));
11138 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11139 SImode, NULL_RTX, NULL_RTX,
11140 no_toc_save_needed);
11142 mem = gen_rtx_MEM (Pmode,
11143 gen_rtx_PLUS (Pmode, stack_top,
11144 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11145 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11146 emit_label (no_toc_save_needed);
11149 /* This ties together stack memory (MEM with an alias set of
11150 rs6000_sr_alias_set) and the change to the stack pointer. */
11153 rs6000_emit_stack_tie (void)
11155 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11157 set_mem_alias_set (mem, rs6000_sr_alias_set);
11158 emit_insn (gen_stack_tie (mem));
11161 /* Emit the correct code for allocating stack space, as insns.
11162 If COPY_R12, make sure a copy of the old frame is left in r12.
11163 The generated code may use hard register 0 as a temporary. */
11166 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11169 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11170 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11171 rtx todec = GEN_INT (-size);
11173 if (current_function_limit_stack)
11175 if (REG_P (stack_limit_rtx)
11176 && REGNO (stack_limit_rtx) > 1
11177 && REGNO (stack_limit_rtx) <= 31)
11179 emit_insn (TARGET_32BIT
11180 ? gen_addsi3 (tmp_reg,
11183 : gen_adddi3 (tmp_reg,
11187 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11190 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11192 && DEFAULT_ABI == ABI_V4)
11194 rtx toload = gen_rtx_CONST (VOIDmode,
11195 gen_rtx_PLUS (Pmode,
11199 emit_insn (gen_elf_high (tmp_reg, toload));
11200 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11201 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11205 warning ("stack limit expression is not supported");
11208 if (copy_r12 || ! TARGET_UPDATE)
11209 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11215 /* Need a note here so that try_split doesn't get confused. */
11216 if (get_last_insn() == NULL_RTX)
11217 emit_note (NOTE_INSN_DELETED);
11218 insn = emit_move_insn (tmp_reg, todec);
11219 try_split (PATTERN (insn), insn, 0);
11223 insn = emit_insn (TARGET_32BIT
11224 ? gen_movsi_update (stack_reg, stack_reg,
11226 : gen_movdi_update (stack_reg, stack_reg,
11227 todec, stack_reg));
11231 insn = emit_insn (TARGET_32BIT
11232 ? gen_addsi3 (stack_reg, stack_reg, todec)
11233 : gen_adddi3 (stack_reg, stack_reg, todec));
11234 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11235 gen_rtx_REG (Pmode, 12));
11238 RTX_FRAME_RELATED_P (insn) = 1;
11240 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11241 gen_rtx_SET (VOIDmode, stack_reg,
11242 gen_rtx_PLUS (Pmode, stack_reg,
11247 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11248 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11249 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11250 deduce these equivalences by itself so it wasn't necessary to hold
11251 its hand so much. */
11254 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11255 rtx reg2, rtx rreg)
11259 /* copy_rtx will not make unique copies of registers, so we need to
11260 ensure we don't have unwanted sharing here. */
11262 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11265 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11267 real = copy_rtx (PATTERN (insn));
11269 if (reg2 != NULL_RTX)
11270 real = replace_rtx (real, reg2, rreg);
11272 real = replace_rtx (real, reg,
11273 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11274 STACK_POINTER_REGNUM),
11277 /* We expect that 'real' is either a SET or a PARALLEL containing
11278 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11279 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11281 if (GET_CODE (real) == SET)
11285 temp = simplify_rtx (SET_SRC (set));
11287 SET_SRC (set) = temp;
11288 temp = simplify_rtx (SET_DEST (set));
11290 SET_DEST (set) = temp;
11291 if (GET_CODE (SET_DEST (set)) == MEM)
11293 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11295 XEXP (SET_DEST (set), 0) = temp;
11298 else if (GET_CODE (real) == PARALLEL)
11301 for (i = 0; i < XVECLEN (real, 0); i++)
11302 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11304 rtx set = XVECEXP (real, 0, i);
11306 temp = simplify_rtx (SET_SRC (set));
11308 SET_SRC (set) = temp;
11309 temp = simplify_rtx (SET_DEST (set));
11311 SET_DEST (set) = temp;
11312 if (GET_CODE (SET_DEST (set)) == MEM)
11314 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11316 XEXP (SET_DEST (set), 0) = temp;
11318 RTX_FRAME_RELATED_P (set) = 1;
11325 real = spe_synthesize_frame_save (real);
11327 RTX_FRAME_RELATED_P (insn) = 1;
11328 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11333 /* Given an SPE frame note, return a PARALLEL of SETs with the
11334 original note, plus a synthetic register save. */
11337 spe_synthesize_frame_save (rtx real)
11339 rtx synth, offset, reg, real2;
11341 if (GET_CODE (real) != SET
11342 || GET_MODE (SET_SRC (real)) != V2SImode)
11345 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11346 frame related note. The parallel contains a set of the register
11347 being saved, and another set to a synthetic register (n+1200).
11348 This is so we can differentiate between 64-bit and 32-bit saves.
11349 Words cannot describe this nastiness. */
11351 if (GET_CODE (SET_DEST (real)) != MEM
11352 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11353 || GET_CODE (SET_SRC (real)) != REG)
11357 (set (mem (plus (reg x) (const y)))
11360 (set (mem (plus (reg x) (const y+4)))
11364 real2 = copy_rtx (real);
11365 PUT_MODE (SET_DEST (real2), SImode);
11366 reg = SET_SRC (real2);
11367 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11368 synth = copy_rtx (real2);
11370 if (BYTES_BIG_ENDIAN)
11372 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11373 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11376 reg = SET_SRC (synth);
11378 synth = replace_rtx (synth, reg,
11379 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11381 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11382 synth = replace_rtx (synth, offset,
11383 GEN_INT (INTVAL (offset)
11384 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11386 RTX_FRAME_RELATED_P (synth) = 1;
11387 RTX_FRAME_RELATED_P (real2) = 1;
11388 if (BYTES_BIG_ENDIAN)
11389 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11391 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11396 /* Returns an insn that has a vrsave set operation with the
11397 appropriate CLOBBERs. */
11400 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11403 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11404 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11407 = gen_rtx_SET (VOIDmode,
11409 gen_rtx_UNSPEC_VOLATILE (SImode,
11410 gen_rtvec (2, reg, vrsave),
11415 /* We need to clobber the registers in the mask so the scheduler
11416 does not move sets to VRSAVE before sets of AltiVec registers.
11418 However, if the function receives nonlocal gotos, reload will set
11419 all call saved registers live. We will end up with:
11421 (set (reg 999) (mem))
11422 (parallel [ (set (reg vrsave) (unspec blah))
11423 (clobber (reg 999))])
11425 The clobber will cause the store into reg 999 to be dead, and
11426 flow will attempt to delete an epilogue insn. In this case, we
11427 need an unspec use/set of the register. */
11429 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11430 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11432 if (!epiloguep || call_used_regs [i])
11433 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11434 gen_rtx_REG (V4SImode, i));
11437 rtx reg = gen_rtx_REG (V4SImode, i);
11440 = gen_rtx_SET (VOIDmode,
11442 gen_rtx_UNSPEC (V4SImode,
11443 gen_rtvec (1, reg), 27));
11447 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11449 for (i = 0; i < nclobs; ++i)
11450 XVECEXP (insn, 0, i) = clobs[i];
11455 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11456 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11459 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11460 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11462 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11463 rtx replacea, replaceb;
11465 int_rtx = GEN_INT (offset);
11467 /* Some cases that need register indexed addressing. */
11468 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11470 && SPE_VECTOR_MODE (mode)
11471 && !SPE_CONST_OFFSET_OK (offset)))
11473 /* Whomever calls us must make sure r11 is available in the
11474 flow path of instructions in the prologue. */
11475 offset_rtx = gen_rtx_REG (Pmode, 11);
11476 emit_move_insn (offset_rtx, int_rtx);
11478 replacea = offset_rtx;
11479 replaceb = int_rtx;
11483 offset_rtx = int_rtx;
11484 replacea = NULL_RTX;
11485 replaceb = NULL_RTX;
11488 reg = gen_rtx_REG (mode, regno);
11489 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11490 mem = gen_rtx_MEM (mode, addr);
11491 set_mem_alias_set (mem, rs6000_sr_alias_set);
11493 insn = emit_move_insn (mem, reg);
11495 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11498 /* Emit an offset memory reference suitable for a frame store, while
11499 converting to a valid addressing mode. */
11502 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11504 rtx int_rtx, offset_rtx;
11506 int_rtx = GEN_INT (offset);
11508 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11510 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11511 emit_move_insn (offset_rtx, int_rtx);
11514 offset_rtx = int_rtx;
11516 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11519 /* Emit function prologue as insns. */
11522 rs6000_emit_prologue (void)
11524 rs6000_stack_t *info = rs6000_stack_info ();
11525 enum machine_mode reg_mode = Pmode;
11526 int reg_size = UNITS_PER_WORD;
11527 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11528 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11529 rtx frame_reg_rtx = sp_reg_rtx;
11530 rtx cr_save_rtx = NULL_RTX;
11532 int saving_FPRs_inline;
11533 int using_store_multiple;
11534 HOST_WIDE_INT sp_offset = 0;
11536 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11538 reg_mode = V2SImode;
11542 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11543 && (!TARGET_SPE_ABI
11544 || info->spe_64bit_regs_used == 0)
11545 && info->first_gp_reg_save < 31);
11546 saving_FPRs_inline = (info->first_fp_reg_save == 64
11547 || FP_SAVE_INLINE (info->first_fp_reg_save)
11548 || current_function_calls_eh_return
11549 || cfun->machine->ra_need_lr);
11551 /* For V.4, update stack before we do any saving and set back pointer. */
11553 && (DEFAULT_ABI == ABI_V4
11554 || current_function_calls_eh_return))
11556 if (info->total_size < 32767)
11557 sp_offset = info->total_size;
11559 frame_reg_rtx = frame_ptr_rtx;
11560 rs6000_emit_allocate_stack (info->total_size,
11561 (frame_reg_rtx != sp_reg_rtx
11562 && (info->cr_save_p
11564 || info->first_fp_reg_save < 64
11565 || info->first_gp_reg_save < 32
11567 if (frame_reg_rtx != sp_reg_rtx)
11568 rs6000_emit_stack_tie ();
11571 /* Save AltiVec registers if needed. */
11572 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11576 /* There should be a non inline version of this, for when we
11577 are saving lots of vector registers. */
11578 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11579 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11581 rtx areg, savereg, mem;
11584 offset = info->altivec_save_offset + sp_offset
11585 + 16 * (i - info->first_altivec_reg_save);
11587 savereg = gen_rtx_REG (V4SImode, i);
11589 areg = gen_rtx_REG (Pmode, 0);
11590 emit_move_insn (areg, GEN_INT (offset));
11592 /* AltiVec addressing mode is [reg+reg]. */
11593 mem = gen_rtx_MEM (V4SImode,
11594 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11596 set_mem_alias_set (mem, rs6000_sr_alias_set);
11598 insn = emit_move_insn (mem, savereg);
11600 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11601 areg, GEN_INT (offset));
11605 /* VRSAVE is a bit vector representing which AltiVec registers
11606 are used. The OS uses this to determine which vector
11607 registers to save on a context switch. We need to save
11608 VRSAVE on the stack frame, add whatever AltiVec registers we
11609 used in this function, and do the corresponding magic in the
11612 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11614 rtx reg, mem, vrsave;
11617 /* Get VRSAVE onto a GPR. */
11618 reg = gen_rtx_REG (SImode, 12);
11619 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11621 emit_insn (gen_get_vrsave_internal (reg));
11623 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11626 offset = info->vrsave_save_offset + sp_offset;
11628 = gen_rtx_MEM (SImode,
11629 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11630 set_mem_alias_set (mem, rs6000_sr_alias_set);
11631 insn = emit_move_insn (mem, reg);
11633 /* Include the registers in the mask. */
11634 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11636 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11639 /* If we use the link register, get it into r0. */
11640 if (info->lr_save_p)
11641 emit_move_insn (gen_rtx_REG (Pmode, 0),
11642 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11644 /* If we need to save CR, put it into r12. */
11645 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11647 cr_save_rtx = gen_rtx_REG (SImode, 12);
11648 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11651 /* Do any required saving of fpr's. If only one or two to save, do
11652 it ourselves. Otherwise, call function. */
11653 if (saving_FPRs_inline)
11656 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11657 if ((regs_ever_live[info->first_fp_reg_save+i]
11658 && ! call_used_regs[info->first_fp_reg_save+i]))
11659 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11660 info->first_fp_reg_save + i,
11661 info->fp_save_offset + sp_offset + 8 * i,
11664 else if (info->first_fp_reg_save != 64)
11668 const char *alloc_rname;
11670 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11672 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11673 gen_rtx_REG (Pmode,
11674 LINK_REGISTER_REGNUM));
11675 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11676 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11677 alloc_rname = ggc_strdup (rname);
11678 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11679 gen_rtx_SYMBOL_REF (Pmode,
11681 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11683 rtx addr, reg, mem;
11684 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11685 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11686 GEN_INT (info->fp_save_offset
11687 + sp_offset + 8*i));
11688 mem = gen_rtx_MEM (DFmode, addr);
11689 set_mem_alias_set (mem, rs6000_sr_alias_set);
11691 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11693 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11694 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11695 NULL_RTX, NULL_RTX);
11698 /* Save GPRs. This is done as a PARALLEL if we are using
11699 the store-multiple instructions. */
11700 if (using_store_multiple)
11704 p = rtvec_alloc (32 - info->first_gp_reg_save);
11705 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11707 rtx addr, reg, mem;
11708 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11709 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11710 GEN_INT (info->gp_save_offset
11713 mem = gen_rtx_MEM (reg_mode, addr);
11714 set_mem_alias_set (mem, rs6000_sr_alias_set);
11716 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11718 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11719 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11720 NULL_RTX, NULL_RTX);
11725 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11726 if ((regs_ever_live[info->first_gp_reg_save+i]
11727 && ! call_used_regs[info->first_gp_reg_save+i])
11728 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11729 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11730 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11732 rtx addr, reg, mem;
11733 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11735 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11737 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11740 if (!SPE_CONST_OFFSET_OK (offset))
11742 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11743 emit_move_insn (b, GEN_INT (offset));
11746 b = GEN_INT (offset);
11748 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11749 mem = gen_rtx_MEM (V2SImode, addr);
11750 set_mem_alias_set (mem, rs6000_sr_alias_set);
11751 insn = emit_move_insn (mem, reg);
11753 if (GET_CODE (b) == CONST_INT)
11754 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11755 NULL_RTX, NULL_RTX);
11757 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11758 b, GEN_INT (offset));
11762 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11763 GEN_INT (info->gp_save_offset
11766 mem = gen_rtx_MEM (reg_mode, addr);
11767 set_mem_alias_set (mem, rs6000_sr_alias_set);
11769 insn = emit_move_insn (mem, reg);
11770 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11771 NULL_RTX, NULL_RTX);
11776 /* ??? There's no need to emit actual instructions here, but it's the
11777 easiest way to get the frame unwind information emitted. */
11778 if (current_function_calls_eh_return)
11780 unsigned int i, regno;
11782 /* In AIX ABI we need to pretend we save r2 here. */
11785 rtx addr, reg, mem;
11787 reg = gen_rtx_REG (reg_mode, 2);
11788 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11789 GEN_INT (sp_offset + 5 * reg_size));
11790 mem = gen_rtx_MEM (reg_mode, addr);
11791 set_mem_alias_set (mem, rs6000_sr_alias_set);
11793 insn = emit_move_insn (mem, reg);
11794 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11795 NULL_RTX, NULL_RTX);
11796 PATTERN (insn) = gen_blockage ();
11801 regno = EH_RETURN_DATA_REGNO (i);
11802 if (regno == INVALID_REGNUM)
11805 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11806 info->ehrd_offset + sp_offset
11807 + reg_size * (int) i,
11812 /* Save lr if we used it. */
11813 if (info->lr_save_p)
11815 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11816 GEN_INT (info->lr_save_offset + sp_offset));
11817 rtx reg = gen_rtx_REG (Pmode, 0);
11818 rtx mem = gen_rtx_MEM (Pmode, addr);
11819 /* This should not be of rs6000_sr_alias_set, because of
11820 __builtin_return_address. */
11822 insn = emit_move_insn (mem, reg);
11823 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11824 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11827 /* Save CR if we use any that must be preserved. */
11828 if (info->cr_save_p)
11830 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11831 GEN_INT (info->cr_save_offset + sp_offset));
11832 rtx mem = gen_rtx_MEM (SImode, addr);
11834 set_mem_alias_set (mem, rs6000_sr_alias_set);
11836 /* If r12 was used to hold the original sp, copy cr into r0 now
11838 if (REGNO (frame_reg_rtx) == 12)
11840 cr_save_rtx = gen_rtx_REG (SImode, 0);
11841 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11843 insn = emit_move_insn (mem, cr_save_rtx);
11845 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11846 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11847 But that's OK. All we have to do is specify that _one_ condition
11848 code register is saved in this stack slot. The thrower's epilogue
11849 will then restore all the call-saved registers.
11850 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11851 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11852 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11855 /* Update stack and set back pointer unless this is V.4,
11856 for which it was done previously. */
11858 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
11859 rs6000_emit_allocate_stack (info->total_size, FALSE);
11861 /* Set frame pointer, if needed. */
11862 if (frame_pointer_needed)
11864 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11866 RTX_FRAME_RELATED_P (insn) = 1;
11869 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11870 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11871 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11872 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11874 /* If emit_load_toc_table will use the link register, we need to save
11875 it. We use R12 for this purpose because emit_load_toc_table
11876 can use register 0. This allows us to use a plain 'blr' to return
11877 from the procedure more often. */
11878 int save_LR_around_toc_setup = (TARGET_ELF
11879 && DEFAULT_ABI != ABI_AIX
11881 && ! info->lr_save_p
11882 && EXIT_BLOCK_PTR->pred != NULL);
11883 if (save_LR_around_toc_setup)
11885 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11886 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11887 rs6000_emit_load_toc_table (TRUE);
11888 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11891 rs6000_emit_load_toc_table (TRUE);
11895 if (DEFAULT_ABI == ABI_DARWIN
11896 && flag_pic && current_function_uses_pic_offset_table)
11898 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11899 const char *picbase = machopic_function_base_name ();
11900 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11902 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11904 rs6000_maybe_dead (
11905 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11906 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11911 /* Write function prologue. */
11914 rs6000_output_function_prologue (FILE *file,
11915 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
11917 rs6000_stack_t *info = rs6000_stack_info ();
11919 if (TARGET_DEBUG_STACK)
11920 debug_stack_info (info);
11922 /* Write .extern for any function we will call to save and restore
11924 if (info->first_fp_reg_save < 64
11925 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11926 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11927 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11928 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11929 RESTORE_FP_SUFFIX);
11931 /* Write .extern for AIX common mode routines, if needed. */
11932 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11934 fputs ("\t.extern __mulh\n", file);
11935 fputs ("\t.extern __mull\n", file);
11936 fputs ("\t.extern __divss\n", file);
11937 fputs ("\t.extern __divus\n", file);
11938 fputs ("\t.extern __quoss\n", file);
11939 fputs ("\t.extern __quous\n", file);
11940 common_mode_defined = 1;
11943 if (! HAVE_prologue)
11947 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11948 the "toplevel" insn chain. */
11949 emit_note (NOTE_INSN_DELETED);
11950 rs6000_emit_prologue ();
11951 emit_note (NOTE_INSN_DELETED);
11953 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11957 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11959 INSN_ADDRESSES_NEW (insn, addr);
11964 if (TARGET_DEBUG_STACK)
11965 debug_rtx_list (get_insns (), 100);
11966 final (get_insns (), file, FALSE, FALSE);
11970 rs6000_pic_labelno++;
11973 /* Emit function epilogue as insns.
11975 At present, dwarf2out_frame_debug_expr doesn't understand
11976 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11977 anywhere in the epilogue. Most of the insns below would in any case
11978 need special notes to explain where r11 is in relation to the stack. */
11981 rs6000_emit_epilogue (int sibcall)
11983 rs6000_stack_t *info;
11984 int restoring_FPRs_inline;
11985 int using_load_multiple;
11986 int using_mfcr_multiple;
11987 int use_backchain_to_restore_sp;
11989 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11990 rtx frame_reg_rtx = sp_reg_rtx;
11991 enum machine_mode reg_mode = Pmode;
11992 int reg_size = UNITS_PER_WORD;
11995 info = rs6000_stack_info ();
11997 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11999 reg_mode = V2SImode;
12003 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12004 && (!TARGET_SPE_ABI
12005 || info->spe_64bit_regs_used == 0)
12006 && info->first_gp_reg_save < 31);
12007 restoring_FPRs_inline = (sibcall
12008 || current_function_calls_eh_return
12009 || info->first_fp_reg_save == 64
12010 || FP_SAVE_INLINE (info->first_fp_reg_save));
12011 use_backchain_to_restore_sp = (frame_pointer_needed
12012 || current_function_calls_alloca
12013 || info->total_size > 32767);
12014 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12015 || rs6000_cpu == PROCESSOR_PPC603
12016 || rs6000_cpu == PROCESSOR_PPC750
12019 /* If we have a frame pointer, a call to alloca, or a large stack
12020 frame, restore the old stack pointer using the backchain. Otherwise,
12021 we know what size to update it with. */
12022 if (use_backchain_to_restore_sp)
12024 /* Under V.4, don't reset the stack pointer until after we're done
12025 loading the saved registers. */
12026 if (DEFAULT_ABI == ABI_V4)
12027 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12029 emit_move_insn (frame_reg_rtx,
12030 gen_rtx_MEM (Pmode, sp_reg_rtx));
12033 else if (info->push_p)
12035 if (DEFAULT_ABI == ABI_V4
12036 || current_function_calls_eh_return)
12037 sp_offset = info->total_size;
12040 emit_insn (TARGET_32BIT
12041 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12042 GEN_INT (info->total_size))
12043 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12044 GEN_INT (info->total_size)));
12048 /* Restore AltiVec registers if needed. */
12049 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12053 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12054 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12056 rtx addr, areg, mem;
12058 areg = gen_rtx_REG (Pmode, 0);
12060 (areg, GEN_INT (info->altivec_save_offset
12062 + 16 * (i - info->first_altivec_reg_save)));
12064 /* AltiVec addressing mode is [reg+reg]. */
12065 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12066 mem = gen_rtx_MEM (V4SImode, addr);
12067 set_mem_alias_set (mem, rs6000_sr_alias_set);
12069 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12073 /* Restore VRSAVE if needed. */
12074 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
12076 rtx addr, mem, reg;
12078 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12079 GEN_INT (info->vrsave_save_offset + sp_offset));
12080 mem = gen_rtx_MEM (SImode, addr);
12081 set_mem_alias_set (mem, rs6000_sr_alias_set);
12082 reg = gen_rtx_REG (SImode, 12);
12083 emit_move_insn (reg, mem);
12085 emit_insn (generate_set_vrsave (reg, info, 1));
12088 /* Get the old lr if we saved it. */
12089 if (info->lr_save_p)
12091 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12092 info->lr_save_offset + sp_offset);
12094 set_mem_alias_set (mem, rs6000_sr_alias_set);
12096 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12099 /* Get the old cr if we saved it. */
12100 if (info->cr_save_p)
12102 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12103 GEN_INT (info->cr_save_offset + sp_offset));
12104 rtx mem = gen_rtx_MEM (SImode, addr);
12106 set_mem_alias_set (mem, rs6000_sr_alias_set);
12108 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12111 /* Set LR here to try to overlap restores below. */
12112 if (info->lr_save_p)
12113 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12114 gen_rtx_REG (Pmode, 0));
12116 /* Load exception handler data registers, if needed. */
12117 if (current_function_calls_eh_return)
12119 unsigned int i, regno;
12123 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12124 GEN_INT (sp_offset + 5 * reg_size));
12125 rtx mem = gen_rtx_MEM (reg_mode, addr);
12127 set_mem_alias_set (mem, rs6000_sr_alias_set);
12129 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12136 regno = EH_RETURN_DATA_REGNO (i);
12137 if (regno == INVALID_REGNUM)
12140 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12141 info->ehrd_offset + sp_offset
12142 + reg_size * (int) i);
12143 set_mem_alias_set (mem, rs6000_sr_alias_set);
12145 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12149 /* Restore GPRs. This is done as a PARALLEL if we are using
12150 the load-multiple instructions. */
12151 if (using_load_multiple)
12154 p = rtvec_alloc (32 - info->first_gp_reg_save);
12155 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12157 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12158 GEN_INT (info->gp_save_offset
12161 rtx mem = gen_rtx_MEM (reg_mode, addr);
12163 set_mem_alias_set (mem, rs6000_sr_alias_set);
12166 gen_rtx_SET (VOIDmode,
12167 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12170 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12173 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12174 if ((regs_ever_live[info->first_gp_reg_save+i]
12175 && ! call_used_regs[info->first_gp_reg_save+i])
12176 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12177 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12178 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12180 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12181 GEN_INT (info->gp_save_offset
12184 rtx mem = gen_rtx_MEM (reg_mode, addr);
12186 /* Restore 64-bit quantities for SPE. */
12187 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12189 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12192 if (!SPE_CONST_OFFSET_OK (offset))
12194 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12195 emit_move_insn (b, GEN_INT (offset));
12198 b = GEN_INT (offset);
12200 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12201 mem = gen_rtx_MEM (V2SImode, addr);
12204 set_mem_alias_set (mem, rs6000_sr_alias_set);
12206 emit_move_insn (gen_rtx_REG (reg_mode,
12207 info->first_gp_reg_save + i), mem);
12210 /* Restore fpr's if we need to do it without calling a function. */
12211 if (restoring_FPRs_inline)
12212 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12213 if ((regs_ever_live[info->first_fp_reg_save+i]
12214 && ! call_used_regs[info->first_fp_reg_save+i]))
12217 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12218 GEN_INT (info->fp_save_offset
12221 mem = gen_rtx_MEM (DFmode, addr);
12222 set_mem_alias_set (mem, rs6000_sr_alias_set);
12224 emit_move_insn (gen_rtx_REG (DFmode,
12225 info->first_fp_reg_save + i),
12229 /* If we saved cr, restore it here. Just those that were used. */
12230 if (info->cr_save_p)
12232 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12235 if (using_mfcr_multiple)
12237 for (i = 0; i < 8; i++)
12238 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12244 if (using_mfcr_multiple && count > 1)
12249 p = rtvec_alloc (count);
12252 for (i = 0; i < 8; i++)
12253 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12255 rtvec r = rtvec_alloc (2);
12256 RTVEC_ELT (r, 0) = r12_rtx;
12257 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12258 RTVEC_ELT (p, ndx) =
12259 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12260 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12263 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12268 for (i = 0; i < 8; i++)
12269 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12271 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12277 /* If this is V.4, unwind the stack pointer after all of the loads
12278 have been done. We need to emit a block here so that sched
12279 doesn't decide to move the sp change before the register restores
12280 (which may not have any obvious dependency on the stack). This
12281 doesn't hurt performance, because there is no scheduling that can
12282 be done after this point. */
12283 if (DEFAULT_ABI == ABI_V4
12284 || current_function_calls_eh_return)
12286 if (frame_reg_rtx != sp_reg_rtx)
12287 rs6000_emit_stack_tie ();
12289 if (use_backchain_to_restore_sp)
12291 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12293 else if (sp_offset != 0)
12295 emit_insn (TARGET_32BIT
12296 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12297 GEN_INT (sp_offset))
12298 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12299 GEN_INT (sp_offset)));
12303 if (current_function_calls_eh_return)
12305 rtx sa = EH_RETURN_STACKADJ_RTX;
12306 emit_insn (TARGET_32BIT
12307 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12308 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12314 if (! restoring_FPRs_inline)
12315 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12317 p = rtvec_alloc (2);
12319 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12320 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12321 gen_rtx_REG (Pmode,
12322 LINK_REGISTER_REGNUM));
12324 /* If we have to restore more than two FP registers, branch to the
12325 restore function. It will return to our caller. */
12326 if (! restoring_FPRs_inline)
12330 const char *alloc_rname;
12332 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12333 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12334 alloc_rname = ggc_strdup (rname);
12335 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12336 gen_rtx_SYMBOL_REF (Pmode,
12339 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12342 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12343 GEN_INT (info->fp_save_offset + 8*i));
12344 mem = gen_rtx_MEM (DFmode, addr);
12345 set_mem_alias_set (mem, rs6000_sr_alias_set);
12347 RTVEC_ELT (p, i+3) =
12348 gen_rtx_SET (VOIDmode,
12349 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12354 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12358 /* Write function epilogue. */
12361 rs6000_output_function_epilogue (FILE *file,
12362 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12364 rs6000_stack_t *info = rs6000_stack_info ();
12366 if (! HAVE_epilogue)
12368 rtx insn = get_last_insn ();
12369 /* If the last insn was a BARRIER, we don't have to write anything except
12370 the trace table. */
12371 if (GET_CODE (insn) == NOTE)
12372 insn = prev_nonnote_insn (insn);
12373 if (insn == 0 || GET_CODE (insn) != BARRIER)
12375 /* This is slightly ugly, but at least we don't have two
12376 copies of the epilogue-emitting code. */
12379 /* A NOTE_INSN_DELETED is supposed to be at the start
12380 and end of the "toplevel" insn chain. */
12381 emit_note (NOTE_INSN_DELETED);
12382 rs6000_emit_epilogue (FALSE);
12383 emit_note (NOTE_INSN_DELETED);
12385 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12389 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12391 INSN_ADDRESSES_NEW (insn, addr);
12396 if (TARGET_DEBUG_STACK)
12397 debug_rtx_list (get_insns (), 100);
12398 final (get_insns (), file, FALSE, FALSE);
12403 #if TARGET_OBJECT_FORMAT == OBJECT_MACHO
12404 /* Mach-O doesn't support labels at the end of objects, so if
12405 it looks like we might want one, insert a NOP. */
12407 rtx insn = get_last_insn ();
12410 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12411 insn = PREV_INSN (insn);
12415 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12416 fputs ("\tnop\n", file);
12420 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12423 We don't output a traceback table if -finhibit-size-directive was
12424 used. The documentation for -finhibit-size-directive reads
12425 ``don't output a @code{.size} assembler directive, or anything
12426 else that would cause trouble if the function is split in the
12427 middle, and the two halves are placed at locations far apart in
12428 memory.'' The traceback table has this property, since it
12429 includes the offset from the start of the function to the
12430 traceback table itself.
12432 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12433 different traceback table. */
12434 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12435 && rs6000_traceback != traceback_none)
12437 const char *fname = NULL;
12438 const char *language_string = lang_hooks.name;
12439 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12441 int optional_tbtab;
12443 if (rs6000_traceback == traceback_full)
12444 optional_tbtab = 1;
12445 else if (rs6000_traceback == traceback_part)
12446 optional_tbtab = 0;
12448 optional_tbtab = !optimize_size && !TARGET_ELF;
12450 if (optional_tbtab)
12452 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12453 while (*fname == '.') /* V.4 encodes . in the name */
12456 /* Need label immediately before tbtab, so we can compute
12457 its offset from the function start. */
12458 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12459 ASM_OUTPUT_LABEL (file, fname);
12462 /* The .tbtab pseudo-op can only be used for the first eight
12463 expressions, since it can't handle the possibly variable
12464 length fields that follow. However, if you omit the optional
12465 fields, the assembler outputs zeros for all optional fields
12466 anyways, giving each variable length field is minimum length
12467 (as defined in sys/debug.h). Thus we can not use the .tbtab
12468 pseudo-op at all. */
12470 /* An all-zero word flags the start of the tbtab, for debuggers
12471 that have to find it by searching forward from the entry
12472 point or from the current pc. */
12473 fputs ("\t.long 0\n", file);
12475 /* Tbtab format type. Use format type 0. */
12476 fputs ("\t.byte 0,", file);
12478 /* Language type. Unfortunately, there doesn't seem to be any
12479 official way to get this info, so we use language_string. C
12480 is 0. C++ is 9. No number defined for Obj-C, so use the
12481 value for C for now. There is no official value for Java,
12482 although IBM appears to be using 13. There is no official value
12483 for Chill, so we've chosen 44 pseudo-randomly. */
12484 if (! strcmp (language_string, "GNU C")
12485 || ! strcmp (language_string, "GNU Objective-C"))
12487 else if (! strcmp (language_string, "GNU F77"))
12489 else if (! strcmp (language_string, "GNU Ada"))
12491 else if (! strcmp (language_string, "GNU Pascal"))
12493 else if (! strcmp (language_string, "GNU C++"))
12495 else if (! strcmp (language_string, "GNU Java"))
12497 else if (! strcmp (language_string, "GNU CHILL"))
12501 fprintf (file, "%d,", i);
12503 /* 8 single bit fields: global linkage (not set for C extern linkage,
12504 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12505 from start of procedure stored in tbtab, internal function, function
12506 has controlled storage, function has no toc, function uses fp,
12507 function logs/aborts fp operations. */
12508 /* Assume that fp operations are used if any fp reg must be saved. */
12509 fprintf (file, "%d,",
12510 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12512 /* 6 bitfields: function is interrupt handler, name present in
12513 proc table, function calls alloca, on condition directives
12514 (controls stack walks, 3 bits), saves condition reg, saves
12516 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12517 set up as a frame pointer, even when there is no alloca call. */
12518 fprintf (file, "%d,",
12519 ((optional_tbtab << 6)
12520 | ((optional_tbtab & frame_pointer_needed) << 5)
12521 | (info->cr_save_p << 1)
12522 | (info->lr_save_p)));
12524 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12526 fprintf (file, "%d,",
12527 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12529 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12530 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12532 if (optional_tbtab)
12534 /* Compute the parameter info from the function decl argument
12537 int next_parm_info_bit = 31;
12539 for (decl = DECL_ARGUMENTS (current_function_decl);
12540 decl; decl = TREE_CHAIN (decl))
12542 rtx parameter = DECL_INCOMING_RTL (decl);
12543 enum machine_mode mode = GET_MODE (parameter);
12545 if (GET_CODE (parameter) == REG)
12547 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12553 if (mode == SFmode)
12555 else if (mode == DFmode || mode == TFmode)
12560 /* If only one bit will fit, don't or in this entry. */
12561 if (next_parm_info_bit > 0)
12562 parm_info |= (bits << (next_parm_info_bit - 1));
12563 next_parm_info_bit -= 2;
12567 fixed_parms += ((GET_MODE_SIZE (mode)
12568 + (UNITS_PER_WORD - 1))
12570 next_parm_info_bit -= 1;
12576 /* Number of fixed point parameters. */
12577 /* This is actually the number of words of fixed point parameters; thus
12578 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12579 fprintf (file, "%d,", fixed_parms);
12581 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12583 /* This is actually the number of fp registers that hold parameters;
12584 and thus the maximum value is 13. */
12585 /* Set parameters on stack bit if parameters are not in their original
12586 registers, regardless of whether they are on the stack? Xlc
12587 seems to set the bit when not optimizing. */
12588 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12590 if (! optional_tbtab)
12593 /* Optional fields follow. Some are variable length. */
12595 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12596 11 double float. */
12597 /* There is an entry for each parameter in a register, in the order that
12598 they occur in the parameter list. Any intervening arguments on the
12599 stack are ignored. If the list overflows a long (max possible length
12600 34 bits) then completely leave off all elements that don't fit. */
12601 /* Only emit this long if there was at least one parameter. */
12602 if (fixed_parms || float_parms)
12603 fprintf (file, "\t.long %d\n", parm_info);
12605 /* Offset from start of code to tb table. */
12606 fputs ("\t.long ", file);
12607 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12609 RS6000_OUTPUT_BASENAME (file, fname);
12611 assemble_name (file, fname);
12613 fputs ("-.", file);
12615 RS6000_OUTPUT_BASENAME (file, fname);
12617 assemble_name (file, fname);
12621 /* Interrupt handler mask. */
12622 /* Omit this long, since we never set the interrupt handler bit
12625 /* Number of CTL (controlled storage) anchors. */
12626 /* Omit this long, since the has_ctl bit is never set above. */
12628 /* Displacement into stack of each CTL anchor. */
12629 /* Omit this list of longs, because there are no CTL anchors. */
12631 /* Length of function name. */
12634 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12636 /* Function name. */
12637 assemble_string (fname, strlen (fname));
12639 /* Register for alloca automatic storage; this is always reg 31.
12640 Only emit this if the alloca bit was set above. */
12641 if (frame_pointer_needed)
12642 fputs ("\t.byte 31\n", file);
12644 fputs ("\t.align 2\n", file);
12648 /* A C compound statement that outputs the assembler code for a thunk
12649 function, used to implement C++ virtual function calls with
12650 multiple inheritance. The thunk acts as a wrapper around a virtual
12651 function, adjusting the implicit object parameter before handing
12652 control off to the real function.
12654 First, emit code to add the integer DELTA to the location that
12655 contains the incoming first argument. Assume that this argument
12656 contains a pointer, and is the one used to pass the `this' pointer
12657 in C++. This is the incoming argument *before* the function
12658 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12659 values of all other incoming arguments.
12661 After the addition, emit code to jump to FUNCTION, which is a
12662 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12663 not touch the return address. Hence returning from FUNCTION will
12664 return to whoever called the current `thunk'.
12666 The effect must be as if FUNCTION had been called directly with the
12667 adjusted first argument. This macro is responsible for emitting
12668 all of the code for a thunk function; output_function_prologue()
12669 and output_function_epilogue() are not invoked.
12671 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12672 been extracted from it.) It might possibly be useful on some
12673 targets, but probably not.
12675 If you do not define this macro, the target-independent code in the
12676 C++ frontend will generate a less efficient heavyweight thunk that
12677 calls FUNCTION instead of jumping to it. The generic approach does
12678 not support varargs. */
12681 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
12682 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12685 rtx this, insn, funexp;
12687 reload_completed = 1;
12688 epilogue_completed = 1;
12689 no_new_pseudos = 1;
12691 /* Mark the end of the (empty) prologue. */
12692 emit_note (NOTE_INSN_PROLOGUE_END);
12694 /* Find the "this" pointer. If the function returns a structure,
12695 the structure return pointer is in r3. */
12696 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12697 this = gen_rtx_REG (Pmode, 4);
12699 this = gen_rtx_REG (Pmode, 3);
12701 /* Apply the constant offset, if required. */
12704 rtx delta_rtx = GEN_INT (delta);
12705 emit_insn (TARGET_32BIT
12706 ? gen_addsi3 (this, this, delta_rtx)
12707 : gen_adddi3 (this, this, delta_rtx));
12710 /* Apply the offset from the vtable, if required. */
12713 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12714 rtx tmp = gen_rtx_REG (Pmode, 12);
12716 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12717 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12719 emit_insn (TARGET_32BIT
12720 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12721 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12722 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12726 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12728 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12730 emit_insn (TARGET_32BIT
12731 ? gen_addsi3 (this, this, tmp)
12732 : gen_adddi3 (this, this, tmp));
12735 /* Generate a tail call to the target function. */
12736 if (!TREE_USED (function))
12738 assemble_external (function);
12739 TREE_USED (function) = 1;
12741 funexp = XEXP (DECL_RTL (function), 0);
12742 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12745 if (MACHOPIC_INDIRECT)
12746 funexp = machopic_indirect_call_target (funexp);
12749 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12750 generate sibcall RTL explicitly to avoid constraint abort. */
12751 insn = emit_call_insn (
12752 gen_rtx_PARALLEL (VOIDmode,
12754 gen_rtx_CALL (VOIDmode,
12755 funexp, const0_rtx),
12756 gen_rtx_USE (VOIDmode, const0_rtx),
12757 gen_rtx_USE (VOIDmode,
12758 gen_rtx_REG (SImode,
12759 LINK_REGISTER_REGNUM)),
12760 gen_rtx_RETURN (VOIDmode))));
12761 SIBLING_CALL_P (insn) = 1;
12764 /* Run just enough of rest_of_compilation to get the insns emitted.
12765 There's not really enough bulk here to make other passes such as
12766 instruction scheduling worth while. Note that use_thunk calls
12767 assemble_start_function and assemble_end_function. */
12768 insn = get_insns ();
12769 insn_locators_initialize ();
12770 shorten_branches (insn);
12771 final_start_function (insn, file, 1);
12772 final (insn, file, 1, 0);
12773 final_end_function ();
12775 reload_completed = 0;
12776 epilogue_completed = 0;
12777 no_new_pseudos = 0;
12780 /* A quick summary of the various types of 'constant-pool tables'
12783 Target Flags Name One table per
12784 AIX (none) AIX TOC object file
12785 AIX -mfull-toc AIX TOC object file
12786 AIX -mminimal-toc AIX minimal TOC translation unit
12787 SVR4/EABI (none) SVR4 SDATA object file
12788 SVR4/EABI -fpic SVR4 pic object file
12789 SVR4/EABI -fPIC SVR4 PIC translation unit
12790 SVR4/EABI -mrelocatable EABI TOC function
12791 SVR4/EABI -maix AIX TOC object file
12792 SVR4/EABI -maix -mminimal-toc
12793 AIX minimal TOC translation unit
12795 Name Reg. Set by entries contains:
12796 made by addrs? fp? sum?
12798 AIX TOC 2 crt0 as Y option option
12799 AIX minimal TOC 30 prolog gcc Y Y option
12800 SVR4 SDATA 13 crt0 gcc N Y N
12801 SVR4 pic 30 prolog ld Y not yet N
12802 SVR4 PIC 30 prolog gcc Y option option
12803 EABI TOC 30 prolog gcc Y option option
12807 /* Hash functions for the hash table. */
12810 rs6000_hash_constant (rtx k)
12812 enum rtx_code code = GET_CODE (k);
12813 enum machine_mode mode = GET_MODE (k);
12814 unsigned result = (code << 3) ^ mode;
12815 const char *format;
12818 format = GET_RTX_FORMAT (code);
12819 flen = strlen (format);
12825 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12828 if (mode != VOIDmode)
12829 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12841 for (; fidx < flen; fidx++)
12842 switch (format[fidx])
12847 const char *str = XSTR (k, fidx);
12848 len = strlen (str);
12849 result = result * 613 + len;
12850 for (i = 0; i < len; i++)
12851 result = result * 613 + (unsigned) str[i];
12856 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12860 result = result * 613 + (unsigned) XINT (k, fidx);
12863 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12864 result = result * 613 + (unsigned) XWINT (k, fidx);
12868 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12869 result = result * 613 + (unsigned) (XWINT (k, fidx)
12883 toc_hash_function (const void *hash_entry)
12885 const struct toc_hash_struct *thc =
12886 (const struct toc_hash_struct *) hash_entry;
12887 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12890 /* Compare H1 and H2 for equivalence. */
12893 toc_hash_eq (const void *h1, const void *h2)
12895 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12896 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12898 if (((const struct toc_hash_struct *) h1)->key_mode
12899 != ((const struct toc_hash_struct *) h2)->key_mode)
12902 return rtx_equal_p (r1, r2);
12905 /* These are the names given by the C++ front-end to vtables, and
12906 vtable-like objects. Ideally, this logic should not be here;
12907 instead, there should be some programmatic way of inquiring as
12908 to whether or not an object is a vtable. */
12910 #define VTABLE_NAME_P(NAME) \
12911 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12912 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12913 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12914 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12917 rs6000_output_symbol_ref (FILE *file, rtx x)
12919 /* Currently C++ toc references to vtables can be emitted before it
12920 is decided whether the vtable is public or private. If this is
12921 the case, then the linker will eventually complain that there is
12922 a reference to an unknown section. Thus, for vtables only,
12923 we emit the TOC reference to reference the symbol and not the
12925 const char *name = XSTR (x, 0);
12927 if (VTABLE_NAME_P (name))
12929 RS6000_OUTPUT_BASENAME (file, name);
12932 assemble_name (file, name);
12935 /* Output a TOC entry. We derive the entry name from what is being
12939 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
12942 const char *name = buf;
12943 const char *real_name;
12950 /* When the linker won't eliminate them, don't output duplicate
12951 TOC entries (this happens on AIX if there is any kind of TOC,
12952 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12954 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12956 struct toc_hash_struct *h;
12959 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12960 time because GGC is not initialized at that point. */
12961 if (toc_hash_table == NULL)
12962 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12963 toc_hash_eq, NULL);
12965 h = ggc_alloc (sizeof (*h));
12967 h->key_mode = mode;
12968 h->labelno = labelno;
12970 found = htab_find_slot (toc_hash_table, h, 1);
12971 if (*found == NULL)
12973 else /* This is indeed a duplicate.
12974 Set this label equal to that label. */
12976 fputs ("\t.set ", file);
12977 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12978 fprintf (file, "%d,", labelno);
12979 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12980 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12986 /* If we're going to put a double constant in the TOC, make sure it's
12987 aligned properly when strict alignment is on. */
12988 if (GET_CODE (x) == CONST_DOUBLE
12989 && STRICT_ALIGNMENT
12990 && GET_MODE_BITSIZE (mode) >= 64
12991 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12992 ASM_OUTPUT_ALIGN (file, 3);
12995 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12997 /* Handle FP constants specially. Note that if we have a minimal
12998 TOC, things we put here aren't actually in the TOC, so we can allow
13000 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13002 REAL_VALUE_TYPE rv;
13005 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13006 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13010 if (TARGET_MINIMAL_TOC)
13011 fputs (DOUBLE_INT_ASM_OP, file);
13013 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13014 k[0] & 0xffffffff, k[1] & 0xffffffff,
13015 k[2] & 0xffffffff, k[3] & 0xffffffff);
13016 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13017 k[0] & 0xffffffff, k[1] & 0xffffffff,
13018 k[2] & 0xffffffff, k[3] & 0xffffffff);
13023 if (TARGET_MINIMAL_TOC)
13024 fputs ("\t.long ", file);
13026 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13027 k[0] & 0xffffffff, k[1] & 0xffffffff,
13028 k[2] & 0xffffffff, k[3] & 0xffffffff);
13029 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13030 k[0] & 0xffffffff, k[1] & 0xffffffff,
13031 k[2] & 0xffffffff, k[3] & 0xffffffff);
13035 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13037 REAL_VALUE_TYPE rv;
13040 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13041 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13045 if (TARGET_MINIMAL_TOC)
13046 fputs (DOUBLE_INT_ASM_OP, file);
13048 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13049 k[0] & 0xffffffff, k[1] & 0xffffffff);
13050 fprintf (file, "0x%lx%08lx\n",
13051 k[0] & 0xffffffff, k[1] & 0xffffffff);
13056 if (TARGET_MINIMAL_TOC)
13057 fputs ("\t.long ", file);
13059 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13060 k[0] & 0xffffffff, k[1] & 0xffffffff);
13061 fprintf (file, "0x%lx,0x%lx\n",
13062 k[0] & 0xffffffff, k[1] & 0xffffffff);
13066 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13068 REAL_VALUE_TYPE rv;
13071 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13072 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13076 if (TARGET_MINIMAL_TOC)
13077 fputs (DOUBLE_INT_ASM_OP, file);
13079 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13080 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13085 if (TARGET_MINIMAL_TOC)
13086 fputs ("\t.long ", file);
13088 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13089 fprintf (file, "0x%lx\n", l & 0xffffffff);
13093 else if (GET_MODE (x) == VOIDmode
13094 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13096 unsigned HOST_WIDE_INT low;
13097 HOST_WIDE_INT high;
13099 if (GET_CODE (x) == CONST_DOUBLE)
13101 low = CONST_DOUBLE_LOW (x);
13102 high = CONST_DOUBLE_HIGH (x);
13105 #if HOST_BITS_PER_WIDE_INT == 32
13108 high = (low & 0x80000000) ? ~0 : 0;
13112 low = INTVAL (x) & 0xffffffff;
13113 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13117 /* TOC entries are always Pmode-sized, but since this
13118 is a bigendian machine then if we're putting smaller
13119 integer constants in the TOC we have to pad them.
13120 (This is still a win over putting the constants in
13121 a separate constant pool, because then we'd have
13122 to have both a TOC entry _and_ the actual constant.)
13124 For a 32-bit target, CONST_INT values are loaded and shifted
13125 entirely within `low' and can be stored in one TOC entry. */
13127 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13128 abort ();/* It would be easy to make this work, but it doesn't now. */
13130 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13132 #if HOST_BITS_PER_WIDE_INT == 32
13133 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13134 POINTER_SIZE, &low, &high, 0);
13137 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13138 high = (HOST_WIDE_INT) low >> 32;
13145 if (TARGET_MINIMAL_TOC)
13146 fputs (DOUBLE_INT_ASM_OP, file);
13148 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13149 (long) high & 0xffffffff, (long) low & 0xffffffff);
13150 fprintf (file, "0x%lx%08lx\n",
13151 (long) high & 0xffffffff, (long) low & 0xffffffff);
13156 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13158 if (TARGET_MINIMAL_TOC)
13159 fputs ("\t.long ", file);
13161 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13162 (long) high & 0xffffffff, (long) low & 0xffffffff);
13163 fprintf (file, "0x%lx,0x%lx\n",
13164 (long) high & 0xffffffff, (long) low & 0xffffffff);
13168 if (TARGET_MINIMAL_TOC)
13169 fputs ("\t.long ", file);
13171 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13172 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13178 if (GET_CODE (x) == CONST)
13180 if (GET_CODE (XEXP (x, 0)) != PLUS)
13183 base = XEXP (XEXP (x, 0), 0);
13184 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13187 if (GET_CODE (base) == SYMBOL_REF)
13188 name = XSTR (base, 0);
13189 else if (GET_CODE (base) == LABEL_REF)
13190 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13191 else if (GET_CODE (base) == CODE_LABEL)
13192 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13196 real_name = (*targetm.strip_name_encoding) (name);
13197 if (TARGET_MINIMAL_TOC)
13198 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13201 fprintf (file, "\t.tc %s", real_name);
13204 fprintf (file, ".N%d", - offset);
13206 fprintf (file, ".P%d", offset);
13208 fputs ("[TC],", file);
13211 /* Currently C++ toc references to vtables can be emitted before it
13212 is decided whether the vtable is public or private. If this is
13213 the case, then the linker will eventually complain that there is
13214 a TOC reference to an unknown section. Thus, for vtables only,
13215 we emit the TOC reference to reference the symbol and not the
13217 if (VTABLE_NAME_P (name))
13219 RS6000_OUTPUT_BASENAME (file, name);
13221 fprintf (file, "%d", offset);
13222 else if (offset > 0)
13223 fprintf (file, "+%d", offset);
13226 output_addr_const (file, x);
13230 /* Output an assembler pseudo-op to write an ASCII string of N characters
13231 starting at P to FILE.
13233 On the RS/6000, we have to do this using the .byte operation and
13234 write out special characters outside the quoted string.
13235 Also, the assembler is broken; very long strings are truncated,
13236 so we must artificially break them up early. */
13239 output_ascii (FILE *file, const char *p, int n)
13242 int i, count_string;
13243 const char *for_string = "\t.byte \"";
13244 const char *for_decimal = "\t.byte ";
13245 const char *to_close = NULL;
13248 for (i = 0; i < n; i++)
13251 if (c >= ' ' && c < 0177)
13254 fputs (for_string, file);
13257 /* Write two quotes to get one. */
13265 for_decimal = "\"\n\t.byte ";
13269 if (count_string >= 512)
13271 fputs (to_close, file);
13273 for_string = "\t.byte \"";
13274 for_decimal = "\t.byte ";
13282 fputs (for_decimal, file);
13283 fprintf (file, "%d", c);
13285 for_string = "\n\t.byte \"";
13286 for_decimal = ", ";
13292 /* Now close the string if we have written one. Then end the line. */
13294 fputs (to_close, file);
13297 /* Generate a unique section name for FILENAME for a section type
13298 represented by SECTION_DESC. Output goes into BUF.
13300 SECTION_DESC can be any string, as long as it is different for each
13301 possible section type.
13303 We name the section in the same manner as xlc. The name begins with an
13304 underscore followed by the filename (after stripping any leading directory
13305 names) with the last period replaced by the string SECTION_DESC. If
13306 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13310 rs6000_gen_section_name (char **buf, const char *filename,
13311 const char *section_desc)
13313 const char *q, *after_last_slash, *last_period = 0;
13317 after_last_slash = filename;
13318 for (q = filename; *q; q++)
13321 after_last_slash = q + 1;
13322 else if (*q == '.')
13326 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13327 *buf = (char *) xmalloc (len);
13332 for (q = after_last_slash; *q; q++)
13334 if (q == last_period)
13336 strcpy (p, section_desc);
13337 p += strlen (section_desc);
13341 else if (ISALNUM (*q))
13345 if (last_period == 0)
13346 strcpy (p, section_desc);
13351 /* Emit profile function. */
13354 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13356 if (TARGET_PROFILE_KERNEL)
13359 if (DEFAULT_ABI == ABI_AIX)
13361 #ifndef NO_PROFILE_COUNTERS
13362 # define NO_PROFILE_COUNTERS 0
13364 if (NO_PROFILE_COUNTERS)
13365 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13369 const char *label_name;
13372 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13373 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13374 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13376 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13380 else if (DEFAULT_ABI == ABI_DARWIN)
13382 const char *mcount_name = RS6000_MCOUNT;
13383 int caller_addr_regno = LINK_REGISTER_REGNUM;
13385 /* Be conservative and always set this, at least for now. */
13386 current_function_uses_pic_offset_table = 1;
13389 /* For PIC code, set up a stub and collect the caller's address
13390 from r0, which is where the prologue puts it. */
13391 if (MACHOPIC_INDIRECT)
13393 mcount_name = machopic_stub_name (mcount_name);
13394 if (current_function_uses_pic_offset_table)
13395 caller_addr_regno = 0;
13398 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13400 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13404 /* Write function profiler code. */
13407 output_function_profiler (FILE *file, int labelno)
13412 switch (DEFAULT_ABI)
13421 warning ("no profiling of 64-bit code for this ABI");
13424 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13425 fprintf (file, "\tmflr %s\n", reg_names[0]);
13428 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13429 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13430 reg_names[0], save_lr, reg_names[1]);
13431 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13432 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13433 assemble_name (file, buf);
13434 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13436 else if (flag_pic > 1)
13438 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13439 reg_names[0], save_lr, reg_names[1]);
13440 /* Now, we need to get the address of the label. */
13441 fputs ("\tbl 1f\n\t.long ", file);
13442 assemble_name (file, buf);
13443 fputs ("-.\n1:", file);
13444 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13445 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13446 reg_names[0], reg_names[11]);
13447 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13448 reg_names[0], reg_names[0], reg_names[11]);
13452 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13453 assemble_name (file, buf);
13454 fputs ("@ha\n", file);
13455 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13456 reg_names[0], save_lr, reg_names[1]);
13457 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13458 assemble_name (file, buf);
13459 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13462 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13463 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13468 if (!TARGET_PROFILE_KERNEL)
13470 /* Don't do anything, done in output_profile_hook (). */
13477 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13478 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13480 if (current_function_needs_context)
13482 asm_fprintf (file, "\tstd %s,24(%s)\n",
13483 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13484 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13485 asm_fprintf (file, "\tld %s,24(%s)\n",
13486 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13489 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13497 rs6000_use_dfa_pipeline_interface (void)
13502 /* Power4 load update and store update instructions are cracked into a
13503 load or store and an integer insn which are executed in the same cycle.
13504 Branches have their own dispatch slot which does not count against the
13505 GCC issue rate, but it changes the program flow so there are no other
13506 instructions to issue in this cycle. */
13509 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13510 int verbose ATTRIBUTE_UNUSED,
13511 rtx insn, int more)
13513 if (GET_CODE (PATTERN (insn)) == USE
13514 || GET_CODE (PATTERN (insn)) == CLOBBER)
13517 if (rs6000_cpu == PROCESSOR_POWER4)
13519 if (is_microcoded_insn (insn))
13521 else if (is_cracked_insn (insn))
13522 return more > 2 ? more - 2 : 0;
13528 /* Adjust the cost of a scheduling dependency. Return the new cost of
13529 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13532 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13535 if (! recog_memoized (insn))
13538 if (REG_NOTE_KIND (link) != 0)
13541 if (REG_NOTE_KIND (link) == 0)
13543 /* Data dependency; DEP_INSN writes a register that INSN reads
13544 some cycles later. */
13545 switch (get_attr_type (insn))
13548 /* Tell the first scheduling pass about the latency between
13549 a mtctr and bctr (and mtlr and br/blr). The first
13550 scheduling pass will not know about this latency since
13551 the mtctr instruction, which has the latency associated
13552 to it, will be generated by reload. */
13553 return TARGET_POWER ? 5 : 4;
13555 /* Leave some extra cycles between a compare and its
13556 dependent branch, to inhibit expensive mispredicts. */
13557 if ((rs6000_cpu_attr == CPU_PPC603
13558 || rs6000_cpu_attr == CPU_PPC604
13559 || rs6000_cpu_attr == CPU_PPC604E
13560 || rs6000_cpu_attr == CPU_PPC620
13561 || rs6000_cpu_attr == CPU_PPC630
13562 || rs6000_cpu_attr == CPU_PPC750
13563 || rs6000_cpu_attr == CPU_PPC7400
13564 || rs6000_cpu_attr == CPU_PPC7450
13565 || rs6000_cpu_attr == CPU_POWER4)
13566 && recog_memoized (dep_insn)
13567 && (INSN_CODE (dep_insn) >= 0)
13568 && (get_attr_type (dep_insn) == TYPE_CMP
13569 || get_attr_type (dep_insn) == TYPE_COMPARE
13570 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13571 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13572 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13573 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13574 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13575 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13580 /* Fall out to return default cost. */
13586 /* The function returns a true if INSN is microcoded.
13587 Return false ptherwise. */
13590 is_microcoded_insn (rtx insn)
13592 if (!insn || !INSN_P (insn)
13593 || GET_CODE (PATTERN (insn)) == USE
13594 || GET_CODE (PATTERN (insn)) == CLOBBER)
13597 if (rs6000_cpu == PROCESSOR_POWER4)
13599 enum attr_type type = get_attr_type (insn);
13600 if (type == TYPE_LOAD_EXT_U
13601 || type == TYPE_LOAD_EXT_UX
13602 || type == TYPE_LOAD_UX
13603 || type == TYPE_STORE_UX
13604 || type == TYPE_MFCR)
13611 /* The function returns a non-zero value if INSN can be scheduled only
13612 as the first insn in a dispatch group ("dispatch-slot restricted").
13613 In this case, the returned value indicates how many dispatch slots
13614 the insn occupies (at the beginning of the group).
13615 Return 0 otherwise. */
13618 is_dispatch_slot_restricted (rtx insn)
13620 enum attr_type type;
13622 if (rs6000_cpu != PROCESSOR_POWER4)
13626 || insn == NULL_RTX
13627 || GET_CODE (insn) == NOTE
13628 || GET_CODE (PATTERN (insn)) == USE
13629 || GET_CODE (PATTERN (insn)) == CLOBBER)
13632 type = get_attr_type (insn);
13638 case TYPE_DELAYED_CR:
13639 case TYPE_CR_LOGICAL:
13651 /* The function returns true if INSN is cracked into 2 instructions
13652 by the processor (and therefore occupies 2 issue slots). */
13655 is_cracked_insn (rtx insn)
13657 if (!insn || !INSN_P (insn)
13658 || GET_CODE (PATTERN (insn)) == USE
13659 || GET_CODE (PATTERN (insn)) == CLOBBER)
13662 if (rs6000_cpu == PROCESSOR_POWER4)
13664 enum attr_type type = get_attr_type (insn);
13665 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13666 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13667 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13668 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13669 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13670 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13671 || type == TYPE_IDIV || type == TYPE_LDIV
13672 || type == TYPE_INSERT_WORD)
13679 /* The function returns true if INSN can be issued only from
13680 the branch slot. */
13683 is_branch_slot_insn (rtx insn)
13685 if (!insn || !INSN_P (insn)
13686 || GET_CODE (PATTERN (insn)) == USE
13687 || GET_CODE (PATTERN (insn)) == CLOBBER)
13690 if (rs6000_cpu == PROCESSOR_POWER4)
13692 enum attr_type type = get_attr_type (insn);
13693 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
13701 /* A C statement (sans semicolon) to update the integer scheduling
13702 priority INSN_PRIORITY (INSN). Increase the priority to execute the
13703 INSN earlier, reduce the priority to execute INSN later. Do not
13704 define this macro if you do not need to adjust the scheduling
13705 priorities of insns. */
13708 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
13710 /* On machines (like the 750) which have asymmetric integer units,
13711 where one integer unit can do multiply and divides and the other
13712 can't, reduce the priority of multiply/divide so it is scheduled
13713 before other integer operations. */
13716 if (! INSN_P (insn))
13719 if (GET_CODE (PATTERN (insn)) == USE)
13722 switch (rs6000_cpu_attr) {
13724 switch (get_attr_type (insn))
13731 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13732 priority, priority);
13733 if (priority >= 0 && priority < 0x01000000)
13740 if (is_dispatch_slot_restricted (insn)
13741 && reload_completed
13742 && current_sched_info->sched_max_insns_priority
13743 && rs6000_sched_restricted_insns_priority)
13746 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
13747 if (rs6000_sched_restricted_insns_priority == 1)
13748 /* Attach highest priority to insn. This means that in
13749 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
13750 precede 'priority' (critical path) considerations. */
13751 return current_sched_info->sched_max_insns_priority;
13752 else if (rs6000_sched_restricted_insns_priority == 2)
13753 /* Increase priority of insn by a minimal amount. This means that in
13754 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
13755 precede dispatch-slot restriction considerations. */
13756 return (priority + 1);
13762 /* Return how many instructions the machine can issue per cycle. */
13765 rs6000_issue_rate (void)
13767 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13768 if (!reload_completed)
13771 switch (rs6000_cpu_attr) {
13772 case CPU_RIOS1: /* ? */
13774 case CPU_PPC601: /* ? */
13796 /* Return how many instructions to look ahead for better insn
13800 rs6000_use_sched_lookahead (void)
13802 if (rs6000_cpu_attr == CPU_PPC8540)
13807 /* Determine is PAT refers to memory. */
13810 is_mem_ref (rtx pat)
13816 if (GET_CODE (pat) == MEM)
13819 /* Recursively process the pattern. */
13820 fmt = GET_RTX_FORMAT (GET_CODE (pat));
13822 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
13825 ret |= is_mem_ref (XEXP (pat, i));
13826 else if (fmt[i] == 'E')
13827 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
13828 ret |= is_mem_ref (XVECEXP (pat, i, j));
13834 /* Determine if PAT is a PATTERN of a load insn. */
13837 is_load_insn1 (rtx pat)
13839 if (!pat || pat == NULL_RTX)
13842 if (GET_CODE (pat) == SET)
13843 return is_mem_ref (SET_SRC (pat));
13845 if (GET_CODE (pat) == PARALLEL)
13849 for (i = 0; i < XVECLEN (pat, 0); i++)
13850 if (is_load_insn1 (XVECEXP (pat, 0, i)))
13857 /* Determine if INSN loads from memory. */
13860 is_load_insn (rtx insn)
13862 if (!insn || !INSN_P (insn))
13865 if (GET_CODE (insn) == CALL_INSN)
13868 return is_load_insn1 (PATTERN (insn));
13871 /* Determine if PAT is a PATTERN of a store insn. */
13874 is_store_insn1 (rtx pat)
13876 if (!pat || pat == NULL_RTX)
13879 if (GET_CODE (pat) == SET)
13880 return is_mem_ref (SET_DEST (pat));
13882 if (GET_CODE (pat) == PARALLEL)
13886 for (i = 0; i < XVECLEN (pat, 0); i++)
13887 if (is_store_insn1 (XVECEXP (pat, 0, i)))
13894 /* Determine if INSN stores to memory. */
13897 is_store_insn (rtx insn)
13899 if (!insn || !INSN_P (insn))
13902 return is_store_insn1 (PATTERN (insn));
13905 /* Returns whether the dependence between INSN and NEXT is considered
13906 costly by the given target. */
13909 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
13911 /* If the flag is not enbled - no dependence is considered costly;
13912 allow all dependent insns in the same group.
13913 This is the most aggressive option. */
13914 if (rs6000_sched_costly_dep == no_dep_costly)
13917 /* If the flag is set to 1 - a dependence is always considered costly;
13918 do not allow dependent instructions in the same group.
13919 This is the most conservative option. */
13920 if (rs6000_sched_costly_dep == all_deps_costly)
13923 if (rs6000_sched_costly_dep == store_to_load_dep_costly
13924 && is_load_insn (next)
13925 && is_store_insn (insn))
13926 /* Prevent load after store in the same group. */
13929 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
13930 && is_load_insn (next)
13931 && is_store_insn (insn)
13932 && (!link || (int) REG_NOTE_KIND (link) == 0))
13933 /* Prevent load after store in the same group if it is a true dependence. */
13936 /* The flag is set to X; dependences with latency >= X are considered costly,
13937 and will not be scheduled in the same group. */
13938 if (rs6000_sched_costly_dep <= max_dep_latency
13939 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
13945 /* Return the next insn after INSN that is found before TAIL is reached,
13946 skipping any "non-active" insns - insns that will not actually occupy
13947 an issue slot. Return NULL_RTX if such an insn is not found. */
13950 get_next_active_insn (rtx insn, rtx tail)
13954 if (!insn || insn == tail)
13957 next_insn = NEXT_INSN (insn);
13960 && next_insn != tail
13961 && (GET_CODE(next_insn) == NOTE
13962 || GET_CODE (PATTERN (next_insn)) == USE
13963 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
13965 next_insn = NEXT_INSN (next_insn);
13968 if (!next_insn || next_insn == tail)
13974 /* Return whether the presence of INSN causes a dispatch group terminatation
13975 of group WHICH_GROUP.
13977 If WHICH_GROUP == current_group, this function will return true if INSN
13978 causes the termination of the current group (i.e, the dispatch group to
13979 which INSN belongs). This means that INSN will be the last insn in the
13980 group it belongs to.
13982 If WHICH_GROUP == previous_group, this function will return true if INSN
13983 causes the termination of the previous group (i.e, the dispatch group that
13984 precedes the group to which INSN belongs). This means that INSN will be
13985 the first insn in the group it belongs to). */
13988 insn_terminates_group_p (rtx insn, enum group_termination which_group)
13990 enum attr_type type;
13995 type = get_attr_type (insn);
13997 if (is_microcoded_insn (insn))
14000 if (which_group == current_group)
14002 if (is_branch_slot_insn (insn))
14006 else if (which_group == previous_group)
14008 if (is_dispatch_slot_restricted (insn))
14016 /* Return true if it is recommended to keep NEXT_INSN "far" (in a seperate
14017 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14020 is_costly_group (rtx *group_insns, rtx next_insn)
14025 int issue_rate = rs6000_issue_rate ();
14027 for (i = 0; i < issue_rate; i++)
14029 rtx insn = group_insns[i];
14032 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14034 rtx next = XEXP (link, 0);
14035 if (next == next_insn)
14037 cost = insn_cost (insn, link, next_insn);
14038 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14047 /* Utility of the function redefine_groups.
14048 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14049 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14050 to keep it "far" (in a separate group) from GROUP_INSNS, following
14051 one of the following schemes, depending on the value of the flag
14052 -minsert_sched_nops = X:
14053 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14054 in order to force NEXT_INSN into a seperate group.
14055 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14056 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14057 insertion (has a group just ended, how many vacant issue slots remain in the
14058 last group, and how many dispatch groups were encountered so far). */
14061 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14062 bool *group_end, int can_issue_more, int *group_count)
14066 int issue_rate = rs6000_issue_rate ();
14067 bool end = *group_end;
14070 if (next_insn == NULL_RTX)
14071 return can_issue_more;
14073 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14074 return can_issue_more;
14076 force = is_costly_group (group_insns, next_insn);
14078 return can_issue_more;
14080 if (sched_verbose > 6)
14081 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14082 *group_count ,can_issue_more);
14084 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14087 can_issue_more = 0;
14089 /* Since only a branch can be issued in the last issue_slot, it is
14090 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14091 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14092 in this case the last nop will start a new group and the branch will be
14093 forced to the new group. */
14094 if (can_issue_more && !is_branch_slot_insn (next_insn))
14097 while (can_issue_more > 0)
14100 emit_insn_before (nop, next_insn);
14108 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14110 int n_nops = rs6000_sched_insert_nops;
14112 /* Nops can't be issued from the branch slot, so the effective
14113 issue_rate for nops is 'issue_rate - 1'. */
14114 if (can_issue_more == 0)
14115 can_issue_more = issue_rate;
14117 if (can_issue_more == 0)
14119 can_issue_more = issue_rate - 1;
14122 for (i = 0; i < issue_rate; i++)
14124 group_insns[i] = 0;
14131 emit_insn_before (nop, next_insn);
14132 if (can_issue_more == issue_rate - 1) /* new group begins */
14135 if (can_issue_more == 0)
14137 can_issue_more = issue_rate - 1;
14140 for (i = 0; i < issue_rate; i++)
14142 group_insns[i] = 0;
14148 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14151 *group_end = /* Is next_insn going to start a new group? */
14153 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14154 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14155 || (can_issue_more < issue_rate &&
14156 insn_terminates_group_p (next_insn, previous_group)));
14157 if (*group_end && end)
14160 if (sched_verbose > 6)
14161 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14162 *group_count, can_issue_more);
14163 return can_issue_more;
14166 return can_issue_more;
14169 /* This function tries to synch the dispatch groups that the compiler "sees"
14170 with the dispatch groups that the processor dispatcher is expected to
14171 form in practice. It tries to achieve this synchronization by forcing the
14172 estimated processor grouping on the compiler (as opposed to the function
14173 'pad_goups' which tries to force the scheduler's grouping on the processor).
14175 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14176 examines the (estimated) dispatch groups that will be formed by the processor
14177 dispatcher. It marks these group boundaries to reflect the estimated
14178 processor grouping, overriding the grouping that the scheduler had marked.
14179 Depending on the value of the flag '-minsert-sched-nops' this function can
14180 force certain insns into separate groups or force a certain distance between
14181 them by inserting nops, for example, if there exists a "costly dependence"
14184 The function estimates the group boundaries that the processor will form as
14185 folllows: It keeps track of how many vacant issue slots are available after
14186 each insn. A subsequent insn will start a new group if one of the following
14188 - no more vacant issue slots remain in the current dispatch group.
14189 - only the last issue slot, which is the branch slot, is vacant, but the next
14190 insn is not a branch.
14191 - only the last 2 or less issue slots, including the branch slot, are vacant,
14192 which means that a cracked insn (which occupies two issue slots) can't be
14193 issued in this group.
14194 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14195 start a new group. */
14198 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14200 rtx insn, next_insn;
14202 int can_issue_more;
14205 int group_count = 0;
14209 issue_rate = rs6000_issue_rate ();
14210 group_insns = alloca (issue_rate * sizeof (rtx));
14211 for (i = 0; i < issue_rate; i++)
14213 group_insns[i] = 0;
14215 can_issue_more = issue_rate;
14217 insn = get_next_active_insn (prev_head_insn, tail);
14220 while (insn != NULL_RTX)
14222 slot = (issue_rate - can_issue_more);
14223 group_insns[slot] = insn;
14225 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14226 if (insn_terminates_group_p (insn, current_group))
14227 can_issue_more = 0;
14229 next_insn = get_next_active_insn (insn, tail);
14230 if (next_insn == NULL_RTX)
14231 return group_count + 1;
14233 group_end = /* Is next_insn going to start a new group? */
14234 (can_issue_more == 0
14235 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14236 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14237 || (can_issue_more < issue_rate &&
14238 insn_terminates_group_p (next_insn, previous_group)));
14240 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14241 next_insn, &group_end, can_issue_more, &group_count);
14246 can_issue_more = 0;
14247 for (i = 0; i < issue_rate; i++)
14249 group_insns[i] = 0;
14253 if (GET_MODE (next_insn) == TImode && can_issue_more)
14254 PUT_MODE(next_insn, VOIDmode);
14255 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14256 PUT_MODE (next_insn, TImode);
14259 if (can_issue_more == 0)
14260 can_issue_more = issue_rate;
14263 return group_count;
14266 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14267 dispatch group boundaries that the scheduler had marked. Pad with nops
14268 any dispatch groups which have vacant issue slots, in order to force the
14269 scheduler's grouping on the processor dispatcher. The function
14270 returns the number of dispatch groups found. */
14273 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14275 rtx insn, next_insn;
14278 int can_issue_more;
14280 int group_count = 0;
14282 /* Initialize issue_rate. */
14283 issue_rate = rs6000_issue_rate ();
14284 can_issue_more = issue_rate;
14286 insn = get_next_active_insn (prev_head_insn, tail);
14287 next_insn = get_next_active_insn (insn, tail);
14289 while (insn != NULL_RTX)
14292 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14294 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14296 if (next_insn == NULL_RTX)
14301 /* If the scheduler had marked group termination at this location
14302 (between insn and next_indn), and neither insn nor next_insn will
14303 force group termination, pad the group with nops to force group
14306 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14307 && !insn_terminates_group_p (insn, current_group)
14308 && !insn_terminates_group_p (next_insn, previous_group))
14310 if (!is_branch_slot_insn(next_insn))
14313 while (can_issue_more)
14316 emit_insn_before (nop, next_insn);
14321 can_issue_more = issue_rate;
14326 next_insn = get_next_active_insn (insn, tail);
14329 return group_count;
14332 /* The following function is called at the end of scheduling BB.
14333 After reload, it inserts nops at insn group bundling. */
14336 rs6000_sched_finish (FILE *dump, int sched_verbose)
14341 fprintf (dump, "=== Finishing schedule.\n");
14343 if (reload_completed && rs6000_cpu == PROCESSOR_POWER4)
14345 if (rs6000_sched_insert_nops == sched_finish_none)
14348 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14349 n_groups = pad_groups (dump, sched_verbose,
14350 current_sched_info->prev_head,
14351 current_sched_info->next_tail);
14353 n_groups = redefine_groups (dump, sched_verbose,
14354 current_sched_info->prev_head,
14355 current_sched_info->next_tail);
14357 if (sched_verbose >= 6)
14359 fprintf (dump, "ngroups = %d\n", n_groups);
14360 print_rtl (dump, current_sched_info->prev_head);
14361 fprintf (dump, "Done finish_sched\n");
14366 /* Length in units of the trampoline for entering a nested function. */
14369 rs6000_trampoline_size (void)
14373 switch (DEFAULT_ABI)
14379 ret = (TARGET_32BIT) ? 12 : 24;
14384 ret = (TARGET_32BIT) ? 40 : 48;
14391 /* Emit RTL insns to initialize the variable parts of a trampoline.
14392 FNADDR is an RTX for the address of the function's pure code.
14393 CXT is an RTX for the static chain value for the function. */
14396 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14398 enum machine_mode pmode = Pmode;
14399 int regsize = (TARGET_32BIT) ? 4 : 8;
14400 rtx ctx_reg = force_reg (pmode, cxt);
14402 switch (DEFAULT_ABI)
14407 /* Macros to shorten the code expansions below. */
14408 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14409 #define MEM_PLUS(addr,offset) \
14410 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14412 /* Under AIX, just build the 3 word function descriptor */
14415 rtx fn_reg = gen_reg_rtx (pmode);
14416 rtx toc_reg = gen_reg_rtx (pmode);
14417 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14418 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14419 emit_move_insn (MEM_DEREF (addr), fn_reg);
14420 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14421 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14425 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14428 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14429 FALSE, VOIDmode, 4,
14431 GEN_INT (rs6000_trampoline_size ()), SImode,
14441 /* Table of valid machine attributes. */
14443 const struct attribute_spec rs6000_attribute_table[] =
14445 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14446 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14447 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14448 { NULL, 0, 0, false, false, false, NULL }
14451 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14452 struct attribute_spec.handler. */
14455 rs6000_handle_longcall_attribute (tree *node, tree name,
14456 tree args ATTRIBUTE_UNUSED,
14457 int flags ATTRIBUTE_UNUSED,
14458 bool *no_add_attrs)
14460 if (TREE_CODE (*node) != FUNCTION_TYPE
14461 && TREE_CODE (*node) != FIELD_DECL
14462 && TREE_CODE (*node) != TYPE_DECL)
14464 warning ("`%s' attribute only applies to functions",
14465 IDENTIFIER_POINTER (name));
14466 *no_add_attrs = true;
14472 /* Set longcall attributes on all functions declared when
14473 rs6000_default_long_calls is true. */
14475 rs6000_set_default_type_attributes (tree type)
14477 if (rs6000_default_long_calls
14478 && (TREE_CODE (type) == FUNCTION_TYPE
14479 || TREE_CODE (type) == METHOD_TYPE))
14480 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14482 TYPE_ATTRIBUTES (type));
14485 /* Return a reference suitable for calling a function with the
14486 longcall attribute. */
14489 rs6000_longcall_ref (rtx call_ref)
14491 const char *call_name;
14494 if (GET_CODE (call_ref) != SYMBOL_REF)
14497 /* System V adds '.' to the internal name, so skip them. */
14498 call_name = XSTR (call_ref, 0);
14499 if (*call_name == '.')
14501 while (*call_name == '.')
14504 node = get_identifier (call_name);
14505 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14508 return force_reg (Pmode, call_ref);
14511 #ifdef USING_ELFOS_H
14513 /* A C statement or statements to switch to the appropriate section
14514 for output of RTX in mode MODE. You can assume that RTX is some
14515 kind of constant in RTL. The argument MODE is redundant except in
14516 the case of a `const_int' rtx. Select the section by calling
14517 `text_section' or one of the alternatives for other sections.
14519 Do not define this macro if you put all constants in the read-only
14523 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14524 unsigned HOST_WIDE_INT align)
14526 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14529 default_elf_select_rtx_section (mode, x, align);
14532 /* A C statement or statements to switch to the appropriate
14533 section for output of DECL. DECL is either a `VAR_DECL' node
14534 or a constant of some sort. RELOC indicates whether forming
14535 the initial value of DECL requires link-time relocations. */
14538 rs6000_elf_select_section (tree decl, int reloc,
14539 unsigned HOST_WIDE_INT align)
14541 /* Pretend that we're always building for a shared library when
14542 ABI_AIX, because otherwise we end up with dynamic relocations
14543 in read-only sections. This happens for function pointers,
14544 references to vtables in typeinfo, and probably other cases. */
14545 default_elf_select_section_1 (decl, reloc, align,
14546 flag_pic || DEFAULT_ABI == ABI_AIX);
14549 /* A C statement to build up a unique section name, expressed as a
14550 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
14551 RELOC indicates whether the initial value of EXP requires
14552 link-time relocations. If you do not define this macro, GCC will use
14553 the symbol name prefixed by `.' as the section name. Note - this
14554 macro can now be called for uninitialized data items as well as
14555 initialized data and functions. */
14558 rs6000_elf_unique_section (tree decl, int reloc)
14560 /* As above, pretend that we're always building for a shared library
14561 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
14562 default_unique_section_1 (decl, reloc,
14563 flag_pic || DEFAULT_ABI == ABI_AIX);
14566 /* For a SYMBOL_REF, set generic flags and then perform some
14567 target-specific processing.
14569 When the AIX ABI is requested on a non-AIX system, replace the
14570 function name with the real name (with a leading .) rather than the
14571 function descriptor name. This saves a lot of overriding code to
14572 read the prefixes. */
14575 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
14577 default_encode_section_info (decl, rtl, first);
14580 && TREE_CODE (decl) == FUNCTION_DECL
14582 && DEFAULT_ABI == ABI_AIX)
14584 rtx sym_ref = XEXP (rtl, 0);
14585 size_t len = strlen (XSTR (sym_ref, 0));
14586 char *str = alloca (len + 2);
14588 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
14589 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
14594 rs6000_elf_in_small_data_p (tree decl)
14596 if (rs6000_sdata == SDATA_NONE)
14599 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
14601 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
14602 if (strcmp (section, ".sdata") == 0
14603 || strcmp (section, ".sdata2") == 0
14604 || strcmp (section, ".sbss") == 0
14605 || strcmp (section, ".sbss2") == 0
14606 || strcmp (section, ".PPC.EMB.sdata0") == 0
14607 || strcmp (section, ".PPC.EMB.sbss0") == 0)
14612 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
14615 && (unsigned HOST_WIDE_INT) size <= g_switch_value
14616 /* If it's not public, and we're not going to reference it there,
14617 there's no need to put it in the small data section. */
14618 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
14625 #endif /* USING_ELFOS_H */
14628 /* Return a REG that occurs in ADDR with coefficient 1.
14629 ADDR can be effectively incremented by incrementing REG.
14631 r0 is special and we must not select it as an address
14632 register by this routine since our caller will try to
14633 increment the returned register via an "la" instruction. */
14636 find_addr_reg (rtx addr)
14638 while (GET_CODE (addr) == PLUS)
14640 if (GET_CODE (XEXP (addr, 0)) == REG
14641 && REGNO (XEXP (addr, 0)) != 0)
14642 addr = XEXP (addr, 0);
14643 else if (GET_CODE (XEXP (addr, 1)) == REG
14644 && REGNO (XEXP (addr, 1)) != 0)
14645 addr = XEXP (addr, 1);
14646 else if (CONSTANT_P (XEXP (addr, 0)))
14647 addr = XEXP (addr, 1);
14648 else if (CONSTANT_P (XEXP (addr, 1)))
14649 addr = XEXP (addr, 0);
14653 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
14659 rs6000_fatal_bad_address (rtx op)
14661 fatal_insn ("bad address", op);
14667 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
14668 reference and a constant. */
14671 symbolic_operand (rtx op)
14673 switch (GET_CODE (op))
14680 return (GET_CODE (op) == SYMBOL_REF ||
14681 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
14682 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
14683 && GET_CODE (XEXP (op, 1)) == CONST_INT);
14690 #ifdef RS6000_LONG_BRANCH
14692 static tree stub_list = 0;
14694 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
14695 procedure calls to the linked list. */
14698 add_compiler_stub (tree label_name, tree function_name, int line_number)
14700 tree stub = build_tree_list (function_name, label_name);
14701 TREE_TYPE (stub) = build_int_2 (line_number, 0);
14702 TREE_CHAIN (stub) = stub_list;
14706 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
14707 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
14708 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
14710 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
14711 handling procedure calls from the linked list and initializes the
14715 output_compiler_stub (void)
14718 char label_buf[256];
14722 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14724 fprintf (asm_out_file,
14725 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
14727 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14728 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14729 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
14730 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14732 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
14734 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
14737 label_buf[0] = '_';
14738 strcpy (label_buf+1,
14739 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
14742 strcpy (tmp_buf, "lis r12,hi16(");
14743 strcat (tmp_buf, label_buf);
14744 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
14745 strcat (tmp_buf, label_buf);
14746 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
14747 output_asm_insn (tmp_buf, 0);
14749 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
14750 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
14751 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
14752 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
14758 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
14759 already there or not. */
14762 no_previous_def (tree function_name)
14765 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14766 if (function_name == STUB_FUNCTION_NAME (stub))
14771 /* GET_PREV_LABEL gets the label name from the previous definition of
14775 get_prev_label (tree function_name)
14778 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
14779 if (function_name == STUB_FUNCTION_NAME (stub))
14780 return STUB_LABEL_NAME (stub);
14784 /* INSN is either a function call or a millicode call. It may have an
14785 unconditional jump in its delay slot.
14787 CALL_DEST is the routine we are calling. */
14790 output_call (rtx insn, rtx call_dest, int operand_number)
14792 static char buf[256];
14793 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
14796 tree funname = get_identifier (XSTR (call_dest, 0));
14798 if (no_previous_def (funname))
14800 int line_number = 0;
14801 rtx label_rtx = gen_label_rtx ();
14802 char *label_buf, temp_buf[256];
14803 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
14804 CODE_LABEL_NUMBER (label_rtx));
14805 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
14806 labelname = get_identifier (label_buf);
14807 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
14809 line_number = NOTE_LINE_NUMBER (insn);
14810 add_compiler_stub (labelname, funname, line_number);
14813 labelname = get_prev_label (funname);
14815 sprintf (buf, "jbsr %%z%d,%.246s",
14816 operand_number, IDENTIFIER_POINTER (labelname));
14821 sprintf (buf, "bl %%z%d", operand_number);
14826 #endif /* RS6000_LONG_BRANCH */
14828 /* Generate PIC and indirect symbol stubs. */
14831 machopic_output_stub (FILE *file, const char *symb, const char *stub)
14833 unsigned int length;
14834 char *symbol_name, *lazy_ptr_name;
14835 char *local_label_0;
14836 static int label = 0;
14838 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
14839 symb = (*targetm.strip_name_encoding) (symb);
14842 length = strlen (symb);
14843 symbol_name = alloca (length + 32);
14844 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
14846 lazy_ptr_name = alloca (length + 32);
14847 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
14850 machopic_picsymbol_stub1_section ();
14852 machopic_symbol_stub1_section ();
14853 fprintf (file, "\t.align 2\n");
14855 fprintf (file, "%s:\n", stub);
14856 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14861 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
14862 sprintf (local_label_0, "\"L%011d$spb\"", label);
14864 fprintf (file, "\tmflr r0\n");
14865 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
14866 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
14867 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
14868 lazy_ptr_name, local_label_0);
14869 fprintf (file, "\tmtlr r0\n");
14870 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
14871 lazy_ptr_name, local_label_0);
14872 fprintf (file, "\tmtctr r12\n");
14873 fprintf (file, "\tbctr\n");
14877 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
14878 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
14879 fprintf (file, "\tmtctr r12\n");
14880 fprintf (file, "\tbctr\n");
14883 machopic_lazy_symbol_ptr_section ();
14884 fprintf (file, "%s:\n", lazy_ptr_name);
14885 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
14886 fprintf (file, "\t.long dyld_stub_binding_helper\n");
14889 /* Legitimize PIC addresses. If the address is already
14890 position-independent, we return ORIG. Newly generated
14891 position-independent addresses go into a reg. This is REG if non
14892 zero, otherwise we allocate register(s) as necessary. */
14894 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
14897 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
14902 if (reg == NULL && ! reload_in_progress && ! reload_completed)
14903 reg = gen_reg_rtx (Pmode);
14905 if (GET_CODE (orig) == CONST)
14907 if (GET_CODE (XEXP (orig, 0)) == PLUS
14908 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
14911 if (GET_CODE (XEXP (orig, 0)) == PLUS)
14913 /* Use a different reg for the intermediate value, as
14914 it will be marked UNCHANGING. */
14915 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
14918 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
14921 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
14927 if (GET_CODE (offset) == CONST_INT)
14929 if (SMALL_INT (offset))
14930 return plus_constant (base, INTVAL (offset));
14931 else if (! reload_in_progress && ! reload_completed)
14932 offset = force_reg (Pmode, offset);
14935 rtx mem = force_const_mem (Pmode, orig);
14936 return machopic_legitimize_pic_address (mem, Pmode, reg);
14939 return gen_rtx (PLUS, Pmode, base, offset);
14942 /* Fall back on generic machopic code. */
14943 return machopic_legitimize_pic_address (orig, mode, reg);
14946 /* This is just a placeholder to make linking work without having to
14947 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
14948 ever needed for Darwin (not too likely!) this would have to get a
14949 real definition. */
14956 #endif /* TARGET_MACHO */
14959 static unsigned int
14960 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
14963 = default_section_type_flags_1 (decl, name, reloc,
14964 flag_pic || DEFAULT_ABI == ABI_AIX);
14966 if (TARGET_RELOCATABLE)
14967 flags |= SECTION_WRITE;
14972 /* Record an element in the table of global constructors. SYMBOL is
14973 a SYMBOL_REF of the function to be called; PRIORITY is a number
14974 between 0 and MAX_INIT_PRIORITY.
14976 This differs from default_named_section_asm_out_constructor in
14977 that we have special handling for -mrelocatable. */
14980 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
14982 const char *section = ".ctors";
14985 if (priority != DEFAULT_INIT_PRIORITY)
14987 sprintf (buf, ".ctors.%.5u",
14988 /* Invert the numbering so the linker puts us in the proper
14989 order; constructors are run from right to left, and the
14990 linker sorts in increasing order. */
14991 MAX_INIT_PRIORITY - priority);
14995 named_section_flags (section, SECTION_WRITE);
14996 assemble_align (POINTER_SIZE);
14998 if (TARGET_RELOCATABLE)
15000 fputs ("\t.long (", asm_out_file);
15001 output_addr_const (asm_out_file, symbol);
15002 fputs (")@fixup\n", asm_out_file);
15005 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15009 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15011 const char *section = ".dtors";
15014 if (priority != DEFAULT_INIT_PRIORITY)
15016 sprintf (buf, ".dtors.%.5u",
15017 /* Invert the numbering so the linker puts us in the proper
15018 order; constructors are run from right to left, and the
15019 linker sorts in increasing order. */
15020 MAX_INIT_PRIORITY - priority);
15024 named_section_flags (section, SECTION_WRITE);
15025 assemble_align (POINTER_SIZE);
15027 if (TARGET_RELOCATABLE)
15029 fputs ("\t.long (", asm_out_file);
15030 output_addr_const (asm_out_file, symbol);
15031 fputs (")@fixup\n", asm_out_file);
15034 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15038 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15042 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15043 ASM_OUTPUT_LABEL (file, name);
15044 fputs (DOUBLE_INT_ASM_OP, file);
15046 assemble_name (file, name);
15047 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15048 assemble_name (file, name);
15049 fputs (",24\n\t.type\t.", file);
15050 assemble_name (file, name);
15051 fputs (",@function\n", file);
15052 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15054 fputs ("\t.globl\t.", file);
15055 assemble_name (file, name);
15058 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15060 ASM_OUTPUT_LABEL (file, name);
15064 if (TARGET_RELOCATABLE
15065 && (get_pool_size () != 0 || current_function_profile)
15070 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15072 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15073 fprintf (file, "\t.long ");
15074 assemble_name (file, buf);
15076 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15077 assemble_name (file, buf);
15081 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15082 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15084 if (DEFAULT_ABI == ABI_AIX)
15086 const char *desc_name, *orig_name;
15088 orig_name = (*targetm.strip_name_encoding) (name);
15089 desc_name = orig_name;
15090 while (*desc_name == '.')
15093 if (TREE_PUBLIC (decl))
15094 fprintf (file, "\t.globl %s\n", desc_name);
15096 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15097 fprintf (file, "%s:\n", desc_name);
15098 fprintf (file, "\t.long %s\n", orig_name);
15099 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15100 if (DEFAULT_ABI == ABI_AIX)
15101 fputs ("\t.long 0\n", file);
15102 fprintf (file, "\t.previous\n");
15104 ASM_OUTPUT_LABEL (file, name);
15110 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15112 fputs (GLOBAL_ASM_OP, stream);
15113 RS6000_OUTPUT_BASENAME (stream, name);
15114 putc ('\n', stream);
15118 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15121 static const char * const suffix[3] = { "PR", "RO", "RW" };
15123 if (flags & SECTION_CODE)
15125 else if (flags & SECTION_WRITE)
15130 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15131 (flags & SECTION_CODE) ? "." : "",
15132 name, suffix[smclass], flags & SECTION_ENTSIZE);
15136 rs6000_xcoff_select_section (tree decl, int reloc,
15137 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15139 if (decl_readonly_section_1 (decl, reloc, 1))
15141 if (TREE_PUBLIC (decl))
15142 read_only_data_section ();
15144 read_only_private_data_section ();
15148 if (TREE_PUBLIC (decl))
15151 private_data_section ();
15156 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15160 /* Use select_section for private and uninitialized data. */
15161 if (!TREE_PUBLIC (decl)
15162 || DECL_COMMON (decl)
15163 || DECL_INITIAL (decl) == NULL_TREE
15164 || DECL_INITIAL (decl) == error_mark_node
15165 || (flag_zero_initialized_in_bss
15166 && initializer_zerop (DECL_INITIAL (decl))))
15169 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15170 name = (*targetm.strip_name_encoding) (name);
15171 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15174 /* Select section for constant in constant pool.
15176 On RS/6000, all constants are in the private read-only data area.
15177 However, if this is being placed in the TOC it must be output as a
15181 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15182 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15184 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15187 read_only_private_data_section ();
15190 /* Remove any trailing [DS] or the like from the symbol name. */
15192 static const char *
15193 rs6000_xcoff_strip_name_encoding (const char *name)
15198 len = strlen (name);
15199 if (name[len - 1] == ']')
15200 return ggc_alloc_string (name, len - 4);
15205 /* Section attributes. AIX is always PIC. */
15207 static unsigned int
15208 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15210 unsigned int align;
15211 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15213 /* Align to at least UNIT size. */
15214 if (flags & SECTION_CODE)
15215 align = MIN_UNITS_PER_WORD;
15217 /* Increase alignment of large objects if not already stricter. */
15218 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15219 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15220 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15222 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15225 /* Output at beginning of assembler file.
15227 Initialize the section names for the RS/6000 at this point.
15229 Specify filename, including full path, to assembler.
15231 We want to go into the TOC section so at least one .toc will be emitted.
15232 Also, in order to output proper .bs/.es pairs, we need at least one static
15233 [RW] section emitted.
15235 Finally, declare mcount when profiling to make the assembler happy. */
15238 rs6000_xcoff_file_start (void)
15240 rs6000_gen_section_name (&xcoff_bss_section_name,
15241 main_input_filename, ".bss_");
15242 rs6000_gen_section_name (&xcoff_private_data_section_name,
15243 main_input_filename, ".rw_");
15244 rs6000_gen_section_name (&xcoff_read_only_section_name,
15245 main_input_filename, ".ro_");
15247 fputs ("\t.file\t", asm_out_file);
15248 output_quoted_string (asm_out_file, main_input_filename);
15249 fputc ('\n', asm_out_file);
15251 if (write_symbols != NO_DEBUG)
15252 private_data_section ();
15255 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15256 rs6000_file_start ();
15259 /* Output at end of assembler file.
15260 On the RS/6000, referencing data should automatically pull in text. */
15263 rs6000_xcoff_file_end (void)
15266 fputs ("_section_.text:\n", asm_out_file);
15268 fputs (TARGET_32BIT
15269 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15272 #endif /* TARGET_XCOFF */
15275 /* Cross-module name binding. Darwin does not support overriding
15276 functions at dynamic-link time. */
15279 rs6000_binds_local_p (tree decl)
15281 return default_binds_local_p_1 (decl, 0);
15285 /* Compute a (partial) cost for rtx X. Return true if the complete
15286 cost has been computed, and false if subexpressions should be
15287 scanned. In either case, *TOTAL contains the cost result. */
15290 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15295 /* On the RS/6000, if it is valid in the insn, it is free.
15296 So this always returns 0. */
15307 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15308 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15309 + 0x8000) >= 0x10000)
15310 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15311 ? COSTS_N_INSNS (2)
15312 : COSTS_N_INSNS (1));
15318 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15319 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15320 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15321 ? COSTS_N_INSNS (2)
15322 : COSTS_N_INSNS (1));
15328 *total = COSTS_N_INSNS (2);
15331 switch (rs6000_cpu)
15333 case PROCESSOR_RIOS1:
15334 case PROCESSOR_PPC405:
15335 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15336 ? COSTS_N_INSNS (5)
15337 : (INTVAL (XEXP (x, 1)) >= -256
15338 && INTVAL (XEXP (x, 1)) <= 255)
15339 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15342 case PROCESSOR_PPC440:
15343 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15344 ? COSTS_N_INSNS (3)
15345 : COSTS_N_INSNS (2));
15348 case PROCESSOR_RS64A:
15349 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15350 ? GET_MODE (XEXP (x, 1)) != DImode
15351 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15352 : (INTVAL (XEXP (x, 1)) >= -256
15353 && INTVAL (XEXP (x, 1)) <= 255)
15354 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15357 case PROCESSOR_RIOS2:
15358 case PROCESSOR_MPCCORE:
15359 case PROCESSOR_PPC604e:
15360 *total = COSTS_N_INSNS (2);
15363 case PROCESSOR_PPC601:
15364 *total = COSTS_N_INSNS (5);
15367 case PROCESSOR_PPC603:
15368 case PROCESSOR_PPC7400:
15369 case PROCESSOR_PPC750:
15370 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15371 ? COSTS_N_INSNS (5)
15372 : (INTVAL (XEXP (x, 1)) >= -256
15373 && INTVAL (XEXP (x, 1)) <= 255)
15374 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15377 case PROCESSOR_PPC7450:
15378 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15379 ? COSTS_N_INSNS (4)
15380 : COSTS_N_INSNS (3));
15383 case PROCESSOR_PPC403:
15384 case PROCESSOR_PPC604:
15385 case PROCESSOR_PPC8540:
15386 *total = COSTS_N_INSNS (4);
15389 case PROCESSOR_PPC620:
15390 case PROCESSOR_PPC630:
15391 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15392 ? GET_MODE (XEXP (x, 1)) != DImode
15393 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15394 : (INTVAL (XEXP (x, 1)) >= -256
15395 && INTVAL (XEXP (x, 1)) <= 255)
15396 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15399 case PROCESSOR_POWER4:
15400 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15401 ? GET_MODE (XEXP (x, 1)) != DImode
15402 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15403 : COSTS_N_INSNS (2));
15412 if (GET_CODE (XEXP (x, 1)) == CONST_INT
15413 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15415 *total = COSTS_N_INSNS (2);
15422 switch (rs6000_cpu)
15424 case PROCESSOR_RIOS1:
15425 *total = COSTS_N_INSNS (19);
15428 case PROCESSOR_RIOS2:
15429 *total = COSTS_N_INSNS (13);
15432 case PROCESSOR_RS64A:
15433 *total = (GET_MODE (XEXP (x, 1)) != DImode
15434 ? COSTS_N_INSNS (65)
15435 : COSTS_N_INSNS (67));
15438 case PROCESSOR_MPCCORE:
15439 *total = COSTS_N_INSNS (6);
15442 case PROCESSOR_PPC403:
15443 *total = COSTS_N_INSNS (33);
15446 case PROCESSOR_PPC405:
15447 *total = COSTS_N_INSNS (35);
15450 case PROCESSOR_PPC440:
15451 *total = COSTS_N_INSNS (34);
15454 case PROCESSOR_PPC601:
15455 *total = COSTS_N_INSNS (36);
15458 case PROCESSOR_PPC603:
15459 *total = COSTS_N_INSNS (37);
15462 case PROCESSOR_PPC604:
15463 case PROCESSOR_PPC604e:
15464 *total = COSTS_N_INSNS (20);
15467 case PROCESSOR_PPC620:
15468 case PROCESSOR_PPC630:
15469 *total = (GET_MODE (XEXP (x, 1)) != DImode
15470 ? COSTS_N_INSNS (21)
15471 : COSTS_N_INSNS (37));
15474 case PROCESSOR_PPC750:
15475 case PROCESSOR_PPC8540:
15476 case PROCESSOR_PPC7400:
15477 *total = COSTS_N_INSNS (19);
15480 case PROCESSOR_PPC7450:
15481 *total = COSTS_N_INSNS (23);
15484 case PROCESSOR_POWER4:
15485 *total = (GET_MODE (XEXP (x, 1)) != DImode
15486 ? COSTS_N_INSNS (18)
15487 : COSTS_N_INSNS (34));
15495 *total = COSTS_N_INSNS (4);
15499 /* MEM should be slightly more expensive than (plus (reg) (const)) */
15508 /* A C expression returning the cost of moving data from a register of class
15509 CLASS1 to one of CLASS2. */
15512 rs6000_register_move_cost (enum machine_mode mode,
15513 enum reg_class from, enum reg_class to)
15515 /* Moves from/to GENERAL_REGS. */
15516 if (reg_classes_intersect_p (to, GENERAL_REGS)
15517 || reg_classes_intersect_p (from, GENERAL_REGS))
15519 if (! reg_classes_intersect_p (to, GENERAL_REGS))
15522 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
15523 return (rs6000_memory_move_cost (mode, from, 0)
15524 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
15526 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
15527 else if (from == CR_REGS)
15531 /* A move will cost one instruction per GPR moved. */
15532 return 2 * HARD_REGNO_NREGS (0, mode);
15535 /* Moving between two similar registers is just one instruction. */
15536 else if (reg_classes_intersect_p (to, from))
15537 return mode == TFmode ? 4 : 2;
15539 /* Everything else has to go through GENERAL_REGS. */
15541 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
15542 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
15545 /* A C expressions returning the cost of moving data of MODE from a register to
15549 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
15550 int in ATTRIBUTE_UNUSED)
15552 if (reg_classes_intersect_p (class, GENERAL_REGS))
15553 return 4 * HARD_REGNO_NREGS (0, mode);
15554 else if (reg_classes_intersect_p (class, FLOAT_REGS))
15555 return 4 * HARD_REGNO_NREGS (32, mode);
15556 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
15557 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
15559 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
15562 /* Return an RTX representing where to find the function value of a
15563 function returning MODE. */
15565 rs6000_complex_function_value (enum machine_mode mode)
15567 unsigned int regno;
15569 enum machine_mode inner = GET_MODE_INNER (mode);
15571 if (FLOAT_MODE_P (mode))
15572 regno = FP_ARG_RETURN;
15575 regno = GP_ARG_RETURN;
15577 /* 32-bit is OK since it'll go in r3/r4. */
15579 && GET_MODE_BITSIZE (inner) >= 32)
15580 return gen_rtx_REG (mode, regno);
15583 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
15585 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
15586 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
15587 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
15590 /* Define how to find the value returned by a function.
15591 VALTYPE is the data type of the value (as a tree).
15592 If the precise function being called is known, FUNC is its FUNCTION_DECL;
15593 otherwise, FUNC is 0.
15595 On the SPE, both FPs and vectors are returned in r3.
15597 On RS/6000 an integer value is in r3 and a floating-point value is in
15598 fp1, unless -msoft-float. */
15601 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
15603 enum machine_mode mode;
15604 unsigned int regno;
15606 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
15608 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
15609 return gen_rtx_PARALLEL (DImode,
15611 gen_rtx_EXPR_LIST (VOIDmode,
15612 gen_rtx_REG (SImode, GP_ARG_RETURN),
15614 gen_rtx_EXPR_LIST (VOIDmode,
15615 gen_rtx_REG (SImode,
15616 GP_ARG_RETURN + 1),
15620 if ((INTEGRAL_TYPE_P (valtype)
15621 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
15622 || POINTER_TYPE_P (valtype))
15623 mode = TARGET_32BIT ? SImode : DImode;
15625 mode = TYPE_MODE (valtype);
15627 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
15628 regno = FP_ARG_RETURN;
15629 else if (TREE_CODE (valtype) == COMPLEX_TYPE
15630 && TARGET_HARD_FLOAT
15631 && SPLIT_COMPLEX_ARGS)
15632 return rs6000_complex_function_value (mode);
15633 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
15634 regno = ALTIVEC_ARG_RETURN;
15636 regno = GP_ARG_RETURN;
15638 return gen_rtx_REG (mode, regno);
15641 /* Define how to find the value returned by a library function
15642 assuming the value has mode MODE. */
15644 rs6000_libcall_value (enum machine_mode mode)
15646 unsigned int regno;
15648 if (GET_MODE_CLASS (mode) == MODE_FLOAT
15649 && TARGET_HARD_FLOAT && TARGET_FPRS)
15650 regno = FP_ARG_RETURN;
15651 else if (ALTIVEC_VECTOR_MODE (mode))
15652 regno = ALTIVEC_ARG_RETURN;
15653 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
15654 return rs6000_complex_function_value (mode);
15656 regno = GP_ARG_RETURN;
15658 return gen_rtx_REG (mode, regno);
15661 /* Define the offset between two registers, FROM to be eliminated and its
15662 replacement TO, at the start of a routine. */
15664 rs6000_initial_elimination_offset (int from, int to)
15666 rs6000_stack_t *info = rs6000_stack_info ();
15667 HOST_WIDE_INT offset;
15669 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
15670 offset = info->push_p ? 0 : -info->total_size;
15671 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
15672 offset = info->total_size;
15673 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
15674 offset = info->push_p ? info->total_size : 0;
15675 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
15683 /* Return true if TYPE is of type __ev64_opaque__. */
15686 is_ev64_opaque_type (tree type)
15689 && (type == opaque_V2SI_type_node
15690 || type == opaque_V2SF_type_node
15691 || type == opaque_p_V2SI_type_node));
15695 rs6000_dwarf_register_span (rtx reg)
15699 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
15702 regno = REGNO (reg);
15704 /* The duality of the SPE register size wreaks all kinds of havoc.
15705 This is a way of distinguishing r0 in 32-bits from r0 in
15708 gen_rtx_PARALLEL (VOIDmode,
15711 gen_rtx_REG (SImode, regno + 1200),
15712 gen_rtx_REG (SImode, regno))
15714 gen_rtx_REG (SImode, regno),
15715 gen_rtx_REG (SImode, regno + 1200)));
15718 #include "gt-rs6000.h"