1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
73 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack {
75 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */
80 unsigned int vrsave_mask; /* mask of vec registers to save */
81 int toc_save_p; /* true if the TOC needs to be saved */
82 int push_p; /* true if we need to allocate stack space */
83 int calls_p; /* true if the function makes any calls */
84 enum rs6000_abi abi; /* which ABI to use */
85 int gp_save_offset; /* offset to save GP regs from initial SP */
86 int fp_save_offset; /* offset to save FP regs from initial SP */
87 int altivec_save_offset; /* offset to save AltiVec regs from initial SP */
88 int lr_save_offset; /* offset to save LR from initial SP */
89 int cr_save_offset; /* offset to save CR from initial SP */
90 int vrsave_save_offset; /* offset to save VRSAVE from initial SP */
91 int spe_gp_save_offset; /* offset to save spe 64-bit gprs */
92 int toc_save_offset; /* offset to save the TOC pointer */
93 int varargs_save_offset; /* offset to save the varargs registers */
94 int ehrd_offset; /* offset to EH return data */
95 int reg_size; /* register size (4 or 8) */
96 int varargs_size; /* size to hold V.4 args passed in regs */
97 HOST_WIDE_INT vars_size; /* variable save area size */
98 int parm_size; /* outgoing parameter size */
99 int save_size; /* save area size */
100 int fixed_size; /* fixed size of stack frame */
101 int gp_size; /* size of saved GP registers */
102 int fp_size; /* size of saved FP registers */
103 int altivec_size; /* size of saved AltiVec registers */
104 int cr_size; /* size to hold CR if not in save_size */
105 int lr_size; /* size to hold LR if not in save_size */
106 int vrsave_size; /* size to hold VRSAVE if not in save_size */
107 int altivec_padding_size; /* size of altivec alignment padding if
109 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
110 int spe_padding_size;
111 int toc_size; /* size to hold TOC if not in save_size */
112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
113 int spe_64bit_regs_used;
116 /* Target cpu type */
118 enum processor_type rs6000_cpu;
119 struct rs6000_cpu_select rs6000_select[3] =
121 /* switch name, tune arch */
122 { (const char *)0, "--with-cpu=", 1, 1 },
123 { (const char *)0, "-mcpu=", 1, 1 },
124 { (const char *)0, "-mtune=", 1, 0 },
127 /* Support adjust_priority scheduler hook
128 and -mprioritize-restricted-insns= option. */
129 const char *rs6000_sched_restricted_insns_priority_str;
130 int rs6000_sched_restricted_insns_priority;
132 /* Support for -msched-costly-dep option. */
133 const char *rs6000_sched_costly_dep_str;
134 enum rs6000_dependence_cost rs6000_sched_costly_dep;
136 /* Support for -minsert-sched-nops option. */
137 const char *rs6000_sched_insert_nops_str;
138 enum rs6000_nop_insertion rs6000_sched_insert_nops;
140 /* Size of long double */
141 const char *rs6000_long_double_size_string;
142 int rs6000_long_double_type_size;
144 /* Whether -mabi=altivec has appeared */
145 int rs6000_altivec_abi;
147 /* Whether VRSAVE instructions should be generated. */
148 int rs6000_altivec_vrsave;
150 /* String from -mvrsave= option. */
151 const char *rs6000_altivec_vrsave_string;
153 /* Nonzero if we want SPE ABI extensions. */
156 /* Whether isel instructions should be generated. */
159 /* Whether SPE simd instructions should be generated. */
162 /* Nonzero if floating point operations are done in the GPRs. */
163 int rs6000_float_gprs = 0;
165 /* String from -mfloat-gprs=. */
166 const char *rs6000_float_gprs_string;
168 /* String from -misel=. */
169 const char *rs6000_isel_string;
171 /* String from -mspe=. */
172 const char *rs6000_spe_string;
174 /* Set to nonzero once AIX common-mode calls have been defined. */
175 static GTY(()) int common_mode_defined;
177 /* Save information from a "cmpxx" operation until the branch or scc is
179 rtx rs6000_compare_op0, rs6000_compare_op1;
180 int rs6000_compare_fp_p;
182 /* Label number of label created for -mrelocatable, to call to so we can
183 get the address of the GOT section */
184 int rs6000_pic_labelno;
187 /* Which abi to adhere to */
188 const char *rs6000_abi_name;
190 /* Semantics of the small data area */
191 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
193 /* Which small data model to use */
194 const char *rs6000_sdata_name = (char *)0;
196 /* Counter for labels which are to be placed in .fixup. */
197 int fixuplabelno = 0;
200 /* Bit size of immediate TLS offsets and string from which it is decoded. */
201 int rs6000_tls_size = 32;
202 const char *rs6000_tls_size_string;
204 /* ABI enumeration available for subtarget to use. */
205 enum rs6000_abi rs6000_current_abi;
207 /* ABI string from -mabi= option. */
208 const char *rs6000_abi_string;
211 const char *rs6000_debug_name;
212 int rs6000_debug_stack; /* debug stack applications */
213 int rs6000_debug_arg; /* debug argument handling */
216 static GTY(()) tree opaque_V2SI_type_node;
217 static GTY(()) tree opaque_V2SF_type_node;
218 static GTY(()) tree opaque_p_V2SI_type_node;
219 static GTY(()) tree V16QI_type_node;
220 static GTY(()) tree V2SI_type_node;
221 static GTY(()) tree V2SF_type_node;
222 static GTY(()) tree V4HI_type_node;
223 static GTY(()) tree V4SI_type_node;
224 static GTY(()) tree V4SF_type_node;
225 static GTY(()) tree V8HI_type_node;
226 static GTY(()) tree unsigned_V16QI_type_node;
227 static GTY(()) tree unsigned_V8HI_type_node;
228 static GTY(()) tree unsigned_V4SI_type_node;
229 static GTY(()) tree bool_char_type_node; /* __bool char */
230 static GTY(()) tree bool_short_type_node; /* __bool short */
231 static GTY(()) tree bool_int_type_node; /* __bool int */
232 static GTY(()) tree pixel_type_node; /* __pixel */
233 static GTY(()) tree bool_V16QI_type_node; /* __vector __bool char */
234 static GTY(()) tree bool_V8HI_type_node; /* __vector __bool short */
235 static GTY(()) tree bool_V4SI_type_node; /* __vector __bool int */
236 static GTY(()) tree pixel_V8HI_type_node; /* __vector __pixel */
238 int rs6000_warn_altivec_long = 1; /* On by default. */
239 const char *rs6000_warn_altivec_long_switch;
241 const char *rs6000_traceback_name;
243 traceback_default = 0,
249 /* Flag to say the TOC is initialized */
251 char toc_label_name[10];
253 /* Alias set for saves and restores from the rs6000 stack. */
254 static GTY(()) int rs6000_sr_alias_set;
256 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
257 The only place that looks at this is rs6000_set_default_type_attributes;
258 everywhere else should rely on the presence or absence of a longcall
259 attribute on the function declaration. */
260 int rs6000_default_long_calls;
261 const char *rs6000_longcall_switch;
263 /* Control alignment for fields within structures. */
264 /* String from -malign-XXXXX. */
265 const char *rs6000_alignment_string;
266 int rs6000_alignment_flags;
268 struct builtin_description
270 /* mask is not const because we're going to alter it below. This
271 nonsense will go away when we rewrite the -march infrastructure
272 to give us more target flag bits. */
274 const enum insn_code icode;
275 const char *const name;
276 const enum rs6000_builtins code;
279 static bool rs6000_function_ok_for_sibcall (tree, tree);
280 static int num_insns_constant_wide (HOST_WIDE_INT);
281 static void validate_condition_mode (enum rtx_code, enum machine_mode);
282 static rtx rs6000_generate_compare (enum rtx_code);
283 static void rs6000_maybe_dead (rtx);
284 static void rs6000_emit_stack_tie (void);
285 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
286 static rtx spe_synthesize_frame_save (rtx);
287 static bool spe_func_has_64bit_regs_p (void);
288 static void emit_frame_save (rtx, rtx, enum machine_mode, unsigned int,
290 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
291 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
292 static unsigned rs6000_hash_constant (rtx);
293 static unsigned toc_hash_function (const void *);
294 static int toc_hash_eq (const void *, const void *);
295 static int constant_pool_expr_1 (rtx, int *, int *);
296 static bool constant_pool_expr_p (rtx);
297 static bool toc_relative_expr_p (rtx);
298 static bool legitimate_small_data_p (enum machine_mode, rtx);
299 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
300 static bool legitimate_indexed_address_p (rtx, int);
301 static bool legitimate_indirect_address_p (rtx, int);
302 static bool macho_lo_sum_memory_operand (rtx x, enum machine_mode mode);
303 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
304 static struct machine_function * rs6000_init_machine_status (void);
305 static bool rs6000_assemble_integer (rtx, unsigned int, int);
306 #ifdef HAVE_GAS_HIDDEN
307 static void rs6000_assemble_visibility (tree, int);
309 static int rs6000_ra_ever_killed (void);
310 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
311 static tree rs6000_handle_altivec_attribute (tree *, tree, tree, int, bool *);
312 static const char *rs6000_mangle_fundamental_type (tree);
313 extern const struct attribute_spec rs6000_attribute_table[];
314 static void rs6000_set_default_type_attributes (tree);
315 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
316 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
317 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
319 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
320 static bool rs6000_return_in_memory (tree, tree);
321 static void rs6000_file_start (void);
323 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
324 static void rs6000_elf_asm_out_constructor (rtx, int);
325 static void rs6000_elf_asm_out_destructor (rtx, int);
326 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
327 static void rs6000_elf_unique_section (tree, int);
328 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
329 unsigned HOST_WIDE_INT);
330 static void rs6000_elf_encode_section_info (tree, rtx, int)
332 static bool rs6000_elf_in_small_data_p (tree);
335 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
336 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
337 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
338 static void rs6000_xcoff_unique_section (tree, int);
339 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
340 unsigned HOST_WIDE_INT);
341 static const char * rs6000_xcoff_strip_name_encoding (const char *);
342 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
343 static void rs6000_xcoff_file_start (void);
344 static void rs6000_xcoff_file_end (void);
347 static bool rs6000_binds_local_p (tree);
349 static int rs6000_use_dfa_pipeline_interface (void);
350 static int rs6000_variable_issue (FILE *, int, rtx, int);
351 static bool rs6000_rtx_costs (rtx, int, int, int *);
352 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
353 static bool is_microcoded_insn (rtx);
354 static int is_dispatch_slot_restricted (rtx);
355 static bool is_cracked_insn (rtx);
356 static bool is_branch_slot_insn (rtx);
357 static int rs6000_adjust_priority (rtx, int);
358 static int rs6000_issue_rate (void);
359 static bool rs6000_is_costly_dependence (rtx, rtx, rtx, int, int);
360 static rtx get_next_active_insn (rtx, rtx);
361 static bool insn_terminates_group_p (rtx , enum group_termination);
362 static bool is_costly_group (rtx *, rtx);
363 static int force_new_group (int, FILE *, rtx *, rtx, bool *, int, int *);
364 static int redefine_groups (FILE *, int, rtx, rtx);
365 static int pad_groups (FILE *, int, rtx, rtx);
366 static void rs6000_sched_finish (FILE *, int);
367 static int rs6000_use_sched_lookahead (void);
369 static void rs6000_init_builtins (void);
370 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
371 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
372 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
373 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
374 static void altivec_init_builtins (void);
375 static void rs6000_common_init_builtins (void);
376 static void rs6000_init_libfuncs (void);
378 static void enable_mask_for_builtins (struct builtin_description *, int,
379 enum rs6000_builtins,
380 enum rs6000_builtins);
381 static void spe_init_builtins (void);
382 static rtx spe_expand_builtin (tree, rtx, bool *);
383 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
384 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
385 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
386 static rs6000_stack_t *rs6000_stack_info (void);
387 static void debug_stack_info (rs6000_stack_t *);
389 static rtx altivec_expand_builtin (tree, rtx, bool *);
390 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
391 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
392 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
393 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
394 static rtx altivec_expand_predicate_builtin (enum insn_code,
395 const char *, tree, rtx);
396 static rtx altivec_expand_lv_builtin (enum insn_code, tree, rtx);
397 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
398 static void rs6000_parse_abi_options (void);
399 static void rs6000_parse_alignment_option (void);
400 static void rs6000_parse_tls_size_option (void);
401 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
402 static int first_altivec_reg_to_save (void);
403 static unsigned int compute_vrsave_mask (void);
404 static void is_altivec_return_reg (rtx, void *);
405 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
406 int easy_vector_constant (rtx, enum machine_mode);
407 static int easy_vector_same (rtx, enum machine_mode);
408 static bool is_ev64_opaque_type (tree);
409 static rtx rs6000_dwarf_register_span (rtx);
410 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
411 static rtx rs6000_tls_get_addr (void);
412 static rtx rs6000_got_sym (void);
413 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
414 static const char *rs6000_get_some_local_dynamic_name (void);
415 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
416 static rtx rs6000_complex_function_value (enum machine_mode);
417 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
418 enum machine_mode, tree);
419 static rtx rs6000_mixed_function_arg (CUMULATIVE_ARGS *,
420 enum machine_mode, tree, int);
421 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
422 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
423 enum machine_mode, tree,
426 static void macho_branch_islands (void);
427 static void add_compiler_branch_island (tree, tree, int);
428 static int no_previous_def (tree function_name);
429 static tree get_prev_label (tree function_name);
432 static tree rs6000_build_builtin_va_list (void);
434 /* Hash table stuff for keeping track of TOC entries. */
436 struct toc_hash_struct GTY(())
438 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
439 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
441 enum machine_mode key_mode;
445 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
447 /* Default register names. */
448 char rs6000_reg_names[][8] =
450 "0", "1", "2", "3", "4", "5", "6", "7",
451 "8", "9", "10", "11", "12", "13", "14", "15",
452 "16", "17", "18", "19", "20", "21", "22", "23",
453 "24", "25", "26", "27", "28", "29", "30", "31",
454 "0", "1", "2", "3", "4", "5", "6", "7",
455 "8", "9", "10", "11", "12", "13", "14", "15",
456 "16", "17", "18", "19", "20", "21", "22", "23",
457 "24", "25", "26", "27", "28", "29", "30", "31",
458 "mq", "lr", "ctr","ap",
459 "0", "1", "2", "3", "4", "5", "6", "7",
461 /* AltiVec registers. */
462 "0", "1", "2", "3", "4", "5", "6", "7",
463 "8", "9", "10", "11", "12", "13", "14", "15",
464 "16", "17", "18", "19", "20", "21", "22", "23",
465 "24", "25", "26", "27", "28", "29", "30", "31",
471 #ifdef TARGET_REGNAMES
472 static const char alt_reg_names[][8] =
474 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
475 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
476 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
477 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
478 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
479 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
480 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
481 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
482 "mq", "lr", "ctr", "ap",
483 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
485 /* AltiVec registers. */
486 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
487 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
488 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
489 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
496 #ifndef MASK_STRICT_ALIGN
497 #define MASK_STRICT_ALIGN 0
499 #ifndef TARGET_PROFILE_KERNEL
500 #define TARGET_PROFILE_KERNEL 0
503 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
504 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
506 /* Return 1 for a symbol ref for a thread-local storage symbol. */
507 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
508 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
510 /* Initialize the GCC target structure. */
511 #undef TARGET_ATTRIBUTE_TABLE
512 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
513 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
514 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
516 #undef TARGET_ASM_ALIGNED_DI_OP
517 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
519 /* Default unaligned ops are only provided for ELF. Find the ops needed
520 for non-ELF systems. */
521 #ifndef OBJECT_FORMAT_ELF
523 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
525 #undef TARGET_ASM_UNALIGNED_HI_OP
526 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
527 #undef TARGET_ASM_UNALIGNED_SI_OP
528 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
529 #undef TARGET_ASM_UNALIGNED_DI_OP
530 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
533 #undef TARGET_ASM_UNALIGNED_HI_OP
534 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
535 #undef TARGET_ASM_UNALIGNED_SI_OP
536 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
540 /* This hook deals with fixups for relocatable code and DI-mode objects
542 #undef TARGET_ASM_INTEGER
543 #define TARGET_ASM_INTEGER rs6000_assemble_integer
545 #ifdef HAVE_GAS_HIDDEN
546 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
547 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
550 #undef TARGET_HAVE_TLS
551 #define TARGET_HAVE_TLS HAVE_AS_TLS
553 #undef TARGET_CANNOT_FORCE_CONST_MEM
554 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
556 #undef TARGET_ASM_FUNCTION_PROLOGUE
557 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
558 #undef TARGET_ASM_FUNCTION_EPILOGUE
559 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
561 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
562 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
563 #undef TARGET_SCHED_VARIABLE_ISSUE
564 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
566 #undef TARGET_SCHED_ISSUE_RATE
567 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
568 #undef TARGET_SCHED_ADJUST_COST
569 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
570 #undef TARGET_SCHED_ADJUST_PRIORITY
571 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
572 #undef TARGET_SCHED_IS_COSTLY_DEPENDENCE
573 #define TARGET_SCHED_IS_COSTLY_DEPENDENCE rs6000_is_costly_dependence
574 #undef TARGET_SCHED_FINISH
575 #define TARGET_SCHED_FINISH rs6000_sched_finish
577 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
578 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
580 #undef TARGET_INIT_BUILTINS
581 #define TARGET_INIT_BUILTINS rs6000_init_builtins
583 #undef TARGET_EXPAND_BUILTIN
584 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
586 #undef TARGET_MANGLE_FUNDAMENTAL_TYPE
587 #define TARGET_MANGLE_FUNDAMENTAL_TYPE rs6000_mangle_fundamental_type
589 #undef TARGET_INIT_LIBFUNCS
590 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
593 #undef TARGET_BINDS_LOCAL_P
594 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
597 #undef TARGET_ASM_OUTPUT_MI_THUNK
598 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
600 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
601 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
603 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
604 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
606 #undef TARGET_RTX_COSTS
607 #define TARGET_RTX_COSTS rs6000_rtx_costs
608 #undef TARGET_ADDRESS_COST
609 #define TARGET_ADDRESS_COST hook_int_rtx_0
611 #undef TARGET_VECTOR_OPAQUE_P
612 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
614 #undef TARGET_DWARF_REGISTER_SPAN
615 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
617 /* On rs6000, function arguments are promoted, as are function return
619 #undef TARGET_PROMOTE_FUNCTION_ARGS
620 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
621 #undef TARGET_PROMOTE_FUNCTION_RETURN
622 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
624 #undef TARGET_RETURN_IN_MEMORY
625 #define TARGET_RETURN_IN_MEMORY rs6000_return_in_memory
627 #undef TARGET_SETUP_INCOMING_VARARGS
628 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
630 /* Always strict argument naming on rs6000. */
631 #undef TARGET_STRICT_ARGUMENT_NAMING
632 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
633 #undef TARGET_PRETEND_OUTGOING_VARARGS_NAMED
634 #define TARGET_PRETEND_OUTGOING_VARARGS_NAMED hook_bool_CUMULATIVE_ARGS_true
635 #undef TARGET_SPLIT_COMPLEX_ARG
636 #define TARGET_SPLIT_COMPLEX_ARG hook_bool_tree_true
638 #undef TARGET_BUILD_BUILTIN_VA_LIST
639 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
641 struct gcc_target targetm = TARGET_INITIALIZER;
643 /* Override command line options. Mostly we process the processor
644 type and sometimes adjust other TARGET_ options. */
647 rs6000_override_options (const char *default_cpu)
650 struct rs6000_cpu_select *ptr;
653 /* Simplifications for entries below. */
656 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
657 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
660 /* This table occasionally claims that a processor does not support
661 a particular feature even though it does, but the feature is slower
662 than the alternative. Thus, it shouldn't be relied on as a
663 complete description of the processor's support.
665 Please keep this list in order, and don't forget to update the
666 documentation in invoke.texi when adding a new processor or
670 const char *const name; /* Canonical processor name. */
671 const enum processor_type processor; /* Processor type enum value. */
672 const int target_enable; /* Target flags to enable. */
673 } const processor_target_table[]
674 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
675 {"403", PROCESSOR_PPC403,
676 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
677 {"405", PROCESSOR_PPC405, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
678 {"405fp", PROCESSOR_PPC405, POWERPC_BASE_MASK},
679 {"440", PROCESSOR_PPC440, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
680 {"440fp", PROCESSOR_PPC440, POWERPC_BASE_MASK},
681 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
682 {"601", PROCESSOR_PPC601,
683 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
684 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
685 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
686 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
687 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
688 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
689 {"620", PROCESSOR_PPC620,
690 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
691 {"630", PROCESSOR_PPC630,
692 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
693 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
694 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
695 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
696 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
697 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
698 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
699 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
700 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
701 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
702 {"970", PROCESSOR_POWER4,
703 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
704 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
705 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
706 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
707 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
708 {"G5", PROCESSOR_POWER4,
709 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
710 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
711 {"power2", PROCESSOR_POWER,
712 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
713 {"power3", PROCESSOR_PPC630,
714 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
715 {"power4", PROCESSOR_POWER4,
716 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
717 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
718 {"powerpc64", PROCESSOR_POWERPC64,
719 POWERPC_BASE_MASK | MASK_POWERPC64},
720 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
721 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
722 {"rios2", PROCESSOR_RIOS2,
723 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
724 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
725 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
726 {"rs64a", PROCESSOR_RS64A, POWERPC_BASE_MASK | MASK_POWERPC64},
729 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
731 /* Some OSs don't support saving the high part of 64-bit registers on
732 context switch. Other OSs don't support saving Altivec registers.
733 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
734 settings; if the user wants either, the user must explicitly specify
735 them and we won't interfere with the user's specification. */
738 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
739 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT
740 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
743 set_masks = POWER_MASKS | POWERPC_MASKS | MASK_SOFT_FLOAT;
744 #ifdef OS_MISSING_POWERPC64
745 if (OS_MISSING_POWERPC64)
746 set_masks &= ~MASK_POWERPC64;
748 #ifdef OS_MISSING_ALTIVEC
749 if (OS_MISSING_ALTIVEC)
750 set_masks &= ~MASK_ALTIVEC;
753 /* Don't override these by the processor default if given explicitly. */
754 set_masks &= ~(target_flags_explicit
755 & (MASK_MULTIPLE | MASK_STRING | MASK_SOFT_FLOAT));
757 /* Identify the processor type. */
758 rs6000_select[0].string = default_cpu;
759 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
761 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
763 ptr = &rs6000_select[i];
764 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
766 for (j = 0; j < ptt_size; j++)
767 if (! strcmp (ptr->string, processor_target_table[j].name))
770 rs6000_cpu = processor_target_table[j].processor;
774 target_flags &= ~set_masks;
775 target_flags |= (processor_target_table[j].target_enable
782 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
789 /* If we are optimizing big endian systems for space, use the load/store
790 multiple and string instructions. */
791 if (BYTES_BIG_ENDIAN && optimize_size)
792 target_flags |= ~target_flags_explicit & (MASK_MULTIPLE | MASK_STRING);
794 /* Don't allow -mmultiple or -mstring on little endian systems
795 unless the cpu is a 750, because the hardware doesn't support the
796 instructions used in little endian mode, and causes an alignment
797 trap. The 750 does not cause an alignment trap (except when the
798 target is unaligned). */
800 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
804 target_flags &= ~MASK_MULTIPLE;
805 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
806 warning ("-mmultiple is not supported on little endian systems");
811 target_flags &= ~MASK_STRING;
812 if ((target_flags_explicit & MASK_STRING) != 0)
813 warning ("-mstring is not supported on little endian systems");
817 /* Set debug flags */
818 if (rs6000_debug_name)
820 if (! strcmp (rs6000_debug_name, "all"))
821 rs6000_debug_stack = rs6000_debug_arg = 1;
822 else if (! strcmp (rs6000_debug_name, "stack"))
823 rs6000_debug_stack = 1;
824 else if (! strcmp (rs6000_debug_name, "arg"))
825 rs6000_debug_arg = 1;
827 error ("unknown -mdebug-%s switch", rs6000_debug_name);
830 if (rs6000_traceback_name)
832 if (! strncmp (rs6000_traceback_name, "full", 4))
833 rs6000_traceback = traceback_full;
834 else if (! strncmp (rs6000_traceback_name, "part", 4))
835 rs6000_traceback = traceback_part;
836 else if (! strncmp (rs6000_traceback_name, "no", 2))
837 rs6000_traceback = traceback_none;
839 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
840 rs6000_traceback_name);
843 /* Set size of long double */
844 rs6000_long_double_type_size = 64;
845 if (rs6000_long_double_size_string)
848 int size = strtol (rs6000_long_double_size_string, &tail, 10);
849 if (*tail != '\0' || (size != 64 && size != 128))
850 error ("Unknown switch -mlong-double-%s",
851 rs6000_long_double_size_string);
853 rs6000_long_double_type_size = size;
856 /* Set Altivec ABI as default for powerpc64 linux. */
857 if (TARGET_ELF && TARGET_64BIT)
859 rs6000_altivec_abi = 1;
860 rs6000_altivec_vrsave = 1;
863 /* Handle -mabi= options. */
864 rs6000_parse_abi_options ();
866 /* Handle -malign-XXXXX option. */
867 rs6000_parse_alignment_option ();
869 /* Handle generic -mFOO=YES/NO options. */
870 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
871 &rs6000_altivec_vrsave);
872 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
874 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
875 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
878 /* Handle -mtls-size option. */
879 rs6000_parse_tls_size_option ();
881 #ifdef SUBTARGET_OVERRIDE_OPTIONS
882 SUBTARGET_OVERRIDE_OPTIONS;
884 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
885 SUBSUBTARGET_OVERRIDE_OPTIONS;
890 /* The e500 does not have string instructions, and we set
891 MASK_STRING above when optimizing for size. */
892 if ((target_flags & MASK_STRING) != 0)
893 target_flags = target_flags & ~MASK_STRING;
895 /* No SPE means 64-bit long doubles, even if an E500. */
896 if (rs6000_spe_string != 0
897 && !strcmp (rs6000_spe_string, "no"))
898 rs6000_long_double_type_size = 64;
900 else if (rs6000_select[1].string != NULL)
902 /* For the powerpc-eabispe configuration, we set all these by
903 default, so let's unset them if we manually set another
904 CPU that is not the E500. */
905 if (rs6000_abi_string == 0)
907 if (rs6000_spe_string == 0)
909 if (rs6000_float_gprs_string == 0)
910 rs6000_float_gprs = 0;
911 if (rs6000_isel_string == 0)
913 if (rs6000_long_double_size_string == 0)
914 rs6000_long_double_type_size = 64;
917 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
918 using TARGET_OPTIONS to handle a toggle switch, but we're out of
919 bits in target_flags so TARGET_SWITCHES cannot be used.
920 Assumption here is that rs6000_longcall_switch points into the
921 text of the complete option, rather than being a copy, so we can
922 scan back for the presence or absence of the no- modifier. */
923 if (rs6000_longcall_switch)
925 const char *base = rs6000_longcall_switch;
926 while (base[-1] != 'm') base--;
928 if (*rs6000_longcall_switch != '\0')
929 error ("invalid option `%s'", base);
930 rs6000_default_long_calls = (base[0] != 'n');
933 /* Handle -m(no-)warn-altivec-long similarly. */
934 if (rs6000_warn_altivec_long_switch)
936 const char *base = rs6000_warn_altivec_long_switch;
937 while (base[-1] != 'm') base--;
939 if (*rs6000_warn_altivec_long_switch != '\0')
940 error ("invalid option `%s'", base);
941 rs6000_warn_altivec_long = (base[0] != 'n');
944 /* Handle -mprioritize-restricted-insns option. */
945 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
946 if (rs6000_sched_restricted_insns_priority_str)
947 rs6000_sched_restricted_insns_priority =
948 atoi (rs6000_sched_restricted_insns_priority_str);
950 /* Handle -msched-costly-dep option. */
951 rs6000_sched_costly_dep = DEFAULT_SCHED_COSTLY_DEP;
952 if (rs6000_sched_costly_dep_str)
954 if (! strcmp (rs6000_sched_costly_dep_str, "no"))
955 rs6000_sched_costly_dep = no_dep_costly;
956 else if (! strcmp (rs6000_sched_costly_dep_str, "all"))
957 rs6000_sched_costly_dep = all_deps_costly;
958 else if (! strcmp (rs6000_sched_costly_dep_str, "true_store_to_load"))
959 rs6000_sched_costly_dep = true_store_to_load_dep_costly;
960 else if (! strcmp (rs6000_sched_costly_dep_str, "store_to_load"))
961 rs6000_sched_costly_dep = store_to_load_dep_costly;
963 rs6000_sched_costly_dep = atoi (rs6000_sched_costly_dep_str);
966 /* Handle -minsert-sched-nops option. */
967 rs6000_sched_insert_nops = DEFAULT_SCHED_FINISH_NOP_INSERTION_SCHEME;
968 if (rs6000_sched_insert_nops_str)
970 if (! strcmp (rs6000_sched_insert_nops_str, "no"))
971 rs6000_sched_insert_nops = sched_finish_none;
972 else if (! strcmp (rs6000_sched_insert_nops_str, "pad"))
973 rs6000_sched_insert_nops = sched_finish_pad_groups;
974 else if (! strcmp (rs6000_sched_insert_nops_str, "regroup_exact"))
975 rs6000_sched_insert_nops = sched_finish_regroup_exact;
977 rs6000_sched_insert_nops = atoi (rs6000_sched_insert_nops_str);
980 #ifdef TARGET_REGNAMES
981 /* If the user desires alternate register names, copy in the
982 alternate names now. */
984 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
987 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
988 If -maix-struct-return or -msvr4-struct-return was explicitly
989 used, don't override with the ABI default. */
990 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
992 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
993 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
995 target_flags |= MASK_AIX_STRUCT_RET;
998 if (TARGET_LONG_DOUBLE_128
999 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
1000 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
1002 /* Allocate an alias set for register saves & restores from stack. */
1003 rs6000_sr_alias_set = new_alias_set ();
1006 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
1008 /* We can only guarantee the availability of DI pseudo-ops when
1009 assembling for 64-bit targets. */
1012 targetm.asm_out.aligned_op.di = NULL;
1013 targetm.asm_out.unaligned_op.di = NULL;
1016 /* Set maximum branch target alignment at two instructions, eight bytes. */
1017 align_jumps_max_skip = 8;
1018 align_loops_max_skip = 8;
1020 /* Arrange to save and restore machine status around nested functions. */
1021 init_machine_status = rs6000_init_machine_status;
1023 /* We should always be splitting complex arguments, but we can't break
1024 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
1025 if (DEFAULT_ABI != ABI_AIX)
1026 targetm.calls.split_complex_arg = NULL;
1029 /* Handle generic options of the form -mfoo=yes/no.
1030 NAME is the option name.
1031 VALUE is the option value.
1032 FLAG is the pointer to the flag where to store a 1 or 0, depending on
1033 whether the option value is 'yes' or 'no' respectively. */
1035 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
1039 else if (!strcmp (value, "yes"))
1041 else if (!strcmp (value, "no"))
1044 error ("unknown -m%s= option specified: '%s'", name, value);
1047 /* Handle -mabi= options. */
1049 rs6000_parse_abi_options (void)
1051 if (rs6000_abi_string == 0)
1053 else if (! strcmp (rs6000_abi_string, "altivec"))
1055 rs6000_altivec_abi = 1;
1058 else if (! strcmp (rs6000_abi_string, "no-altivec"))
1059 rs6000_altivec_abi = 0;
1060 else if (! strcmp (rs6000_abi_string, "spe"))
1063 rs6000_altivec_abi = 0;
1064 if (!TARGET_SPE_ABI)
1065 error ("not configured for ABI: '%s'", rs6000_abi_string);
1068 else if (! strcmp (rs6000_abi_string, "no-spe"))
1071 error ("unknown ABI specified: '%s'", rs6000_abi_string);
1074 /* Handle -malign-XXXXXX options. */
1076 rs6000_parse_alignment_option (void)
1078 if (rs6000_alignment_string == 0)
1080 else if (! strcmp (rs6000_alignment_string, "power"))
1081 rs6000_alignment_flags = MASK_ALIGN_POWER;
1082 else if (! strcmp (rs6000_alignment_string, "natural"))
1083 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
1085 error ("unknown -malign-XXXXX option specified: '%s'",
1086 rs6000_alignment_string);
1089 /* Validate and record the size specified with the -mtls-size option. */
1092 rs6000_parse_tls_size_option (void)
1094 if (rs6000_tls_size_string == 0)
1096 else if (strcmp (rs6000_tls_size_string, "16") == 0)
1097 rs6000_tls_size = 16;
1098 else if (strcmp (rs6000_tls_size_string, "32") == 0)
1099 rs6000_tls_size = 32;
1100 else if (strcmp (rs6000_tls_size_string, "64") == 0)
1101 rs6000_tls_size = 64;
1103 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
1107 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
1111 /* Do anything needed at the start of the asm file. */
1114 rs6000_file_start (void)
1118 const char *start = buffer;
1119 struct rs6000_cpu_select *ptr;
1120 const char *default_cpu = TARGET_CPU_DEFAULT;
1121 FILE *file = asm_out_file;
1123 default_file_start ();
1125 #ifdef TARGET_BI_ARCH
1126 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
1130 if (flag_verbose_asm)
1132 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
1133 rs6000_select[0].string = default_cpu;
1135 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
1137 ptr = &rs6000_select[i];
1138 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
1140 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
1145 #ifdef USING_ELFOS_H
1146 switch (rs6000_sdata)
1148 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
1149 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1150 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1151 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1154 if (rs6000_sdata && g_switch_value)
1156 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1167 /* Return nonzero if this function is known to have a null epilogue. */
1170 direct_return (void)
1172 if (reload_completed)
1174 rs6000_stack_t *info = rs6000_stack_info ();
1176 if (info->first_gp_reg_save == 32
1177 && info->first_fp_reg_save == 64
1178 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1179 && ! info->lr_save_p
1180 && ! info->cr_save_p
1181 && info->vrsave_mask == 0
1189 /* Returns 1 always. */
1192 any_operand (rtx op ATTRIBUTE_UNUSED,
1193 enum machine_mode mode ATTRIBUTE_UNUSED)
1198 /* Returns 1 if op is the count register. */
1200 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1202 if (GET_CODE (op) != REG)
1205 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1208 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1214 /* Returns 1 if op is an altivec register. */
1216 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1219 return (register_operand (op, mode)
1220 && (GET_CODE (op) != REG
1221 || REGNO (op) > FIRST_PSEUDO_REGISTER
1222 || ALTIVEC_REGNO_P (REGNO (op))));
1226 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1228 if (GET_CODE (op) != REG)
1231 if (XER_REGNO_P (REGNO (op)))
1237 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1238 by such constants completes more quickly. */
1241 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1243 return ( GET_CODE (op) == CONST_INT
1244 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1247 /* Return 1 if OP is a constant that can fit in a D field. */
1250 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1252 return (GET_CODE (op) == CONST_INT
1253 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1256 /* Similar for an unsigned D field. */
1259 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1261 return (GET_CODE (op) == CONST_INT
1262 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1265 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1268 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1270 return (GET_CODE (op) == CONST_INT
1271 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1274 /* Returns 1 if OP is a CONST_INT that is a positive value
1275 and an exact power of 2. */
1278 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1280 return (GET_CODE (op) == CONST_INT
1282 && exact_log2 (INTVAL (op)) >= 0);
1285 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1289 gpc_reg_operand (rtx op, enum machine_mode mode)
1291 return (register_operand (op, mode)
1292 && (GET_CODE (op) != REG
1293 || (REGNO (op) >= ARG_POINTER_REGNUM
1294 && !XER_REGNO_P (REGNO (op)))
1295 || REGNO (op) < MQ_REGNO));
1298 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1302 cc_reg_operand (rtx op, enum machine_mode mode)
1304 return (register_operand (op, mode)
1305 && (GET_CODE (op) != REG
1306 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1307 || CR_REGNO_P (REGNO (op))));
1310 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1311 CR field that isn't CR0. */
1314 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1316 return (register_operand (op, mode)
1317 && (GET_CODE (op) != REG
1318 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1319 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1322 /* Returns 1 if OP is either a constant integer valid for a D-field or
1323 a non-special register. If a register, it must be in the proper
1324 mode unless MODE is VOIDmode. */
1327 reg_or_short_operand (rtx op, enum machine_mode mode)
1329 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1332 /* Similar, except check if the negation of the constant would be
1333 valid for a D-field. */
1336 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1338 if (GET_CODE (op) == CONST_INT)
1339 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1341 return gpc_reg_operand (op, mode);
1344 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1345 a non-special register. If a register, it must be in the proper
1346 mode unless MODE is VOIDmode. */
1349 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1351 if (gpc_reg_operand (op, mode))
1353 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1360 /* Return 1 if the operand is either a register or an integer whose
1361 high-order 16 bits are zero. */
1364 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1366 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1369 /* Return 1 is the operand is either a non-special register or ANY
1370 constant integer. */
1373 reg_or_cint_operand (rtx op, enum machine_mode mode)
1375 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1378 /* Return 1 is the operand is either a non-special register or ANY
1379 32-bit signed constant integer. */
1382 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1384 return (gpc_reg_operand (op, mode)
1385 || (GET_CODE (op) == CONST_INT
1386 #if HOST_BITS_PER_WIDE_INT != 32
1387 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1388 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1393 /* Return 1 is the operand is either a non-special register or a 32-bit
1394 signed constant integer valid for 64-bit addition. */
1397 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1399 return (gpc_reg_operand (op, mode)
1400 || (GET_CODE (op) == CONST_INT
1401 #if HOST_BITS_PER_WIDE_INT == 32
1402 && INTVAL (op) < 0x7fff8000
1404 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1410 /* Return 1 is the operand is either a non-special register or a 32-bit
1411 signed constant integer valid for 64-bit subtraction. */
1414 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1416 return (gpc_reg_operand (op, mode)
1417 || (GET_CODE (op) == CONST_INT
1418 #if HOST_BITS_PER_WIDE_INT == 32
1419 && (- INTVAL (op)) < 0x7fff8000
1421 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1427 /* Return 1 is the operand is either a non-special register or ANY
1428 32-bit unsigned constant integer. */
1431 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1433 if (GET_CODE (op) == CONST_INT)
1435 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1437 if (GET_MODE_BITSIZE (mode) <= 32)
1440 if (INTVAL (op) < 0)
1444 return ((INTVAL (op) & GET_MODE_MASK (mode)
1445 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1447 else if (GET_CODE (op) == CONST_DOUBLE)
1449 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1453 return CONST_DOUBLE_HIGH (op) == 0;
1456 return gpc_reg_operand (op, mode);
1459 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1462 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1464 return (GET_CODE (op) == SYMBOL_REF
1465 || GET_CODE (op) == CONST
1466 || GET_CODE (op) == LABEL_REF);
1469 /* Return 1 if the operand is a simple references that can be loaded via
1470 the GOT (labels involving addition aren't allowed). */
1473 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1475 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1478 /* Return the number of instructions it takes to form a constant in an
1479 integer register. */
1482 num_insns_constant_wide (HOST_WIDE_INT value)
1484 /* signed constant loadable with {cal|addi} */
1485 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1488 /* constant loadable with {cau|addis} */
1489 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1492 #if HOST_BITS_PER_WIDE_INT == 64
1493 else if (TARGET_POWERPC64)
1495 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1496 HOST_WIDE_INT high = value >> 31;
1498 if (high == 0 || high == -1)
1504 return num_insns_constant_wide (high) + 1;
1506 return (num_insns_constant_wide (high)
1507 + num_insns_constant_wide (low) + 1);
1516 num_insns_constant (rtx op, enum machine_mode mode)
1518 if (GET_CODE (op) == CONST_INT)
1520 #if HOST_BITS_PER_WIDE_INT == 64
1521 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1522 && mask64_operand (op, mode))
1526 return num_insns_constant_wide (INTVAL (op));
1529 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1534 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1535 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1536 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1539 else if (GET_CODE (op) == CONST_DOUBLE)
1545 int endian = (WORDS_BIG_ENDIAN == 0);
1547 if (mode == VOIDmode || mode == DImode)
1549 high = CONST_DOUBLE_HIGH (op);
1550 low = CONST_DOUBLE_LOW (op);
1554 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1555 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1557 low = l[1 - endian];
1561 return (num_insns_constant_wide (low)
1562 + num_insns_constant_wide (high));
1566 if (high == 0 && low >= 0)
1567 return num_insns_constant_wide (low);
1569 else if (high == -1 && low < 0)
1570 return num_insns_constant_wide (low);
1572 else if (mask64_operand (op, mode))
1576 return num_insns_constant_wide (high) + 1;
1579 return (num_insns_constant_wide (high)
1580 + num_insns_constant_wide (low) + 1);
1588 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1589 register with one instruction per word. We only do this if we can
1590 safely read CONST_DOUBLE_{LOW,HIGH}. */
1593 easy_fp_constant (rtx op, enum machine_mode mode)
1595 if (GET_CODE (op) != CONST_DOUBLE
1596 || GET_MODE (op) != mode
1597 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1600 /* Consider all constants with -msoft-float to be easy. */
1601 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1605 /* If we are using V.4 style PIC, consider all constants to be hard. */
1606 if (flag_pic && DEFAULT_ABI == ABI_V4)
1609 #ifdef TARGET_RELOCATABLE
1610 /* Similarly if we are using -mrelocatable, consider all constants
1612 if (TARGET_RELOCATABLE)
1621 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1622 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1624 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1625 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1626 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1627 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1630 else if (mode == DFmode)
1635 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1636 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1638 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1639 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1642 else if (mode == SFmode)
1647 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1648 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1650 return num_insns_constant_wide (l) == 1;
1653 else if (mode == DImode)
1654 return ((TARGET_POWERPC64
1655 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1656 || (num_insns_constant (op, DImode) <= 2));
1658 else if (mode == SImode)
1664 /* Return nonzero if all elements of a vector have the same value. */
1667 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1671 units = CONST_VECTOR_NUNITS (op);
1673 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1674 for (i = 1; i < units; ++i)
1675 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1682 /* Return 1 if the operand is a CONST_INT and can be put into a
1683 register without using memory. */
1686 easy_vector_constant (rtx op, enum machine_mode mode)
1690 if (GET_CODE (op) != CONST_VECTOR
1695 if (zero_constant (op, mode)
1696 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1697 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1700 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1703 if (TARGET_SPE && mode == V1DImode)
1706 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1707 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1709 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1711 evmergelo r0, r0, r0
1714 I don't know how efficient it would be to allow bigger constants,
1715 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1716 instructions is better than a 64-bit memory load, but I don't
1717 have the e500 timing specs. */
1718 if (TARGET_SPE && mode == V2SImode
1719 && cst >= -0x7fff && cst <= 0x7fff
1720 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1727 if (EASY_VECTOR_15 (cst, op, mode))
1729 if ((cst & 0xffff) != ((cst >> 16) & 0xffff))
1733 if (EASY_VECTOR_15 (cst, op, mode))
1735 if ((cst & 0xff) != ((cst >> 8) & 0xff))
1739 if (EASY_VECTOR_15 (cst, op, mode))
1745 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1751 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1754 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1758 if (!easy_vector_constant (op, mode))
1761 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1763 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1767 output_vec_const_move (rtx *operands)
1770 enum machine_mode mode;
1776 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1777 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1778 mode = GET_MODE (dest);
1782 if (zero_constant (vec, mode))
1783 return "vxor %0,%0,%0";
1784 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1786 else if (easy_vector_constant (vec, mode))
1788 operands[1] = GEN_INT (cst);
1792 if (EASY_VECTOR_15 (cst, vec, mode))
1794 operands[1] = GEN_INT (cst);
1795 return "vspltisw %0,%1";
1799 if (EASY_VECTOR_15 (cst, vec, mode))
1801 operands[1] = GEN_INT (cst);
1802 return "vspltish %0,%1";
1806 if (EASY_VECTOR_15 (cst, vec, mode))
1808 operands[1] = GEN_INT (cst);
1809 return "vspltisb %0,%1";
1821 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1822 pattern of V1DI, V4HI, and V2SF.
1824 FIXME: We should probably return # and add post reload
1825 splitters for these, but this way is so easy ;-).
1827 operands[1] = GEN_INT (cst);
1828 operands[2] = GEN_INT (cst2);
1830 return "li %0,%1\n\tevmergelo %0,%0,%0";
1832 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1838 /* Return 1 if the operand is the constant 0. This works for scalars
1839 as well as vectors. */
1841 zero_constant (rtx op, enum machine_mode mode)
1843 return op == CONST0_RTX (mode);
1846 /* Return 1 if the operand is 0.0. */
1848 zero_fp_constant (rtx op, enum machine_mode mode)
1850 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1853 /* Return 1 if the operand is in volatile memory. Note that during
1854 the RTL generation phase, memory_operand does not return TRUE for
1855 volatile memory references. So this function allows us to
1856 recognize volatile references where its safe. */
1859 volatile_mem_operand (rtx op, enum machine_mode mode)
1861 if (GET_CODE (op) != MEM)
1864 if (!MEM_VOLATILE_P (op))
1867 if (mode != GET_MODE (op))
1870 if (reload_completed)
1871 return memory_operand (op, mode);
1873 if (reload_in_progress)
1874 return strict_memory_address_p (mode, XEXP (op, 0));
1876 return memory_address_p (mode, XEXP (op, 0));
1879 /* Return 1 if the operand is an offsettable memory operand. */
1882 offsettable_mem_operand (rtx op, enum machine_mode mode)
1884 return ((GET_CODE (op) == MEM)
1885 && offsettable_address_p (reload_completed || reload_in_progress,
1886 mode, XEXP (op, 0)));
1889 /* Return 1 if the operand is either an easy FP constant (see above) or
1893 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1895 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1898 /* Return 1 if the operand is either a non-special register or an item
1899 that can be used as the operand of a `mode' add insn. */
1902 add_operand (rtx op, enum machine_mode mode)
1904 if (GET_CODE (op) == CONST_INT)
1905 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1906 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1908 return gpc_reg_operand (op, mode);
1911 /* Return 1 if OP is a constant but not a valid add_operand. */
1914 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1916 return (GET_CODE (op) == CONST_INT
1917 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1918 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1921 /* Return 1 if the operand is a non-special register or a constant that
1922 can be used as the operand of an OR or XOR insn on the RS/6000. */
1925 logical_operand (rtx op, enum machine_mode mode)
1927 HOST_WIDE_INT opl, oph;
1929 if (gpc_reg_operand (op, mode))
1932 if (GET_CODE (op) == CONST_INT)
1934 opl = INTVAL (op) & GET_MODE_MASK (mode);
1936 #if HOST_BITS_PER_WIDE_INT <= 32
1937 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1941 else if (GET_CODE (op) == CONST_DOUBLE)
1943 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1946 opl = CONST_DOUBLE_LOW (op);
1947 oph = CONST_DOUBLE_HIGH (op);
1954 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1955 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1958 /* Return 1 if C is a constant that is not a logical operand (as
1959 above), but could be split into one. */
1962 non_logical_cint_operand (rtx op, enum machine_mode mode)
1964 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1965 && ! logical_operand (op, mode)
1966 && reg_or_logical_cint_operand (op, mode));
1969 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1970 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1971 Reject all ones and all zeros, since these should have been optimized
1972 away and confuse the making of MB and ME. */
1975 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1977 HOST_WIDE_INT c, lsb;
1979 if (GET_CODE (op) != CONST_INT)
1984 /* Fail in 64-bit mode if the mask wraps around because the upper
1985 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1986 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1989 /* We don't change the number of transitions by inverting,
1990 so make sure we start with the LS bit zero. */
1994 /* Reject all zeros or all ones. */
1998 /* Find the first transition. */
2001 /* Invert to look for a second transition. */
2004 /* Erase first transition. */
2007 /* Find the second transition (if any). */
2010 /* Match if all the bits above are 1's (or c is zero). */
2014 /* Return 1 for the PowerPC64 rlwinm corner case. */
2017 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2019 HOST_WIDE_INT c, lsb;
2021 if (GET_CODE (op) != CONST_INT)
2026 if ((c & 0x80000001) != 0x80000001)
2040 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
2041 It is if there are no more than one 1->0 or 0->1 transitions.
2042 Reject all zeros, since zero should have been optimized away and
2043 confuses the making of MB and ME. */
2046 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2048 if (GET_CODE (op) == CONST_INT)
2050 HOST_WIDE_INT c, lsb;
2054 /* Reject all zeros. */
2058 /* We don't change the number of transitions by inverting,
2059 so make sure we start with the LS bit zero. */
2063 /* Find the transition, and check that all bits above are 1's. */
2066 /* Match if all the bits above are 1's (or c is zero). */
2072 /* Like mask64_operand, but allow up to three transitions. This
2073 predicate is used by insn patterns that generate two rldicl or
2074 rldicr machine insns. */
2077 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2079 if (GET_CODE (op) == CONST_INT)
2081 HOST_WIDE_INT c, lsb;
2085 /* Disallow all zeros. */
2089 /* We don't change the number of transitions by inverting,
2090 so make sure we start with the LS bit zero. */
2094 /* Find the first transition. */
2097 /* Invert to look for a second transition. */
2100 /* Erase first transition. */
2103 /* Find the second transition. */
2106 /* Invert to look for a third transition. */
2109 /* Erase second transition. */
2112 /* Find the third transition (if any). */
2115 /* Match if all the bits above are 1's (or c is zero). */
2121 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
2122 implement ANDing by the mask IN. */
2124 build_mask64_2_operands (rtx in, rtx *out)
2126 #if HOST_BITS_PER_WIDE_INT >= 64
2127 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2130 if (GET_CODE (in) != CONST_INT)
2136 /* Assume c initially something like 0x00fff000000fffff. The idea
2137 is to rotate the word so that the middle ^^^^^^ group of zeros
2138 is at the MS end and can be cleared with an rldicl mask. We then
2139 rotate back and clear off the MS ^^ group of zeros with a
2141 c = ~c; /* c == 0xff000ffffff00000 */
2142 lsb = c & -c; /* lsb == 0x0000000000100000 */
2143 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2144 c = ~c; /* c == 0x00fff000000fffff */
2145 c &= -lsb; /* c == 0x00fff00000000000 */
2146 lsb = c & -c; /* lsb == 0x0000100000000000 */
2147 c = ~c; /* c == 0xff000fffffffffff */
2148 c &= -lsb; /* c == 0xff00000000000000 */
2150 while ((lsb >>= 1) != 0)
2151 shift++; /* shift == 44 on exit from loop */
2152 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2153 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2154 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2158 /* Assume c initially something like 0xff000f0000000000. The idea
2159 is to rotate the word so that the ^^^ middle group of zeros
2160 is at the LS end and can be cleared with an rldicr mask. We then
2161 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2163 lsb = c & -c; /* lsb == 0x0000010000000000 */
2164 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2165 c = ~c; /* c == 0x00fff0ffffffffff */
2166 c &= -lsb; /* c == 0x00fff00000000000 */
2167 lsb = c & -c; /* lsb == 0x0000100000000000 */
2168 c = ~c; /* c == 0xff000fffffffffff */
2169 c &= -lsb; /* c == 0xff00000000000000 */
2171 while ((lsb >>= 1) != 0)
2172 shift++; /* shift == 44 on exit from loop */
2173 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2174 m1 >>= shift; /* m1 == 0x0000000000000fff */
2175 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2178 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2179 masks will be all 1's. We are guaranteed more than one transition. */
2180 out[0] = GEN_INT (64 - shift);
2181 out[1] = GEN_INT (m1);
2182 out[2] = GEN_INT (shift);
2183 out[3] = GEN_INT (m2);
2191 /* Return 1 if the operand is either a non-special register or a constant
2192 that can be used as the operand of a PowerPC64 logical AND insn. */
2195 and64_operand (rtx op, enum machine_mode mode)
2197 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2198 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2200 return (logical_operand (op, mode) || mask64_operand (op, mode));
2203 /* Like the above, but also match constants that can be implemented
2204 with two rldicl or rldicr insns. */
2207 and64_2_operand (rtx op, enum machine_mode mode)
2209 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2210 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2212 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2215 /* Return 1 if the operand is either a non-special register or a
2216 constant that can be used as the operand of an RS/6000 logical AND insn. */
2219 and_operand (rtx op, enum machine_mode mode)
2221 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2222 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2224 return (logical_operand (op, mode) || mask_operand (op, mode));
2227 /* Return 1 if the operand is a general register or memory operand. */
2230 reg_or_mem_operand (rtx op, enum machine_mode mode)
2232 return (gpc_reg_operand (op, mode)
2233 || memory_operand (op, mode)
2234 || macho_lo_sum_memory_operand (op, mode)
2235 || volatile_mem_operand (op, mode));
2238 /* Return 1 if the operand is a general register or memory operand without
2239 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2243 lwa_operand (rtx op, enum machine_mode mode)
2247 if (reload_completed && GET_CODE (inner) == SUBREG)
2248 inner = SUBREG_REG (inner);
2250 return gpc_reg_operand (inner, mode)
2251 || (memory_operand (inner, mode)
2252 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2253 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2254 && (GET_CODE (XEXP (inner, 0)) != PLUS
2255 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2256 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2259 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2262 symbol_ref_operand (rtx op, enum machine_mode mode)
2264 if (mode != VOIDmode && GET_MODE (op) != mode)
2268 if (GET_CODE (op) == SYMBOL_REF && TARGET_MACHO && MACHOPIC_INDIRECT)
2270 /* Macho says it has to go through a stub or be local
2271 when indirect mode. Stubs are considered local. */
2272 const char *t = XSTR (op, 0);
2273 /* "&" means that it is it a local defined symbol
2274 so it is okay to call to. */
2278 /* "!T" means that the function is local defined. */
2279 return (t[0] == '!' && t[1] == 'T');
2284 return (GET_CODE (op) == SYMBOL_REF
2285 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2288 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2289 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2292 call_operand (rtx op, enum machine_mode mode)
2294 if (mode != VOIDmode && GET_MODE (op) != mode)
2297 return (GET_CODE (op) == SYMBOL_REF
2298 || (GET_CODE (op) == REG
2299 && (REGNO (op) == LINK_REGISTER_REGNUM
2300 || REGNO (op) == COUNT_REGISTER_REGNUM
2301 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2304 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2308 current_file_function_operand (rtx op,
2309 enum machine_mode mode ATTRIBUTE_UNUSED)
2311 return (GET_CODE (op) == SYMBOL_REF
2312 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2313 && (SYMBOL_REF_LOCAL_P (op)
2314 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2317 /* Return 1 if this operand is a valid input for a move insn. */
2320 input_operand (rtx op, enum machine_mode mode)
2322 /* Memory is always valid. */
2323 if (memory_operand (op, mode))
2326 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2327 if (GET_CODE (op) == CONSTANT_P_RTX)
2330 /* For floating-point, easy constants are valid. */
2331 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2333 && easy_fp_constant (op, mode))
2336 /* Allow any integer constant. */
2337 if (GET_MODE_CLASS (mode) == MODE_INT
2338 && (GET_CODE (op) == CONST_INT
2339 || GET_CODE (op) == CONST_DOUBLE))
2342 /* Allow easy vector constants. */
2343 if (GET_CODE (op) == CONST_VECTOR
2344 && easy_vector_constant (op, mode))
2347 /* For floating-point or multi-word mode, the only remaining valid type
2349 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2350 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2351 return register_operand (op, mode);
2353 /* The only cases left are integral modes one word or smaller (we
2354 do not get called for MODE_CC values). These can be in any
2356 if (register_operand (op, mode))
2359 /* A SYMBOL_REF referring to the TOC is valid. */
2360 if (legitimate_constant_pool_address_p (op))
2363 /* A constant pool expression (relative to the TOC) is valid */
2364 if (toc_relative_expr_p (op))
2367 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2369 if (DEFAULT_ABI == ABI_V4
2370 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2371 && small_data_operand (op, Pmode))
2378 /* Darwin, AIX increases natural record alignment to doubleword if the first
2379 field is an FP double while the FP fields remain word aligned. */
2382 rs6000_special_round_type_align (tree type, int computed, int specified)
2384 tree field = TYPE_FIELDS (type);
2386 /* Skip all the static variables only if ABI is greater than
2388 while (field != NULL && TREE_CODE (field) == VAR_DECL)
2389 field = TREE_CHAIN (field);
2391 if (field == NULL || field == type || DECL_MODE (field) != DFmode)
2392 return MAX (computed, specified);
2394 return MAX (MAX (computed, specified), 64);
2397 /* Return 1 for an operand in small memory on V.4/eabi. */
2400 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2401 enum machine_mode mode ATTRIBUTE_UNUSED)
2406 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2409 if (DEFAULT_ABI != ABI_V4)
2412 if (GET_CODE (op) == SYMBOL_REF)
2415 else if (GET_CODE (op) != CONST
2416 || GET_CODE (XEXP (op, 0)) != PLUS
2417 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2418 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2423 rtx sum = XEXP (op, 0);
2424 HOST_WIDE_INT summand;
2426 /* We have to be careful here, because it is the referenced address
2427 that must be 32k from _SDA_BASE_, not just the symbol. */
2428 summand = INTVAL (XEXP (sum, 1));
2429 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2432 sym_ref = XEXP (sum, 0);
2435 return SYMBOL_REF_SMALL_P (sym_ref);
2441 /* Return true, if operand is a memory operand and has a
2442 displacement divisible by 4. */
2445 word_offset_memref_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
2450 if (!memory_operand (op, mode))
2453 addr = XEXP (op, 0);
2454 if (GET_CODE (addr) == PLUS
2455 && GET_CODE (XEXP (addr, 0)) == REG
2456 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
2457 off = INTVAL (XEXP (addr, 1));
2459 return (off % 4) == 0;
2462 /* Return true if either operand is a general purpose register. */
2465 gpr_or_gpr_p (rtx op0, rtx op1)
2467 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2468 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2472 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2475 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2477 switch (GET_CODE(op))
2480 if (RS6000_SYMBOL_REF_TLS_P (op))
2482 else if (CONSTANT_POOL_ADDRESS_P (op))
2484 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2492 else if (! strcmp (XSTR (op, 0), toc_label_name))
2501 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2502 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2504 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2513 constant_pool_expr_p (rtx op)
2517 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2521 toc_relative_expr_p (rtx op)
2525 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2528 /* SPE offset addressing is limited to 5-bits worth of double words. */
2529 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2532 legitimate_constant_pool_address_p (rtx x)
2535 && GET_CODE (x) == PLUS
2536 && GET_CODE (XEXP (x, 0)) == REG
2537 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2538 && constant_pool_expr_p (XEXP (x, 1)));
2542 legitimate_small_data_p (enum machine_mode mode, rtx x)
2544 return (DEFAULT_ABI == ABI_V4
2545 && !flag_pic && !TARGET_TOC
2546 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2547 && small_data_operand (x, mode));
2551 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2553 unsigned HOST_WIDE_INT offset, extra;
2555 if (GET_CODE (x) != PLUS)
2557 if (GET_CODE (XEXP (x, 0)) != REG)
2559 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2561 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2564 offset = INTVAL (XEXP (x, 1));
2572 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2573 which leaves the only valid constant offset of zero, which by
2574 canonicalization rules is also invalid. */
2581 /* SPE vector modes. */
2582 return SPE_CONST_OFFSET_OK (offset);
2586 if (mode == DFmode || !TARGET_POWERPC64)
2588 else if (offset & 3)
2594 if (mode == TFmode || !TARGET_POWERPC64)
2596 else if (offset & 3)
2607 return (offset < 0x10000) && (offset + extra < 0x10000);
2611 legitimate_indexed_address_p (rtx x, int strict)
2615 if (GET_CODE (x) != PLUS)
2620 if (!REG_P (op0) || !REG_P (op1))
2623 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2624 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2625 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2626 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2630 legitimate_indirect_address_p (rtx x, int strict)
2632 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2636 macho_lo_sum_memory_operand (rtx x, enum machine_mode mode)
2638 if (!TARGET_MACHO || !flag_pic
2639 || mode != SImode || GET_CODE(x) != MEM)
2643 if (GET_CODE (x) != LO_SUM)
2645 if (GET_CODE (XEXP (x, 0)) != REG)
2647 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), 0))
2651 return CONSTANT_P (x);
2655 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2657 if (GET_CODE (x) != LO_SUM)
2659 if (GET_CODE (XEXP (x, 0)) != REG)
2661 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2665 if (TARGET_ELF || TARGET_MACHO)
2667 if (DEFAULT_ABI != ABI_AIX && DEFAULT_ABI != ABI_DARWIN && flag_pic)
2671 if (GET_MODE_NUNITS (mode) != 1)
2673 if (GET_MODE_BITSIZE (mode) > 32
2674 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2677 return CONSTANT_P (x);
2684 /* Try machine-dependent ways of modifying an illegitimate address
2685 to be legitimate. If we find one, return the new, valid address.
2686 This is used from only one place: `memory_address' in explow.c.
2688 OLDX is the address as it was before break_out_memory_refs was
2689 called. In some cases it is useful to look at this to decide what
2692 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2694 It is always safe for this function to do nothing. It exists to
2695 recognize opportunities to optimize the output.
2697 On RS/6000, first check for the sum of a register with a constant
2698 integer that is out of range. If so, generate code to add the
2699 constant with the low-order 16 bits masked to the register and force
2700 this result into another register (this can be done with `cau').
2701 Then generate an address of REG+(CONST&0xffff), allowing for the
2702 possibility of bit 16 being a one.
2704 Then check for the sum of a register and something not constant, try to
2705 load the other things into a register and return the sum. */
2708 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2709 enum machine_mode mode)
2711 if (GET_CODE (x) == SYMBOL_REF)
2713 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2715 return rs6000_legitimize_tls_address (x, model);
2718 if (GET_CODE (x) == PLUS
2719 && GET_CODE (XEXP (x, 0)) == REG
2720 && GET_CODE (XEXP (x, 1)) == CONST_INT
2721 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2723 HOST_WIDE_INT high_int, low_int;
2725 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2726 high_int = INTVAL (XEXP (x, 1)) - low_int;
2727 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2728 GEN_INT (high_int)), 0);
2729 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2731 else if (GET_CODE (x) == PLUS
2732 && GET_CODE (XEXP (x, 0)) == REG
2733 && GET_CODE (XEXP (x, 1)) != CONST_INT
2734 && GET_MODE_NUNITS (mode) == 1
2735 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2737 || (mode != DFmode && mode != TFmode))
2738 && (TARGET_POWERPC64 || mode != DImode)
2741 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2742 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2744 else if (ALTIVEC_VECTOR_MODE (mode))
2748 /* Make sure both operands are registers. */
2749 if (GET_CODE (x) == PLUS)
2750 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2751 force_reg (Pmode, XEXP (x, 1)));
2753 reg = force_reg (Pmode, x);
2756 else if (SPE_VECTOR_MODE (mode))
2758 /* We accept [reg + reg] and [reg + OFFSET]. */
2760 if (GET_CODE (x) == PLUS)
2762 rtx op1 = XEXP (x, 0);
2763 rtx op2 = XEXP (x, 1);
2765 op1 = force_reg (Pmode, op1);
2767 if (GET_CODE (op2) != REG
2768 && (GET_CODE (op2) != CONST_INT
2769 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2770 op2 = force_reg (Pmode, op2);
2772 return gen_rtx_PLUS (Pmode, op1, op2);
2775 return force_reg (Pmode, x);
2781 && GET_CODE (x) != CONST_INT
2782 && GET_CODE (x) != CONST_DOUBLE
2784 && GET_MODE_NUNITS (mode) == 1
2785 && (GET_MODE_BITSIZE (mode) <= 32
2786 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2788 rtx reg = gen_reg_rtx (Pmode);
2789 emit_insn (gen_elf_high (reg, x));
2790 return gen_rtx_LO_SUM (Pmode, reg, x);
2792 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2795 && ! MACHO_DYNAMIC_NO_PIC_P
2797 && GET_CODE (x) != CONST_INT
2798 && GET_CODE (x) != CONST_DOUBLE
2800 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2804 rtx reg = gen_reg_rtx (Pmode);
2805 emit_insn (gen_macho_high (reg, x));
2806 return gen_rtx_LO_SUM (Pmode, reg, x);
2809 && constant_pool_expr_p (x)
2810 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2812 return create_TOC_reference (x);
2818 /* This is called from dwarf2out.c via ASM_OUTPUT_DWARF_DTPREL.
2819 We need to emit DTP-relative relocations. */
2822 rs6000_output_dwarf_dtprel (FILE *file, int size, rtx x)
2827 fputs ("\t.long\t", file);
2830 fputs (DOUBLE_INT_ASM_OP, file);
2835 output_addr_const (file, x);
2836 fputs ("@dtprel+0x8000", file);
2839 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2841 static GTY(()) rtx rs6000_tls_symbol;
2843 rs6000_tls_get_addr (void)
2845 if (!rs6000_tls_symbol)
2846 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2848 return rs6000_tls_symbol;
2851 /* Construct the SYMBOL_REF for TLS GOT references. */
2853 static GTY(()) rtx rs6000_got_symbol;
2855 rs6000_got_sym (void)
2857 if (!rs6000_got_symbol)
2859 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2860 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2861 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2864 return rs6000_got_symbol;
2867 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2868 this (thread-local) address. */
2871 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2875 dest = gen_reg_rtx (Pmode);
2876 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2882 tlsreg = gen_rtx_REG (Pmode, 13);
2883 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2887 tlsreg = gen_rtx_REG (Pmode, 2);
2888 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2892 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2896 tmp = gen_reg_rtx (Pmode);
2899 tlsreg = gen_rtx_REG (Pmode, 13);
2900 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2904 tlsreg = gen_rtx_REG (Pmode, 2);
2905 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2909 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2911 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2916 rtx r3, got, tga, tmp1, tmp2, eqv;
2919 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2923 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2926 rtx gsym = rs6000_got_sym ();
2927 got = gen_reg_rtx (Pmode);
2929 rs6000_emit_move (got, gsym, Pmode);
2933 static int tls_got_labelno = 0;
2934 rtx tempLR, lab, tmp3, mem;
2937 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2938 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2939 tempLR = gen_reg_rtx (Pmode);
2940 tmp1 = gen_reg_rtx (Pmode);
2941 tmp2 = gen_reg_rtx (Pmode);
2942 tmp3 = gen_reg_rtx (Pmode);
2943 mem = gen_rtx_MEM (Pmode, tmp1);
2944 RTX_UNCHANGING_P (mem) = 1;
2946 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2948 emit_move_insn (tmp1, tempLR);
2949 emit_move_insn (tmp2, mem);
2950 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2951 last = emit_move_insn (got, tmp3);
2952 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2954 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2956 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2962 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2964 r3 = gen_rtx_REG (Pmode, 3);
2966 insn = gen_tls_gd_64 (r3, got, addr);
2968 insn = gen_tls_gd_32 (r3, got, addr);
2971 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2972 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2973 insn = emit_call_insn (insn);
2974 CONST_OR_PURE_CALL_P (insn) = 1;
2975 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2976 insn = get_insns ();
2978 emit_libcall_block (insn, dest, r3, addr);
2980 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2982 r3 = gen_rtx_REG (Pmode, 3);
2984 insn = gen_tls_ld_64 (r3, got);
2986 insn = gen_tls_ld_32 (r3, got);
2989 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2990 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2991 insn = emit_call_insn (insn);
2992 CONST_OR_PURE_CALL_P (insn) = 1;
2993 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2994 insn = get_insns ();
2996 tmp1 = gen_reg_rtx (Pmode);
2997 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2999 emit_libcall_block (insn, tmp1, r3, eqv);
3000 if (rs6000_tls_size == 16)
3003 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
3005 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
3007 else if (rs6000_tls_size == 32)
3009 tmp2 = gen_reg_rtx (Pmode);
3011 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
3013 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
3016 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
3018 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
3022 tmp2 = gen_reg_rtx (Pmode);
3024 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
3026 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
3028 insn = gen_rtx_SET (Pmode, dest,
3029 gen_rtx_PLUS (Pmode, tmp2, tmp1));
3035 /* IE, or 64 bit offset LE. */
3036 tmp2 = gen_reg_rtx (Pmode);
3038 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
3040 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
3043 insn = gen_tls_tls_64 (dest, tmp2, addr);
3045 insn = gen_tls_tls_32 (dest, tmp2, addr);
3053 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
3054 instruction definitions. */
3057 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
3059 return RS6000_SYMBOL_REF_TLS_P (x);
3062 /* Return 1 if X contains a thread-local symbol. */
3065 rs6000_tls_referenced_p (rtx x)
3067 if (! TARGET_HAVE_TLS)
3070 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
3073 /* Return 1 if *X is a thread-local symbol. This is the same as
3074 rs6000_tls_symbol_ref except for the type of the unused argument. */
3077 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
3079 return RS6000_SYMBOL_REF_TLS_P (*x);
3082 /* The convention appears to be to define this wherever it is used.
3083 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
3084 is now used here. */
3085 #ifndef REG_MODE_OK_FOR_BASE_P
3086 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
3089 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
3090 replace the input X, or the original X if no replacement is called for.
3091 The output parameter *WIN is 1 if the calling macro should goto WIN,
3094 For RS/6000, we wish to handle large displacements off a base
3095 register by splitting the addend across an addiu/addis and the mem insn.
3096 This cuts number of extra insns needed from 3 to 1.
3098 On Darwin, we use this to generate code for floating point constants.
3099 A movsf_low is generated so we wind up with 2 instructions rather than 3.
3100 The Darwin code is inside #if TARGET_MACHO because only then is
3101 machopic_function_base_name() defined. */
3103 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
3104 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
3106 /* We must recognize output that we have already generated ourselves. */
3107 if (GET_CODE (x) == PLUS
3108 && GET_CODE (XEXP (x, 0)) == PLUS
3109 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
3110 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3111 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3113 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3114 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3115 opnum, (enum reload_type)type);
3121 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
3122 && GET_CODE (x) == LO_SUM
3123 && GET_CODE (XEXP (x, 0)) == PLUS
3124 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
3125 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
3126 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
3127 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
3128 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
3129 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
3130 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
3132 /* Result of previous invocation of this function on Darwin
3133 floating point constant. */
3134 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3135 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3136 opnum, (enum reload_type)type);
3141 if (GET_CODE (x) == PLUS
3142 && GET_CODE (XEXP (x, 0)) == REG
3143 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
3144 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
3145 && GET_CODE (XEXP (x, 1)) == CONST_INT
3146 && !SPE_VECTOR_MODE (mode)
3147 && !ALTIVEC_VECTOR_MODE (mode))
3149 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
3150 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
3152 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
3154 /* Check for 32-bit overflow. */
3155 if (high + low != val)
3161 /* Reload the high part into a base reg; leave the low part
3162 in the mem directly. */
3164 x = gen_rtx_PLUS (GET_MODE (x),
3165 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
3169 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3170 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
3171 opnum, (enum reload_type)type);
3176 if (GET_CODE (x) == SYMBOL_REF
3177 && DEFAULT_ABI == ABI_DARWIN
3178 && !ALTIVEC_VECTOR_MODE (mode)
3179 && (flag_pic || MACHO_DYNAMIC_NO_PIC_P)
3180 /* Don't do this for TFmode, since the result isn't offsettable. */
3185 rtx offset = gen_rtx_CONST (Pmode,
3186 gen_rtx_MINUS (Pmode, x,
3187 gen_rtx_SYMBOL_REF (Pmode,
3188 machopic_function_base_name ())));
3189 x = gen_rtx_LO_SUM (GET_MODE (x),
3190 gen_rtx_PLUS (Pmode, pic_offset_table_rtx,
3191 gen_rtx_HIGH (Pmode, offset)), offset);
3194 x = gen_rtx_LO_SUM (GET_MODE (x),
3195 gen_rtx_HIGH (Pmode, x), x);
3197 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3198 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3199 opnum, (enum reload_type)type);
3205 && constant_pool_expr_p (x)
3206 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3208 (x) = create_TOC_reference (x);
3216 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3217 that is a valid memory address for an instruction.
3218 The MODE argument is the machine mode for the MEM expression
3219 that wants to use this address.
3221 On the RS/6000, there are four valid address: a SYMBOL_REF that
3222 refers to a constant pool entry of an address (or the sum of it
3223 plus a constant), a short (16-bit signed) constant plus a register,
3224 the sum of two registers, or a register indirect, possibly with an
3225 auto-increment. For DFmode and DImode with a constant plus register,
3226 we must ensure that both words are addressable or PowerPC64 with offset
3229 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3230 32-bit DImode, TImode), indexed addressing cannot be used because
3231 adjacent memory cells are accessed by adding word-sized offsets
3232 during assembly output. */
3234 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
3236 if (RS6000_SYMBOL_REF_TLS_P (x))
3238 if (legitimate_indirect_address_p (x, reg_ok_strict))
3240 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3241 && !ALTIVEC_VECTOR_MODE (mode)
3242 && !SPE_VECTOR_MODE (mode)
3244 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3246 if (legitimate_small_data_p (mode, x))
3248 if (legitimate_constant_pool_address_p (x))
3250 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3252 && GET_CODE (x) == PLUS
3253 && GET_CODE (XEXP (x, 0)) == REG
3254 && XEXP (x, 0) == virtual_stack_vars_rtx
3255 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3257 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3260 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3262 || (mode != DFmode && mode != TFmode))
3263 && (TARGET_POWERPC64 || mode != DImode)
3264 && legitimate_indexed_address_p (x, reg_ok_strict))
3266 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3271 /* Go to LABEL if ADDR (a legitimate address expression)
3272 has an effect that depends on the machine mode it is used for.
3274 On the RS/6000 this is true of all integral offsets (since AltiVec
3275 modes don't allow them) or is a pre-increment or decrement.
3277 ??? Except that due to conceptual problems in offsettable_address_p
3278 we can't really report the problems of integral offsets. So leave
3279 this assuming that the adjustable offset must be valid for the
3280 sub-words of a TFmode operand, which is what we had before. */
3283 rs6000_mode_dependent_address (rtx addr)
3285 switch (GET_CODE (addr))
3288 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3290 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3291 return val + 12 + 0x8000 >= 0x10000;
3300 return TARGET_UPDATE;
3309 /* Try to output insns to set TARGET equal to the constant C if it can
3310 be done in less than N insns. Do all computations in MODE.
3311 Returns the place where the output has been placed if it can be
3312 done and the insns have been emitted. If it would take more than N
3313 insns, zero is returned and no insns and emitted. */
3316 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3317 rtx source, int n ATTRIBUTE_UNUSED)
3319 rtx result, insn, set;
3320 HOST_WIDE_INT c0, c1;
3322 if (mode == QImode || mode == HImode)
3325 dest = gen_reg_rtx (mode);
3326 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3329 else if (mode == SImode)
3331 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3333 emit_insn (gen_rtx_SET (VOIDmode, result,
3334 GEN_INT (INTVAL (source)
3335 & (~ (HOST_WIDE_INT) 0xffff))));
3336 emit_insn (gen_rtx_SET (VOIDmode, dest,
3337 gen_rtx_IOR (SImode, result,
3338 GEN_INT (INTVAL (source) & 0xffff))));
3341 else if (mode == DImode)
3343 if (GET_CODE (source) == CONST_INT)
3345 c0 = INTVAL (source);
3348 else if (GET_CODE (source) == CONST_DOUBLE)
3350 #if HOST_BITS_PER_WIDE_INT >= 64
3351 c0 = CONST_DOUBLE_LOW (source);
3354 c0 = CONST_DOUBLE_LOW (source);
3355 c1 = CONST_DOUBLE_HIGH (source);
3361 result = rs6000_emit_set_long_const (dest, c0, c1);
3366 insn = get_last_insn ();
3367 set = single_set (insn);
3368 if (! CONSTANT_P (SET_SRC (set)))
3369 set_unique_reg_note (insn, REG_EQUAL, source);
3374 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3375 fall back to a straight forward decomposition. We do this to avoid
3376 exponential run times encountered when looking for longer sequences
3377 with rs6000_emit_set_const. */
3379 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3381 if (!TARGET_POWERPC64)
3383 rtx operand1, operand2;
3385 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3387 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3389 emit_move_insn (operand1, GEN_INT (c1));
3390 emit_move_insn (operand2, GEN_INT (c2));
3394 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3397 ud2 = (c1 & 0xffff0000) >> 16;
3398 #if HOST_BITS_PER_WIDE_INT >= 64
3402 ud4 = (c2 & 0xffff0000) >> 16;
3404 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3405 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3408 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3410 emit_move_insn (dest, GEN_INT (ud1));
3413 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3414 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3417 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3420 emit_move_insn (dest, GEN_INT (ud2 << 16));
3422 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3424 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3425 || (ud4 == 0 && ! (ud3 & 0x8000)))
3428 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3431 emit_move_insn (dest, GEN_INT (ud3 << 16));
3434 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3435 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3437 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3442 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3445 emit_move_insn (dest, GEN_INT (ud4 << 16));
3448 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3450 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3452 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3453 GEN_INT (ud2 << 16)));
3455 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3461 /* Emit a move from SOURCE to DEST in mode MODE. */
3463 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3467 operands[1] = source;
3469 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3470 if (GET_CODE (operands[1]) == CONST_DOUBLE
3471 && ! FLOAT_MODE_P (mode)
3472 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3474 /* FIXME. This should never happen. */
3475 /* Since it seems that it does, do the safe thing and convert
3477 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3479 if (GET_CODE (operands[1]) == CONST_DOUBLE
3480 && ! FLOAT_MODE_P (mode)
3481 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3482 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3483 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3484 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3487 /* Check if GCC is setting up a block move that will end up using FP
3488 registers as temporaries. We must make sure this is acceptable. */
3489 if (GET_CODE (operands[0]) == MEM
3490 && GET_CODE (operands[1]) == MEM
3492 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3493 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3494 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3495 ? 32 : MEM_ALIGN (operands[0])))
3496 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3498 : MEM_ALIGN (operands[1]))))
3499 && ! MEM_VOLATILE_P (operands [0])
3500 && ! MEM_VOLATILE_P (operands [1]))
3502 emit_move_insn (adjust_address (operands[0], SImode, 0),
3503 adjust_address (operands[1], SImode, 0));
3504 emit_move_insn (adjust_address (operands[0], SImode, 4),
3505 adjust_address (operands[1], SImode, 4));
3509 if (!no_new_pseudos)
3511 if (GET_CODE (operands[1]) == MEM && optimize > 0
3512 && (mode == QImode || mode == HImode || mode == SImode)
3513 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3515 rtx reg = gen_reg_rtx (word_mode);
3517 emit_insn (gen_rtx_SET (word_mode, reg,
3518 gen_rtx_ZERO_EXTEND (word_mode,
3520 operands[1] = gen_lowpart (mode, reg);
3522 if (GET_CODE (operands[0]) != REG)
3523 operands[1] = force_reg (mode, operands[1]);
3526 if (mode == SFmode && ! TARGET_POWERPC
3527 && TARGET_HARD_FLOAT && TARGET_FPRS
3528 && GET_CODE (operands[0]) == MEM)
3532 if (reload_in_progress || reload_completed)
3533 regnum = true_regnum (operands[1]);
3534 else if (GET_CODE (operands[1]) == REG)
3535 regnum = REGNO (operands[1]);
3539 /* If operands[1] is a register, on POWER it may have
3540 double-precision data in it, so truncate it to single
3542 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3545 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3546 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3547 operands[1] = newreg;
3551 /* Recognize the case where operand[1] is a reference to thread-local
3552 data and load its address to a register. */
3553 if (GET_CODE (operands[1]) == SYMBOL_REF)
3555 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3557 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3560 /* Handle the case where reload calls us with an invalid address. */
3561 if (reload_in_progress && mode == Pmode
3562 && (! general_operand (operands[1], mode)
3563 || ! nonimmediate_operand (operands[0], mode)))
3566 /* Handle the case of CONSTANT_P_RTX. */
3567 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3570 /* 128-bit constant floating-point values on Darwin should really be
3571 loaded as two parts. */
3572 if ((DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN)
3573 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128
3574 && mode == TFmode && GET_CODE (operands[1]) == CONST_DOUBLE)
3576 /* DImode is used, not DFmode, because simplify_gen_subreg doesn't
3577 know how to get a DFmode SUBREG of a TFmode. */
3578 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode, 0),
3579 simplify_gen_subreg (DImode, operands[1], mode, 0),
3581 rs6000_emit_move (simplify_gen_subreg (DImode, operands[0], mode,
3582 GET_MODE_SIZE (DImode)),
3583 simplify_gen_subreg (DImode, operands[1], mode,
3584 GET_MODE_SIZE (DImode)),
3589 /* FIXME: In the long term, this switch statement should go away
3590 and be replaced by a sequence of tests based on things like
3596 if (CONSTANT_P (operands[1])
3597 && GET_CODE (operands[1]) != CONST_INT)
3598 operands[1] = force_const_mem (mode, operands[1]);
3604 if (CONSTANT_P (operands[1])
3605 && ! easy_fp_constant (operands[1], mode))
3606 operands[1] = force_const_mem (mode, operands[1]);
3617 if (CONSTANT_P (operands[1])
3618 && !easy_vector_constant (operands[1], mode))
3619 operands[1] = force_const_mem (mode, operands[1]);
3624 /* Use default pattern for address of ELF small data */
3627 && DEFAULT_ABI == ABI_V4
3628 && (GET_CODE (operands[1]) == SYMBOL_REF
3629 || GET_CODE (operands[1]) == CONST)
3630 && small_data_operand (operands[1], mode))
3632 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3636 if (DEFAULT_ABI == ABI_V4
3637 && mode == Pmode && mode == SImode
3638 && flag_pic == 1 && got_operand (operands[1], mode))
3640 emit_insn (gen_movsi_got (operands[0], operands[1]));
3644 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3648 && CONSTANT_P (operands[1])
3649 && GET_CODE (operands[1]) != HIGH
3650 && GET_CODE (operands[1]) != CONST_INT)
3652 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3654 /* If this is a function address on -mcall-aixdesc,
3655 convert it to the address of the descriptor. */
3656 if (DEFAULT_ABI == ABI_AIX
3657 && GET_CODE (operands[1]) == SYMBOL_REF
3658 && XSTR (operands[1], 0)[0] == '.')
3660 const char *name = XSTR (operands[1], 0);
3662 while (*name == '.')
3664 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3665 CONSTANT_POOL_ADDRESS_P (new_ref)
3666 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3667 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3668 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3669 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3670 operands[1] = new_ref;
3673 if (DEFAULT_ABI == ABI_DARWIN)
3676 if (MACHO_DYNAMIC_NO_PIC_P)
3678 /* Take care of any required data indirection. */
3679 operands[1] = rs6000_machopic_legitimize_pic_address (
3680 operands[1], mode, operands[0]);
3681 if (operands[0] != operands[1])
3682 emit_insn (gen_rtx_SET (VOIDmode,
3683 operands[0], operands[1]));
3687 emit_insn (gen_macho_high (target, operands[1]));
3688 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3692 emit_insn (gen_elf_high (target, operands[1]));
3693 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3697 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3698 and we have put it in the TOC, we just need to make a TOC-relative
3701 && GET_CODE (operands[1]) == SYMBOL_REF
3702 && constant_pool_expr_p (operands[1])
3703 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3704 get_pool_mode (operands[1])))
3706 operands[1] = create_TOC_reference (operands[1]);
3708 else if (mode == Pmode
3709 && CONSTANT_P (operands[1])
3710 && ((GET_CODE (operands[1]) != CONST_INT
3711 && ! easy_fp_constant (operands[1], mode))
3712 || (GET_CODE (operands[1]) == CONST_INT
3713 && num_insns_constant (operands[1], mode) > 2)
3714 || (GET_CODE (operands[0]) == REG
3715 && FP_REGNO_P (REGNO (operands[0]))))
3716 && GET_CODE (operands[1]) != HIGH
3717 && ! legitimate_constant_pool_address_p (operands[1])
3718 && ! toc_relative_expr_p (operands[1]))
3720 /* Emit a USE operation so that the constant isn't deleted if
3721 expensive optimizations are turned on because nobody
3722 references it. This should only be done for operands that
3723 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3724 This should not be done for operands that contain LABEL_REFs.
3725 For now, we just handle the obvious case. */
3726 if (GET_CODE (operands[1]) != LABEL_REF)
3727 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3730 /* Darwin uses a special PIC legitimizer. */
3731 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3734 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3736 if (operands[0] != operands[1])
3737 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3742 /* If we are to limit the number of things we put in the TOC and
3743 this is a symbol plus a constant we can add in one insn,
3744 just put the symbol in the TOC and add the constant. Don't do
3745 this if reload is in progress. */
3746 if (GET_CODE (operands[1]) == CONST
3747 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3748 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3749 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3750 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3751 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3752 && ! side_effects_p (operands[0]))
3755 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3756 rtx other = XEXP (XEXP (operands[1], 0), 1);
3758 sym = force_reg (mode, sym);
3760 emit_insn (gen_addsi3 (operands[0], sym, other));
3762 emit_insn (gen_adddi3 (operands[0], sym, other));
3766 operands[1] = force_const_mem (mode, operands[1]);
3769 && constant_pool_expr_p (XEXP (operands[1], 0))
3770 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3771 get_pool_constant (XEXP (operands[1], 0)),
3772 get_pool_mode (XEXP (operands[1], 0))))
3775 = gen_rtx_MEM (mode,
3776 create_TOC_reference (XEXP (operands[1], 0)));
3777 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3778 RTX_UNCHANGING_P (operands[1]) = 1;
3784 if (GET_CODE (operands[0]) == MEM
3785 && GET_CODE (XEXP (operands[0], 0)) != REG
3786 && ! reload_in_progress)
3788 = replace_equiv_address (operands[0],
3789 copy_addr_to_reg (XEXP (operands[0], 0)));
3791 if (GET_CODE (operands[1]) == MEM
3792 && GET_CODE (XEXP (operands[1], 0)) != REG
3793 && ! reload_in_progress)
3795 = replace_equiv_address (operands[1],
3796 copy_addr_to_reg (XEXP (operands[1], 0)));
3799 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3801 gen_rtx_SET (VOIDmode,
3802 operands[0], operands[1]),
3803 gen_rtx_CLOBBER (VOIDmode,
3804 gen_rtx_SCRATCH (SImode)))));
3813 /* Above, we may have called force_const_mem which may have returned
3814 an invalid address. If we can, fix this up; otherwise, reload will
3815 have to deal with it. */
3816 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3817 operands[1] = validize_mem (operands[1]);
3820 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3823 /* Nonzero if we can use a floating-point register to pass this arg. */
3824 #define USE_FP_FOR_ARG_P(CUM,MODE,TYPE) \
3825 (GET_MODE_CLASS (MODE) == MODE_FLOAT \
3826 && (CUM)->fregno <= FP_ARG_MAX_REG \
3827 && TARGET_HARD_FLOAT && TARGET_FPRS)
3829 /* Nonzero if we can use an AltiVec register to pass this arg. */
3830 #define USE_ALTIVEC_FOR_ARG_P(CUM,MODE,TYPE,NAMED) \
3831 (ALTIVEC_VECTOR_MODE (MODE) \
3832 && (CUM)->vregno <= ALTIVEC_ARG_MAX_REG \
3833 && TARGET_ALTIVEC_ABI \
3836 /* Return a nonzero value to say to return the function value in
3837 memory, just as large structures are always returned. TYPE will be
3838 the data type of the value, and FNTYPE will be the type of the
3839 function doing the returning, or @code{NULL} for libcalls.
3841 The AIX ABI for the RS/6000 specifies that all structures are
3842 returned in memory. The Darwin ABI does the same. The SVR4 ABI
3843 specifies that structures <= 8 bytes are returned in r3/r4, but a
3844 draft put them in memory, and GCC used to implement the draft
3845 instead of the final standard. Therefore, TARGET_AIX_STRUCT_RET
3846 controls this instead of DEFAULT_ABI; V.4 targets needing backward
3847 compatibility can change DRAFT_V4_STRUCT_RET to override the
3848 default, and -m switches get the final word. See
3849 rs6000_override_options for more details.
3851 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
3852 long double support is enabled. These values are returned in memory.
3854 int_size_in_bytes returns -1 for variable size objects, which go in
3855 memory always. The cast to unsigned makes -1 > 8. */
3858 rs6000_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
3860 if (AGGREGATE_TYPE_P (type)
3861 && (TARGET_AIX_STRUCT_RET
3862 || (unsigned HOST_WIDE_INT) int_size_in_bytes (type) > 8))
3864 if (DEFAULT_ABI == ABI_V4 && TYPE_MODE (type) == TFmode)
3869 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3870 for a call to a function whose data type is FNTYPE.
3871 For a library call, FNTYPE is 0.
3873 For incoming args we set the number of arguments in the prototype large
3874 so we never return a PARALLEL. */
3877 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3878 rtx libname ATTRIBUTE_UNUSED, int incoming,
3879 int libcall, int n_named_args)
3881 static CUMULATIVE_ARGS zero_cumulative;
3883 *cum = zero_cumulative;
3885 cum->fregno = FP_ARG_MIN_REG;
3886 cum->vregno = ALTIVEC_ARG_MIN_REG;
3887 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3888 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3889 ? CALL_LIBCALL : CALL_NORMAL);
3890 cum->sysv_gregno = GP_ARG_MIN_REG;
3891 cum->stdarg = fntype
3892 && (TYPE_ARG_TYPES (fntype) != 0
3893 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3894 != void_type_node));
3896 cum->nargs_prototype = 0;
3897 if (incoming || cum->prototype)
3898 cum->nargs_prototype = n_named_args;
3900 /* Check for a longcall attribute. */
3902 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3903 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3904 cum->call_cookie = CALL_LONG;
3906 if (TARGET_DEBUG_ARG)
3908 fprintf (stderr, "\ninit_cumulative_args:");
3911 tree ret_type = TREE_TYPE (fntype);
3912 fprintf (stderr, " ret code = %s,",
3913 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3916 if (cum->call_cookie & CALL_LONG)
3917 fprintf (stderr, " longcall,");
3919 fprintf (stderr, " proto = %d, nargs = %d\n",
3920 cum->prototype, cum->nargs_prototype);
3925 && TARGET_ALTIVEC_ABI
3926 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
3928 error ("Cannot return value in vector register because"
3929 " altivec instructions are disabled, use -maltivec"
3930 " to enable them.");
3934 /* If defined, a C expression which determines whether, and in which
3935 direction, to pad out an argument with extra space. The value
3936 should be of type `enum direction': either `upward' to pad above
3937 the argument, `downward' to pad below, or `none' to inhibit
3940 For the AIX ABI structs are always stored left shifted in their
3944 function_arg_padding (enum machine_mode mode, tree type)
3946 #ifndef AGGREGATE_PADDING_FIXED
3947 #define AGGREGATE_PADDING_FIXED 0
3949 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3950 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3953 if (!AGGREGATE_PADDING_FIXED)
3955 /* GCC used to pass structures of the same size as integer types as
3956 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3957 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3958 passed padded downward, except that -mstrict-align further
3959 muddied the water in that multi-component structures of 2 and 4
3960 bytes in size were passed padded upward.
3962 The following arranges for best compatibility with previous
3963 versions of gcc, but removes the -mstrict-align dependency. */
3964 if (BYTES_BIG_ENDIAN)
3966 HOST_WIDE_INT size = 0;
3968 if (mode == BLKmode)
3970 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3971 size = int_size_in_bytes (type);
3974 size = GET_MODE_SIZE (mode);
3976 if (size == 1 || size == 2 || size == 4)
3982 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3984 if (type != 0 && AGGREGATE_TYPE_P (type))
3988 /* Fall back to the default. */
3989 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3992 /* If defined, a C expression that gives the alignment boundary, in bits,
3993 of an argument with the specified mode and type. If it is not defined,
3994 PARM_BOUNDARY is used for all arguments.
3996 V.4 wants long longs to be double word aligned. */
3999 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
4001 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
4003 else if (SPE_VECTOR_MODE (mode))
4005 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4008 return PARM_BOUNDARY;
4011 /* Compute the size (in words) of a function argument. */
4013 static unsigned long
4014 rs6000_arg_size (enum machine_mode mode, tree type)
4018 if (mode != BLKmode)
4019 size = GET_MODE_SIZE (mode);
4021 size = int_size_in_bytes (type);
4024 return (size + 3) >> 2;
4026 return (size + 7) >> 3;
4029 /* Update the data in CUM to advance over an argument
4030 of mode MODE and data type TYPE.
4031 (TYPE is null for libcalls where that information may not be available.) */
4034 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4035 tree type, int named)
4037 cum->nargs_prototype--;
4039 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4041 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4044 if (!TARGET_ALTIVEC)
4045 error ("Cannot pass argument in vector register because"
4046 " altivec instructions are disabled, use -maltivec"
4047 " to enable them.");
4049 /* PowerPC64 Linux and AIX allocates GPRs for a vector argument
4050 even if it is going to be passed in a vector register.
4051 Darwin does the same for variable-argument functions. */
4052 if ((DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
4053 || (cum->stdarg && DEFAULT_ABI != ABI_V4))
4057 /* Vector parameters must be 16-byte aligned. This places
4058 them at 2 mod 4 in terms of words in 32-bit mode, since
4059 the parameter save area starts at offset 24 from the
4060 stack. In 64-bit mode, they just have to start on an
4061 even word, since the parameter save area is 16-byte
4062 aligned. Space for GPRs is reserved even if the argument
4063 will be passed in memory. */
4065 align = ((6 - (cum->words & 3)) & 3);
4067 align = cum->words & 1;
4068 cum->words += align + rs6000_arg_size (mode, type);
4070 if (TARGET_DEBUG_ARG)
4072 fprintf (stderr, "function_adv: words = %2d, align=%d, ",
4074 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s\n",
4075 cum->nargs_prototype, cum->prototype,
4076 GET_MODE_NAME (mode));
4080 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
4082 && cum->sysv_gregno <= GP_ARG_MAX_REG)
4084 else if (DEFAULT_ABI == ABI_V4)
4086 if (TARGET_HARD_FLOAT && TARGET_FPRS
4087 && (mode == SFmode || mode == DFmode))
4089 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4094 cum->words += cum->words & 1;
4095 cum->words += rs6000_arg_size (mode, type);
4101 int gregno = cum->sysv_gregno;
4103 /* Aggregates and IEEE quad get passed by reference. */
4104 if ((type && AGGREGATE_TYPE_P (type))
4108 n_words = rs6000_arg_size (mode, type);
4110 /* Long long and SPE vectors are put in odd registers. */
4111 if (n_words == 2 && (gregno & 1) == 0)
4114 /* Long long and SPE vectors are not split between registers
4116 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
4118 /* Long long is aligned on the stack. */
4120 cum->words += cum->words & 1;
4121 cum->words += n_words;
4124 /* Note: continuing to accumulate gregno past when we've started
4125 spilling to the stack indicates the fact that we've started
4126 spilling to the stack to expand_builtin_saveregs. */
4127 cum->sysv_gregno = gregno + n_words;
4130 if (TARGET_DEBUG_ARG)
4132 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4133 cum->words, cum->fregno);
4134 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
4135 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
4136 fprintf (stderr, "mode = %4s, named = %d\n",
4137 GET_MODE_NAME (mode), named);
4142 int align = (TARGET_32BIT && (cum->words & 1) != 0
4143 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4145 cum->words += align + rs6000_arg_size (mode, type);
4147 if (GET_MODE_CLASS (mode) == MODE_FLOAT
4148 && TARGET_HARD_FLOAT && TARGET_FPRS)
4149 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3;
4151 if (TARGET_DEBUG_ARG)
4153 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
4154 cum->words, cum->fregno);
4155 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
4156 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
4157 fprintf (stderr, "named = %d, align = %d\n", named, align);
4162 /* Determine where to put a SIMD argument on the SPE. */
4165 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4170 int gregno = cum->sysv_gregno;
4171 int n_words = rs6000_arg_size (mode, type);
4173 /* SPE vectors are put in odd registers. */
4174 if (n_words == 2 && (gregno & 1) == 0)
4177 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4180 enum machine_mode m = SImode;
4182 r1 = gen_rtx_REG (m, gregno);
4183 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
4184 r2 = gen_rtx_REG (m, gregno + 1);
4185 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
4186 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
4193 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
4194 return gen_rtx_REG (mode, cum->sysv_gregno);
4200 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
4203 rs6000_mixed_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4204 tree type, int align_words)
4208 /* -mpowerpc64 with 32bit ABI splits up a DFmode argument
4209 in vararg list into zero, one or two GPRs */
4210 if (align_words >= GP_ARG_NUM_REG)
4211 return gen_rtx_PARALLEL (DFmode,
4213 gen_rtx_EXPR_LIST (VOIDmode,
4214 NULL_RTX, const0_rtx),
4215 gen_rtx_EXPR_LIST (VOIDmode,
4219 else if (align_words + rs6000_arg_size (mode, type)
4221 /* If this is partially on the stack, then we only
4222 include the portion actually in registers here. */
4223 return gen_rtx_PARALLEL (DFmode,
4225 gen_rtx_EXPR_LIST (VOIDmode,
4226 gen_rtx_REG (SImode,
4230 gen_rtx_EXPR_LIST (VOIDmode,
4235 /* split a DFmode arg into two GPRs */
4236 return gen_rtx_PARALLEL (DFmode,
4238 gen_rtx_EXPR_LIST (VOIDmode,
4239 gen_rtx_REG (SImode,
4243 gen_rtx_EXPR_LIST (VOIDmode,
4244 gen_rtx_REG (SImode,
4248 gen_rtx_EXPR_LIST (VOIDmode,
4249 gen_rtx_REG (mode, cum->fregno),
4252 /* -mpowerpc64 with 32bit ABI splits up a DImode argument into one
4254 else if (mode == DImode)
4256 if (align_words < GP_ARG_NUM_REG - 1)
4257 return gen_rtx_PARALLEL (DImode,
4259 gen_rtx_EXPR_LIST (VOIDmode,
4260 gen_rtx_REG (SImode,
4264 gen_rtx_EXPR_LIST (VOIDmode,
4265 gen_rtx_REG (SImode,
4269 else if (align_words == GP_ARG_NUM_REG - 1)
4270 return gen_rtx_PARALLEL (DImode,
4272 gen_rtx_EXPR_LIST (VOIDmode,
4273 NULL_RTX, const0_rtx),
4274 gen_rtx_EXPR_LIST (VOIDmode,
4275 gen_rtx_REG (SImode,
4280 else if (mode == BLKmode && align_words <= (GP_ARG_NUM_REG - 1))
4283 int size = int_size_in_bytes (type);
4284 int no_units = ((size - 1) / 4) + 1;
4285 int max_no_words = GP_ARG_NUM_REG - align_words;
4286 int rtlvec_len = no_units < max_no_words ? no_units : max_no_words;
4287 rtx *rtlvec = (rtx *) alloca (rtlvec_len * sizeof (rtx));
4289 memset ((char *) rtlvec, 0, rtlvec_len * sizeof (rtx));
4291 for (k=0; k < rtlvec_len; k++)
4292 rtlvec[k] = gen_rtx_EXPR_LIST (VOIDmode,
4293 gen_rtx_REG (SImode,
4296 k == 0 ? const0_rtx : GEN_INT (k*4));
4298 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (k, rtlvec));
4304 /* Determine where to put an argument to a function.
4305 Value is zero to push the argument on the stack,
4306 or a hard register in which to store the argument.
4308 MODE is the argument's machine mode.
4309 TYPE is the data type of the argument (as a tree).
4310 This is null for libcalls where that information may
4312 CUM is a variable of type CUMULATIVE_ARGS which gives info about
4313 the preceding args and about the function being called.
4314 NAMED is nonzero if this argument is a named parameter
4315 (otherwise it is an extra parameter matching an ellipsis).
4317 On RS/6000 the first eight words of non-FP are normally in registers
4318 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
4319 Under V.4, the first 8 FP args are in registers.
4321 If this is floating-point and no prototype is specified, we use
4322 both an FP and integer register (or possibly FP reg and stack). Library
4323 functions (when CALL_LIBCALL is set) always have the proper types for args,
4324 so we can pass the FP value just in one register. emit_library_function
4325 doesn't support PARALLEL anyway. */
4328 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4329 tree type, int named)
4331 enum rs6000_abi abi = DEFAULT_ABI;
4333 /* Return a marker to indicate whether CR1 needs to set or clear the
4334 bit that V.4 uses to say fp args were passed in registers.
4335 Assume that we don't need the marker for software floating point,
4336 or compiler generated library calls. */
4337 if (mode == VOIDmode)
4340 && cum->nargs_prototype < 0
4341 && (cum->call_cookie & CALL_LIBCALL) == 0
4342 && (cum->prototype || TARGET_NO_PROTOTYPE))
4344 /* For the SPE, we need to crxor CR6 always. */
4346 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
4347 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
4348 return GEN_INT (cum->call_cookie
4349 | ((cum->fregno == FP_ARG_MIN_REG)
4350 ? CALL_V4_SET_FP_ARGS
4351 : CALL_V4_CLEAR_FP_ARGS));
4354 return GEN_INT (cum->call_cookie);
4357 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named))
4358 if (TARGET_64BIT && ! cum->prototype)
4360 /* Vector parameters get passed in vector register
4361 and also in GPRs or memory, in absence of prototype. */
4364 align_words = (cum->words + 1) & ~1;
4366 if (align_words >= GP_ARG_NUM_REG)
4372 slot = gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4374 return gen_rtx_PARALLEL (mode,
4376 gen_rtx_EXPR_LIST (VOIDmode,
4378 gen_rtx_EXPR_LIST (VOIDmode,
4379 gen_rtx_REG (mode, cum->vregno),
4383 return gen_rtx_REG (mode, cum->vregno);
4384 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
4386 if (named || abi == ABI_V4)
4390 /* Vector parameters to varargs functions under AIX or Darwin
4391 get passed in memory and possibly also in GPRs. */
4392 int align, align_words;
4393 enum machine_mode part_mode = mode;
4395 /* Vector parameters must be 16-byte aligned. This places them at
4396 2 mod 4 in terms of words in 32-bit mode, since the parameter
4397 save area starts at offset 24 from the stack. In 64-bit mode,
4398 they just have to start on an even word, since the parameter
4399 save area is 16-byte aligned. */
4401 align = ((6 - (cum->words & 3)) & 3);
4403 align = cum->words & 1;
4404 align_words = cum->words + align;
4406 /* Out of registers? Memory, then. */
4407 if (align_words >= GP_ARG_NUM_REG)
4410 /* The vector value goes in GPRs. Only the part of the
4411 value in GPRs is reported here. */
4412 if (align_words + CLASS_MAX_NREGS (mode, GENERAL_REGS)
4414 /* Fortunately, there are only two possibilities, the value
4415 is either wholly in GPRs or half in GPRs and half not. */
4418 return gen_rtx_REG (part_mode, GP_ARG_MIN_REG + align_words);
4421 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
4422 return rs6000_spe_function_arg (cum, mode, type);
4423 else if (abi == ABI_V4)
4425 if (TARGET_HARD_FLOAT && TARGET_FPRS
4426 && (mode == SFmode || mode == DFmode))
4428 if (cum->fregno <= FP_ARG_V4_MAX_REG)
4429 return gen_rtx_REG (mode, cum->fregno);
4436 int gregno = cum->sysv_gregno;
4438 /* Aggregates and IEEE quad get passed by reference. */
4439 if ((type && AGGREGATE_TYPE_P (type))
4443 n_words = rs6000_arg_size (mode, type);
4445 /* Long long and SPE vectors are put in odd registers. */
4446 if (n_words == 2 && (gregno & 1) == 0)
4449 /* Long long does not split between registers and stack. */
4450 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
4451 return gen_rtx_REG (mode, gregno);
4458 int align = (TARGET_32BIT && (cum->words & 1) != 0
4459 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
4460 int align_words = cum->words + align;
4462 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4465 if (TARGET_32BIT && TARGET_POWERPC64
4466 && (mode == DImode || mode == BLKmode))
4467 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4469 if (USE_FP_FOR_ARG_P (cum, mode, type))
4474 enum machine_mode fmode = mode;
4476 unsigned long n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
4478 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
4480 /* Long double split over regs and memory. */
4481 if (fmode == TFmode)
4484 /* Currently, we only ever need one reg here because complex
4485 doubles are split. */
4486 if (cum->fregno != FP_ARG_MAX_REG - 1)
4489 fpr[1] = gen_rtx_REG (fmode, cum->fregno);
4491 /* Do we also need to pass this arg in the parameter save
4494 && (cum->nargs_prototype <= 0
4495 || (DEFAULT_ABI == ABI_AIX
4497 && align_words >= GP_ARG_NUM_REG)));
4499 if (!needs_psave && mode == fmode)
4502 if (TARGET_32BIT && TARGET_POWERPC64
4503 && mode == DFmode && cum->stdarg)
4504 return rs6000_mixed_function_arg (cum, mode, type, align_words);
4506 /* Describe where this piece goes. */
4508 *r = gen_rtx_EXPR_LIST (VOIDmode, *r, const0_rtx);
4513 /* Now describe the part that goes in gprs or the stack.
4514 This piece must come first, before the fprs. */
4516 if (align_words < GP_ARG_NUM_REG)
4518 unsigned long n_words = rs6000_arg_size (mode, type);
4519 enum machine_mode rmode = mode;
4521 if (align_words + n_words > GP_ARG_NUM_REG)
4522 /* If this is partially on the stack, then we only
4523 include the portion actually in registers here.
4524 We know this can only be one register because
4525 complex doubles are splt. */
4527 reg = gen_rtx_REG (rmode, GP_ARG_MIN_REG + align_words);
4529 *--r = gen_rtx_EXPR_LIST (VOIDmode, reg, const0_rtx);
4533 return gen_rtx_PARALLEL (mode, gen_rtvec_v (n, r));
4535 else if (align_words < GP_ARG_NUM_REG)
4536 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4542 /* For an arg passed partly in registers and partly in memory,
4543 this is the number of registers used.
4544 For args passed entirely in registers or entirely in memory, zero. */
4547 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4548 tree type, int named)
4552 if (DEFAULT_ABI == ABI_V4)
4555 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
4556 && cum->nargs_prototype >= 0)
4559 if (USE_FP_FOR_ARG_P (cum, mode, type))
4561 if (cum->fregno + ((GET_MODE_SIZE (mode) + 7) >> 3) > FP_ARG_MAX_REG + 1)
4562 ret = FP_ARG_MAX_REG - cum->fregno;
4563 else if (cum->nargs_prototype >= 0)
4567 if (cum->words < GP_ARG_NUM_REG
4568 && GP_ARG_NUM_REG < cum->words + rs6000_arg_size (mode, type))
4569 ret = GP_ARG_NUM_REG - cum->words;
4571 if (ret != 0 && TARGET_DEBUG_ARG)
4572 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4577 /* A C expression that indicates when an argument must be passed by
4578 reference. If nonzero for an argument, a copy of that argument is
4579 made in memory and a pointer to the argument is passed instead of
4580 the argument itself. The pointer is passed in whatever way is
4581 appropriate for passing a pointer to that type.
4583 Under V.4, structures and unions are passed by reference.
4585 As an extension to all ABIs, variable sized types are passed by
4589 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
4590 enum machine_mode mode ATTRIBUTE_UNUSED,
4591 tree type, int named ATTRIBUTE_UNUSED)
4593 if (DEFAULT_ABI == ABI_V4
4594 && ((type && AGGREGATE_TYPE_P (type))
4597 if (TARGET_DEBUG_ARG)
4598 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4602 return type && int_size_in_bytes (type) < 0;
4606 rs6000_move_block_from_reg (int regno, rtx x, int nregs)
4609 enum machine_mode reg_mode = TARGET_32BIT ? SImode : DImode;
4614 for (i = 0; i < nregs; i++)
4616 rtx tem = adjust_address_nv (x, reg_mode, i*GET_MODE_SIZE(reg_mode));
4617 if (reload_completed)
4619 if (! strict_memory_address_p (reg_mode, XEXP (tem, 0)))
4622 tem = simplify_gen_subreg (reg_mode, x, BLKmode,
4623 i * GET_MODE_SIZE(reg_mode));
4626 tem = replace_equiv_address (tem, XEXP (tem, 0));
4628 if (tem == NULL_RTX)
4631 emit_move_insn (tem, gen_rtx_REG (reg_mode, regno + i));
4636 /* Perform any needed actions needed for a function that is receiving a
4637 variable number of arguments.
4641 MODE and TYPE are the mode and type of the current parameter.
4643 PRETEND_SIZE is a variable that should be set to the amount of stack
4644 that must be pushed by the prolog to pretend that our caller pushed
4647 Normally, this macro will push all remaining incoming registers on the
4648 stack and set PRETEND_SIZE to the length of the registers pushed. */
4651 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4652 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4654 CUMULATIVE_ARGS next_cum;
4655 int reg_size = TARGET_32BIT ? 4 : 8;
4656 rtx save_area = NULL_RTX, mem;
4657 int first_reg_offset, set;
4659 /* Skip the last named argument. */
4661 function_arg_advance (&next_cum, mode, type, 1);
4663 if (DEFAULT_ABI == ABI_V4)
4665 /* Indicate to allocate space on the stack for varargs save area. */
4666 cfun->machine->sysv_varargs_p = 1;
4668 save_area = plus_constant (virtual_stack_vars_rtx,
4669 - RS6000_VARARGS_SIZE);
4671 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4675 first_reg_offset = next_cum.words;
4676 save_area = virtual_incoming_args_rtx;
4677 cfun->machine->sysv_varargs_p = 0;
4679 if (MUST_PASS_IN_STACK (mode, type))
4680 first_reg_offset += rs6000_arg_size (TYPE_MODE (type), type);
4683 set = get_varargs_alias_set ();
4684 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4686 mem = gen_rtx_MEM (BLKmode,
4687 plus_constant (save_area,
4688 first_reg_offset * reg_size)),
4689 set_mem_alias_set (mem, set);
4690 set_mem_align (mem, BITS_PER_WORD);
4692 rs6000_move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4693 GP_ARG_NUM_REG - first_reg_offset);
4696 /* Save FP registers if needed. */
4697 if (DEFAULT_ABI == ABI_V4
4698 && TARGET_HARD_FLOAT && TARGET_FPRS
4700 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4702 int fregno = next_cum.fregno;
4703 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4704 rtx lab = gen_label_rtx ();
4705 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4707 emit_jump_insn (gen_rtx_SET (VOIDmode,
4709 gen_rtx_IF_THEN_ELSE (VOIDmode,
4710 gen_rtx_NE (VOIDmode, cr1,
4712 gen_rtx_LABEL_REF (VOIDmode, lab),
4715 while (fregno <= FP_ARG_V4_MAX_REG)
4717 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4718 set_mem_alias_set (mem, set);
4719 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4728 /* Create the va_list data type. */
4731 rs6000_build_builtin_va_list (void)
4733 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4735 /* For AIX, prefer 'char *' because that's what the system
4736 header files like. */
4737 if (DEFAULT_ABI != ABI_V4)
4738 return build_pointer_type (char_type_node);
4740 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4741 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4743 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4744 unsigned_char_type_node);
4745 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4746 unsigned_char_type_node);
4747 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4749 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4750 short_unsigned_type_node);
4751 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4753 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4756 DECL_FIELD_CONTEXT (f_gpr) = record;
4757 DECL_FIELD_CONTEXT (f_fpr) = record;
4758 DECL_FIELD_CONTEXT (f_res) = record;
4759 DECL_FIELD_CONTEXT (f_ovf) = record;
4760 DECL_FIELD_CONTEXT (f_sav) = record;
4762 TREE_CHAIN (record) = type_decl;
4763 TYPE_NAME (record) = type_decl;
4764 TYPE_FIELDS (record) = f_gpr;
4765 TREE_CHAIN (f_gpr) = f_fpr;
4766 TREE_CHAIN (f_fpr) = f_res;
4767 TREE_CHAIN (f_res) = f_ovf;
4768 TREE_CHAIN (f_ovf) = f_sav;
4770 layout_type (record);
4772 /* The correct type is an array type of one element. */
4773 return build_array_type (record, build_index_type (size_zero_node));
4776 /* Implement va_start. */
4779 rs6000_va_start (tree valist, rtx nextarg)
4781 HOST_WIDE_INT words, n_gpr, n_fpr;
4782 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4783 tree gpr, fpr, ovf, sav, t;
4785 /* Only SVR4 needs something special. */
4786 if (DEFAULT_ABI != ABI_V4)
4788 std_expand_builtin_va_start (valist, nextarg);
4792 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4793 f_fpr = TREE_CHAIN (f_gpr);
4794 f_res = TREE_CHAIN (f_fpr);
4795 f_ovf = TREE_CHAIN (f_res);
4796 f_sav = TREE_CHAIN (f_ovf);
4798 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4799 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4800 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4801 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4802 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4804 /* Count number of gp and fp argument registers used. */
4805 words = current_function_args_info.words;
4806 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4807 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4809 if (TARGET_DEBUG_ARG)
4810 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4811 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4812 words, n_gpr, n_fpr);
4814 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4815 TREE_SIDE_EFFECTS (t) = 1;
4816 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4818 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4819 TREE_SIDE_EFFECTS (t) = 1;
4820 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4822 /* Find the overflow area. */
4823 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4825 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4826 build_int_2 (words * UNITS_PER_WORD, 0));
4827 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4828 TREE_SIDE_EFFECTS (t) = 1;
4829 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4831 /* Find the register save area. */
4832 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4833 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4834 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4835 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4836 TREE_SIDE_EFFECTS (t) = 1;
4837 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4840 /* Implement va_arg. */
4843 rs6000_va_arg (tree valist, tree type)
4845 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4846 tree gpr, fpr, ovf, sav, reg, t, u;
4847 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4848 rtx lab_false, lab_over, addr_rtx, r;
4850 if (DEFAULT_ABI != ABI_V4)
4852 /* Variable sized types are passed by reference. */
4853 if (int_size_in_bytes (type) < 0)
4855 u = build_pointer_type (type);
4857 /* Args grow upward. */
4858 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4859 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4860 TREE_SIDE_EFFECTS (t) = 1;
4862 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4863 TREE_SIDE_EFFECTS (t) = 1;
4865 t = build1 (INDIRECT_REF, u, t);
4866 TREE_SIDE_EFFECTS (t) = 1;
4868 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4870 if (targetm.calls.split_complex_arg
4871 && TREE_CODE (type) == COMPLEX_TYPE)
4873 tree elem_type = TREE_TYPE (type);
4874 enum machine_mode elem_mode = TYPE_MODE (elem_type);
4875 int elem_size = GET_MODE_SIZE (elem_mode);
4877 if (elem_size < UNITS_PER_WORD)
4879 rtx real_part, imag_part, dest_real, rr;
4881 real_part = rs6000_va_arg (valist, elem_type);
4882 imag_part = rs6000_va_arg (valist, elem_type);
4884 /* We're not returning the value here, but the address.
4885 real_part and imag_part are not contiguous, and we know
4886 there is space available to pack real_part next to
4887 imag_part. float _Complex is not promoted to
4888 double _Complex by the default promotion rules that
4889 promote float to double. */
4890 if (2 * elem_size > UNITS_PER_WORD)
4893 real_part = gen_rtx_MEM (elem_mode, real_part);
4894 imag_part = gen_rtx_MEM (elem_mode, imag_part);
4896 dest_real = adjust_address (imag_part, elem_mode, -elem_size);
4897 rr = gen_reg_rtx (elem_mode);
4898 emit_move_insn (rr, real_part);
4899 emit_move_insn (dest_real, rr);
4901 return XEXP (dest_real, 0);
4905 return std_expand_builtin_va_arg (valist, type);
4908 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4909 f_fpr = TREE_CHAIN (f_gpr);
4910 f_res = TREE_CHAIN (f_fpr);
4911 f_ovf = TREE_CHAIN (f_res);
4912 f_sav = TREE_CHAIN (f_ovf);
4914 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4915 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4916 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4917 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4918 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4920 size = int_size_in_bytes (type);
4921 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4923 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4925 /* Aggregates and long doubles are passed by reference. */
4931 size = UNITS_PER_WORD;
4934 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4936 /* FP args go in FP registers, if present. */
4945 /* Otherwise into GP registers. */
4953 /* Pull the value out of the saved registers.... */
4955 lab_false = gen_label_rtx ();
4956 lab_over = gen_label_rtx ();
4957 addr_rtx = gen_reg_rtx (Pmode);
4959 /* AltiVec vectors never go in registers. */
4960 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4962 TREE_THIS_VOLATILE (reg) = 1;
4963 emit_cmp_and_jump_insns
4964 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4965 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4968 /* Long long is aligned in the registers. */
4971 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4972 build_int_2 (n_reg - 1, 0));
4973 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4974 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4975 TREE_SIDE_EFFECTS (u) = 1;
4976 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4980 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4984 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4985 build_int_2 (n_reg, 0));
4986 TREE_SIDE_EFFECTS (u) = 1;
4988 u = build1 (CONVERT_EXPR, integer_type_node, u);
4989 TREE_SIDE_EFFECTS (u) = 1;
4991 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4992 TREE_SIDE_EFFECTS (u) = 1;
4994 t = build (PLUS_EXPR, ptr_type_node, t, u);
4995 TREE_SIDE_EFFECTS (t) = 1;
4997 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4999 emit_move_insn (addr_rtx, r);
5001 emit_jump_insn (gen_jump (lab_over));
5005 emit_label (lab_false);
5007 /* ... otherwise out of the overflow area. */
5009 /* Make sure we don't find reg 7 for the next int arg.
5011 All AltiVec vectors go in the overflow area. So in the AltiVec
5012 case we need to get the vectors from the overflow area, but
5013 remember where the GPRs and FPRs are. */
5014 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
5015 || !TARGET_ALTIVEC))
5017 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
5018 TREE_SIDE_EFFECTS (t) = 1;
5019 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5022 /* Care for on-stack alignment if needed. */
5029 /* AltiVec vectors are 16 byte aligned. */
5030 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
5035 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
5036 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
5040 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
5042 emit_move_insn (addr_rtx, r);
5044 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
5045 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
5046 TREE_SIDE_EFFECTS (t) = 1;
5047 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5049 emit_label (lab_over);
5053 r = gen_rtx_MEM (Pmode, addr_rtx);
5054 set_mem_alias_set (r, get_varargs_alias_set ());
5055 emit_move_insn (addr_rtx, r);
5063 #define def_builtin(MASK, NAME, TYPE, CODE) \
5065 if ((MASK) & target_flags) \
5066 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
5070 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
5072 static const struct builtin_description bdesc_3arg[] =
5074 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
5075 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
5076 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
5077 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
5078 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
5079 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
5080 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
5081 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
5082 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
5083 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
5084 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
5085 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
5086 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
5087 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
5088 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
5089 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
5090 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
5091 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
5092 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
5093 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
5094 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
5095 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
5096 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
5099 /* DST operations: void foo (void *, const int, const char). */
5101 static const struct builtin_description bdesc_dst[] =
5103 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
5104 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
5105 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
5106 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
5109 /* Simple binary operations: VECc = foo (VECa, VECb). */
5111 static struct builtin_description bdesc_2arg[] =
5113 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
5114 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
5115 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
5116 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
5117 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
5118 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
5119 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
5120 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
5121 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
5122 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
5123 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
5124 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
5125 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
5126 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
5127 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
5128 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
5129 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
5130 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
5131 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
5132 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
5133 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
5134 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
5135 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
5136 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
5137 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
5138 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
5139 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
5140 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
5141 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
5142 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
5143 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
5144 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
5145 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
5146 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
5147 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
5148 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
5149 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
5150 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
5151 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
5152 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
5153 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
5154 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
5155 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
5156 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
5157 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
5158 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
5159 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
5160 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
5161 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
5162 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
5163 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
5164 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
5165 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
5166 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
5167 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
5168 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
5169 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
5170 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
5171 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
5172 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
5173 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
5174 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
5175 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
5176 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
5177 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
5178 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
5179 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
5180 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
5181 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
5182 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
5183 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
5184 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
5185 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
5186 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
5187 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
5188 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
5189 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
5190 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
5191 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
5192 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
5193 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
5194 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
5195 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
5196 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
5197 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
5198 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
5199 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
5200 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
5201 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
5202 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
5203 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
5204 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
5205 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
5206 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
5207 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
5208 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
5209 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
5210 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
5211 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
5212 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
5213 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
5214 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
5215 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
5216 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
5217 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
5218 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
5219 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
5220 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
5221 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
5222 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
5223 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
5224 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
5225 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
5227 /* Place holder, leave as first spe builtin. */
5228 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
5229 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
5230 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
5231 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
5232 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
5233 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
5234 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
5235 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
5236 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
5237 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
5238 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
5239 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
5240 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
5241 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
5242 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
5243 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
5244 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
5245 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
5246 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
5247 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
5248 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
5249 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
5250 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
5251 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
5252 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
5253 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
5254 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
5255 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
5256 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
5257 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
5258 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
5259 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
5260 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
5261 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
5262 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
5263 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
5264 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
5265 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
5266 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
5267 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
5268 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
5269 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
5270 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
5271 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
5272 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
5273 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
5274 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
5275 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
5276 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
5277 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
5278 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
5279 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
5280 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
5281 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
5282 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
5283 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
5284 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
5285 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
5286 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
5287 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
5288 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
5289 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
5290 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
5291 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
5292 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
5293 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
5294 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
5295 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
5296 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
5297 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
5298 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
5299 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
5300 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
5301 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
5302 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
5303 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
5304 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
5305 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
5306 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
5307 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
5308 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
5309 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
5310 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
5311 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
5312 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
5313 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
5314 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
5315 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
5316 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
5317 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
5318 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
5319 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
5320 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
5321 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
5322 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
5323 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
5324 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
5325 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
5326 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
5327 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
5328 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
5329 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
5330 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
5331 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
5332 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
5333 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
5334 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
5335 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
5336 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
5338 /* SPE binary operations expecting a 5-bit unsigned literal. */
5339 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
5341 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
5342 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
5343 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
5344 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
5345 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
5346 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
5347 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
5348 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
5349 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
5350 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
5351 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
5352 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
5353 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
5354 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
5355 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
5356 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
5357 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
5358 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
5359 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
5360 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
5361 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
5362 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
5363 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
5364 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
5365 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
5366 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
5368 /* Place-holder. Leave as last binary SPE builtin. */
5369 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
5372 /* AltiVec predicates. */
5374 struct builtin_description_predicates
5376 const unsigned int mask;
5377 const enum insn_code icode;
5379 const char *const name;
5380 const enum rs6000_builtins code;
5383 static const struct builtin_description_predicates bdesc_altivec_preds[] =
5385 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
5386 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
5387 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
5388 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
5389 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
5390 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
5391 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
5392 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
5393 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
5394 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
5395 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
5396 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
5397 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
5400 /* SPE predicates. */
5401 static struct builtin_description bdesc_spe_predicates[] =
5403 /* Place-holder. Leave as first. */
5404 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
5405 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
5406 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
5407 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
5408 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
5409 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
5410 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
5411 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
5412 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
5413 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
5414 /* Place-holder. Leave as last. */
5415 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
5418 /* SPE evsel predicates. */
5419 static struct builtin_description bdesc_spe_evsel[] =
5421 /* Place-holder. Leave as first. */
5422 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
5423 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
5424 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
5425 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
5426 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
5427 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
5428 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
5429 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
5430 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
5431 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
5432 /* Place-holder. Leave as last. */
5433 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
5436 /* ABS* operations. */
5438 static const struct builtin_description bdesc_abs[] =
5440 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
5441 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
5442 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
5443 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
5444 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
5445 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
5446 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
5449 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
5452 static struct builtin_description bdesc_1arg[] =
5454 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
5455 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
5456 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
5457 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
5458 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
5459 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
5460 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
5461 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
5462 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
5463 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
5464 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
5465 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
5466 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
5467 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
5468 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
5469 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
5470 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
5472 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
5473 end with SPE_BUILTIN_EVSUBFUSIAAW. */
5474 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
5475 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
5476 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
5477 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
5478 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
5479 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
5480 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
5481 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
5482 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
5483 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
5484 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
5485 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
5486 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
5487 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
5488 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
5489 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
5490 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
5491 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
5492 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
5493 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
5494 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
5495 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
5496 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
5497 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
5498 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
5499 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
5500 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
5501 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
5503 /* Place-holder. Leave as last unary SPE builtin. */
5504 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
5508 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
5511 tree arg0 = TREE_VALUE (arglist);
5512 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5513 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5514 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5516 if (icode == CODE_FOR_nothing)
5517 /* Builtin not supported on this processor. */
5520 /* If we got invalid arguments bail out before generating bad rtl. */
5521 if (arg0 == error_mark_node)
5524 if (icode == CODE_FOR_altivec_vspltisb
5525 || icode == CODE_FOR_altivec_vspltish
5526 || icode == CODE_FOR_altivec_vspltisw
5527 || icode == CODE_FOR_spe_evsplatfi
5528 || icode == CODE_FOR_spe_evsplati)
5530 /* Only allow 5-bit *signed* literals. */
5531 if (GET_CODE (op0) != CONST_INT
5532 || INTVAL (op0) > 0x1f
5533 || INTVAL (op0) < -0x1f)
5535 error ("argument 1 must be a 5-bit signed literal");
5541 || GET_MODE (target) != tmode
5542 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5543 target = gen_reg_rtx (tmode);
5545 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5546 op0 = copy_to_mode_reg (mode0, op0);
5548 pat = GEN_FCN (icode) (target, op0);
5557 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
5559 rtx pat, scratch1, scratch2;
5560 tree arg0 = TREE_VALUE (arglist);
5561 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5562 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5563 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5565 /* If we have invalid arguments, bail out before generating bad rtl. */
5566 if (arg0 == error_mark_node)
5570 || GET_MODE (target) != tmode
5571 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5572 target = gen_reg_rtx (tmode);
5574 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5575 op0 = copy_to_mode_reg (mode0, op0);
5577 scratch1 = gen_reg_rtx (mode0);
5578 scratch2 = gen_reg_rtx (mode0);
5580 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5589 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
5592 tree arg0 = TREE_VALUE (arglist);
5593 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5594 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5595 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5596 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5597 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5598 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5600 if (icode == CODE_FOR_nothing)
5601 /* Builtin not supported on this processor. */
5604 /* If we got invalid arguments bail out before generating bad rtl. */
5605 if (arg0 == error_mark_node || arg1 == error_mark_node)
5608 if (icode == CODE_FOR_altivec_vcfux
5609 || icode == CODE_FOR_altivec_vcfsx
5610 || icode == CODE_FOR_altivec_vctsxs
5611 || icode == CODE_FOR_altivec_vctuxs
5612 || icode == CODE_FOR_altivec_vspltb
5613 || icode == CODE_FOR_altivec_vsplth
5614 || icode == CODE_FOR_altivec_vspltw
5615 || icode == CODE_FOR_spe_evaddiw
5616 || icode == CODE_FOR_spe_evldd
5617 || icode == CODE_FOR_spe_evldh
5618 || icode == CODE_FOR_spe_evldw
5619 || icode == CODE_FOR_spe_evlhhesplat
5620 || icode == CODE_FOR_spe_evlhhossplat
5621 || icode == CODE_FOR_spe_evlhhousplat
5622 || icode == CODE_FOR_spe_evlwhe
5623 || icode == CODE_FOR_spe_evlwhos
5624 || icode == CODE_FOR_spe_evlwhou
5625 || icode == CODE_FOR_spe_evlwhsplat
5626 || icode == CODE_FOR_spe_evlwwsplat
5627 || icode == CODE_FOR_spe_evrlwi
5628 || icode == CODE_FOR_spe_evslwi
5629 || icode == CODE_FOR_spe_evsrwis
5630 || icode == CODE_FOR_spe_evsubifw
5631 || icode == CODE_FOR_spe_evsrwiu)
5633 /* Only allow 5-bit unsigned literals. */
5635 if (TREE_CODE (arg1) != INTEGER_CST
5636 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5638 error ("argument 2 must be a 5-bit unsigned literal");
5644 || GET_MODE (target) != tmode
5645 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5646 target = gen_reg_rtx (tmode);
5648 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5649 op0 = copy_to_mode_reg (mode0, op0);
5650 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5651 op1 = copy_to_mode_reg (mode1, op1);
5653 pat = GEN_FCN (icode) (target, op0, op1);
5662 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5663 tree arglist, rtx target)
5666 tree cr6_form = TREE_VALUE (arglist);
5667 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5668 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5669 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5670 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5671 enum machine_mode tmode = SImode;
5672 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5673 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5676 if (TREE_CODE (cr6_form) != INTEGER_CST)
5678 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5682 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5687 /* If we have invalid arguments, bail out before generating bad rtl. */
5688 if (arg0 == error_mark_node || arg1 == error_mark_node)
5692 || GET_MODE (target) != tmode
5693 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5694 target = gen_reg_rtx (tmode);
5696 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5697 op0 = copy_to_mode_reg (mode0, op0);
5698 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5699 op1 = copy_to_mode_reg (mode1, op1);
5701 scratch = gen_reg_rtx (mode0);
5703 pat = GEN_FCN (icode) (scratch, op0, op1,
5704 gen_rtx_SYMBOL_REF (Pmode, opcode));
5709 /* The vec_any* and vec_all* predicates use the same opcodes for two
5710 different operations, but the bits in CR6 will be different
5711 depending on what information we want. So we have to play tricks
5712 with CR6 to get the right bits out.
5714 If you think this is disgusting, look at the specs for the
5715 AltiVec predicates. */
5717 switch (cr6_form_int)
5720 emit_insn (gen_cr6_test_for_zero (target));
5723 emit_insn (gen_cr6_test_for_zero_reverse (target));
5726 emit_insn (gen_cr6_test_for_lt (target));
5729 emit_insn (gen_cr6_test_for_lt_reverse (target));
5732 error ("argument 1 of __builtin_altivec_predicate is out of range");
5740 altivec_expand_lv_builtin (enum insn_code icode, tree arglist, rtx target)
5743 tree arg0 = TREE_VALUE (arglist);
5744 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5745 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5746 enum machine_mode mode0 = Pmode;
5747 enum machine_mode mode1 = Pmode;
5748 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5749 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5751 if (icode == CODE_FOR_nothing)
5752 /* Builtin not supported on this processor. */
5755 /* If we got invalid arguments bail out before generating bad rtl. */
5756 if (arg0 == error_mark_node || arg1 == error_mark_node)
5760 || GET_MODE (target) != tmode
5761 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5762 target = gen_reg_rtx (tmode);
5764 op1 = copy_to_mode_reg (mode1, op1);
5766 if (op0 == const0_rtx)
5768 addr = gen_rtx_MEM (tmode, op1);
5772 op0 = copy_to_mode_reg (mode0, op0);
5773 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op0, op1));
5776 pat = GEN_FCN (icode) (target, addr);
5786 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5788 tree arg0 = TREE_VALUE (arglist);
5789 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5790 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5791 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5792 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5793 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5795 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5796 enum machine_mode mode1 = Pmode;
5797 enum machine_mode mode2 = Pmode;
5799 /* Invalid arguments. Bail before doing anything stoopid! */
5800 if (arg0 == error_mark_node
5801 || arg1 == error_mark_node
5802 || arg2 == error_mark_node)
5805 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode))
5806 op0 = copy_to_mode_reg (tmode, op0);
5808 op2 = copy_to_mode_reg (mode2, op2);
5810 if (op1 == const0_rtx)
5812 addr = gen_rtx_MEM (tmode, op2);
5816 op1 = copy_to_mode_reg (mode1, op1);
5817 addr = gen_rtx_MEM (tmode, gen_rtx_PLUS (Pmode, op1, op2));
5820 pat = GEN_FCN (icode) (addr, op0);
5827 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5830 tree arg0 = TREE_VALUE (arglist);
5831 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5832 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5833 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5834 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5835 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5836 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5837 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5838 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5839 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5841 if (icode == CODE_FOR_nothing)
5842 /* Builtin not supported on this processor. */
5845 /* If we got invalid arguments bail out before generating bad rtl. */
5846 if (arg0 == error_mark_node
5847 || arg1 == error_mark_node
5848 || arg2 == error_mark_node)
5851 if (icode == CODE_FOR_altivec_vsldoi_4sf
5852 || icode == CODE_FOR_altivec_vsldoi_4si
5853 || icode == CODE_FOR_altivec_vsldoi_8hi
5854 || icode == CODE_FOR_altivec_vsldoi_16qi)
5856 /* Only allow 4-bit unsigned literals. */
5858 if (TREE_CODE (arg2) != INTEGER_CST
5859 || TREE_INT_CST_LOW (arg2) & ~0xf)
5861 error ("argument 3 must be a 4-bit unsigned literal");
5867 || GET_MODE (target) != tmode
5868 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5869 target = gen_reg_rtx (tmode);
5871 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5872 op0 = copy_to_mode_reg (mode0, op0);
5873 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5874 op1 = copy_to_mode_reg (mode1, op1);
5875 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5876 op2 = copy_to_mode_reg (mode2, op2);
5878 pat = GEN_FCN (icode) (target, op0, op1, op2);
5886 /* Expand the lvx builtins. */
5888 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5890 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5891 tree arglist = TREE_OPERAND (exp, 1);
5892 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5894 enum machine_mode tmode, mode0;
5896 enum insn_code icode;
5900 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5901 icode = CODE_FOR_altivec_lvx_16qi;
5903 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5904 icode = CODE_FOR_altivec_lvx_8hi;
5906 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5907 icode = CODE_FOR_altivec_lvx_4si;
5909 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5910 icode = CODE_FOR_altivec_lvx_4sf;
5919 arg0 = TREE_VALUE (arglist);
5920 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5921 tmode = insn_data[icode].operand[0].mode;
5922 mode0 = insn_data[icode].operand[1].mode;
5925 || GET_MODE (target) != tmode
5926 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5927 target = gen_reg_rtx (tmode);
5929 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5930 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5932 pat = GEN_FCN (icode) (target, op0);
5939 /* Expand the stvx builtins. */
5941 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5944 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5945 tree arglist = TREE_OPERAND (exp, 1);
5946 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5948 enum machine_mode mode0, mode1;
5950 enum insn_code icode;
5954 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5955 icode = CODE_FOR_altivec_stvx_16qi;
5957 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5958 icode = CODE_FOR_altivec_stvx_8hi;
5960 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5961 icode = CODE_FOR_altivec_stvx_4si;
5963 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5964 icode = CODE_FOR_altivec_stvx_4sf;
5971 arg0 = TREE_VALUE (arglist);
5972 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5973 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5974 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5975 mode0 = insn_data[icode].operand[0].mode;
5976 mode1 = insn_data[icode].operand[1].mode;
5978 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5979 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5980 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5981 op1 = copy_to_mode_reg (mode1, op1);
5983 pat = GEN_FCN (icode) (op0, op1);
5991 /* Expand the dst builtins. */
5993 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5996 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5997 tree arglist = TREE_OPERAND (exp, 1);
5998 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5999 tree arg0, arg1, arg2;
6000 enum machine_mode mode0, mode1, mode2;
6001 rtx pat, op0, op1, op2;
6002 struct builtin_description *d;
6007 /* Handle DST variants. */
6008 d = (struct builtin_description *) bdesc_dst;
6009 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6010 if (d->code == fcode)
6012 arg0 = TREE_VALUE (arglist);
6013 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6014 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6015 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6016 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6017 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6018 mode0 = insn_data[d->icode].operand[0].mode;
6019 mode1 = insn_data[d->icode].operand[1].mode;
6020 mode2 = insn_data[d->icode].operand[2].mode;
6022 /* Invalid arguments, bail out before generating bad rtl. */
6023 if (arg0 == error_mark_node
6024 || arg1 == error_mark_node
6025 || arg2 == error_mark_node)
6030 if (TREE_CODE (arg2) != INTEGER_CST
6031 || TREE_INT_CST_LOW (arg2) & ~0x3)
6033 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
6037 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
6038 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
6039 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
6040 op1 = copy_to_mode_reg (mode1, op1);
6042 pat = GEN_FCN (d->icode) (op0, op1, op2);
6052 /* Expand the builtin in EXP and store the result in TARGET. Store
6053 true in *EXPANDEDP if we found a builtin to expand. */
6055 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
6057 struct builtin_description *d;
6058 struct builtin_description_predicates *dp;
6060 enum insn_code icode;
6061 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6062 tree arglist = TREE_OPERAND (exp, 1);
6065 enum machine_mode tmode, mode0;
6066 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6068 target = altivec_expand_ld_builtin (exp, target, expandedp);
6072 target = altivec_expand_st_builtin (exp, target, expandedp);
6076 target = altivec_expand_dst_builtin (exp, target, expandedp);
6084 case ALTIVEC_BUILTIN_STVX:
6085 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
6086 case ALTIVEC_BUILTIN_STVEBX:
6087 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
6088 case ALTIVEC_BUILTIN_STVEHX:
6089 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
6090 case ALTIVEC_BUILTIN_STVEWX:
6091 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
6092 case ALTIVEC_BUILTIN_STVXL:
6093 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
6095 case ALTIVEC_BUILTIN_MFVSCR:
6096 icode = CODE_FOR_altivec_mfvscr;
6097 tmode = insn_data[icode].operand[0].mode;
6100 || GET_MODE (target) != tmode
6101 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6102 target = gen_reg_rtx (tmode);
6104 pat = GEN_FCN (icode) (target);
6110 case ALTIVEC_BUILTIN_MTVSCR:
6111 icode = CODE_FOR_altivec_mtvscr;
6112 arg0 = TREE_VALUE (arglist);
6113 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6114 mode0 = insn_data[icode].operand[0].mode;
6116 /* If we got invalid arguments bail out before generating bad rtl. */
6117 if (arg0 == error_mark_node)
6120 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6121 op0 = copy_to_mode_reg (mode0, op0);
6123 pat = GEN_FCN (icode) (op0);
6128 case ALTIVEC_BUILTIN_DSSALL:
6129 emit_insn (gen_altivec_dssall ());
6132 case ALTIVEC_BUILTIN_DSS:
6133 icode = CODE_FOR_altivec_dss;
6134 arg0 = TREE_VALUE (arglist);
6136 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6137 mode0 = insn_data[icode].operand[0].mode;
6139 /* If we got invalid arguments bail out before generating bad rtl. */
6140 if (arg0 == error_mark_node)
6143 if (TREE_CODE (arg0) != INTEGER_CST
6144 || TREE_INT_CST_LOW (arg0) & ~0x3)
6146 error ("argument to dss must be a 2-bit unsigned literal");
6150 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6151 op0 = copy_to_mode_reg (mode0, op0);
6153 emit_insn (gen_altivec_dss (op0));
6156 case ALTIVEC_BUILTIN_COMPILETIME_ERROR:
6157 arg0 = TREE_VALUE (arglist);
6158 while (TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == ADDR_EXPR)
6159 arg0 = TREE_OPERAND (arg0, 0);
6160 error ("invalid parameter combination for `%s' AltiVec intrinsic",
6161 TREE_STRING_POINTER (arg0));
6166 /* Expand abs* operations. */
6167 d = (struct builtin_description *) bdesc_abs;
6168 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6169 if (d->code == fcode)
6170 return altivec_expand_abs_builtin (d->icode, arglist, target);
6172 /* Expand the AltiVec predicates. */
6173 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6174 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6175 if (dp->code == fcode)
6176 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
6178 /* LV* are funky. We initialized them differently. */
6181 case ALTIVEC_BUILTIN_LVSL:
6182 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsl,
6184 case ALTIVEC_BUILTIN_LVSR:
6185 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvsr,
6187 case ALTIVEC_BUILTIN_LVEBX:
6188 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvebx,
6190 case ALTIVEC_BUILTIN_LVEHX:
6191 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvehx,
6193 case ALTIVEC_BUILTIN_LVEWX:
6194 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvewx,
6196 case ALTIVEC_BUILTIN_LVXL:
6197 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
6199 case ALTIVEC_BUILTIN_LVX:
6200 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx,
6211 /* Binops that need to be initialized manually, but can be expanded
6212 automagically by rs6000_expand_binop_builtin. */
6213 static struct builtin_description bdesc_2arg_spe[] =
6215 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
6216 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
6217 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
6218 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
6219 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
6220 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
6221 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
6222 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
6223 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
6224 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
6225 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
6226 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
6227 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
6228 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
6229 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
6230 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
6231 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
6232 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
6233 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
6234 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
6235 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
6236 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
6239 /* Expand the builtin in EXP and store the result in TARGET. Store
6240 true in *EXPANDEDP if we found a builtin to expand.
6242 This expands the SPE builtins that are not simple unary and binary
6245 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
6247 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6248 tree arglist = TREE_OPERAND (exp, 1);
6250 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6251 enum insn_code icode;
6252 enum machine_mode tmode, mode0;
6254 struct builtin_description *d;
6259 /* Syntax check for a 5-bit unsigned immediate. */
6262 case SPE_BUILTIN_EVSTDD:
6263 case SPE_BUILTIN_EVSTDH:
6264 case SPE_BUILTIN_EVSTDW:
6265 case SPE_BUILTIN_EVSTWHE:
6266 case SPE_BUILTIN_EVSTWHO:
6267 case SPE_BUILTIN_EVSTWWE:
6268 case SPE_BUILTIN_EVSTWWO:
6269 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6270 if (TREE_CODE (arg1) != INTEGER_CST
6271 || TREE_INT_CST_LOW (arg1) & ~0x1f)
6273 error ("argument 2 must be a 5-bit unsigned literal");
6281 /* The evsplat*i instructions are not quite generic. */
6284 case SPE_BUILTIN_EVSPLATFI:
6285 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
6287 case SPE_BUILTIN_EVSPLATI:
6288 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
6294 d = (struct builtin_description *) bdesc_2arg_spe;
6295 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
6296 if (d->code == fcode)
6297 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6299 d = (struct builtin_description *) bdesc_spe_predicates;
6300 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
6301 if (d->code == fcode)
6302 return spe_expand_predicate_builtin (d->icode, arglist, target);
6304 d = (struct builtin_description *) bdesc_spe_evsel;
6305 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
6306 if (d->code == fcode)
6307 return spe_expand_evsel_builtin (d->icode, arglist, target);
6311 case SPE_BUILTIN_EVSTDDX:
6312 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
6313 case SPE_BUILTIN_EVSTDHX:
6314 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
6315 case SPE_BUILTIN_EVSTDWX:
6316 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
6317 case SPE_BUILTIN_EVSTWHEX:
6318 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
6319 case SPE_BUILTIN_EVSTWHOX:
6320 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
6321 case SPE_BUILTIN_EVSTWWEX:
6322 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
6323 case SPE_BUILTIN_EVSTWWOX:
6324 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
6325 case SPE_BUILTIN_EVSTDD:
6326 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
6327 case SPE_BUILTIN_EVSTDH:
6328 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
6329 case SPE_BUILTIN_EVSTDW:
6330 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
6331 case SPE_BUILTIN_EVSTWHE:
6332 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
6333 case SPE_BUILTIN_EVSTWHO:
6334 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
6335 case SPE_BUILTIN_EVSTWWE:
6336 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
6337 case SPE_BUILTIN_EVSTWWO:
6338 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
6339 case SPE_BUILTIN_MFSPEFSCR:
6340 icode = CODE_FOR_spe_mfspefscr;
6341 tmode = insn_data[icode].operand[0].mode;
6344 || GET_MODE (target) != tmode
6345 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6346 target = gen_reg_rtx (tmode);
6348 pat = GEN_FCN (icode) (target);
6353 case SPE_BUILTIN_MTSPEFSCR:
6354 icode = CODE_FOR_spe_mtspefscr;
6355 arg0 = TREE_VALUE (arglist);
6356 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6357 mode0 = insn_data[icode].operand[0].mode;
6359 if (arg0 == error_mark_node)
6362 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
6363 op0 = copy_to_mode_reg (mode0, op0);
6365 pat = GEN_FCN (icode) (op0);
6378 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
6380 rtx pat, scratch, tmp;
6381 tree form = TREE_VALUE (arglist);
6382 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
6383 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6384 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6385 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6386 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6387 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6391 if (TREE_CODE (form) != INTEGER_CST)
6393 error ("argument 1 of __builtin_spe_predicate must be a constant");
6397 form_int = TREE_INT_CST_LOW (form);
6402 if (arg0 == error_mark_node || arg1 == error_mark_node)
6406 || GET_MODE (target) != SImode
6407 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
6408 target = gen_reg_rtx (SImode);
6410 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6411 op0 = copy_to_mode_reg (mode0, op0);
6412 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6413 op1 = copy_to_mode_reg (mode1, op1);
6415 scratch = gen_reg_rtx (CCmode);
6417 pat = GEN_FCN (icode) (scratch, op0, op1);
6422 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
6423 _lower_. We use one compare, but look in different bits of the
6424 CR for each variant.
6426 There are 2 elements in each SPE simd type (upper/lower). The CR
6427 bits are set as follows:
6429 BIT0 | BIT 1 | BIT 2 | BIT 3
6430 U | L | (U | L) | (U & L)
6432 So, for an "all" relationship, BIT 3 would be set.
6433 For an "any" relationship, BIT 2 would be set. Etc.
6435 Following traditional nomenclature, these bits map to:
6437 BIT0 | BIT 1 | BIT 2 | BIT 3
6440 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
6445 /* All variant. OV bit. */
6447 /* We need to get to the OV bit, which is the ORDERED bit. We
6448 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
6449 that's ugly and will trigger a validate_condition_mode abort.
6450 So let's just use another pattern. */
6451 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
6453 /* Any variant. EQ bit. */
6457 /* Upper variant. LT bit. */
6461 /* Lower variant. GT bit. */
6466 error ("argument 1 of __builtin_spe_predicate is out of range");
6470 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
6471 emit_move_insn (target, tmp);
6476 /* The evsel builtins look like this:
6478 e = __builtin_spe_evsel_OP (a, b, c, d);
6482 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
6483 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
6487 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
6490 tree arg0 = TREE_VALUE (arglist);
6491 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
6492 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6493 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
6494 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
6495 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
6496 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
6497 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
6498 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6499 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6504 if (arg0 == error_mark_node || arg1 == error_mark_node
6505 || arg2 == error_mark_node || arg3 == error_mark_node)
6509 || GET_MODE (target) != mode0
6510 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
6511 target = gen_reg_rtx (mode0);
6513 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6514 op0 = copy_to_mode_reg (mode0, op0);
6515 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
6516 op1 = copy_to_mode_reg (mode0, op1);
6517 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
6518 op2 = copy_to_mode_reg (mode0, op2);
6519 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
6520 op3 = copy_to_mode_reg (mode0, op3);
6522 /* Generate the compare. */
6523 scratch = gen_reg_rtx (CCmode);
6524 pat = GEN_FCN (icode) (scratch, op0, op1);
6529 if (mode0 == V2SImode)
6530 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
6532 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
6537 /* Expand an expression EXP that calls a built-in function,
6538 with result going to TARGET if that's convenient
6539 (and in mode MODE if that's convenient).
6540 SUBTARGET may be used as the target for computing one of EXP's operands.
6541 IGNORE is nonzero if the value is to be ignored. */
6544 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
6545 enum machine_mode mode ATTRIBUTE_UNUSED,
6546 int ignore ATTRIBUTE_UNUSED)
6548 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6549 tree arglist = TREE_OPERAND (exp, 1);
6550 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
6551 struct builtin_description *d;
6558 ret = altivec_expand_builtin (exp, target, &success);
6565 ret = spe_expand_builtin (exp, target, &success);
6571 if (TARGET_ALTIVEC || TARGET_SPE)
6573 /* Handle simple unary operations. */
6574 d = (struct builtin_description *) bdesc_1arg;
6575 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6576 if (d->code == fcode)
6577 return rs6000_expand_unop_builtin (d->icode, arglist, target);
6579 /* Handle simple binary operations. */
6580 d = (struct builtin_description *) bdesc_2arg;
6581 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6582 if (d->code == fcode)
6583 return rs6000_expand_binop_builtin (d->icode, arglist, target);
6585 /* Handle simple ternary operations. */
6586 d = (struct builtin_description *) bdesc_3arg;
6587 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6588 if (d->code == fcode)
6589 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6597 rs6000_init_builtins (void)
6599 V2SI_type_node = build_vector_type (intSI_type_node, 2);
6600 V2SF_type_node = build_vector_type (float_type_node, 2);
6601 V4HI_type_node = build_vector_type (intHI_type_node, 4);
6602 V4SI_type_node = build_vector_type (intSI_type_node, 4);
6603 V4SF_type_node = build_vector_type (float_type_node, 4);
6604 V8HI_type_node = build_vector_type (intHI_type_node, 8);
6605 V16QI_type_node = build_vector_type (intQI_type_node, 16);
6607 unsigned_V16QI_type_node = build_vector_type (unsigned_intQI_type_node, 16);
6608 unsigned_V8HI_type_node = build_vector_type (unsigned_intHI_type_node, 8);
6609 unsigned_V4SI_type_node = build_vector_type (unsigned_intSI_type_node, 4);
6611 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6612 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6613 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6615 /* The 'vector bool ...' types must be kept distinct from 'vector unsigned ...'
6616 types, especially in C++ land. Similarly, 'vector pixel' is distinct from
6617 'vector unsigned short'. */
6619 bool_char_type_node = copy_node (unsigned_intQI_type_node);
6620 TYPE_MAIN_VARIANT (bool_char_type_node) = bool_char_type_node;
6621 bool_short_type_node = copy_node (unsigned_intHI_type_node);
6622 TYPE_MAIN_VARIANT (bool_short_type_node) = bool_short_type_node;
6623 bool_int_type_node = copy_node (unsigned_intSI_type_node);
6624 TYPE_MAIN_VARIANT (bool_int_type_node) = bool_int_type_node;
6625 pixel_type_node = copy_node (unsigned_intHI_type_node);
6626 TYPE_MAIN_VARIANT (pixel_type_node) = pixel_type_node;
6628 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6629 get_identifier ("__bool char"),
6630 bool_char_type_node));
6631 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6632 get_identifier ("__bool short"),
6633 bool_short_type_node));
6634 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6635 get_identifier ("__bool int"),
6636 bool_int_type_node));
6637 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6638 get_identifier ("__pixel"),
6641 bool_V16QI_type_node = build_vector_type (bool_char_type_node, 16);
6642 bool_V8HI_type_node = build_vector_type (bool_short_type_node, 8);
6643 bool_V4SI_type_node = build_vector_type (bool_int_type_node, 4);
6644 pixel_V8HI_type_node = build_vector_type (pixel_type_node, 8);
6646 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6647 get_identifier ("__vector unsigned char"),
6648 unsigned_V16QI_type_node));
6649 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6650 get_identifier ("__vector signed char"),
6652 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6653 get_identifier ("__vector __bool char"),
6654 bool_V16QI_type_node));
6656 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6657 get_identifier ("__vector unsigned short"),
6658 unsigned_V8HI_type_node));
6659 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6660 get_identifier ("__vector signed short"),
6662 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6663 get_identifier ("__vector __bool short"),
6664 bool_V8HI_type_node));
6666 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6667 get_identifier ("__vector unsigned int"),
6668 unsigned_V4SI_type_node));
6669 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6670 get_identifier ("__vector signed int"),
6672 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6673 get_identifier ("__vector __bool int"),
6674 bool_V4SI_type_node));
6676 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6677 get_identifier ("__vector float"),
6679 (*lang_hooks.decls.pushdecl) (build_decl (TYPE_DECL,
6680 get_identifier ("__vector __pixel"),
6681 pixel_V8HI_type_node));
6684 spe_init_builtins ();
6686 altivec_init_builtins ();
6687 if (TARGET_ALTIVEC || TARGET_SPE)
6688 rs6000_common_init_builtins ();
6691 /* Search through a set of builtins and enable the mask bits.
6692 DESC is an array of builtins.
6693 SIZE is the total number of builtins.
6694 START is the builtin enum at which to start.
6695 END is the builtin enum at which to end. */
6697 enable_mask_for_builtins (struct builtin_description *desc, int size,
6698 enum rs6000_builtins start,
6699 enum rs6000_builtins end)
6703 for (i = 0; i < size; ++i)
6704 if (desc[i].code == start)
6710 for (; i < size; ++i)
6712 /* Flip all the bits on. */
6713 desc[i].mask = target_flags;
6714 if (desc[i].code == end)
6720 spe_init_builtins (void)
6722 tree endlink = void_list_node;
6723 tree puint_type_node = build_pointer_type (unsigned_type_node);
6724 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6725 struct builtin_description *d;
6728 tree v2si_ftype_4_v2si
6729 = build_function_type
6730 (opaque_V2SI_type_node,
6731 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6732 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6733 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6734 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6737 tree v2sf_ftype_4_v2sf
6738 = build_function_type
6739 (opaque_V2SF_type_node,
6740 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6741 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6742 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6743 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6746 tree int_ftype_int_v2si_v2si
6747 = build_function_type
6749 tree_cons (NULL_TREE, integer_type_node,
6750 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6751 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6754 tree int_ftype_int_v2sf_v2sf
6755 = build_function_type
6757 tree_cons (NULL_TREE, integer_type_node,
6758 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6759 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6762 tree void_ftype_v2si_puint_int
6763 = build_function_type (void_type_node,
6764 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6765 tree_cons (NULL_TREE, puint_type_node,
6766 tree_cons (NULL_TREE,
6770 tree void_ftype_v2si_puint_char
6771 = build_function_type (void_type_node,
6772 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6773 tree_cons (NULL_TREE, puint_type_node,
6774 tree_cons (NULL_TREE,
6778 tree void_ftype_v2si_pv2si_int
6779 = build_function_type (void_type_node,
6780 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6781 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6782 tree_cons (NULL_TREE,
6786 tree void_ftype_v2si_pv2si_char
6787 = build_function_type (void_type_node,
6788 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6789 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6790 tree_cons (NULL_TREE,
6795 = build_function_type (void_type_node,
6796 tree_cons (NULL_TREE, integer_type_node, endlink));
6799 = build_function_type (integer_type_node, endlink);
6801 tree v2si_ftype_pv2si_int
6802 = build_function_type (opaque_V2SI_type_node,
6803 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6804 tree_cons (NULL_TREE, integer_type_node,
6807 tree v2si_ftype_puint_int
6808 = build_function_type (opaque_V2SI_type_node,
6809 tree_cons (NULL_TREE, puint_type_node,
6810 tree_cons (NULL_TREE, integer_type_node,
6813 tree v2si_ftype_pushort_int
6814 = build_function_type (opaque_V2SI_type_node,
6815 tree_cons (NULL_TREE, pushort_type_node,
6816 tree_cons (NULL_TREE, integer_type_node,
6819 tree v2si_ftype_signed_char
6820 = build_function_type (opaque_V2SI_type_node,
6821 tree_cons (NULL_TREE, signed_char_type_node,
6824 /* The initialization of the simple binary and unary builtins is
6825 done in rs6000_common_init_builtins, but we have to enable the
6826 mask bits here manually because we have run out of `target_flags'
6827 bits. We really need to redesign this mask business. */
6829 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6830 ARRAY_SIZE (bdesc_2arg),
6833 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6834 ARRAY_SIZE (bdesc_1arg),
6836 SPE_BUILTIN_EVSUBFUSIAAW);
6837 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6838 ARRAY_SIZE (bdesc_spe_predicates),
6839 SPE_BUILTIN_EVCMPEQ,
6840 SPE_BUILTIN_EVFSTSTLT);
6841 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6842 ARRAY_SIZE (bdesc_spe_evsel),
6843 SPE_BUILTIN_EVSEL_CMPGTS,
6844 SPE_BUILTIN_EVSEL_FSTSTEQ);
6846 (*lang_hooks.decls.pushdecl)
6847 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6848 opaque_V2SI_type_node));
6850 /* Initialize irregular SPE builtins. */
6852 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6853 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6854 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6855 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6856 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6857 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6858 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6859 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6860 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6861 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6862 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6863 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6864 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6865 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6866 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6867 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6868 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6869 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6872 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6873 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6874 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6875 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6876 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6877 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6878 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6879 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6880 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6881 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6882 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6883 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6884 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6885 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6886 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6887 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6888 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6889 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6890 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6891 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6892 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6893 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6896 d = (struct builtin_description *) bdesc_spe_predicates;
6897 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6901 switch (insn_data[d->icode].operand[1].mode)
6904 type = int_ftype_int_v2si_v2si;
6907 type = int_ftype_int_v2sf_v2sf;
6913 def_builtin (d->mask, d->name, type, d->code);
6916 /* Evsel predicates. */
6917 d = (struct builtin_description *) bdesc_spe_evsel;
6918 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6922 switch (insn_data[d->icode].operand[1].mode)
6925 type = v2si_ftype_4_v2si;
6928 type = v2sf_ftype_4_v2sf;
6934 def_builtin (d->mask, d->name, type, d->code);
6939 altivec_init_builtins (void)
6941 struct builtin_description *d;
6942 struct builtin_description_predicates *dp;
6944 tree pfloat_type_node = build_pointer_type (float_type_node);
6945 tree pint_type_node = build_pointer_type (integer_type_node);
6946 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6947 tree pchar_type_node = build_pointer_type (char_type_node);
6949 tree pvoid_type_node = build_pointer_type (void_type_node);
6951 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6952 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6953 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6954 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6956 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6958 tree int_ftype_int_v4si_v4si
6959 = build_function_type_list (integer_type_node,
6960 integer_type_node, V4SI_type_node,
6961 V4SI_type_node, NULL_TREE);
6962 tree v4sf_ftype_pcfloat
6963 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6964 tree void_ftype_pfloat_v4sf
6965 = build_function_type_list (void_type_node,
6966 pfloat_type_node, V4SF_type_node, NULL_TREE);
6967 tree v4si_ftype_pcint
6968 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6969 tree void_ftype_pint_v4si
6970 = build_function_type_list (void_type_node,
6971 pint_type_node, V4SI_type_node, NULL_TREE);
6972 tree v8hi_ftype_pcshort
6973 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6974 tree void_ftype_pshort_v8hi
6975 = build_function_type_list (void_type_node,
6976 pshort_type_node, V8HI_type_node, NULL_TREE);
6977 tree v16qi_ftype_pcchar
6978 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6979 tree void_ftype_pchar_v16qi
6980 = build_function_type_list (void_type_node,
6981 pchar_type_node, V16QI_type_node, NULL_TREE);
6982 tree void_ftype_v4si
6983 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6984 tree v8hi_ftype_void
6985 = build_function_type (V8HI_type_node, void_list_node);
6986 tree void_ftype_void
6987 = build_function_type (void_type_node, void_list_node);
6989 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6991 tree v16qi_ftype_long_pcvoid
6992 = build_function_type_list (V16QI_type_node,
6993 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6994 tree v8hi_ftype_long_pcvoid
6995 = build_function_type_list (V8HI_type_node,
6996 long_integer_type_node, pcvoid_type_node, NULL_TREE);
6997 tree v4si_ftype_long_pcvoid
6998 = build_function_type_list (V4SI_type_node,
6999 long_integer_type_node, pcvoid_type_node, NULL_TREE);
7001 tree void_ftype_v4si_long_pvoid
7002 = build_function_type_list (void_type_node,
7003 V4SI_type_node, long_integer_type_node,
7004 pvoid_type_node, NULL_TREE);
7005 tree void_ftype_v16qi_long_pvoid
7006 = build_function_type_list (void_type_node,
7007 V16QI_type_node, long_integer_type_node,
7008 pvoid_type_node, NULL_TREE);
7009 tree void_ftype_v8hi_long_pvoid
7010 = build_function_type_list (void_type_node,
7011 V8HI_type_node, long_integer_type_node,
7012 pvoid_type_node, NULL_TREE);
7013 tree int_ftype_int_v8hi_v8hi
7014 = build_function_type_list (integer_type_node,
7015 integer_type_node, V8HI_type_node,
7016 V8HI_type_node, NULL_TREE);
7017 tree int_ftype_int_v16qi_v16qi
7018 = build_function_type_list (integer_type_node,
7019 integer_type_node, V16QI_type_node,
7020 V16QI_type_node, NULL_TREE);
7021 tree int_ftype_int_v4sf_v4sf
7022 = build_function_type_list (integer_type_node,
7023 integer_type_node, V4SF_type_node,
7024 V4SF_type_node, NULL_TREE);
7025 tree v4si_ftype_v4si
7026 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
7027 tree v8hi_ftype_v8hi
7028 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
7029 tree v16qi_ftype_v16qi
7030 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
7031 tree v4sf_ftype_v4sf
7032 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7033 tree void_ftype_pcvoid_int_int
7034 = build_function_type_list (void_type_node,
7035 pcvoid_type_node, integer_type_node,
7036 integer_type_node, NULL_TREE);
7037 tree int_ftype_pcchar
7038 = build_function_type_list (integer_type_node,
7039 pcchar_type_node, NULL_TREE);
7041 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
7042 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
7043 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
7044 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
7045 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
7046 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
7047 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
7048 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
7049 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
7050 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
7051 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
7052 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
7053 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
7054 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
7055 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
7056 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
7057 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
7058 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
7059 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
7060 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
7061 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
7062 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSR);
7063 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEBX);
7064 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEHX);
7065 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVEWX);
7066 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVXL);
7067 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVX);
7068 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVX);
7069 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVEWX);
7070 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_long_pvoid, ALTIVEC_BUILTIN_STVXL);
7071 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_long_pvoid, ALTIVEC_BUILTIN_STVEBX);
7072 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_long_pvoid, ALTIVEC_BUILTIN_STVEHX);
7074 /* See altivec.h for usage of "__builtin_altivec_compiletime_error". */
7075 def_builtin (MASK_ALTIVEC, "__builtin_altivec_compiletime_error", int_ftype_pcchar,
7076 ALTIVEC_BUILTIN_COMPILETIME_ERROR);
7078 /* Add the DST variants. */
7079 d = (struct builtin_description *) bdesc_dst;
7080 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
7081 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_int, d->code);
7083 /* Initialize the predicates. */
7084 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
7085 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
7087 enum machine_mode mode1;
7090 mode1 = insn_data[dp->icode].operand[1].mode;
7095 type = int_ftype_int_v4si_v4si;
7098 type = int_ftype_int_v8hi_v8hi;
7101 type = int_ftype_int_v16qi_v16qi;
7104 type = int_ftype_int_v4sf_v4sf;
7110 def_builtin (dp->mask, dp->name, type, dp->code);
7113 /* Initialize the abs* operators. */
7114 d = (struct builtin_description *) bdesc_abs;
7115 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
7117 enum machine_mode mode0;
7120 mode0 = insn_data[d->icode].operand[0].mode;
7125 type = v4si_ftype_v4si;
7128 type = v8hi_ftype_v8hi;
7131 type = v16qi_ftype_v16qi;
7134 type = v4sf_ftype_v4sf;
7140 def_builtin (d->mask, d->name, type, d->code);
7145 rs6000_common_init_builtins (void)
7147 struct builtin_description *d;
7150 tree v4sf_ftype_v4sf_v4sf_v16qi
7151 = build_function_type_list (V4SF_type_node,
7152 V4SF_type_node, V4SF_type_node,
7153 V16QI_type_node, NULL_TREE);
7154 tree v4si_ftype_v4si_v4si_v16qi
7155 = build_function_type_list (V4SI_type_node,
7156 V4SI_type_node, V4SI_type_node,
7157 V16QI_type_node, NULL_TREE);
7158 tree v8hi_ftype_v8hi_v8hi_v16qi
7159 = build_function_type_list (V8HI_type_node,
7160 V8HI_type_node, V8HI_type_node,
7161 V16QI_type_node, NULL_TREE);
7162 tree v16qi_ftype_v16qi_v16qi_v16qi
7163 = build_function_type_list (V16QI_type_node,
7164 V16QI_type_node, V16QI_type_node,
7165 V16QI_type_node, NULL_TREE);
7167 = build_function_type_list (V4SI_type_node, integer_type_node, NULL_TREE);
7169 = build_function_type_list (V8HI_type_node, integer_type_node, NULL_TREE);
7170 tree v16qi_ftype_int
7171 = build_function_type_list (V16QI_type_node, integer_type_node, NULL_TREE);
7172 tree v8hi_ftype_v16qi
7173 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
7174 tree v4sf_ftype_v4sf
7175 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
7177 tree v2si_ftype_v2si_v2si
7178 = build_function_type_list (opaque_V2SI_type_node,
7179 opaque_V2SI_type_node,
7180 opaque_V2SI_type_node, NULL_TREE);
7182 tree v2sf_ftype_v2sf_v2sf
7183 = build_function_type_list (opaque_V2SF_type_node,
7184 opaque_V2SF_type_node,
7185 opaque_V2SF_type_node, NULL_TREE);
7187 tree v2si_ftype_int_int
7188 = build_function_type_list (opaque_V2SI_type_node,
7189 integer_type_node, integer_type_node,
7192 tree v2si_ftype_v2si
7193 = build_function_type_list (opaque_V2SI_type_node,
7194 opaque_V2SI_type_node, NULL_TREE);
7196 tree v2sf_ftype_v2sf
7197 = build_function_type_list (opaque_V2SF_type_node,
7198 opaque_V2SF_type_node, NULL_TREE);
7200 tree v2sf_ftype_v2si
7201 = build_function_type_list (opaque_V2SF_type_node,
7202 opaque_V2SI_type_node, NULL_TREE);
7204 tree v2si_ftype_v2sf
7205 = build_function_type_list (opaque_V2SI_type_node,
7206 opaque_V2SF_type_node, NULL_TREE);
7208 tree v2si_ftype_v2si_char
7209 = build_function_type_list (opaque_V2SI_type_node,
7210 opaque_V2SI_type_node,
7211 char_type_node, NULL_TREE);
7213 tree v2si_ftype_int_char
7214 = build_function_type_list (opaque_V2SI_type_node,
7215 integer_type_node, char_type_node, NULL_TREE);
7217 tree v2si_ftype_char
7218 = build_function_type_list (opaque_V2SI_type_node,
7219 char_type_node, NULL_TREE);
7221 tree int_ftype_int_int
7222 = build_function_type_list (integer_type_node,
7223 integer_type_node, integer_type_node,
7226 tree v4si_ftype_v4si_v4si
7227 = build_function_type_list (V4SI_type_node,
7228 V4SI_type_node, V4SI_type_node, NULL_TREE);
7229 tree v4sf_ftype_v4si_int
7230 = build_function_type_list (V4SF_type_node,
7231 V4SI_type_node, integer_type_node, NULL_TREE);
7232 tree v4si_ftype_v4sf_int
7233 = build_function_type_list (V4SI_type_node,
7234 V4SF_type_node, integer_type_node, NULL_TREE);
7235 tree v4si_ftype_v4si_int
7236 = build_function_type_list (V4SI_type_node,
7237 V4SI_type_node, integer_type_node, NULL_TREE);
7238 tree v8hi_ftype_v8hi_int
7239 = build_function_type_list (V8HI_type_node,
7240 V8HI_type_node, integer_type_node, NULL_TREE);
7241 tree v16qi_ftype_v16qi_int
7242 = build_function_type_list (V16QI_type_node,
7243 V16QI_type_node, integer_type_node, NULL_TREE);
7244 tree v16qi_ftype_v16qi_v16qi_int
7245 = build_function_type_list (V16QI_type_node,
7246 V16QI_type_node, V16QI_type_node,
7247 integer_type_node, NULL_TREE);
7248 tree v8hi_ftype_v8hi_v8hi_int
7249 = build_function_type_list (V8HI_type_node,
7250 V8HI_type_node, V8HI_type_node,
7251 integer_type_node, NULL_TREE);
7252 tree v4si_ftype_v4si_v4si_int
7253 = build_function_type_list (V4SI_type_node,
7254 V4SI_type_node, V4SI_type_node,
7255 integer_type_node, NULL_TREE);
7256 tree v4sf_ftype_v4sf_v4sf_int
7257 = build_function_type_list (V4SF_type_node,
7258 V4SF_type_node, V4SF_type_node,
7259 integer_type_node, NULL_TREE);
7260 tree v4sf_ftype_v4sf_v4sf
7261 = build_function_type_list (V4SF_type_node,
7262 V4SF_type_node, V4SF_type_node, NULL_TREE);
7263 tree v4sf_ftype_v4sf_v4sf_v4si
7264 = build_function_type_list (V4SF_type_node,
7265 V4SF_type_node, V4SF_type_node,
7266 V4SI_type_node, NULL_TREE);
7267 tree v4sf_ftype_v4sf_v4sf_v4sf
7268 = build_function_type_list (V4SF_type_node,
7269 V4SF_type_node, V4SF_type_node,
7270 V4SF_type_node, NULL_TREE);
7271 tree v4si_ftype_v4si_v4si_v4si
7272 = build_function_type_list (V4SI_type_node,
7273 V4SI_type_node, V4SI_type_node,
7274 V4SI_type_node, NULL_TREE);
7275 tree v8hi_ftype_v8hi_v8hi
7276 = build_function_type_list (V8HI_type_node,
7277 V8HI_type_node, V8HI_type_node, NULL_TREE);
7278 tree v8hi_ftype_v8hi_v8hi_v8hi
7279 = build_function_type_list (V8HI_type_node,
7280 V8HI_type_node, V8HI_type_node,
7281 V8HI_type_node, NULL_TREE);
7282 tree v4si_ftype_v8hi_v8hi_v4si
7283 = build_function_type_list (V4SI_type_node,
7284 V8HI_type_node, V8HI_type_node,
7285 V4SI_type_node, NULL_TREE);
7286 tree v4si_ftype_v16qi_v16qi_v4si
7287 = build_function_type_list (V4SI_type_node,
7288 V16QI_type_node, V16QI_type_node,
7289 V4SI_type_node, NULL_TREE);
7290 tree v16qi_ftype_v16qi_v16qi
7291 = build_function_type_list (V16QI_type_node,
7292 V16QI_type_node, V16QI_type_node, NULL_TREE);
7293 tree v4si_ftype_v4sf_v4sf
7294 = build_function_type_list (V4SI_type_node,
7295 V4SF_type_node, V4SF_type_node, NULL_TREE);
7296 tree v8hi_ftype_v16qi_v16qi
7297 = build_function_type_list (V8HI_type_node,
7298 V16QI_type_node, V16QI_type_node, NULL_TREE);
7299 tree v4si_ftype_v8hi_v8hi
7300 = build_function_type_list (V4SI_type_node,
7301 V8HI_type_node, V8HI_type_node, NULL_TREE);
7302 tree v8hi_ftype_v4si_v4si
7303 = build_function_type_list (V8HI_type_node,
7304 V4SI_type_node, V4SI_type_node, NULL_TREE);
7305 tree v16qi_ftype_v8hi_v8hi
7306 = build_function_type_list (V16QI_type_node,
7307 V8HI_type_node, V8HI_type_node, NULL_TREE);
7308 tree v4si_ftype_v16qi_v4si
7309 = build_function_type_list (V4SI_type_node,
7310 V16QI_type_node, V4SI_type_node, NULL_TREE);
7311 tree v4si_ftype_v16qi_v16qi
7312 = build_function_type_list (V4SI_type_node,
7313 V16QI_type_node, V16QI_type_node, NULL_TREE);
7314 tree v4si_ftype_v8hi_v4si
7315 = build_function_type_list (V4SI_type_node,
7316 V8HI_type_node, V4SI_type_node, NULL_TREE);
7317 tree v4si_ftype_v8hi
7318 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
7319 tree int_ftype_v4si_v4si
7320 = build_function_type_list (integer_type_node,
7321 V4SI_type_node, V4SI_type_node, NULL_TREE);
7322 tree int_ftype_v4sf_v4sf
7323 = build_function_type_list (integer_type_node,
7324 V4SF_type_node, V4SF_type_node, NULL_TREE);
7325 tree int_ftype_v16qi_v16qi
7326 = build_function_type_list (integer_type_node,
7327 V16QI_type_node, V16QI_type_node, NULL_TREE);
7328 tree int_ftype_v8hi_v8hi
7329 = build_function_type_list (integer_type_node,
7330 V8HI_type_node, V8HI_type_node, NULL_TREE);
7332 /* Add the simple ternary operators. */
7333 d = (struct builtin_description *) bdesc_3arg;
7334 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
7337 enum machine_mode mode0, mode1, mode2, mode3;
7340 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7343 mode0 = insn_data[d->icode].operand[0].mode;
7344 mode1 = insn_data[d->icode].operand[1].mode;
7345 mode2 = insn_data[d->icode].operand[2].mode;
7346 mode3 = insn_data[d->icode].operand[3].mode;
7348 /* When all four are of the same mode. */
7349 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
7354 type = v4si_ftype_v4si_v4si_v4si;
7357 type = v4sf_ftype_v4sf_v4sf_v4sf;
7360 type = v8hi_ftype_v8hi_v8hi_v8hi;
7363 type = v16qi_ftype_v16qi_v16qi_v16qi;
7369 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
7374 type = v4si_ftype_v4si_v4si_v16qi;
7377 type = v4sf_ftype_v4sf_v4sf_v16qi;
7380 type = v8hi_ftype_v8hi_v8hi_v16qi;
7383 type = v16qi_ftype_v16qi_v16qi_v16qi;
7389 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
7390 && mode3 == V4SImode)
7391 type = v4si_ftype_v16qi_v16qi_v4si;
7392 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
7393 && mode3 == V4SImode)
7394 type = v4si_ftype_v8hi_v8hi_v4si;
7395 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
7396 && mode3 == V4SImode)
7397 type = v4sf_ftype_v4sf_v4sf_v4si;
7399 /* vchar, vchar, vchar, 4 bit literal. */
7400 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
7402 type = v16qi_ftype_v16qi_v16qi_int;
7404 /* vshort, vshort, vshort, 4 bit literal. */
7405 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
7407 type = v8hi_ftype_v8hi_v8hi_int;
7409 /* vint, vint, vint, 4 bit literal. */
7410 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
7412 type = v4si_ftype_v4si_v4si_int;
7414 /* vfloat, vfloat, vfloat, 4 bit literal. */
7415 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
7417 type = v4sf_ftype_v4sf_v4sf_int;
7422 def_builtin (d->mask, d->name, type, d->code);
7425 /* Add the simple binary operators. */
7426 d = (struct builtin_description *) bdesc_2arg;
7427 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
7429 enum machine_mode mode0, mode1, mode2;
7432 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7435 mode0 = insn_data[d->icode].operand[0].mode;
7436 mode1 = insn_data[d->icode].operand[1].mode;
7437 mode2 = insn_data[d->icode].operand[2].mode;
7439 /* When all three operands are of the same mode. */
7440 if (mode0 == mode1 && mode1 == mode2)
7445 type = v4sf_ftype_v4sf_v4sf;
7448 type = v4si_ftype_v4si_v4si;
7451 type = v16qi_ftype_v16qi_v16qi;
7454 type = v8hi_ftype_v8hi_v8hi;
7457 type = v2si_ftype_v2si_v2si;
7460 type = v2sf_ftype_v2sf_v2sf;
7463 type = int_ftype_int_int;
7470 /* A few other combos we really don't want to do manually. */
7472 /* vint, vfloat, vfloat. */
7473 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
7474 type = v4si_ftype_v4sf_v4sf;
7476 /* vshort, vchar, vchar. */
7477 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
7478 type = v8hi_ftype_v16qi_v16qi;
7480 /* vint, vshort, vshort. */
7481 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
7482 type = v4si_ftype_v8hi_v8hi;
7484 /* vshort, vint, vint. */
7485 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
7486 type = v8hi_ftype_v4si_v4si;
7488 /* vchar, vshort, vshort. */
7489 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
7490 type = v16qi_ftype_v8hi_v8hi;
7492 /* vint, vchar, vint. */
7493 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
7494 type = v4si_ftype_v16qi_v4si;
7496 /* vint, vchar, vchar. */
7497 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
7498 type = v4si_ftype_v16qi_v16qi;
7500 /* vint, vshort, vint. */
7501 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
7502 type = v4si_ftype_v8hi_v4si;
7504 /* vint, vint, 5 bit literal. */
7505 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
7506 type = v4si_ftype_v4si_int;
7508 /* vshort, vshort, 5 bit literal. */
7509 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
7510 type = v8hi_ftype_v8hi_int;
7512 /* vchar, vchar, 5 bit literal. */
7513 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
7514 type = v16qi_ftype_v16qi_int;
7516 /* vfloat, vint, 5 bit literal. */
7517 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
7518 type = v4sf_ftype_v4si_int;
7520 /* vint, vfloat, 5 bit literal. */
7521 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
7522 type = v4si_ftype_v4sf_int;
7524 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
7525 type = v2si_ftype_int_int;
7527 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
7528 type = v2si_ftype_v2si_char;
7530 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
7531 type = v2si_ftype_int_char;
7534 else if (mode0 == SImode)
7539 type = int_ftype_v4si_v4si;
7542 type = int_ftype_v4sf_v4sf;
7545 type = int_ftype_v16qi_v16qi;
7548 type = int_ftype_v8hi_v8hi;
7558 def_builtin (d->mask, d->name, type, d->code);
7561 /* Add the simple unary operators. */
7562 d = (struct builtin_description *) bdesc_1arg;
7563 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
7565 enum machine_mode mode0, mode1;
7568 if (d->name == 0 || d->icode == CODE_FOR_nothing)
7571 mode0 = insn_data[d->icode].operand[0].mode;
7572 mode1 = insn_data[d->icode].operand[1].mode;
7574 if (mode0 == V4SImode && mode1 == QImode)
7575 type = v4si_ftype_int;
7576 else if (mode0 == V8HImode && mode1 == QImode)
7577 type = v8hi_ftype_int;
7578 else if (mode0 == V16QImode && mode1 == QImode)
7579 type = v16qi_ftype_int;
7580 else if (mode0 == V4SFmode && mode1 == V4SFmode)
7581 type = v4sf_ftype_v4sf;
7582 else if (mode0 == V8HImode && mode1 == V16QImode)
7583 type = v8hi_ftype_v16qi;
7584 else if (mode0 == V4SImode && mode1 == V8HImode)
7585 type = v4si_ftype_v8hi;
7586 else if (mode0 == V2SImode && mode1 == V2SImode)
7587 type = v2si_ftype_v2si;
7588 else if (mode0 == V2SFmode && mode1 == V2SFmode)
7589 type = v2sf_ftype_v2sf;
7590 else if (mode0 == V2SFmode && mode1 == V2SImode)
7591 type = v2sf_ftype_v2si;
7592 else if (mode0 == V2SImode && mode1 == V2SFmode)
7593 type = v2si_ftype_v2sf;
7594 else if (mode0 == V2SImode && mode1 == QImode)
7595 type = v2si_ftype_char;
7599 def_builtin (d->mask, d->name, type, d->code);
7604 rs6000_init_libfuncs (void)
7606 if (!TARGET_HARD_FLOAT)
7609 if (DEFAULT_ABI != ABI_V4)
7611 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
7613 /* AIX library routines for float->int conversion. */
7614 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
7615 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
7616 set_conv_libfunc (sfix_optab, SImode, TFmode, "_qitrunc");
7617 set_conv_libfunc (ufix_optab, SImode, TFmode, "_quitrunc");
7620 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
7621 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
7622 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
7623 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
7624 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
7628 /* 32-bit SVR4 quad floating point routines. */
7630 set_optab_libfunc (add_optab, TFmode, "_q_add");
7631 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
7632 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
7633 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
7634 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
7635 if (TARGET_PPC_GPOPT || TARGET_POWER2)
7636 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
7638 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
7639 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
7640 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
7641 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
7642 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
7643 set_optab_libfunc (le_optab, TFmode, "_q_fle");
7645 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
7646 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
7647 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
7648 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
7649 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
7650 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
7651 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
7655 /* Expand a block move operation, and return 1 if successful. Return 0
7656 if we should let the compiler generate normal code.
7658 operands[0] is the destination
7659 operands[1] is the source
7660 operands[2] is the length
7661 operands[3] is the alignment */
7663 #define MAX_MOVE_REG 4
7666 expand_block_move (rtx operands[])
7668 rtx orig_dest = operands[0];
7669 rtx orig_src = operands[1];
7670 rtx bytes_rtx = operands[2];
7671 rtx align_rtx = operands[3];
7672 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
7677 rtx stores[MAX_MOVE_REG];
7680 /* If this is not a fixed size move, just call memcpy */
7684 /* If this is not a fixed size alignment, abort */
7685 if (GET_CODE (align_rtx) != CONST_INT)
7687 align = INTVAL (align_rtx);
7689 /* Anything to move? */
7690 bytes = INTVAL (bytes_rtx);
7694 /* store_one_arg depends on expand_block_move to handle at least the size of
7695 reg_parm_stack_space. */
7696 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
7699 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
7702 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
7703 rtx (*mov) (rtx, rtx);
7705 enum machine_mode mode = BLKmode;
7709 && bytes > 24 /* move up to 32 bytes at a time */
7717 && ! fixed_regs[12])
7719 move_bytes = (bytes > 32) ? 32 : bytes;
7720 gen_func.movstrsi = gen_movstrsi_8reg;
7722 else if (TARGET_STRING
7723 && bytes > 16 /* move up to 24 bytes at a time */
7729 && ! fixed_regs[10])
7731 move_bytes = (bytes > 24) ? 24 : bytes;
7732 gen_func.movstrsi = gen_movstrsi_6reg;
7734 else if (TARGET_STRING
7735 && bytes > 8 /* move up to 16 bytes at a time */
7741 move_bytes = (bytes > 16) ? 16 : bytes;
7742 gen_func.movstrsi = gen_movstrsi_4reg;
7744 else if (bytes >= 8 && TARGET_POWERPC64
7745 /* 64-bit loads and stores require word-aligned
7747 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7751 gen_func.mov = gen_movdi;
7753 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7754 { /* move up to 8 bytes at a time */
7755 move_bytes = (bytes > 8) ? 8 : bytes;
7756 gen_func.movstrsi = gen_movstrsi_2reg;
7758 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7759 { /* move 4 bytes */
7762 gen_func.mov = gen_movsi;
7764 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7765 { /* move 2 bytes */
7768 gen_func.mov = gen_movhi;
7770 else if (TARGET_STRING && bytes > 1)
7771 { /* move up to 4 bytes at a time */
7772 move_bytes = (bytes > 4) ? 4 : bytes;
7773 gen_func.movstrsi = gen_movstrsi_1reg;
7775 else /* move 1 byte at a time */
7779 gen_func.mov = gen_movqi;
7782 src = adjust_address (orig_src, mode, offset);
7783 dest = adjust_address (orig_dest, mode, offset);
7785 if (mode != BLKmode)
7787 rtx tmp_reg = gen_reg_rtx (mode);
7789 emit_insn ((*gen_func.mov) (tmp_reg, src));
7790 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7793 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7796 for (i = 0; i < num_reg; i++)
7797 emit_insn (stores[i]);
7801 if (mode == BLKmode)
7803 /* Move the address into scratch registers. The movstrsi
7804 patterns require zero offset. */
7805 if (!REG_P (XEXP (src, 0)))
7807 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7808 src = replace_equiv_address (src, src_reg);
7810 set_mem_size (src, GEN_INT (move_bytes));
7812 if (!REG_P (XEXP (dest, 0)))
7814 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7815 dest = replace_equiv_address (dest, dest_reg);
7817 set_mem_size (dest, GEN_INT (move_bytes));
7819 emit_insn ((*gen_func.movstrsi) (dest, src,
7820 GEN_INT (move_bytes & 31),
7829 /* Return 1 if OP is a load multiple operation. It is known to be a
7830 PARALLEL and the first section will be tested. */
7833 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7835 int count = XVECLEN (op, 0);
7836 unsigned int dest_regno;
7840 /* Perform a quick check so we don't blow up below. */
7842 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7843 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7844 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7847 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7848 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7850 for (i = 1; i < count; i++)
7852 rtx elt = XVECEXP (op, 0, i);
7854 if (GET_CODE (elt) != SET
7855 || GET_CODE (SET_DEST (elt)) != REG
7856 || GET_MODE (SET_DEST (elt)) != SImode
7857 || REGNO (SET_DEST (elt)) != dest_regno + i
7858 || GET_CODE (SET_SRC (elt)) != MEM
7859 || GET_MODE (SET_SRC (elt)) != SImode
7860 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7861 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7862 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7863 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7870 /* Similar, but tests for store multiple. Here, the second vector element
7871 is a CLOBBER. It will be tested later. */
7874 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7876 int count = XVECLEN (op, 0) - 1;
7877 unsigned int src_regno;
7881 /* Perform a quick check so we don't blow up below. */
7883 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7884 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7885 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7888 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7889 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7891 for (i = 1; i < count; i++)
7893 rtx elt = XVECEXP (op, 0, i + 1);
7895 if (GET_CODE (elt) != SET
7896 || GET_CODE (SET_SRC (elt)) != REG
7897 || GET_MODE (SET_SRC (elt)) != SImode
7898 || REGNO (SET_SRC (elt)) != src_regno + i
7899 || GET_CODE (SET_DEST (elt)) != MEM
7900 || GET_MODE (SET_DEST (elt)) != SImode
7901 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7902 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7903 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7904 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7911 /* Return a string to perform a load_multiple operation.
7912 operands[0] is the vector.
7913 operands[1] is the source address.
7914 operands[2] is the first destination register. */
7917 rs6000_output_load_multiple (rtx operands[3])
7919 /* We have to handle the case where the pseudo used to contain the address
7920 is assigned to one of the output registers. */
7922 int words = XVECLEN (operands[0], 0);
7925 if (XVECLEN (operands[0], 0) == 1)
7926 return "{l|lwz} %2,0(%1)";
7928 for (i = 0; i < words; i++)
7929 if (refers_to_regno_p (REGNO (operands[2]) + i,
7930 REGNO (operands[2]) + i + 1, operands[1], 0))
7934 xop[0] = GEN_INT (4 * (words-1));
7935 xop[1] = operands[1];
7936 xop[2] = operands[2];
7937 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7942 xop[0] = GEN_INT (4 * (words-1));
7943 xop[1] = operands[1];
7944 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7945 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7950 for (j = 0; j < words; j++)
7953 xop[0] = GEN_INT (j * 4);
7954 xop[1] = operands[1];
7955 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7956 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7958 xop[0] = GEN_INT (i * 4);
7959 xop[1] = operands[1];
7960 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7965 return "{lsi|lswi} %2,%1,%N0";
7968 /* Return 1 for a parallel vrsave operation. */
7971 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7973 int count = XVECLEN (op, 0);
7974 unsigned int dest_regno, src_regno;
7978 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7979 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7980 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7983 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7984 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7986 if (dest_regno != VRSAVE_REGNO
7987 && src_regno != VRSAVE_REGNO)
7990 for (i = 1; i < count; i++)
7992 rtx elt = XVECEXP (op, 0, i);
7994 if (GET_CODE (elt) != CLOBBER
7995 && GET_CODE (elt) != SET)
8002 /* Return 1 for an PARALLEL suitable for mfcr. */
8005 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8007 int count = XVECLEN (op, 0);
8010 /* Perform a quick check so we don't blow up below. */
8012 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8013 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8014 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8017 for (i = 0; i < count; i++)
8019 rtx exp = XVECEXP (op, 0, i);
8024 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
8026 if (GET_CODE (src_reg) != REG
8027 || GET_MODE (src_reg) != CCmode
8028 || ! CR_REGNO_P (REGNO (src_reg)))
8031 if (GET_CODE (exp) != SET
8032 || GET_CODE (SET_DEST (exp)) != REG
8033 || GET_MODE (SET_DEST (exp)) != SImode
8034 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
8036 unspec = SET_SRC (exp);
8037 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
8039 if (GET_CODE (unspec) != UNSPEC
8040 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
8041 || XVECLEN (unspec, 0) != 2
8042 || XVECEXP (unspec, 0, 0) != src_reg
8043 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8044 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8050 /* Return 1 for an PARALLEL suitable for mtcrf. */
8053 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8055 int count = XVECLEN (op, 0);
8059 /* Perform a quick check so we don't blow up below. */
8061 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8062 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
8063 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
8065 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
8067 if (GET_CODE (src_reg) != REG
8068 || GET_MODE (src_reg) != SImode
8069 || ! INT_REGNO_P (REGNO (src_reg)))
8072 for (i = 0; i < count; i++)
8074 rtx exp = XVECEXP (op, 0, i);
8078 if (GET_CODE (exp) != SET
8079 || GET_CODE (SET_DEST (exp)) != REG
8080 || GET_MODE (SET_DEST (exp)) != CCmode
8081 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
8083 unspec = SET_SRC (exp);
8084 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
8086 if (GET_CODE (unspec) != UNSPEC
8087 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
8088 || XVECLEN (unspec, 0) != 2
8089 || XVECEXP (unspec, 0, 0) != src_reg
8090 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
8091 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
8097 /* Return 1 for an PARALLEL suitable for lmw. */
8100 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8102 int count = XVECLEN (op, 0);
8103 unsigned int dest_regno;
8105 unsigned int base_regno;
8106 HOST_WIDE_INT offset;
8109 /* Perform a quick check so we don't blow up below. */
8111 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8112 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
8113 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
8116 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
8117 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
8120 || count != 32 - (int) dest_regno)
8123 if (legitimate_indirect_address_p (src_addr, 0))
8126 base_regno = REGNO (src_addr);
8127 if (base_regno == 0)
8130 else if (legitimate_offset_address_p (SImode, src_addr, 0))
8132 offset = INTVAL (XEXP (src_addr, 1));
8133 base_regno = REGNO (XEXP (src_addr, 0));
8138 for (i = 0; i < count; i++)
8140 rtx elt = XVECEXP (op, 0, i);
8143 HOST_WIDE_INT newoffset;
8145 if (GET_CODE (elt) != SET
8146 || GET_CODE (SET_DEST (elt)) != REG
8147 || GET_MODE (SET_DEST (elt)) != SImode
8148 || REGNO (SET_DEST (elt)) != dest_regno + i
8149 || GET_CODE (SET_SRC (elt)) != MEM
8150 || GET_MODE (SET_SRC (elt)) != SImode)
8152 newaddr = XEXP (SET_SRC (elt), 0);
8153 if (legitimate_indirect_address_p (newaddr, 0))
8158 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8160 addr_reg = XEXP (newaddr, 0);
8161 newoffset = INTVAL (XEXP (newaddr, 1));
8165 if (REGNO (addr_reg) != base_regno
8166 || newoffset != offset + 4 * i)
8173 /* Return 1 for an PARALLEL suitable for stmw. */
8176 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8178 int count = XVECLEN (op, 0);
8179 unsigned int src_regno;
8181 unsigned int base_regno;
8182 HOST_WIDE_INT offset;
8185 /* Perform a quick check so we don't blow up below. */
8187 || GET_CODE (XVECEXP (op, 0, 0)) != SET
8188 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
8189 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
8192 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
8193 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
8196 || count != 32 - (int) src_regno)
8199 if (legitimate_indirect_address_p (dest_addr, 0))
8202 base_regno = REGNO (dest_addr);
8203 if (base_regno == 0)
8206 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
8208 offset = INTVAL (XEXP (dest_addr, 1));
8209 base_regno = REGNO (XEXP (dest_addr, 0));
8214 for (i = 0; i < count; i++)
8216 rtx elt = XVECEXP (op, 0, i);
8219 HOST_WIDE_INT newoffset;
8221 if (GET_CODE (elt) != SET
8222 || GET_CODE (SET_SRC (elt)) != REG
8223 || GET_MODE (SET_SRC (elt)) != SImode
8224 || REGNO (SET_SRC (elt)) != src_regno + i
8225 || GET_CODE (SET_DEST (elt)) != MEM
8226 || GET_MODE (SET_DEST (elt)) != SImode)
8228 newaddr = XEXP (SET_DEST (elt), 0);
8229 if (legitimate_indirect_address_p (newaddr, 0))
8234 else if (legitimate_offset_address_p (SImode, newaddr, 0))
8236 addr_reg = XEXP (newaddr, 0);
8237 newoffset = INTVAL (XEXP (newaddr, 1));
8241 if (REGNO (addr_reg) != base_regno
8242 || newoffset != offset + 4 * i)
8249 /* A validation routine: say whether CODE, a condition code, and MODE
8250 match. The other alternatives either don't make sense or should
8251 never be generated. */
8254 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
8256 if ((GET_RTX_CLASS (code) != RTX_COMPARE
8257 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
8258 || GET_MODE_CLASS (mode) != MODE_CC)
8261 /* These don't make sense. */
8262 if ((code == GT || code == LT || code == GE || code == LE)
8263 && mode == CCUNSmode)
8266 if ((code == GTU || code == LTU || code == GEU || code == LEU)
8267 && mode != CCUNSmode)
8270 if (mode != CCFPmode
8271 && (code == ORDERED || code == UNORDERED
8272 || code == UNEQ || code == LTGT
8273 || code == UNGT || code == UNLT
8274 || code == UNGE || code == UNLE))
8277 /* These should never be generated except for
8278 flag_finite_math_only. */
8279 if (mode == CCFPmode
8280 && ! flag_finite_math_only
8281 && (code == LE || code == GE
8282 || code == UNEQ || code == LTGT
8283 || code == UNGT || code == UNLT))
8286 /* These are invalid; the information is not there. */
8287 if (mode == CCEQmode
8288 && code != EQ && code != NE)
8292 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
8293 We only check the opcode against the mode of the CC value here. */
8296 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8298 enum rtx_code code = GET_CODE (op);
8299 enum machine_mode cc_mode;
8301 if (!COMPARISON_P (op))
8304 cc_mode = GET_MODE (XEXP (op, 0));
8305 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
8308 validate_condition_mode (code, cc_mode);
8313 /* Return 1 if OP is a comparison operation that is valid for a branch
8314 insn and which is true if the corresponding bit in the CC register
8318 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
8322 if (! branch_comparison_operator (op, mode))
8325 code = GET_CODE (op);
8326 return (code == EQ || code == LT || code == GT
8327 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
8328 || code == LTU || code == GTU
8329 || code == UNORDERED);
8332 /* Return 1 if OP is a comparison operation that is valid for an scc
8333 insn: it must be a positive comparison. */
8336 scc_comparison_operator (rtx op, enum machine_mode mode)
8338 return branch_positive_comparison_operator (op, mode);
8342 trap_comparison_operator (rtx op, enum machine_mode mode)
8344 if (mode != VOIDmode && mode != GET_MODE (op))
8346 return COMPARISON_P (op);
8350 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8352 enum rtx_code code = GET_CODE (op);
8353 return (code == AND || code == IOR || code == XOR);
8357 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8359 enum rtx_code code = GET_CODE (op);
8360 return (code == IOR || code == XOR);
8364 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
8366 enum rtx_code code = GET_CODE (op);
8367 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
8370 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
8371 mask required to convert the result of a rotate insn into a shift
8372 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
8375 includes_lshift_p (rtx shiftop, rtx andop)
8377 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8379 shift_mask <<= INTVAL (shiftop);
8381 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8384 /* Similar, but for right shift. */
8387 includes_rshift_p (rtx shiftop, rtx andop)
8389 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
8391 shift_mask >>= INTVAL (shiftop);
8393 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
8396 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
8397 to perform a left shift. It must have exactly SHIFTOP least
8398 significant 0's, then one or more 1's, then zero or more 0's. */
8401 includes_rldic_lshift_p (rtx shiftop, rtx andop)
8403 if (GET_CODE (andop) == CONST_INT)
8405 HOST_WIDE_INT c, lsb, shift_mask;
8408 if (c == 0 || c == ~0)
8412 shift_mask <<= INTVAL (shiftop);
8414 /* Find the least significant one bit. */
8417 /* It must coincide with the LSB of the shift mask. */
8418 if (-lsb != shift_mask)
8421 /* Invert to look for the next transition (if any). */
8424 /* Remove the low group of ones (originally low group of zeros). */
8427 /* Again find the lsb, and check we have all 1's above. */
8431 else if (GET_CODE (andop) == CONST_DOUBLE
8432 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8434 HOST_WIDE_INT low, high, lsb;
8435 HOST_WIDE_INT shift_mask_low, shift_mask_high;
8437 low = CONST_DOUBLE_LOW (andop);
8438 if (HOST_BITS_PER_WIDE_INT < 64)
8439 high = CONST_DOUBLE_HIGH (andop);
8441 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
8442 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
8445 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8447 shift_mask_high = ~0;
8448 if (INTVAL (shiftop) > 32)
8449 shift_mask_high <<= INTVAL (shiftop) - 32;
8453 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
8460 return high == -lsb;
8463 shift_mask_low = ~0;
8464 shift_mask_low <<= INTVAL (shiftop);
8468 if (-lsb != shift_mask_low)
8471 if (HOST_BITS_PER_WIDE_INT < 64)
8476 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
8479 return high == -lsb;
8483 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
8489 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
8490 to perform a left shift. It must have SHIFTOP or more least
8491 significant 0's, with the remainder of the word 1's. */
8494 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
8496 if (GET_CODE (andop) == CONST_INT)
8498 HOST_WIDE_INT c, lsb, shift_mask;
8501 shift_mask <<= INTVAL (shiftop);
8504 /* Find the least significant one bit. */
8507 /* It must be covered by the shift mask.
8508 This test also rejects c == 0. */
8509 if ((lsb & shift_mask) == 0)
8512 /* Check we have all 1's above the transition, and reject all 1's. */
8513 return c == -lsb && lsb != 1;
8515 else if (GET_CODE (andop) == CONST_DOUBLE
8516 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
8518 HOST_WIDE_INT low, lsb, shift_mask_low;
8520 low = CONST_DOUBLE_LOW (andop);
8522 if (HOST_BITS_PER_WIDE_INT < 64)
8524 HOST_WIDE_INT high, shift_mask_high;
8526 high = CONST_DOUBLE_HIGH (andop);
8530 shift_mask_high = ~0;
8531 if (INTVAL (shiftop) > 32)
8532 shift_mask_high <<= INTVAL (shiftop) - 32;
8536 if ((lsb & shift_mask_high) == 0)
8539 return high == -lsb;
8545 shift_mask_low = ~0;
8546 shift_mask_low <<= INTVAL (shiftop);
8550 if ((lsb & shift_mask_low) == 0)
8553 return low == -lsb && lsb != 1;
8559 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
8560 for lfq and stfq insns.
8562 Note reg1 and reg2 *must* be hard registers. To be sure we will
8563 abort if we are passed pseudo registers. */
8566 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
8568 /* We might have been passed a SUBREG. */
8569 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
8572 return (REGNO (reg1) == REGNO (reg2) - 1);
8575 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
8576 addr1 and addr2 must be in consecutive memory locations
8577 (addr2 == addr1 + 8). */
8580 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
8585 /* Extract an offset (if used) from the first addr. */
8586 if (GET_CODE (addr1) == PLUS)
8588 /* If not a REG, return zero. */
8589 if (GET_CODE (XEXP (addr1, 0)) != REG)
8593 reg1 = REGNO (XEXP (addr1, 0));
8594 /* The offset must be constant! */
8595 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
8597 offset1 = INTVAL (XEXP (addr1, 1));
8600 else if (GET_CODE (addr1) != REG)
8604 reg1 = REGNO (addr1);
8605 /* This was a simple (mem (reg)) expression. Offset is 0. */
8609 /* Make sure the second address is a (mem (plus (reg) (const_int)))
8610 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
8611 register as addr1. */
8612 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
8614 if (GET_CODE (addr2) != PLUS)
8617 if (GET_CODE (XEXP (addr2, 0)) != REG
8618 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
8621 if (reg1 != REGNO (XEXP (addr2, 0)))
8624 /* The offset for the second addr must be 8 more than the first addr. */
8625 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
8628 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
8633 /* Return the register class of a scratch register needed to copy IN into
8634 or out of a register in CLASS in MODE. If it can be done directly,
8635 NO_REGS is returned. */
8638 secondary_reload_class (enum reg_class class,
8639 enum machine_mode mode ATTRIBUTE_UNUSED,
8644 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
8646 && MACHOPIC_INDIRECT
8650 /* We cannot copy a symbolic operand directly into anything
8651 other than BASE_REGS for TARGET_ELF. So indicate that a
8652 register from BASE_REGS is needed as an intermediate
8655 On Darwin, pic addresses require a load from memory, which
8656 needs a base register. */
8657 if (class != BASE_REGS
8658 && (GET_CODE (in) == SYMBOL_REF
8659 || GET_CODE (in) == HIGH
8660 || GET_CODE (in) == LABEL_REF
8661 || GET_CODE (in) == CONST))
8665 if (GET_CODE (in) == REG)
8668 if (regno >= FIRST_PSEUDO_REGISTER)
8670 regno = true_regnum (in);
8671 if (regno >= FIRST_PSEUDO_REGISTER)
8675 else if (GET_CODE (in) == SUBREG)
8677 regno = true_regnum (in);
8678 if (regno >= FIRST_PSEUDO_REGISTER)
8684 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
8686 if (class == GENERAL_REGS || class == BASE_REGS
8687 || (regno >= 0 && INT_REGNO_P (regno)))
8690 /* Constants, memory, and FP registers can go into FP registers. */
8691 if ((regno == -1 || FP_REGNO_P (regno))
8692 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
8695 /* Memory, and AltiVec registers can go into AltiVec registers. */
8696 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
8697 && class == ALTIVEC_REGS)
8700 /* We can copy among the CR registers. */
8701 if ((class == CR_REGS || class == CR0_REGS)
8702 && regno >= 0 && CR_REGNO_P (regno))
8705 /* Otherwise, we need GENERAL_REGS. */
8706 return GENERAL_REGS;
8709 /* Given a comparison operation, return the bit number in CCR to test. We
8710 know this is a valid comparison.
8712 SCC_P is 1 if this is for an scc. That means that %D will have been
8713 used instead of %C, so the bits will be in different places.
8715 Return -1 if OP isn't a valid comparison for some reason. */
8718 ccr_bit (rtx op, int scc_p)
8720 enum rtx_code code = GET_CODE (op);
8721 enum machine_mode cc_mode;
8726 if (!COMPARISON_P (op))
8731 if (GET_CODE (reg) != REG
8732 || ! CR_REGNO_P (REGNO (reg)))
8735 cc_mode = GET_MODE (reg);
8736 cc_regnum = REGNO (reg);
8737 base_bit = 4 * (cc_regnum - CR0_REGNO);
8739 validate_condition_mode (code, cc_mode);
8741 /* When generating a sCOND operation, only positive conditions are
8743 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
8744 && code != GTU && code != LTU)
8750 if (TARGET_E500 && !TARGET_FPRS
8751 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8752 return base_bit + 1;
8753 return scc_p ? base_bit + 3 : base_bit + 2;
8755 if (TARGET_E500 && !TARGET_FPRS
8756 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8757 return base_bit + 1;
8758 return base_bit + 2;
8759 case GT: case GTU: case UNLE:
8760 return base_bit + 1;
8761 case LT: case LTU: case UNGE:
8763 case ORDERED: case UNORDERED:
8764 return base_bit + 3;
8767 /* If scc, we will have done a cror to put the bit in the
8768 unordered position. So test that bit. For integer, this is ! LT
8769 unless this is an scc insn. */
8770 return scc_p ? base_bit + 3 : base_bit;
8773 return scc_p ? base_bit + 3 : base_bit + 1;
8780 /* Return the GOT register. */
8783 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
8785 /* The second flow pass currently (June 1999) can't update
8786 regs_ever_live without disturbing other parts of the compiler, so
8787 update it here to make the prolog/epilogue code happy. */
8788 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8789 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8791 current_function_uses_pic_offset_table = 1;
8793 return pic_offset_table_rtx;
8796 /* Function to init struct machine_function.
8797 This will be called, via a pointer variable,
8798 from push_function_context. */
8800 static struct machine_function *
8801 rs6000_init_machine_status (void)
8803 return ggc_alloc_cleared (sizeof (machine_function));
8806 /* These macros test for integers and extract the low-order bits. */
8808 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8809 && GET_MODE (X) == VOIDmode)
8811 #define INT_LOWPART(X) \
8812 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8818 unsigned long val = INT_LOWPART (op);
8820 /* If the high bit is zero, the value is the first 1 bit we find
8822 if ((val & 0x80000000) == 0)
8824 if ((val & 0xffffffff) == 0)
8828 while (((val <<= 1) & 0x80000000) == 0)
8833 /* If the high bit is set and the low bit is not, or the mask is all
8834 1's, the value is zero. */
8835 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8838 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8841 while (((val >>= 1) & 1) != 0)
8851 unsigned long val = INT_LOWPART (op);
8853 /* If the low bit is zero, the value is the first 1 bit we find from
8857 if ((val & 0xffffffff) == 0)
8861 while (((val >>= 1) & 1) == 0)
8867 /* If the low bit is set and the high bit is not, or the mask is all
8868 1's, the value is 31. */
8869 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8872 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8875 while (((val <<= 1) & 0x80000000) != 0)
8881 /* Locate some local-dynamic symbol still in use by this function
8882 so that we can print its name in some tls_ld pattern. */
8885 rs6000_get_some_local_dynamic_name (void)
8889 if (cfun->machine->some_ld_name)
8890 return cfun->machine->some_ld_name;
8892 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8894 && for_each_rtx (&PATTERN (insn),
8895 rs6000_get_some_local_dynamic_name_1, 0))
8896 return cfun->machine->some_ld_name;
8901 /* Helper function for rs6000_get_some_local_dynamic_name. */
8904 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8908 if (GET_CODE (x) == SYMBOL_REF)
8910 const char *str = XSTR (x, 0);
8911 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8913 cfun->machine->some_ld_name = str;
8921 /* Print an operand. Recognize special options, documented below. */
8924 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8925 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8927 #define SMALL_DATA_RELOC "sda21"
8928 #define SMALL_DATA_REG 0
8932 print_operand (FILE *file, rtx x, int code)
8936 unsigned HOST_WIDE_INT uval;
8941 /* Write out an instruction after the call which may be replaced
8942 with glue code by the loader. This depends on the AIX version. */
8943 asm_fprintf (file, RS6000_CALL_GLUE);
8946 /* %a is output_address. */
8949 /* If X is a constant integer whose low-order 5 bits are zero,
8950 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8951 in the AIX assembler where "sri" with a zero shift count
8952 writes a trash instruction. */
8953 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8960 /* If constant, low-order 16 bits of constant, unsigned.
8961 Otherwise, write normally. */
8963 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8965 print_operand (file, x, 0);
8969 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8970 for 64-bit mask direction. */
8971 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8974 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8978 /* X is a CR register. Print the number of the EQ bit of the CR */
8979 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8980 output_operand_lossage ("invalid %%E value");
8982 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8986 /* X is a CR register. Print the shift count needed to move it
8987 to the high-order four bits. */
8988 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8989 output_operand_lossage ("invalid %%f value");
8991 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8995 /* Similar, but print the count for the rotate in the opposite
8997 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8998 output_operand_lossage ("invalid %%F value");
9000 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
9004 /* X is a constant integer. If it is negative, print "m",
9005 otherwise print "z". This is to make an aze or ame insn. */
9006 if (GET_CODE (x) != CONST_INT)
9007 output_operand_lossage ("invalid %%G value");
9008 else if (INTVAL (x) >= 0)
9015 /* If constant, output low-order five bits. Otherwise, write
9018 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
9020 print_operand (file, x, 0);
9024 /* If constant, output low-order six bits. Otherwise, write
9027 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
9029 print_operand (file, x, 0);
9033 /* Print `i' if this is a constant, else nothing. */
9039 /* Write the bit number in CCR for jump. */
9042 output_operand_lossage ("invalid %%j code");
9044 fprintf (file, "%d", i);
9048 /* Similar, but add one for shift count in rlinm for scc and pass
9049 scc flag to `ccr_bit'. */
9052 output_operand_lossage ("invalid %%J code");
9054 /* If we want bit 31, write a shift count of zero, not 32. */
9055 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9059 /* X must be a constant. Write the 1's complement of the
9062 output_operand_lossage ("invalid %%k value");
9064 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
9068 /* X must be a symbolic constant on ELF. Write an
9069 expression suitable for an 'addi' that adds in the low 16
9071 if (GET_CODE (x) != CONST)
9073 print_operand_address (file, x);
9078 if (GET_CODE (XEXP (x, 0)) != PLUS
9079 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
9080 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
9081 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
9082 output_operand_lossage ("invalid %%K value");
9083 print_operand_address (file, XEXP (XEXP (x, 0), 0));
9085 /* For GNU as, there must be a non-alphanumeric character
9086 between 'l' and the number. The '-' is added by
9087 print_operand() already. */
9088 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
9090 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
9094 /* %l is output_asm_label. */
9097 /* Write second word of DImode or DFmode reference. Works on register
9098 or non-indexed memory only. */
9099 if (GET_CODE (x) == REG)
9100 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
9101 else if (GET_CODE (x) == MEM)
9103 /* Handle possible auto-increment. Since it is pre-increment and
9104 we have already done it, we can just use an offset of word. */
9105 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9106 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9107 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
9110 output_address (XEXP (adjust_address_nv (x, SImode,
9114 if (small_data_operand (x, GET_MODE (x)))
9115 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9116 reg_names[SMALL_DATA_REG]);
9121 /* MB value for a mask operand. */
9122 if (! mask_operand (x, SImode))
9123 output_operand_lossage ("invalid %%m value");
9125 fprintf (file, "%d", extract_MB (x));
9129 /* ME value for a mask operand. */
9130 if (! mask_operand (x, SImode))
9131 output_operand_lossage ("invalid %%M value");
9133 fprintf (file, "%d", extract_ME (x));
9136 /* %n outputs the negative of its operand. */
9139 /* Write the number of elements in the vector times 4. */
9140 if (GET_CODE (x) != PARALLEL)
9141 output_operand_lossage ("invalid %%N value");
9143 fprintf (file, "%d", XVECLEN (x, 0) * 4);
9147 /* Similar, but subtract 1 first. */
9148 if (GET_CODE (x) != PARALLEL)
9149 output_operand_lossage ("invalid %%O value");
9151 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
9155 /* X is a CONST_INT that is a power of two. Output the logarithm. */
9157 || INT_LOWPART (x) < 0
9158 || (i = exact_log2 (INT_LOWPART (x))) < 0)
9159 output_operand_lossage ("invalid %%p value");
9161 fprintf (file, "%d", i);
9165 /* The operand must be an indirect memory reference. The result
9166 is the register name. */
9167 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
9168 || REGNO (XEXP (x, 0)) >= 32)
9169 output_operand_lossage ("invalid %%P value");
9171 fprintf (file, "%s", reg_names[REGNO (XEXP (x, 0))]);
9175 /* This outputs the logical code corresponding to a boolean
9176 expression. The expression may have one or both operands
9177 negated (if one, only the first one). For condition register
9178 logical operations, it will also treat the negated
9179 CR codes as NOTs, but not handle NOTs of them. */
9181 const char *const *t = 0;
9183 enum rtx_code code = GET_CODE (x);
9184 static const char * const tbl[3][3] = {
9185 { "and", "andc", "nor" },
9186 { "or", "orc", "nand" },
9187 { "xor", "eqv", "xor" } };
9191 else if (code == IOR)
9193 else if (code == XOR)
9196 output_operand_lossage ("invalid %%q value");
9198 if (GET_CODE (XEXP (x, 0)) != NOT)
9202 if (GET_CODE (XEXP (x, 1)) == NOT)
9220 /* X is a CR register. Print the mask for `mtcrf'. */
9221 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
9222 output_operand_lossage ("invalid %%R value");
9224 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
9228 /* Low 5 bits of 32 - value */
9230 output_operand_lossage ("invalid %%s value");
9232 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
9236 /* PowerPC64 mask position. All 0's is excluded.
9237 CONST_INT 32-bit mask is considered sign-extended so any
9238 transition must occur within the CONST_INT, not on the boundary. */
9239 if (! mask64_operand (x, DImode))
9240 output_operand_lossage ("invalid %%S value");
9242 uval = INT_LOWPART (x);
9244 if (uval & 1) /* Clear Left */
9246 #if HOST_BITS_PER_WIDE_INT > 64
9247 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9251 else /* Clear Right */
9254 #if HOST_BITS_PER_WIDE_INT > 64
9255 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
9263 fprintf (file, "%d", i);
9267 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
9268 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
9271 /* Bit 3 is OV bit. */
9272 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
9274 /* If we want bit 31, write a shift count of zero, not 32. */
9275 fprintf (file, "%d", i == 31 ? 0 : i + 1);
9279 /* Print the symbolic name of a branch target register. */
9280 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
9281 && REGNO (x) != COUNT_REGISTER_REGNUM))
9282 output_operand_lossage ("invalid %%T value");
9283 else if (REGNO (x) == LINK_REGISTER_REGNUM)
9284 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
9286 fputs ("ctr", file);
9290 /* High-order 16 bits of constant for use in unsigned operand. */
9292 output_operand_lossage ("invalid %%u value");
9294 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9295 (INT_LOWPART (x) >> 16) & 0xffff);
9299 /* High-order 16 bits of constant for use in signed operand. */
9301 output_operand_lossage ("invalid %%v value");
9303 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
9304 (INT_LOWPART (x) >> 16) & 0xffff);
9308 /* Print `u' if this has an auto-increment or auto-decrement. */
9309 if (GET_CODE (x) == MEM
9310 && (GET_CODE (XEXP (x, 0)) == PRE_INC
9311 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
9316 /* Print the trap code for this operand. */
9317 switch (GET_CODE (x))
9320 fputs ("eq", file); /* 4 */
9323 fputs ("ne", file); /* 24 */
9326 fputs ("lt", file); /* 16 */
9329 fputs ("le", file); /* 20 */
9332 fputs ("gt", file); /* 8 */
9335 fputs ("ge", file); /* 12 */
9338 fputs ("llt", file); /* 2 */
9341 fputs ("lle", file); /* 6 */
9344 fputs ("lgt", file); /* 1 */
9347 fputs ("lge", file); /* 5 */
9355 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
9358 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
9359 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
9361 print_operand (file, x, 0);
9365 /* MB value for a PowerPC64 rldic operand. */
9366 val = (GET_CODE (x) == CONST_INT
9367 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
9372 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
9373 if ((val <<= 1) < 0)
9376 #if HOST_BITS_PER_WIDE_INT == 32
9377 if (GET_CODE (x) == CONST_INT && i >= 0)
9378 i += 32; /* zero-extend high-part was all 0's */
9379 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
9381 val = CONST_DOUBLE_LOW (x);
9388 for ( ; i < 64; i++)
9389 if ((val <<= 1) < 0)
9394 fprintf (file, "%d", i + 1);
9398 if (GET_CODE (x) == MEM
9399 && legitimate_indexed_address_p (XEXP (x, 0), 0))
9404 /* Like 'L', for third word of TImode */
9405 if (GET_CODE (x) == REG)
9406 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
9407 else if (GET_CODE (x) == MEM)
9409 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9410 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9411 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
9413 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
9414 if (small_data_operand (x, GET_MODE (x)))
9415 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9416 reg_names[SMALL_DATA_REG]);
9421 /* X is a SYMBOL_REF. Write out the name preceded by a
9422 period and without any trailing data in brackets. Used for function
9423 names. If we are configured for System V (or the embedded ABI) on
9424 the PowerPC, do not emit the period, since those systems do not use
9425 TOCs and the like. */
9426 if (GET_CODE (x) != SYMBOL_REF)
9429 if (XSTR (x, 0)[0] != '.')
9431 switch (DEFAULT_ABI)
9446 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
9448 assemble_name (file, XSTR (x, 0));
9452 /* Like 'L', for last word of TImode. */
9453 if (GET_CODE (x) == REG)
9454 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
9455 else if (GET_CODE (x) == MEM)
9457 if (GET_CODE (XEXP (x, 0)) == PRE_INC
9458 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
9459 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
9461 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
9462 if (small_data_operand (x, GET_MODE (x)))
9463 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9464 reg_names[SMALL_DATA_REG]);
9468 /* Print AltiVec or SPE memory operand. */
9473 if (GET_CODE (x) != MEM)
9481 if (GET_CODE (tmp) == REG)
9483 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
9486 /* Handle [reg+UIMM]. */
9487 else if (GET_CODE (tmp) == PLUS &&
9488 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
9492 if (GET_CODE (XEXP (tmp, 0)) != REG)
9495 x = INTVAL (XEXP (tmp, 1));
9496 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
9500 /* Fall through. Must be [reg+reg]. */
9502 if (GET_CODE (tmp) == REG)
9503 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
9504 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
9506 if (REGNO (XEXP (tmp, 0)) == 0)
9507 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
9508 reg_names[ REGNO (XEXP (tmp, 0)) ]);
9510 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
9511 reg_names[ REGNO (XEXP (tmp, 1)) ]);
9519 if (GET_CODE (x) == REG)
9520 fprintf (file, "%s", reg_names[REGNO (x)]);
9521 else if (GET_CODE (x) == MEM)
9523 /* We need to handle PRE_INC and PRE_DEC here, since we need to
9524 know the width from the mode. */
9525 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
9526 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
9527 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9528 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
9529 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
9530 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
9532 output_address (XEXP (x, 0));
9535 output_addr_const (file, x);
9539 assemble_name (file, rs6000_get_some_local_dynamic_name ());
9543 output_operand_lossage ("invalid %%xn code");
9547 /* Print the address of an operand. */
9550 print_operand_address (FILE *file, rtx x)
9552 if (GET_CODE (x) == REG)
9553 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
9554 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
9555 || GET_CODE (x) == LABEL_REF)
9557 output_addr_const (file, x);
9558 if (small_data_operand (x, GET_MODE (x)))
9559 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
9560 reg_names[SMALL_DATA_REG]);
9561 else if (TARGET_TOC)
9564 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
9566 if (REGNO (XEXP (x, 0)) == 0)
9567 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
9568 reg_names[ REGNO (XEXP (x, 0)) ]);
9570 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
9571 reg_names[ REGNO (XEXP (x, 1)) ]);
9573 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
9574 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
9575 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
9577 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9578 && CONSTANT_P (XEXP (x, 1)))
9580 output_addr_const (file, XEXP (x, 1));
9581 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9585 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
9586 && CONSTANT_P (XEXP (x, 1)))
9588 fprintf (file, "lo16(");
9589 output_addr_const (file, XEXP (x, 1));
9590 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
9593 else if (legitimate_constant_pool_address_p (x))
9595 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
9597 rtx contains_minus = XEXP (x, 1);
9601 /* Find the (minus (sym) (toc)) buried in X, and temporarily
9602 turn it into (sym) for output_addr_const. */
9603 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
9604 contains_minus = XEXP (contains_minus, 0);
9606 minus = XEXP (contains_minus, 0);
9607 symref = XEXP (minus, 0);
9608 XEXP (contains_minus, 0) = symref;
9613 name = XSTR (symref, 0);
9614 newname = alloca (strlen (name) + sizeof ("@toc"));
9615 strcpy (newname, name);
9616 strcat (newname, "@toc");
9617 XSTR (symref, 0) = newname;
9619 output_addr_const (file, XEXP (x, 1));
9621 XSTR (symref, 0) = name;
9622 XEXP (contains_minus, 0) = minus;
9625 output_addr_const (file, XEXP (x, 1));
9627 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
9633 /* Target hook for assembling integer objects. The PowerPC version has
9634 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
9635 is defined. It also needs to handle DI-mode objects on 64-bit
9639 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
9641 #ifdef RELOCATABLE_NEEDS_FIXUP
9642 /* Special handling for SI values. */
9643 if (size == 4 && aligned_p)
9645 extern int in_toc_section (void);
9646 static int recurse = 0;
9648 /* For -mrelocatable, we mark all addresses that need to be fixed up
9649 in the .fixup section. */
9650 if (TARGET_RELOCATABLE
9651 && !in_toc_section ()
9652 && !in_text_section ()
9654 && GET_CODE (x) != CONST_INT
9655 && GET_CODE (x) != CONST_DOUBLE
9661 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
9663 ASM_OUTPUT_LABEL (asm_out_file, buf);
9664 fprintf (asm_out_file, "\t.long\t(");
9665 output_addr_const (asm_out_file, x);
9666 fprintf (asm_out_file, ")@fixup\n");
9667 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
9668 ASM_OUTPUT_ALIGN (asm_out_file, 2);
9669 fprintf (asm_out_file, "\t.long\t");
9670 assemble_name (asm_out_file, buf);
9671 fprintf (asm_out_file, "\n\t.previous\n");
9675 /* Remove initial .'s to turn a -mcall-aixdesc function
9676 address into the address of the descriptor, not the function
9678 else if (GET_CODE (x) == SYMBOL_REF
9679 && XSTR (x, 0)[0] == '.'
9680 && DEFAULT_ABI == ABI_AIX)
9682 const char *name = XSTR (x, 0);
9683 while (*name == '.')
9686 fprintf (asm_out_file, "\t.long\t%s\n", name);
9690 #endif /* RELOCATABLE_NEEDS_FIXUP */
9691 return default_assemble_integer (x, size, aligned_p);
9694 #ifdef HAVE_GAS_HIDDEN
9695 /* Emit an assembler directive to set symbol visibility for DECL to
9699 rs6000_assemble_visibility (tree decl, int vis)
9701 /* Functions need to have their entry point symbol visibility set as
9702 well as their descriptor symbol visibility. */
9703 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
9705 static const char * const visibility_types[] = {
9706 NULL, "internal", "hidden", "protected"
9709 const char *name, *type;
9711 name = ((* targetm.strip_name_encoding)
9712 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
9713 type = visibility_types[vis];
9715 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
9716 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
9719 default_assemble_visibility (decl, vis);
9724 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
9726 /* Reversal of FP compares takes care -- an ordered compare
9727 becomes an unordered compare and vice versa. */
9728 if (mode == CCFPmode
9729 && (!flag_finite_math_only
9730 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
9731 || code == UNEQ || code == LTGT))
9732 return reverse_condition_maybe_unordered (code);
9734 return reverse_condition (code);
9737 /* Generate a compare for CODE. Return a brand-new rtx that
9738 represents the result of the compare. */
9741 rs6000_generate_compare (enum rtx_code code)
9743 enum machine_mode comp_mode;
9746 if (rs6000_compare_fp_p)
9747 comp_mode = CCFPmode;
9748 else if (code == GTU || code == LTU
9749 || code == GEU || code == LEU)
9750 comp_mode = CCUNSmode;
9754 /* First, the compare. */
9755 compare_result = gen_reg_rtx (comp_mode);
9757 /* SPE FP compare instructions on the GPRs. Yuck! */
9758 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9759 && rs6000_compare_fp_p)
9761 rtx cmp, or1, or2, or_result, compare_result2;
9769 cmp = flag_finite_math_only
9770 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9772 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9773 rs6000_compare_op1);
9781 cmp = flag_finite_math_only
9782 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9784 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9785 rs6000_compare_op1);
9793 cmp = flag_finite_math_only
9794 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9796 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9797 rs6000_compare_op1);
9803 /* Synthesize LE and GE from LT/GT || EQ. */
9804 if (code == LE || code == GE || code == LEU || code == GEU)
9806 /* Synthesize GE/LE frome GT/LT || EQ. */
9812 case LE: code = LT; break;
9813 case GE: code = GT; break;
9814 case LEU: code = LT; break;
9815 case GEU: code = GT; break;
9819 or1 = gen_reg_rtx (SImode);
9820 or2 = gen_reg_rtx (SImode);
9821 or_result = gen_reg_rtx (CCEQmode);
9822 compare_result2 = gen_reg_rtx (CCFPmode);
9825 cmp = flag_finite_math_only
9826 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9828 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9829 rs6000_compare_op1);
9832 /* The MC8540 FP compare instructions set the CR bits
9833 differently than other PPC compare instructions. For
9834 that matter, there is no generic test instruction, but a
9835 testgt, testlt, and testeq. For a true condition, bit 2
9836 is set (x1xx) in the CR. Following the traditional CR
9842 ... bit 2 would be a GT CR alias, so later on we
9843 look in the GT bits for the branch instructions.
9844 However, we must be careful to emit correct RTL in
9845 the meantime, so optimizations don't get confused. */
9847 or1 = gen_rtx_NE (SImode, compare_result, const0_rtx);
9848 or2 = gen_rtx_NE (SImode, compare_result2, const0_rtx);
9850 /* OR them together. */
9851 cmp = gen_rtx_SET (VOIDmode, or_result,
9852 gen_rtx_COMPARE (CCEQmode,
9853 gen_rtx_IOR (SImode, or1, or2),
9855 compare_result = or_result;
9860 /* We only care about 1 bit (x1xx), so map everything to NE to
9861 maintain rtl sanity. We'll get to the right bit (x1xx) at
9862 code output time. */
9863 if (code == NE || code == LTGT)
9864 /* Do the inverse here because we have no cmpne
9865 instruction. We use the cmpeq instruction and expect
9866 to get a 0 instead. */
9875 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9876 gen_rtx_COMPARE (comp_mode,
9878 rs6000_compare_op1)));
9880 /* Some kinds of FP comparisons need an OR operation;
9881 under flag_finite_math_only we don't bother. */
9882 if (rs6000_compare_fp_p
9883 && ! flag_finite_math_only
9884 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9885 && (code == LE || code == GE
9886 || code == UNEQ || code == LTGT
9887 || code == UNGT || code == UNLT))
9889 enum rtx_code or1, or2;
9890 rtx or1_rtx, or2_rtx, compare2_rtx;
9891 rtx or_result = gen_reg_rtx (CCEQmode);
9895 case LE: or1 = LT; or2 = EQ; break;
9896 case GE: or1 = GT; or2 = EQ; break;
9897 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9898 case LTGT: or1 = LT; or2 = GT; break;
9899 case UNGT: or1 = UNORDERED; or2 = GT; break;
9900 case UNLT: or1 = UNORDERED; or2 = LT; break;
9903 validate_condition_mode (or1, comp_mode);
9904 validate_condition_mode (or2, comp_mode);
9905 or1_rtx = gen_rtx_fmt_ee (or1, SImode, compare_result, const0_rtx);
9906 or2_rtx = gen_rtx_fmt_ee (or2, SImode, compare_result, const0_rtx);
9907 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9908 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9910 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9912 compare_result = or_result;
9916 validate_condition_mode (code, GET_MODE (compare_result));
9918 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
9922 /* Emit the RTL for an sCOND pattern. */
9925 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9928 enum machine_mode op_mode;
9929 enum rtx_code cond_code;
9931 condition_rtx = rs6000_generate_compare (code);
9932 cond_code = GET_CODE (condition_rtx);
9935 || cond_code == GE || cond_code == LE
9936 || cond_code == GEU || cond_code == LEU
9937 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9939 rtx not_result = gen_reg_rtx (CCEQmode);
9940 rtx not_op, rev_cond_rtx;
9941 enum machine_mode cc_mode;
9943 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9945 rev_cond_rtx = gen_rtx_fmt_ee (rs6000_reverse_condition (cc_mode, cond_code),
9946 SImode, XEXP (condition_rtx, 0), const0_rtx);
9947 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9948 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9949 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9952 op_mode = GET_MODE (rs6000_compare_op0);
9953 if (op_mode == VOIDmode)
9954 op_mode = GET_MODE (rs6000_compare_op1);
9956 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9958 PUT_MODE (condition_rtx, DImode);
9959 convert_move (result, condition_rtx, 0);
9963 PUT_MODE (condition_rtx, SImode);
9964 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9968 /* Emit a branch of kind CODE to location LOC. */
9971 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9973 rtx condition_rtx, loc_ref;
9975 condition_rtx = rs6000_generate_compare (code);
9976 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9977 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9978 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9982 /* Return the string to output a conditional branch to LABEL, which is
9983 the operand number of the label, or -1 if the branch is really a
9986 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9987 condition code register and its mode specifies what kind of
9990 REVERSED is nonzero if we should reverse the sense of the comparison.
9992 INSN is the insn. */
9995 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9997 static char string[64];
9998 enum rtx_code code = GET_CODE (op);
9999 rtx cc_reg = XEXP (op, 0);
10000 enum machine_mode mode = GET_MODE (cc_reg);
10001 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
10002 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
10003 int really_reversed = reversed ^ need_longbranch;
10009 validate_condition_mode (code, mode);
10011 /* Work out which way this really branches. We could use
10012 reverse_condition_maybe_unordered here always but this
10013 makes the resulting assembler clearer. */
10014 if (really_reversed)
10016 /* Reversal of FP compares takes care -- an ordered compare
10017 becomes an unordered compare and vice versa. */
10018 if (mode == CCFPmode)
10019 code = reverse_condition_maybe_unordered (code);
10021 code = reverse_condition (code);
10024 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
10026 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
10029 /* Opposite of GT. */
10031 else if (code == NE)
10039 /* Not all of these are actually distinct opcodes, but
10040 we distinguish them for clarity of the resulting assembler. */
10041 case NE: case LTGT:
10042 ccode = "ne"; break;
10043 case EQ: case UNEQ:
10044 ccode = "eq"; break;
10046 ccode = "ge"; break;
10047 case GT: case GTU: case UNGT:
10048 ccode = "gt"; break;
10050 ccode = "le"; break;
10051 case LT: case LTU: case UNLT:
10052 ccode = "lt"; break;
10053 case UNORDERED: ccode = "un"; break;
10054 case ORDERED: ccode = "nu"; break;
10055 case UNGE: ccode = "nl"; break;
10056 case UNLE: ccode = "ng"; break;
10061 /* Maybe we have a guess as to how likely the branch is.
10062 The old mnemonics don't have a way to specify this information. */
10064 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
10065 if (note != NULL_RTX)
10067 /* PROB is the difference from 50%. */
10068 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
10069 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
10071 /* Only hint for highly probable/improbable branches on newer
10072 cpus as static prediction overrides processor dynamic
10073 prediction. For older cpus we may as well always hint, but
10074 assume not taken for branches that are very close to 50% as a
10075 mispredicted taken branch is more expensive than a
10076 mispredicted not-taken branch. */
10078 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
10080 if (abs (prob) > REG_BR_PROB_BASE / 20
10081 && ((prob > 0) ^ need_longbranch))
10089 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
10091 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
10093 /* We need to escape any '%' characters in the reg_names string.
10094 Assume they'd only be the first character.... */
10095 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
10097 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
10101 /* If the branch distance was too far, we may have to use an
10102 unconditional branch to go the distance. */
10103 if (need_longbranch)
10104 s += sprintf (s, ",$+8\n\tb %s", label);
10106 s += sprintf (s, ",%s", label);
10112 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
10113 operands of the last comparison is nonzero/true, FALSE_COND if it
10114 is zero/false. Return 0 if the hardware has no such operation. */
10117 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10119 enum rtx_code code = GET_CODE (op);
10120 rtx op0 = rs6000_compare_op0;
10121 rtx op1 = rs6000_compare_op1;
10122 REAL_VALUE_TYPE c1;
10123 enum machine_mode compare_mode = GET_MODE (op0);
10124 enum machine_mode result_mode = GET_MODE (dest);
10127 /* These modes should always match. */
10128 if (GET_MODE (op1) != compare_mode
10129 /* In the isel case however, we can use a compare immediate, so
10130 op1 may be a small constant. */
10131 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
10133 if (GET_MODE (true_cond) != result_mode)
10135 if (GET_MODE (false_cond) != result_mode)
10138 /* First, work out if the hardware can do this at all, or
10139 if it's too slow.... */
10140 if (! rs6000_compare_fp_p)
10143 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
10147 /* Eliminate half of the comparisons by switching operands, this
10148 makes the remaining code simpler. */
10149 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
10150 || code == LTGT || code == LT || code == UNLE)
10152 code = reverse_condition_maybe_unordered (code);
10154 true_cond = false_cond;
10158 /* UNEQ and LTGT take four instructions for a comparison with zero,
10159 it'll probably be faster to use a branch here too. */
10160 if (code == UNEQ && HONOR_NANS (compare_mode))
10163 if (GET_CODE (op1) == CONST_DOUBLE)
10164 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
10166 /* We're going to try to implement comparisons by performing
10167 a subtract, then comparing against zero. Unfortunately,
10168 Inf - Inf is NaN which is not zero, and so if we don't
10169 know that the operand is finite and the comparison
10170 would treat EQ different to UNORDERED, we can't do it. */
10171 if (HONOR_INFINITIES (compare_mode)
10172 && code != GT && code != UNGE
10173 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
10174 /* Constructs of the form (a OP b ? a : b) are safe. */
10175 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
10176 || (! rtx_equal_p (op0, true_cond)
10177 && ! rtx_equal_p (op1, true_cond))))
10179 /* At this point we know we can use fsel. */
10181 /* Reduce the comparison to a comparison against zero. */
10182 temp = gen_reg_rtx (compare_mode);
10183 emit_insn (gen_rtx_SET (VOIDmode, temp,
10184 gen_rtx_MINUS (compare_mode, op0, op1)));
10186 op1 = CONST0_RTX (compare_mode);
10188 /* If we don't care about NaNs we can reduce some of the comparisons
10189 down to faster ones. */
10190 if (! HONOR_NANS (compare_mode))
10196 true_cond = false_cond;
10209 /* Now, reduce everything down to a GE. */
10216 temp = gen_reg_rtx (compare_mode);
10217 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10222 temp = gen_reg_rtx (compare_mode);
10223 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
10228 temp = gen_reg_rtx (compare_mode);
10229 emit_insn (gen_rtx_SET (VOIDmode, temp,
10230 gen_rtx_NEG (compare_mode,
10231 gen_rtx_ABS (compare_mode, op0))));
10236 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
10237 temp = gen_reg_rtx (result_mode);
10238 emit_insn (gen_rtx_SET (VOIDmode, temp,
10239 gen_rtx_IF_THEN_ELSE (result_mode,
10240 gen_rtx_GE (VOIDmode,
10242 true_cond, false_cond)));
10243 false_cond = true_cond;
10246 temp = gen_reg_rtx (compare_mode);
10247 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10252 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
10253 temp = gen_reg_rtx (result_mode);
10254 emit_insn (gen_rtx_SET (VOIDmode, temp,
10255 gen_rtx_IF_THEN_ELSE (result_mode,
10256 gen_rtx_GE (VOIDmode,
10258 true_cond, false_cond)));
10259 true_cond = false_cond;
10262 temp = gen_reg_rtx (compare_mode);
10263 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
10271 emit_insn (gen_rtx_SET (VOIDmode, dest,
10272 gen_rtx_IF_THEN_ELSE (result_mode,
10273 gen_rtx_GE (VOIDmode,
10275 true_cond, false_cond)));
10279 /* Same as above, but for ints (isel). */
10282 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
10284 rtx condition_rtx, cr;
10286 /* All isel implementations thus far are 32-bits. */
10287 if (GET_MODE (rs6000_compare_op0) != SImode)
10290 /* We still have to do the compare, because isel doesn't do a
10291 compare, it just looks at the CRx bits set by a previous compare
10293 condition_rtx = rs6000_generate_compare (GET_CODE (op));
10294 cr = XEXP (condition_rtx, 0);
10296 if (GET_MODE (cr) == CCmode)
10297 emit_insn (gen_isel_signed (dest, condition_rtx,
10298 true_cond, false_cond, cr));
10300 emit_insn (gen_isel_unsigned (dest, condition_rtx,
10301 true_cond, false_cond, cr));
10307 output_isel (rtx *operands)
10309 enum rtx_code code;
10311 code = GET_CODE (operands[1]);
10312 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
10314 PUT_CODE (operands[1], reverse_condition (code));
10315 return "isel %0,%3,%2,%j1";
10318 return "isel %0,%2,%3,%j1";
10322 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
10324 enum machine_mode mode = GET_MODE (op0);
10328 if (code == SMAX || code == SMIN)
10333 if (code == SMAX || code == UMAX)
10334 target = emit_conditional_move (dest, c, op0, op1, mode,
10335 op0, op1, mode, 0);
10337 target = emit_conditional_move (dest, c, op0, op1, mode,
10338 op1, op0, mode, 0);
10339 if (target == NULL_RTX)
10341 if (target != dest)
10342 emit_move_insn (dest, target);
10345 /* Emit instructions to move SRC to DST. Called by splitters for
10346 multi-register moves. It will emit at most one instruction for
10347 each register that is accessed; that is, it won't emit li/lis pairs
10348 (or equivalent for 64-bit code). One of SRC or DST must be a hard
10352 rs6000_split_multireg_move (rtx dst, rtx src)
10354 /* The register number of the first register being moved. */
10356 /* The mode that is to be moved. */
10357 enum machine_mode mode;
10358 /* The mode that the move is being done in, and its size. */
10359 enum machine_mode reg_mode;
10361 /* The number of registers that will be moved. */
10364 reg = REG_P (dst) ? REGNO (dst) : REGNO (src);
10365 mode = GET_MODE (dst);
10366 nregs = HARD_REGNO_NREGS (reg, mode);
10367 if (FP_REGNO_P (reg))
10369 else if (ALTIVEC_REGNO_P (reg))
10370 reg_mode = V16QImode;
10372 reg_mode = word_mode;
10373 reg_mode_size = GET_MODE_SIZE (reg_mode);
10375 if (reg_mode_size * nregs != GET_MODE_SIZE (mode))
10378 if (REG_P (src) && REG_P (dst) && (REGNO (src) < REGNO (dst)))
10380 /* Move register range backwards, if we might have destructive
10383 for (i = nregs - 1; i >= 0; i--)
10384 emit_insn (gen_rtx_SET (VOIDmode,
10385 simplify_gen_subreg (reg_mode, dst, mode,
10386 i * reg_mode_size),
10387 simplify_gen_subreg (reg_mode, src, mode,
10388 i * reg_mode_size)));
10394 bool used_update = false;
10396 if (GET_CODE (src) == MEM && INT_REGNO_P (reg))
10400 if (GET_CODE (XEXP (src, 0)) == PRE_INC
10401 || GET_CODE (XEXP (src, 0)) == PRE_DEC)
10404 breg = XEXP (XEXP (src, 0), 0);
10405 delta_rtx = GET_CODE (XEXP (src, 0)) == PRE_INC
10406 ? GEN_INT (GET_MODE_SIZE (GET_MODE (src)))
10407 : GEN_INT (-GET_MODE_SIZE (GET_MODE (src)));
10408 emit_insn (TARGET_32BIT
10409 ? gen_addsi3 (breg, breg, delta_rtx)
10410 : gen_adddi3 (breg, breg, delta_rtx));
10411 src = gen_rtx_MEM (mode, breg);
10414 /* We have now address involving an base register only.
10415 If we use one of the registers to address memory,
10416 we have change that register last. */
10418 breg = (GET_CODE (XEXP (src, 0)) == PLUS
10419 ? XEXP (XEXP (src, 0), 0)
10425 if (REGNO (breg) >= REGNO (dst)
10426 && REGNO (breg) < REGNO (dst) + nregs)
10427 j = REGNO (breg) - REGNO (dst);
10430 if (GET_CODE (dst) == MEM && INT_REGNO_P (reg))
10434 if (GET_CODE (XEXP (dst, 0)) == PRE_INC
10435 || GET_CODE (XEXP (dst, 0)) == PRE_DEC)
10438 breg = XEXP (XEXP (dst, 0), 0);
10439 delta_rtx = GET_CODE (XEXP (dst, 0)) == PRE_INC
10440 ? GEN_INT (GET_MODE_SIZE (GET_MODE (dst)))
10441 : GEN_INT (-GET_MODE_SIZE (GET_MODE (dst)));
10443 /* We have to update the breg before doing the store.
10444 Use store with update, if available. */
10448 rtx nsrc = simplify_gen_subreg (reg_mode, src, mode, 0);
10449 emit_insn (TARGET_32BIT
10450 ? gen_movsi_update (breg, breg, delta_rtx, nsrc)
10451 : gen_movdi_update (breg, breg, delta_rtx, nsrc));
10452 used_update = true;
10455 emit_insn (TARGET_32BIT
10456 ? gen_addsi3 (breg, breg, delta_rtx)
10457 : gen_adddi3 (breg, breg, delta_rtx));
10458 dst = gen_rtx_MEM (mode, breg);
10462 for (i = 0; i < nregs; i++)
10464 /* Calculate index to next subword. */
10469 /* If compiler already emited move of first word by
10470 store with update, no need to do anything. */
10471 if (j == 0 && used_update)
10474 emit_insn (gen_rtx_SET (VOIDmode,
10475 simplify_gen_subreg (reg_mode, dst, mode,
10476 j * reg_mode_size),
10477 simplify_gen_subreg (reg_mode, src, mode,
10478 j * reg_mode_size)));
10484 /* This page contains routines that are used to determine what the
10485 function prologue and epilogue code will do and write them out. */
10487 /* Return the first fixed-point register that is required to be
10488 saved. 32 if none. */
10491 first_reg_to_save (void)
10495 /* Find lowest numbered live register. */
10496 for (first_reg = 13; first_reg <= 31; first_reg++)
10497 if (regs_ever_live[first_reg]
10498 && (! call_used_regs[first_reg]
10499 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
10500 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
10501 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
10506 && current_function_uses_pic_offset_table
10507 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
10508 return RS6000_PIC_OFFSET_TABLE_REGNUM;
10514 /* Similar, for FP regs. */
10517 first_fp_reg_to_save (void)
10521 /* Find lowest numbered live register. */
10522 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
10523 if (regs_ever_live[first_reg])
10529 /* Similar, for AltiVec regs. */
10532 first_altivec_reg_to_save (void)
10536 /* Stack frame remains as is unless we are in AltiVec ABI. */
10537 if (! TARGET_ALTIVEC_ABI)
10538 return LAST_ALTIVEC_REGNO + 1;
10540 /* Find lowest numbered live register. */
10541 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
10542 if (regs_ever_live[i])
10548 /* Return a 32-bit mask of the AltiVec registers we need to set in
10549 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
10550 the 32-bit word is 0. */
10552 static unsigned int
10553 compute_vrsave_mask (void)
10555 unsigned int i, mask = 0;
10557 /* First, find out if we use _any_ altivec registers. */
10558 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10559 if (regs_ever_live[i])
10560 mask |= ALTIVEC_REG_BIT (i);
10565 /* Next, remove the argument registers from the set. These must
10566 be in the VRSAVE mask set by the caller, so we don't need to add
10567 them in again. More importantly, the mask we compute here is
10568 used to generate CLOBBERs in the set_vrsave insn, and we do not
10569 wish the argument registers to die. */
10570 for (i = cfun->args_info.vregno - 1; i >= ALTIVEC_ARG_MIN_REG; --i)
10571 mask &= ~ALTIVEC_REG_BIT (i);
10573 /* Similarly, remove the return value from the set. */
10576 diddle_return_value (is_altivec_return_reg, &yes);
10578 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
10585 is_altivec_return_reg (rtx reg, void *xyes)
10587 bool *yes = (bool *) xyes;
10588 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
10593 /* Calculate the stack information for the current function. This is
10594 complicated by having two separate calling sequences, the AIX calling
10595 sequence and the V.4 calling sequence.
10597 AIX (and Darwin/Mac OS X) stack frames look like:
10599 SP----> +---------------------------------------+
10600 | back chain to caller | 0 0
10601 +---------------------------------------+
10602 | saved CR | 4 8 (8-11)
10603 +---------------------------------------+
10605 +---------------------------------------+
10606 | reserved for compilers | 12 24
10607 +---------------------------------------+
10608 | reserved for binders | 16 32
10609 +---------------------------------------+
10610 | saved TOC pointer | 20 40
10611 +---------------------------------------+
10612 | Parameter save area (P) | 24 48
10613 +---------------------------------------+
10614 | Alloca space (A) | 24+P etc.
10615 +---------------------------------------+
10616 | Local variable space (L) | 24+P+A
10617 +---------------------------------------+
10618 | Float/int conversion temporary (X) | 24+P+A+L
10619 +---------------------------------------+
10620 | Save area for AltiVec registers (W) | 24+P+A+L+X
10621 +---------------------------------------+
10622 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
10623 +---------------------------------------+
10624 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
10625 +---------------------------------------+
10626 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
10627 +---------------------------------------+
10628 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
10629 +---------------------------------------+
10630 old SP->| back chain to caller's caller |
10631 +---------------------------------------+
10633 The required alignment for AIX configurations is two words (i.e., 8
10637 V.4 stack frames look like:
10639 SP----> +---------------------------------------+
10640 | back chain to caller | 0
10641 +---------------------------------------+
10642 | caller's saved LR | 4
10643 +---------------------------------------+
10644 | Parameter save area (P) | 8
10645 +---------------------------------------+
10646 | Alloca space (A) | 8+P
10647 +---------------------------------------+
10648 | Varargs save area (V) | 8+P+A
10649 +---------------------------------------+
10650 | Local variable space (L) | 8+P+A+V
10651 +---------------------------------------+
10652 | Float/int conversion temporary (X) | 8+P+A+V+L
10653 +---------------------------------------+
10654 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
10655 +---------------------------------------+
10656 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
10657 +---------------------------------------+
10658 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
10659 +---------------------------------------+
10660 | SPE: area for 64-bit GP registers |
10661 +---------------------------------------+
10662 | SPE alignment padding |
10663 +---------------------------------------+
10664 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
10665 +---------------------------------------+
10666 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
10667 +---------------------------------------+
10668 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
10669 +---------------------------------------+
10670 old SP->| back chain to caller's caller |
10671 +---------------------------------------+
10673 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
10674 given. (But note below and in sysv4.h that we require only 8 and
10675 may round up the size of our stack frame anyways. The historical
10676 reason is early versions of powerpc-linux which didn't properly
10677 align the stack at program startup. A happy side-effect is that
10678 -mno-eabi libraries can be used with -meabi programs.)
10680 The EABI configuration defaults to the V.4 layout. However,
10681 the stack alignment requirements may differ. If -mno-eabi is not
10682 given, the required stack alignment is 8 bytes; if -mno-eabi is
10683 given, the required alignment is 16 bytes. (But see V.4 comment
10686 #ifndef ABI_STACK_BOUNDARY
10687 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
10690 static rs6000_stack_t *
10691 rs6000_stack_info (void)
10693 static rs6000_stack_t info, zero_info;
10694 rs6000_stack_t *info_ptr = &info;
10695 int reg_size = TARGET_32BIT ? 4 : 8;
10697 HOST_WIDE_INT total_raw_size;
10699 /* Zero all fields portably. */
10704 /* Cache value so we don't rescan instruction chain over and over. */
10705 if (cfun->machine->insn_chain_scanned_p == 0)
10707 cfun->machine->insn_chain_scanned_p = 1;
10708 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
10712 /* Select which calling sequence. */
10713 info_ptr->abi = DEFAULT_ABI;
10715 /* Calculate which registers need to be saved & save area size. */
10716 info_ptr->first_gp_reg_save = first_reg_to_save ();
10717 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
10718 even if it currently looks like we won't. */
10719 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
10720 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
10721 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
10722 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
10723 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
10725 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
10727 /* For the SPE, we have an additional upper 32-bits on each GPR.
10728 Ideally we should save the entire 64-bits only when the upper
10729 half is used in SIMD instructions. Since we only record
10730 registers live (not the size they are used in), this proves
10731 difficult because we'd have to traverse the instruction chain at
10732 the right time, taking reload into account. This is a real pain,
10733 so we opt to save the GPRs in 64-bits always if but one register
10734 gets used in 64-bits. Otherwise, all the registers in the frame
10735 get saved in 32-bits.
10737 So... since when we save all GPRs (except the SP) in 64-bits, the
10738 traditional GP save area will be empty. */
10739 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10740 info_ptr->gp_size = 0;
10742 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
10743 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
10745 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
10746 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
10747 - info_ptr->first_altivec_reg_save);
10749 /* Does this function call anything? */
10750 info_ptr->calls_p = (! current_function_is_leaf
10751 || cfun->machine->ra_needs_full_frame);
10753 /* Determine if we need to save the link register. */
10754 if (rs6000_ra_ever_killed ()
10755 || (DEFAULT_ABI == ABI_AIX
10756 && current_function_profile
10757 && !TARGET_PROFILE_KERNEL)
10758 #ifdef TARGET_RELOCATABLE
10759 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
10761 || (info_ptr->first_fp_reg_save != 64
10762 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
10763 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
10764 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
10765 || (DEFAULT_ABI == ABI_DARWIN
10767 && current_function_uses_pic_offset_table)
10768 || info_ptr->calls_p)
10770 info_ptr->lr_save_p = 1;
10771 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
10774 /* Determine if we need to save the condition code registers. */
10775 if (regs_ever_live[CR2_REGNO]
10776 || regs_ever_live[CR3_REGNO]
10777 || regs_ever_live[CR4_REGNO])
10779 info_ptr->cr_save_p = 1;
10780 if (DEFAULT_ABI == ABI_V4)
10781 info_ptr->cr_size = reg_size;
10784 /* If the current function calls __builtin_eh_return, then we need
10785 to allocate stack space for registers that will hold data for
10786 the exception handler. */
10787 if (current_function_calls_eh_return)
10790 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
10793 /* SPE saves EH registers in 64-bits. */
10794 ehrd_size = i * (TARGET_SPE_ABI
10795 && info_ptr->spe_64bit_regs_used != 0
10796 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
10801 /* Determine various sizes. */
10802 info_ptr->reg_size = reg_size;
10803 info_ptr->fixed_size = RS6000_SAVE_AREA;
10804 info_ptr->varargs_size = RS6000_VARARGS_AREA;
10805 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
10806 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
10809 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10810 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
10812 info_ptr->spe_gp_size = 0;
10814 if (TARGET_ALTIVEC_ABI)
10815 info_ptr->vrsave_mask = compute_vrsave_mask ();
10817 info_ptr->vrsave_mask = 0;
10819 if (TARGET_ALTIVEC_VRSAVE && info_ptr->vrsave_mask)
10820 info_ptr->vrsave_size = 4;
10822 info_ptr->vrsave_size = 0;
10824 /* Calculate the offsets. */
10825 switch (DEFAULT_ABI)
10833 info_ptr->fp_save_offset = - info_ptr->fp_size;
10834 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10836 if (TARGET_ALTIVEC_ABI)
10838 info_ptr->vrsave_save_offset
10839 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10841 /* Align stack so vector save area is on a quadword boundary. */
10842 if (info_ptr->altivec_size != 0)
10843 info_ptr->altivec_padding_size
10844 = 16 - (-info_ptr->vrsave_save_offset % 16);
10846 info_ptr->altivec_padding_size = 0;
10848 info_ptr->altivec_save_offset
10849 = info_ptr->vrsave_save_offset
10850 - info_ptr->altivec_padding_size
10851 - info_ptr->altivec_size;
10853 /* Adjust for AltiVec case. */
10854 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10857 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10858 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10859 info_ptr->lr_save_offset = 2*reg_size;
10863 info_ptr->fp_save_offset = - info_ptr->fp_size;
10864 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10865 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10867 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10869 /* Align stack so SPE GPR save area is aligned on a
10870 double-word boundary. */
10871 if (info_ptr->spe_gp_size != 0)
10872 info_ptr->spe_padding_size
10873 = 8 - (-info_ptr->cr_save_offset % 8);
10875 info_ptr->spe_padding_size = 0;
10877 info_ptr->spe_gp_save_offset
10878 = info_ptr->cr_save_offset
10879 - info_ptr->spe_padding_size
10880 - info_ptr->spe_gp_size;
10882 /* Adjust for SPE case. */
10883 info_ptr->toc_save_offset
10884 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10886 else if (TARGET_ALTIVEC_ABI)
10888 info_ptr->vrsave_save_offset
10889 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10891 /* Align stack so vector save area is on a quadword boundary. */
10892 if (info_ptr->altivec_size != 0)
10893 info_ptr->altivec_padding_size
10894 = 16 - (-info_ptr->vrsave_save_offset % 16);
10896 info_ptr->altivec_padding_size = 0;
10898 info_ptr->altivec_save_offset
10899 = info_ptr->vrsave_save_offset
10900 - info_ptr->altivec_padding_size
10901 - info_ptr->altivec_size;
10903 /* Adjust for AltiVec case. */
10904 info_ptr->toc_save_offset
10905 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10908 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10909 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10910 info_ptr->lr_save_offset = reg_size;
10914 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10915 + info_ptr->gp_size
10916 + info_ptr->altivec_size
10917 + info_ptr->altivec_padding_size
10918 + info_ptr->spe_gp_size
10919 + info_ptr->spe_padding_size
10921 + info_ptr->cr_size
10922 + info_ptr->lr_size
10923 + info_ptr->vrsave_size
10924 + info_ptr->toc_size,
10925 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10928 total_raw_size = (info_ptr->vars_size
10929 + info_ptr->parm_size
10930 + info_ptr->save_size
10931 + info_ptr->varargs_size
10932 + info_ptr->fixed_size);
10934 info_ptr->total_size =
10935 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10937 /* Determine if we need to allocate any stack frame:
10939 For AIX we need to push the stack if a frame pointer is needed
10940 (because the stack might be dynamically adjusted), if we are
10941 debugging, if we make calls, or if the sum of fp_save, gp_save,
10942 and local variables are more than the space needed to save all
10943 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10944 + 18*8 = 288 (GPR13 reserved).
10946 For V.4 we don't have the stack cushion that AIX uses, but assume
10947 that the debugger can handle stackless frames. */
10949 if (info_ptr->calls_p)
10950 info_ptr->push_p = 1;
10952 else if (DEFAULT_ABI == ABI_V4)
10953 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10955 else if (frame_pointer_needed)
10956 info_ptr->push_p = 1;
10958 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10959 info_ptr->push_p = 1;
10963 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10965 /* Zero offsets if we're not saving those registers. */
10966 if (info_ptr->fp_size == 0)
10967 info_ptr->fp_save_offset = 0;
10969 if (info_ptr->gp_size == 0)
10970 info_ptr->gp_save_offset = 0;
10972 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10973 info_ptr->altivec_save_offset = 0;
10975 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10976 info_ptr->vrsave_save_offset = 0;
10978 if (! TARGET_SPE_ABI
10979 || info_ptr->spe_64bit_regs_used == 0
10980 || info_ptr->spe_gp_size == 0)
10981 info_ptr->spe_gp_save_offset = 0;
10983 if (! info_ptr->lr_save_p)
10984 info_ptr->lr_save_offset = 0;
10986 if (! info_ptr->cr_save_p)
10987 info_ptr->cr_save_offset = 0;
10989 if (! info_ptr->toc_save_p)
10990 info_ptr->toc_save_offset = 0;
10995 /* Return true if the current function uses any GPRs in 64-bit SIMD
10999 spe_func_has_64bit_regs_p (void)
11003 /* Functions that save and restore all the call-saved registers will
11004 need to save/restore the registers in 64-bits. */
11005 if (current_function_calls_eh_return
11006 || current_function_calls_setjmp
11007 || current_function_has_nonlocal_goto)
11010 insns = get_insns ();
11012 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
11018 i = PATTERN (insn);
11019 if (GET_CODE (i) == SET
11020 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
11029 debug_stack_info (rs6000_stack_t *info)
11031 const char *abi_string;
11034 info = rs6000_stack_info ();
11036 fprintf (stderr, "\nStack information for function %s:\n",
11037 ((current_function_decl && DECL_NAME (current_function_decl))
11038 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
11043 default: abi_string = "Unknown"; break;
11044 case ABI_NONE: abi_string = "NONE"; break;
11045 case ABI_AIX: abi_string = "AIX"; break;
11046 case ABI_DARWIN: abi_string = "Darwin"; break;
11047 case ABI_V4: abi_string = "V.4"; break;
11050 fprintf (stderr, "\tABI = %5s\n", abi_string);
11052 if (TARGET_ALTIVEC_ABI)
11053 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
11055 if (TARGET_SPE_ABI)
11056 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
11058 if (info->first_gp_reg_save != 32)
11059 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
11061 if (info->first_fp_reg_save != 64)
11062 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
11064 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
11065 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
11066 info->first_altivec_reg_save);
11068 if (info->lr_save_p)
11069 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
11071 if (info->cr_save_p)
11072 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
11074 if (info->toc_save_p)
11075 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
11077 if (info->vrsave_mask)
11078 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
11081 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
11084 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
11086 if (info->gp_save_offset)
11087 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
11089 if (info->fp_save_offset)
11090 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
11092 if (info->altivec_save_offset)
11093 fprintf (stderr, "\taltivec_save_offset = %5d\n",
11094 info->altivec_save_offset);
11096 if (info->spe_gp_save_offset)
11097 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
11098 info->spe_gp_save_offset);
11100 if (info->vrsave_save_offset)
11101 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
11102 info->vrsave_save_offset);
11104 if (info->lr_save_offset)
11105 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
11107 if (info->cr_save_offset)
11108 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
11110 if (info->toc_save_offset)
11111 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
11113 if (info->varargs_save_offset)
11114 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
11116 if (info->total_size)
11117 fprintf (stderr, "\ttotal_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11120 if (info->varargs_size)
11121 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
11123 if (info->vars_size)
11124 fprintf (stderr, "\tvars_size = "HOST_WIDE_INT_PRINT_DEC"\n",
11127 if (info->parm_size)
11128 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
11130 if (info->fixed_size)
11131 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
11134 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
11136 if (info->spe_gp_size)
11137 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
11140 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
11142 if (info->altivec_size)
11143 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
11145 if (info->vrsave_size)
11146 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
11148 if (info->altivec_padding_size)
11149 fprintf (stderr, "\taltivec_padding_size= %5d\n",
11150 info->altivec_padding_size);
11152 if (info->spe_padding_size)
11153 fprintf (stderr, "\tspe_padding_size = %5d\n",
11154 info->spe_padding_size);
11157 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
11160 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
11162 if (info->toc_size)
11163 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
11165 if (info->save_size)
11166 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
11168 if (info->reg_size != 4)
11169 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
11171 fprintf (stderr, "\n");
11175 rs6000_return_addr (int count, rtx frame)
11177 /* Currently we don't optimize very well between prolog and body
11178 code and for PIC code the code can be actually quite bad, so
11179 don't try to be too clever here. */
11180 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
11182 cfun->machine->ra_needs_full_frame = 1;
11189 plus_constant (copy_to_reg
11190 (gen_rtx_MEM (Pmode,
11191 memory_address (Pmode, frame))),
11192 RETURN_ADDRESS_OFFSET)));
11195 cfun->machine->ra_need_lr = 1;
11196 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
11199 /* Say whether a function is a candidate for sibcall handling or not.
11200 We do not allow indirect calls to be optimized into sibling calls.
11201 Also, we can't do it if there are any vector parameters; there's
11202 nowhere to put the VRsave code so it works; note that functions with
11203 vector parameters are required to have a prototype, so the argument
11204 type info must be available here. (The tail recursion case can work
11205 with vector parameters, but there's no way to distinguish here.) */
11207 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
11212 if (TARGET_ALTIVEC_VRSAVE)
11214 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
11215 type; type = TREE_CHAIN (type))
11217 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
11221 if (DEFAULT_ABI == ABI_DARWIN
11222 || (*targetm.binds_local_p) (decl))
11224 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
11226 if (!lookup_attribute ("longcall", attr_list)
11227 || lookup_attribute ("shortcall", attr_list))
11235 rs6000_ra_ever_killed (void)
11241 if (current_function_is_thunk)
11244 /* regs_ever_live has LR marked as used if any sibcalls are present,
11245 but this should not force saving and restoring in the
11246 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
11247 clobbers LR, so that is inappropriate. */
11249 /* Also, the prologue can generate a store into LR that
11250 doesn't really count, like this:
11253 bcl to set PIC register
11257 When we're called from the epilogue, we need to avoid counting
11258 this as a store. */
11260 push_topmost_sequence ();
11261 top = get_insns ();
11262 pop_topmost_sequence ();
11263 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11265 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
11269 if (FIND_REG_INC_NOTE (insn, reg))
11271 else if (GET_CODE (insn) == CALL_INSN
11272 && !SIBLING_CALL_P (insn))
11274 else if (set_of (reg, insn) != NULL_RTX
11275 && !prologue_epilogue_contains (insn))
11282 /* Add a REG_MAYBE_DEAD note to the insn. */
11284 rs6000_maybe_dead (rtx insn)
11286 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
11291 /* Emit instructions needed to load the TOC register.
11292 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
11293 a constant pool; or for SVR4 -fpic. */
11296 rs6000_emit_load_toc_table (int fromprolog)
11299 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
11301 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
11303 rtx temp = (fromprolog
11304 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11305 : gen_reg_rtx (Pmode));
11306 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
11308 rs6000_maybe_dead (insn);
11309 insn = emit_move_insn (dest, temp);
11311 rs6000_maybe_dead (insn);
11313 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
11316 rtx tempLR = (fromprolog
11317 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
11318 : gen_reg_rtx (Pmode));
11319 rtx temp0 = (fromprolog
11320 ? gen_rtx_REG (Pmode, 0)
11321 : gen_reg_rtx (Pmode));
11324 /* possibly create the toc section */
11325 if (! toc_initialized)
11328 function_section (current_function_decl);
11335 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
11336 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11338 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
11339 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11341 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
11343 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
11344 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
11351 static int reload_toc_labelno = 0;
11353 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
11355 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
11356 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11358 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
11359 emit_move_insn (dest, tempLR);
11360 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
11362 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
11364 rs6000_maybe_dead (insn);
11366 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
11368 /* This is for AIX code running in non-PIC ELF32. */
11371 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
11372 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
11374 insn = emit_insn (gen_elf_high (dest, realsym));
11376 rs6000_maybe_dead (insn);
11377 insn = emit_insn (gen_elf_low (dest, dest, realsym));
11379 rs6000_maybe_dead (insn);
11381 else if (DEFAULT_ABI == ABI_AIX)
11384 insn = emit_insn (gen_load_toc_aix_si (dest));
11386 insn = emit_insn (gen_load_toc_aix_di (dest));
11388 rs6000_maybe_dead (insn);
11394 /* Emit instructions to restore the link register after determining where
11395 its value has been stored. */
11398 rs6000_emit_eh_reg_restore (rtx source, rtx scratch)
11400 rs6000_stack_t *info = rs6000_stack_info ();
11403 operands[0] = source;
11404 operands[1] = scratch;
11406 if (info->lr_save_p)
11408 rtx frame_rtx = stack_pointer_rtx;
11409 HOST_WIDE_INT sp_offset = 0;
11412 if (frame_pointer_needed
11413 || current_function_calls_alloca
11414 || info->total_size > 32767)
11416 emit_move_insn (operands[1], gen_rtx_MEM (Pmode, frame_rtx));
11417 frame_rtx = operands[1];
11419 else if (info->push_p)
11420 sp_offset = info->total_size;
11422 tmp = plus_constant (frame_rtx, info->lr_save_offset + sp_offset);
11423 tmp = gen_rtx_MEM (Pmode, tmp);
11424 emit_move_insn (tmp, operands[0]);
11427 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM), operands[0]);
11430 static GTY(()) int set = -1;
11433 get_TOC_alias_set (void)
11436 set = new_alias_set ();
11440 /* This returns nonzero if the current function uses the TOC. This is
11441 determined by the presence of (use (unspec ... UNSPEC_TOC)), which
11442 is generated by the ABI_V4 load_toc_* patterns. */
11449 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
11452 rtx pat = PATTERN (insn);
11455 if (GET_CODE (pat) == PARALLEL)
11456 for (i = 0; i < XVECLEN (pat, 0); i++)
11458 rtx sub = XVECEXP (pat, 0, i);
11459 if (GET_CODE (sub) == USE)
11461 sub = XEXP (sub, 0);
11462 if (GET_CODE (sub) == UNSPEC
11463 && XINT (sub, 1) == UNSPEC_TOC)
11473 create_TOC_reference (rtx symbol)
11475 return gen_rtx_PLUS (Pmode,
11476 gen_rtx_REG (Pmode, TOC_REGISTER),
11477 gen_rtx_CONST (Pmode,
11478 gen_rtx_MINUS (Pmode, symbol,
11479 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
11482 /* If _Unwind_* has been called from within the same module,
11483 toc register is not guaranteed to be saved to 40(1) on function
11484 entry. Save it there in that case. */
11487 rs6000_aix_emit_builtin_unwind_init (void)
11490 rtx stack_top = gen_reg_rtx (Pmode);
11491 rtx opcode_addr = gen_reg_rtx (Pmode);
11492 rtx opcode = gen_reg_rtx (SImode);
11493 rtx tocompare = gen_reg_rtx (SImode);
11494 rtx no_toc_save_needed = gen_label_rtx ();
11496 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
11497 emit_move_insn (stack_top, mem);
11499 mem = gen_rtx_MEM (Pmode,
11500 gen_rtx_PLUS (Pmode, stack_top,
11501 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
11502 emit_move_insn (opcode_addr, mem);
11503 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
11504 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
11505 : 0xE8410028, SImode));
11507 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
11508 SImode, NULL_RTX, NULL_RTX,
11509 no_toc_save_needed);
11511 mem = gen_rtx_MEM (Pmode,
11512 gen_rtx_PLUS (Pmode, stack_top,
11513 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
11514 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
11515 emit_label (no_toc_save_needed);
11518 /* This ties together stack memory (MEM with an alias set of
11519 rs6000_sr_alias_set) and the change to the stack pointer. */
11522 rs6000_emit_stack_tie (void)
11524 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
11526 set_mem_alias_set (mem, rs6000_sr_alias_set);
11527 emit_insn (gen_stack_tie (mem));
11530 /* Emit the correct code for allocating stack space, as insns.
11531 If COPY_R12, make sure a copy of the old frame is left in r12.
11532 The generated code may use hard register 0 as a temporary. */
11535 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
11538 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11539 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
11540 rtx todec = GEN_INT (-size);
11542 if (current_function_limit_stack)
11544 if (REG_P (stack_limit_rtx)
11545 && REGNO (stack_limit_rtx) > 1
11546 && REGNO (stack_limit_rtx) <= 31)
11548 emit_insn (TARGET_32BIT
11549 ? gen_addsi3 (tmp_reg,
11552 : gen_adddi3 (tmp_reg,
11556 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11559 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
11561 && DEFAULT_ABI == ABI_V4)
11563 rtx toload = gen_rtx_CONST (VOIDmode,
11564 gen_rtx_PLUS (Pmode,
11568 emit_insn (gen_elf_high (tmp_reg, toload));
11569 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
11570 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
11574 warning ("stack limit expression is not supported");
11577 if (copy_r12 || ! TARGET_UPDATE)
11578 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
11584 /* Need a note here so that try_split doesn't get confused. */
11585 if (get_last_insn() == NULL_RTX)
11586 emit_note (NOTE_INSN_DELETED);
11587 insn = emit_move_insn (tmp_reg, todec);
11588 try_split (PATTERN (insn), insn, 0);
11592 insn = emit_insn (TARGET_32BIT
11593 ? gen_movsi_update (stack_reg, stack_reg,
11595 : gen_movdi_update (stack_reg, stack_reg,
11596 todec, stack_reg));
11600 insn = emit_insn (TARGET_32BIT
11601 ? gen_addsi3 (stack_reg, stack_reg, todec)
11602 : gen_adddi3 (stack_reg, stack_reg, todec));
11603 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
11604 gen_rtx_REG (Pmode, 12));
11607 RTX_FRAME_RELATED_P (insn) = 1;
11609 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11610 gen_rtx_SET (VOIDmode, stack_reg,
11611 gen_rtx_PLUS (Pmode, stack_reg,
11616 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
11617 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
11618 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
11619 deduce these equivalences by itself so it wasn't necessary to hold
11620 its hand so much. */
11623 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
11624 rtx reg2, rtx rreg)
11628 /* copy_rtx will not make unique copies of registers, so we need to
11629 ensure we don't have unwanted sharing here. */
11631 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11634 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
11636 real = copy_rtx (PATTERN (insn));
11638 if (reg2 != NULL_RTX)
11639 real = replace_rtx (real, reg2, rreg);
11641 real = replace_rtx (real, reg,
11642 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
11643 STACK_POINTER_REGNUM),
11646 /* We expect that 'real' is either a SET or a PARALLEL containing
11647 SETs (and possibly other stuff). In a PARALLEL, all the SETs
11648 are important so they all have to be marked RTX_FRAME_RELATED_P. */
11650 if (GET_CODE (real) == SET)
11654 temp = simplify_rtx (SET_SRC (set));
11656 SET_SRC (set) = temp;
11657 temp = simplify_rtx (SET_DEST (set));
11659 SET_DEST (set) = temp;
11660 if (GET_CODE (SET_DEST (set)) == MEM)
11662 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11664 XEXP (SET_DEST (set), 0) = temp;
11667 else if (GET_CODE (real) == PARALLEL)
11670 for (i = 0; i < XVECLEN (real, 0); i++)
11671 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
11673 rtx set = XVECEXP (real, 0, i);
11675 temp = simplify_rtx (SET_SRC (set));
11677 SET_SRC (set) = temp;
11678 temp = simplify_rtx (SET_DEST (set));
11680 SET_DEST (set) = temp;
11681 if (GET_CODE (SET_DEST (set)) == MEM)
11683 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
11685 XEXP (SET_DEST (set), 0) = temp;
11687 RTX_FRAME_RELATED_P (set) = 1;
11694 real = spe_synthesize_frame_save (real);
11696 RTX_FRAME_RELATED_P (insn) = 1;
11697 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
11702 /* Given an SPE frame note, return a PARALLEL of SETs with the
11703 original note, plus a synthetic register save. */
11706 spe_synthesize_frame_save (rtx real)
11708 rtx synth, offset, reg, real2;
11710 if (GET_CODE (real) != SET
11711 || GET_MODE (SET_SRC (real)) != V2SImode)
11714 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
11715 frame related note. The parallel contains a set of the register
11716 being saved, and another set to a synthetic register (n+1200).
11717 This is so we can differentiate between 64-bit and 32-bit saves.
11718 Words cannot describe this nastiness. */
11720 if (GET_CODE (SET_DEST (real)) != MEM
11721 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
11722 || GET_CODE (SET_SRC (real)) != REG)
11726 (set (mem (plus (reg x) (const y)))
11729 (set (mem (plus (reg x) (const y+4)))
11733 real2 = copy_rtx (real);
11734 PUT_MODE (SET_DEST (real2), SImode);
11735 reg = SET_SRC (real2);
11736 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
11737 synth = copy_rtx (real2);
11739 if (BYTES_BIG_ENDIAN)
11741 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
11742 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
11745 reg = SET_SRC (synth);
11747 synth = replace_rtx (synth, reg,
11748 gen_rtx_REG (SImode, REGNO (reg) + 1200));
11750 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
11751 synth = replace_rtx (synth, offset,
11752 GEN_INT (INTVAL (offset)
11753 + (BYTES_BIG_ENDIAN ? 0 : 4)));
11755 RTX_FRAME_RELATED_P (synth) = 1;
11756 RTX_FRAME_RELATED_P (real2) = 1;
11757 if (BYTES_BIG_ENDIAN)
11758 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
11760 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
11765 /* Returns an insn that has a vrsave set operation with the
11766 appropriate CLOBBERs. */
11769 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
11772 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
11773 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11776 = gen_rtx_SET (VOIDmode,
11778 gen_rtx_UNSPEC_VOLATILE (SImode,
11779 gen_rtvec (2, reg, vrsave),
11784 /* We need to clobber the registers in the mask so the scheduler
11785 does not move sets to VRSAVE before sets of AltiVec registers.
11787 However, if the function receives nonlocal gotos, reload will set
11788 all call saved registers live. We will end up with:
11790 (set (reg 999) (mem))
11791 (parallel [ (set (reg vrsave) (unspec blah))
11792 (clobber (reg 999))])
11794 The clobber will cause the store into reg 999 to be dead, and
11795 flow will attempt to delete an epilogue insn. In this case, we
11796 need an unspec use/set of the register. */
11798 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11799 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11801 if (!epiloguep || call_used_regs [i])
11802 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11803 gen_rtx_REG (V4SImode, i));
11806 rtx reg = gen_rtx_REG (V4SImode, i);
11809 = gen_rtx_SET (VOIDmode,
11811 gen_rtx_UNSPEC (V4SImode,
11812 gen_rtvec (1, reg), 27));
11816 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11818 for (i = 0; i < nclobs; ++i)
11819 XVECEXP (insn, 0, i) = clobs[i];
11824 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11825 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11828 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
11829 unsigned int regno, int offset, HOST_WIDE_INT total_size)
11831 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11832 rtx replacea, replaceb;
11834 int_rtx = GEN_INT (offset);
11836 /* Some cases that need register indexed addressing. */
11837 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11839 && SPE_VECTOR_MODE (mode)
11840 && !SPE_CONST_OFFSET_OK (offset)))
11842 /* Whomever calls us must make sure r11 is available in the
11843 flow path of instructions in the prologue. */
11844 offset_rtx = gen_rtx_REG (Pmode, 11);
11845 emit_move_insn (offset_rtx, int_rtx);
11847 replacea = offset_rtx;
11848 replaceb = int_rtx;
11852 offset_rtx = int_rtx;
11853 replacea = NULL_RTX;
11854 replaceb = NULL_RTX;
11857 reg = gen_rtx_REG (mode, regno);
11858 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11859 mem = gen_rtx_MEM (mode, addr);
11860 set_mem_alias_set (mem, rs6000_sr_alias_set);
11862 insn = emit_move_insn (mem, reg);
11864 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11867 /* Emit an offset memory reference suitable for a frame store, while
11868 converting to a valid addressing mode. */
11871 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11873 rtx int_rtx, offset_rtx;
11875 int_rtx = GEN_INT (offset);
11877 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11879 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11880 emit_move_insn (offset_rtx, int_rtx);
11883 offset_rtx = int_rtx;
11885 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11888 /* Emit function prologue as insns. */
11891 rs6000_emit_prologue (void)
11893 rs6000_stack_t *info = rs6000_stack_info ();
11894 enum machine_mode reg_mode = Pmode;
11895 int reg_size = TARGET_32BIT ? 4 : 8;
11896 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11897 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11898 rtx frame_reg_rtx = sp_reg_rtx;
11899 rtx cr_save_rtx = NULL_RTX;
11901 int saving_FPRs_inline;
11902 int using_store_multiple;
11903 HOST_WIDE_INT sp_offset = 0;
11905 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11907 reg_mode = V2SImode;
11911 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11912 && (!TARGET_SPE_ABI
11913 || info->spe_64bit_regs_used == 0)
11914 && info->first_gp_reg_save < 31);
11915 saving_FPRs_inline = (info->first_fp_reg_save == 64
11916 || FP_SAVE_INLINE (info->first_fp_reg_save)
11917 || current_function_calls_eh_return
11918 || cfun->machine->ra_need_lr);
11920 /* For V.4, update stack before we do any saving and set back pointer. */
11922 && (DEFAULT_ABI == ABI_V4
11923 || current_function_calls_eh_return))
11925 if (info->total_size < 32767)
11926 sp_offset = info->total_size;
11928 frame_reg_rtx = frame_ptr_rtx;
11929 rs6000_emit_allocate_stack (info->total_size,
11930 (frame_reg_rtx != sp_reg_rtx
11931 && (info->cr_save_p
11933 || info->first_fp_reg_save < 64
11934 || info->first_gp_reg_save < 32
11936 if (frame_reg_rtx != sp_reg_rtx)
11937 rs6000_emit_stack_tie ();
11940 /* Save AltiVec registers if needed. */
11941 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11945 /* There should be a non inline version of this, for when we
11946 are saving lots of vector registers. */
11947 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11948 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11950 rtx areg, savereg, mem;
11953 offset = info->altivec_save_offset + sp_offset
11954 + 16 * (i - info->first_altivec_reg_save);
11956 savereg = gen_rtx_REG (V4SImode, i);
11958 areg = gen_rtx_REG (Pmode, 0);
11959 emit_move_insn (areg, GEN_INT (offset));
11961 /* AltiVec addressing mode is [reg+reg]. */
11962 mem = gen_rtx_MEM (V4SImode,
11963 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11965 set_mem_alias_set (mem, rs6000_sr_alias_set);
11967 insn = emit_move_insn (mem, savereg);
11969 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11970 areg, GEN_INT (offset));
11974 /* VRSAVE is a bit vector representing which AltiVec registers
11975 are used. The OS uses this to determine which vector
11976 registers to save on a context switch. We need to save
11977 VRSAVE on the stack frame, add whatever AltiVec registers we
11978 used in this function, and do the corresponding magic in the
11981 if (TARGET_ALTIVEC && TARGET_ALTIVEC_VRSAVE
11982 && info->vrsave_mask != 0)
11984 rtx reg, mem, vrsave;
11987 /* Get VRSAVE onto a GPR. */
11988 reg = gen_rtx_REG (SImode, 12);
11989 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11991 emit_insn (gen_get_vrsave_internal (reg));
11993 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11996 offset = info->vrsave_save_offset + sp_offset;
11998 = gen_rtx_MEM (SImode,
11999 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
12000 set_mem_alias_set (mem, rs6000_sr_alias_set);
12001 insn = emit_move_insn (mem, reg);
12003 /* Include the registers in the mask. */
12004 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
12006 insn = emit_insn (generate_set_vrsave (reg, info, 0));
12009 /* If we use the link register, get it into r0. */
12010 if (info->lr_save_p)
12011 emit_move_insn (gen_rtx_REG (Pmode, 0),
12012 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12014 /* If we need to save CR, put it into r12. */
12015 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
12017 cr_save_rtx = gen_rtx_REG (SImode, 12);
12018 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12021 /* Do any required saving of fpr's. If only one or two to save, do
12022 it ourselves. Otherwise, call function. */
12023 if (saving_FPRs_inline)
12026 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12027 if ((regs_ever_live[info->first_fp_reg_save+i]
12028 && ! call_used_regs[info->first_fp_reg_save+i]))
12029 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
12030 info->first_fp_reg_save + i,
12031 info->fp_save_offset + sp_offset + 8 * i,
12034 else if (info->first_fp_reg_save != 64)
12038 const char *alloc_rname;
12040 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
12042 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
12043 gen_rtx_REG (Pmode,
12044 LINK_REGISTER_REGNUM));
12045 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
12046 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
12047 alloc_rname = ggc_strdup (rname);
12048 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12049 gen_rtx_SYMBOL_REF (Pmode,
12051 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12053 rtx addr, reg, mem;
12054 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
12055 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12056 GEN_INT (info->fp_save_offset
12057 + sp_offset + 8*i));
12058 mem = gen_rtx_MEM (DFmode, addr);
12059 set_mem_alias_set (mem, rs6000_sr_alias_set);
12061 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
12063 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12064 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12065 NULL_RTX, NULL_RTX);
12068 /* Save GPRs. This is done as a PARALLEL if we are using
12069 the store-multiple instructions. */
12070 if (using_store_multiple)
12074 p = rtvec_alloc (32 - info->first_gp_reg_save);
12075 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12077 rtx addr, reg, mem;
12078 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12079 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12080 GEN_INT (info->gp_save_offset
12083 mem = gen_rtx_MEM (reg_mode, addr);
12084 set_mem_alias_set (mem, rs6000_sr_alias_set);
12086 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
12088 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12089 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12090 NULL_RTX, NULL_RTX);
12095 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12096 if ((regs_ever_live[info->first_gp_reg_save+i]
12097 && ! call_used_regs[info->first_gp_reg_save+i])
12098 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12099 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12100 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12102 rtx addr, reg, mem;
12103 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
12105 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12107 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12110 if (!SPE_CONST_OFFSET_OK (offset))
12112 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12113 emit_move_insn (b, GEN_INT (offset));
12116 b = GEN_INT (offset);
12118 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12119 mem = gen_rtx_MEM (V2SImode, addr);
12120 set_mem_alias_set (mem, rs6000_sr_alias_set);
12121 insn = emit_move_insn (mem, reg);
12123 if (GET_CODE (b) == CONST_INT)
12124 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12125 NULL_RTX, NULL_RTX);
12127 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12128 b, GEN_INT (offset));
12132 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12133 GEN_INT (info->gp_save_offset
12136 mem = gen_rtx_MEM (reg_mode, addr);
12137 set_mem_alias_set (mem, rs6000_sr_alias_set);
12139 insn = emit_move_insn (mem, reg);
12140 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12141 NULL_RTX, NULL_RTX);
12146 /* ??? There's no need to emit actual instructions here, but it's the
12147 easiest way to get the frame unwind information emitted. */
12148 if (current_function_calls_eh_return)
12150 unsigned int i, regno;
12152 /* In AIX ABI we need to pretend we save r2 here. */
12155 rtx addr, reg, mem;
12157 reg = gen_rtx_REG (reg_mode, 2);
12158 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12159 GEN_INT (sp_offset + 5 * reg_size));
12160 mem = gen_rtx_MEM (reg_mode, addr);
12161 set_mem_alias_set (mem, rs6000_sr_alias_set);
12163 insn = emit_move_insn (mem, reg);
12164 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12165 NULL_RTX, NULL_RTX);
12166 PATTERN (insn) = gen_blockage ();
12171 regno = EH_RETURN_DATA_REGNO (i);
12172 if (regno == INVALID_REGNUM)
12175 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
12176 info->ehrd_offset + sp_offset
12177 + reg_size * (int) i,
12182 /* Save lr if we used it. */
12183 if (info->lr_save_p)
12185 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12186 GEN_INT (info->lr_save_offset + sp_offset));
12187 rtx reg = gen_rtx_REG (Pmode, 0);
12188 rtx mem = gen_rtx_MEM (Pmode, addr);
12189 /* This should not be of rs6000_sr_alias_set, because of
12190 __builtin_return_address. */
12192 insn = emit_move_insn (mem, reg);
12193 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12194 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
12197 /* Save CR if we use any that must be preserved. */
12198 if (info->cr_save_p)
12200 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12201 GEN_INT (info->cr_save_offset + sp_offset));
12202 rtx mem = gen_rtx_MEM (SImode, addr);
12204 set_mem_alias_set (mem, rs6000_sr_alias_set);
12206 /* If r12 was used to hold the original sp, copy cr into r0 now
12208 if (REGNO (frame_reg_rtx) == 12)
12210 cr_save_rtx = gen_rtx_REG (SImode, 0);
12211 emit_insn (gen_movesi_from_cr (cr_save_rtx));
12213 insn = emit_move_insn (mem, cr_save_rtx);
12215 /* Now, there's no way that dwarf2out_frame_debug_expr is going
12216 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
12217 But that's OK. All we have to do is specify that _one_ condition
12218 code register is saved in this stack slot. The thrower's epilogue
12219 will then restore all the call-saved registers.
12220 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
12221 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
12222 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
12225 /* Update stack and set back pointer unless this is V.4,
12226 for which it was done previously. */
12228 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
12229 rs6000_emit_allocate_stack (info->total_size, FALSE);
12231 /* Set frame pointer, if needed. */
12232 if (frame_pointer_needed)
12234 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
12236 RTX_FRAME_RELATED_P (insn) = 1;
12239 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
12240 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
12241 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
12242 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
12244 /* If emit_load_toc_table will use the link register, we need to save
12245 it. We use R12 for this purpose because emit_load_toc_table
12246 can use register 0. This allows us to use a plain 'blr' to return
12247 from the procedure more often. */
12248 int save_LR_around_toc_setup = (TARGET_ELF
12249 && DEFAULT_ABI != ABI_AIX
12251 && ! info->lr_save_p
12252 && EXIT_BLOCK_PTR->pred != NULL);
12253 if (save_LR_around_toc_setup)
12255 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12256 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
12257 rs6000_emit_load_toc_table (TRUE);
12258 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
12261 rs6000_emit_load_toc_table (TRUE);
12265 if (DEFAULT_ABI == ABI_DARWIN
12266 && flag_pic && current_function_uses_pic_offset_table)
12268 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
12269 const char *picbase = machopic_function_base_name ();
12270 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
12272 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
12274 rs6000_maybe_dead (
12275 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
12276 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
12281 /* Write function prologue. */
12284 rs6000_output_function_prologue (FILE *file,
12285 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12287 rs6000_stack_t *info = rs6000_stack_info ();
12289 if (TARGET_DEBUG_STACK)
12290 debug_stack_info (info);
12292 /* Write .extern for any function we will call to save and restore
12294 if (info->first_fp_reg_save < 64
12295 && !FP_SAVE_INLINE (info->first_fp_reg_save))
12296 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
12297 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
12298 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
12299 RESTORE_FP_SUFFIX);
12301 /* Write .extern for AIX common mode routines, if needed. */
12302 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
12304 fputs ("\t.extern __mulh\n", file);
12305 fputs ("\t.extern __mull\n", file);
12306 fputs ("\t.extern __divss\n", file);
12307 fputs ("\t.extern __divus\n", file);
12308 fputs ("\t.extern __quoss\n", file);
12309 fputs ("\t.extern __quous\n", file);
12310 common_mode_defined = 1;
12313 if (! HAVE_prologue)
12317 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
12318 the "toplevel" insn chain. */
12319 emit_note (NOTE_INSN_DELETED);
12320 rs6000_emit_prologue ();
12321 emit_note (NOTE_INSN_DELETED);
12323 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12327 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12329 INSN_ADDRESSES_NEW (insn, addr);
12334 if (TARGET_DEBUG_STACK)
12335 debug_rtx_list (get_insns (), 100);
12336 final (get_insns (), file, FALSE, FALSE);
12340 rs6000_pic_labelno++;
12343 /* Emit function epilogue as insns.
12345 At present, dwarf2out_frame_debug_expr doesn't understand
12346 register restores, so we don't bother setting RTX_FRAME_RELATED_P
12347 anywhere in the epilogue. Most of the insns below would in any case
12348 need special notes to explain where r11 is in relation to the stack. */
12351 rs6000_emit_epilogue (int sibcall)
12353 rs6000_stack_t *info;
12354 int restoring_FPRs_inline;
12355 int using_load_multiple;
12356 int using_mfcr_multiple;
12357 int use_backchain_to_restore_sp;
12359 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
12360 rtx frame_reg_rtx = sp_reg_rtx;
12361 enum machine_mode reg_mode = Pmode;
12362 int reg_size = TARGET_32BIT ? 4 : 8;
12365 info = rs6000_stack_info ();
12367 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12369 reg_mode = V2SImode;
12373 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
12374 && (!TARGET_SPE_ABI
12375 || info->spe_64bit_regs_used == 0)
12376 && info->first_gp_reg_save < 31);
12377 restoring_FPRs_inline = (sibcall
12378 || current_function_calls_eh_return
12379 || info->first_fp_reg_save == 64
12380 || FP_SAVE_INLINE (info->first_fp_reg_save));
12381 use_backchain_to_restore_sp = (frame_pointer_needed
12382 || current_function_calls_alloca
12383 || info->total_size > 32767);
12384 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
12385 || rs6000_cpu == PROCESSOR_PPC603
12386 || rs6000_cpu == PROCESSOR_PPC750
12389 /* If we have a frame pointer, a call to alloca, or a large stack
12390 frame, restore the old stack pointer using the backchain. Otherwise,
12391 we know what size to update it with. */
12392 if (use_backchain_to_restore_sp)
12394 /* Under V.4, don't reset the stack pointer until after we're done
12395 loading the saved registers. */
12396 if (DEFAULT_ABI == ABI_V4)
12397 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
12399 emit_move_insn (frame_reg_rtx,
12400 gen_rtx_MEM (Pmode, sp_reg_rtx));
12403 else if (info->push_p)
12405 if (DEFAULT_ABI == ABI_V4
12406 || current_function_calls_eh_return)
12407 sp_offset = info->total_size;
12410 emit_insn (TARGET_32BIT
12411 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12412 GEN_INT (info->total_size))
12413 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12414 GEN_INT (info->total_size)));
12418 /* Restore AltiVec registers if needed. */
12419 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
12423 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
12424 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
12426 rtx addr, areg, mem;
12428 areg = gen_rtx_REG (Pmode, 0);
12430 (areg, GEN_INT (info->altivec_save_offset
12432 + 16 * (i - info->first_altivec_reg_save)));
12434 /* AltiVec addressing mode is [reg+reg]. */
12435 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
12436 mem = gen_rtx_MEM (V4SImode, addr);
12437 set_mem_alias_set (mem, rs6000_sr_alias_set);
12439 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
12443 /* Restore VRSAVE if needed. */
12444 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE
12445 && info->vrsave_mask != 0)
12447 rtx addr, mem, reg;
12449 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12450 GEN_INT (info->vrsave_save_offset + sp_offset));
12451 mem = gen_rtx_MEM (SImode, addr);
12452 set_mem_alias_set (mem, rs6000_sr_alias_set);
12453 reg = gen_rtx_REG (SImode, 12);
12454 emit_move_insn (reg, mem);
12456 emit_insn (generate_set_vrsave (reg, info, 1));
12459 /* Get the old lr if we saved it. */
12460 if (info->lr_save_p)
12462 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
12463 info->lr_save_offset + sp_offset);
12465 set_mem_alias_set (mem, rs6000_sr_alias_set);
12467 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
12470 /* Get the old cr if we saved it. */
12471 if (info->cr_save_p)
12473 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12474 GEN_INT (info->cr_save_offset + sp_offset));
12475 rtx mem = gen_rtx_MEM (SImode, addr);
12477 set_mem_alias_set (mem, rs6000_sr_alias_set);
12479 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
12482 /* Set LR here to try to overlap restores below. */
12483 if (info->lr_save_p)
12484 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
12485 gen_rtx_REG (Pmode, 0));
12487 /* Load exception handler data registers, if needed. */
12488 if (current_function_calls_eh_return)
12490 unsigned int i, regno;
12494 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12495 GEN_INT (sp_offset + 5 * reg_size));
12496 rtx mem = gen_rtx_MEM (reg_mode, addr);
12498 set_mem_alias_set (mem, rs6000_sr_alias_set);
12500 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
12507 regno = EH_RETURN_DATA_REGNO (i);
12508 if (regno == INVALID_REGNUM)
12511 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
12512 info->ehrd_offset + sp_offset
12513 + reg_size * (int) i);
12514 set_mem_alias_set (mem, rs6000_sr_alias_set);
12516 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
12520 /* Restore GPRs. This is done as a PARALLEL if we are using
12521 the load-multiple instructions. */
12522 if (using_load_multiple)
12525 p = rtvec_alloc (32 - info->first_gp_reg_save);
12526 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12528 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12529 GEN_INT (info->gp_save_offset
12532 rtx mem = gen_rtx_MEM (reg_mode, addr);
12534 set_mem_alias_set (mem, rs6000_sr_alias_set);
12537 gen_rtx_SET (VOIDmode,
12538 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
12541 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12544 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
12545 if ((regs_ever_live[info->first_gp_reg_save+i]
12546 && ! call_used_regs[info->first_gp_reg_save+i])
12547 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
12548 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
12549 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
12551 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12552 GEN_INT (info->gp_save_offset
12555 rtx mem = gen_rtx_MEM (reg_mode, addr);
12557 /* Restore 64-bit quantities for SPE. */
12558 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
12560 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
12563 if (!SPE_CONST_OFFSET_OK (offset))
12565 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
12566 emit_move_insn (b, GEN_INT (offset));
12569 b = GEN_INT (offset);
12571 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
12572 mem = gen_rtx_MEM (V2SImode, addr);
12575 set_mem_alias_set (mem, rs6000_sr_alias_set);
12577 emit_move_insn (gen_rtx_REG (reg_mode,
12578 info->first_gp_reg_save + i), mem);
12581 /* Restore fpr's if we need to do it without calling a function. */
12582 if (restoring_FPRs_inline)
12583 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12584 if ((regs_ever_live[info->first_fp_reg_save+i]
12585 && ! call_used_regs[info->first_fp_reg_save+i]))
12588 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
12589 GEN_INT (info->fp_save_offset
12592 mem = gen_rtx_MEM (DFmode, addr);
12593 set_mem_alias_set (mem, rs6000_sr_alias_set);
12595 emit_move_insn (gen_rtx_REG (DFmode,
12596 info->first_fp_reg_save + i),
12600 /* If we saved cr, restore it here. Just those that were used. */
12601 if (info->cr_save_p)
12603 rtx r12_rtx = gen_rtx_REG (SImode, 12);
12606 if (using_mfcr_multiple)
12608 for (i = 0; i < 8; i++)
12609 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12615 if (using_mfcr_multiple && count > 1)
12620 p = rtvec_alloc (count);
12623 for (i = 0; i < 8; i++)
12624 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12626 rtvec r = rtvec_alloc (2);
12627 RTVEC_ELT (r, 0) = r12_rtx;
12628 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
12629 RTVEC_ELT (p, ndx) =
12630 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
12631 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
12634 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
12639 for (i = 0; i < 8; i++)
12640 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
12642 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
12648 /* If this is V.4, unwind the stack pointer after all of the loads
12649 have been done. We need to emit a block here so that sched
12650 doesn't decide to move the sp change before the register restores
12651 (which may not have any obvious dependency on the stack). This
12652 doesn't hurt performance, because there is no scheduling that can
12653 be done after this point. */
12654 if (DEFAULT_ABI == ABI_V4
12655 || current_function_calls_eh_return)
12657 if (frame_reg_rtx != sp_reg_rtx)
12658 rs6000_emit_stack_tie ();
12660 if (use_backchain_to_restore_sp)
12662 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
12664 else if (sp_offset != 0)
12666 emit_insn (TARGET_32BIT
12667 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
12668 GEN_INT (sp_offset))
12669 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
12670 GEN_INT (sp_offset)));
12674 if (current_function_calls_eh_return)
12676 rtx sa = EH_RETURN_STACKADJ_RTX;
12677 emit_insn (TARGET_32BIT
12678 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
12679 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
12685 if (! restoring_FPRs_inline)
12686 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
12688 p = rtvec_alloc (2);
12690 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
12691 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
12692 gen_rtx_REG (Pmode,
12693 LINK_REGISTER_REGNUM));
12695 /* If we have to restore more than two FP registers, branch to the
12696 restore function. It will return to our caller. */
12697 if (! restoring_FPRs_inline)
12701 const char *alloc_rname;
12703 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
12704 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
12705 alloc_rname = ggc_strdup (rname);
12706 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
12707 gen_rtx_SYMBOL_REF (Pmode,
12710 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
12713 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
12714 GEN_INT (info->fp_save_offset + 8*i));
12715 mem = gen_rtx_MEM (DFmode, addr);
12716 set_mem_alias_set (mem, rs6000_sr_alias_set);
12718 RTVEC_ELT (p, i+3) =
12719 gen_rtx_SET (VOIDmode,
12720 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
12725 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
12729 /* Write function epilogue. */
12732 rs6000_output_function_epilogue (FILE *file,
12733 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
12735 rs6000_stack_t *info = rs6000_stack_info ();
12737 if (! HAVE_epilogue)
12739 rtx insn = get_last_insn ();
12740 /* If the last insn was a BARRIER, we don't have to write anything except
12741 the trace table. */
12742 if (GET_CODE (insn) == NOTE)
12743 insn = prev_nonnote_insn (insn);
12744 if (insn == 0 || GET_CODE (insn) != BARRIER)
12746 /* This is slightly ugly, but at least we don't have two
12747 copies of the epilogue-emitting code. */
12750 /* A NOTE_INSN_DELETED is supposed to be at the start
12751 and end of the "toplevel" insn chain. */
12752 emit_note (NOTE_INSN_DELETED);
12753 rs6000_emit_epilogue (FALSE);
12754 emit_note (NOTE_INSN_DELETED);
12756 /* Expand INSN_ADDRESSES so final() doesn't crash. */
12760 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
12762 INSN_ADDRESSES_NEW (insn, addr);
12767 if (TARGET_DEBUG_STACK)
12768 debug_rtx_list (get_insns (), 100);
12769 final (get_insns (), file, FALSE, FALSE);
12775 macho_branch_islands ();
12776 /* Mach-O doesn't support labels at the end of objects, so if
12777 it looks like we might want one, insert a NOP. */
12779 rtx insn = get_last_insn ();
12782 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
12783 insn = PREV_INSN (insn);
12787 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
12788 fputs ("\tnop\n", file);
12792 /* Output a traceback table here. See /usr/include/sys/debug.h for info
12795 We don't output a traceback table if -finhibit-size-directive was
12796 used. The documentation for -finhibit-size-directive reads
12797 ``don't output a @code{.size} assembler directive, or anything
12798 else that would cause trouble if the function is split in the
12799 middle, and the two halves are placed at locations far apart in
12800 memory.'' The traceback table has this property, since it
12801 includes the offset from the start of the function to the
12802 traceback table itself.
12804 System V.4 Powerpc's (and the embedded ABI derived from it) use a
12805 different traceback table. */
12806 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
12807 && rs6000_traceback != traceback_none)
12809 const char *fname = NULL;
12810 const char *language_string = lang_hooks.name;
12811 int fixed_parms = 0, float_parms = 0, parm_info = 0;
12813 int optional_tbtab;
12815 if (rs6000_traceback == traceback_full)
12816 optional_tbtab = 1;
12817 else if (rs6000_traceback == traceback_part)
12818 optional_tbtab = 0;
12820 optional_tbtab = !optimize_size && !TARGET_ELF;
12822 if (optional_tbtab)
12824 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12825 while (*fname == '.') /* V.4 encodes . in the name */
12828 /* Need label immediately before tbtab, so we can compute
12829 its offset from the function start. */
12830 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12831 ASM_OUTPUT_LABEL (file, fname);
12834 /* The .tbtab pseudo-op can only be used for the first eight
12835 expressions, since it can't handle the possibly variable
12836 length fields that follow. However, if you omit the optional
12837 fields, the assembler outputs zeros for all optional fields
12838 anyways, giving each variable length field is minimum length
12839 (as defined in sys/debug.h). Thus we can not use the .tbtab
12840 pseudo-op at all. */
12842 /* An all-zero word flags the start of the tbtab, for debuggers
12843 that have to find it by searching forward from the entry
12844 point or from the current pc. */
12845 fputs ("\t.long 0\n", file);
12847 /* Tbtab format type. Use format type 0. */
12848 fputs ("\t.byte 0,", file);
12850 /* Language type. Unfortunately, there does not seem to be any
12851 official way to discover the language being compiled, so we
12852 use language_string.
12853 C is 0. Fortran is 1. Pascal is 2. Ada is 3. C++ is 9.
12854 Java is 13. Objective-C is 14. */
12855 if (! strcmp (language_string, "GNU C"))
12857 else if (! strcmp (language_string, "GNU F77"))
12859 else if (! strcmp (language_string, "GNU Pascal"))
12861 else if (! strcmp (language_string, "GNU Ada"))
12863 else if (! strcmp (language_string, "GNU C++"))
12865 else if (! strcmp (language_string, "GNU Java"))
12867 else if (! strcmp (language_string, "GNU Objective-C"))
12871 fprintf (file, "%d,", i);
12873 /* 8 single bit fields: global linkage (not set for C extern linkage,
12874 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12875 from start of procedure stored in tbtab, internal function, function
12876 has controlled storage, function has no toc, function uses fp,
12877 function logs/aborts fp operations. */
12878 /* Assume that fp operations are used if any fp reg must be saved. */
12879 fprintf (file, "%d,",
12880 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12882 /* 6 bitfields: function is interrupt handler, name present in
12883 proc table, function calls alloca, on condition directives
12884 (controls stack walks, 3 bits), saves condition reg, saves
12886 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12887 set up as a frame pointer, even when there is no alloca call. */
12888 fprintf (file, "%d,",
12889 ((optional_tbtab << 6)
12890 | ((optional_tbtab & frame_pointer_needed) << 5)
12891 | (info->cr_save_p << 1)
12892 | (info->lr_save_p)));
12894 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12896 fprintf (file, "%d,",
12897 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12899 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12900 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12902 if (optional_tbtab)
12904 /* Compute the parameter info from the function decl argument
12907 int next_parm_info_bit = 31;
12909 for (decl = DECL_ARGUMENTS (current_function_decl);
12910 decl; decl = TREE_CHAIN (decl))
12912 rtx parameter = DECL_INCOMING_RTL (decl);
12913 enum machine_mode mode = GET_MODE (parameter);
12915 if (GET_CODE (parameter) == REG)
12917 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12923 if (mode == SFmode)
12925 else if (mode == DFmode || mode == TFmode)
12930 /* If only one bit will fit, don't or in this entry. */
12931 if (next_parm_info_bit > 0)
12932 parm_info |= (bits << (next_parm_info_bit - 1));
12933 next_parm_info_bit -= 2;
12937 fixed_parms += ((GET_MODE_SIZE (mode)
12938 + (UNITS_PER_WORD - 1))
12940 next_parm_info_bit -= 1;
12946 /* Number of fixed point parameters. */
12947 /* This is actually the number of words of fixed point parameters; thus
12948 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12949 fprintf (file, "%d,", fixed_parms);
12951 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12953 /* This is actually the number of fp registers that hold parameters;
12954 and thus the maximum value is 13. */
12955 /* Set parameters on stack bit if parameters are not in their original
12956 registers, regardless of whether they are on the stack? Xlc
12957 seems to set the bit when not optimizing. */
12958 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12960 if (! optional_tbtab)
12963 /* Optional fields follow. Some are variable length. */
12965 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12966 11 double float. */
12967 /* There is an entry for each parameter in a register, in the order that
12968 they occur in the parameter list. Any intervening arguments on the
12969 stack are ignored. If the list overflows a long (max possible length
12970 34 bits) then completely leave off all elements that don't fit. */
12971 /* Only emit this long if there was at least one parameter. */
12972 if (fixed_parms || float_parms)
12973 fprintf (file, "\t.long %d\n", parm_info);
12975 /* Offset from start of code to tb table. */
12976 fputs ("\t.long ", file);
12977 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12979 RS6000_OUTPUT_BASENAME (file, fname);
12981 assemble_name (file, fname);
12983 fputs ("-.", file);
12985 RS6000_OUTPUT_BASENAME (file, fname);
12987 assemble_name (file, fname);
12991 /* Interrupt handler mask. */
12992 /* Omit this long, since we never set the interrupt handler bit
12995 /* Number of CTL (controlled storage) anchors. */
12996 /* Omit this long, since the has_ctl bit is never set above. */
12998 /* Displacement into stack of each CTL anchor. */
12999 /* Omit this list of longs, because there are no CTL anchors. */
13001 /* Length of function name. */
13004 fprintf (file, "\t.short %d\n", (int) strlen (fname));
13006 /* Function name. */
13007 assemble_string (fname, strlen (fname));
13009 /* Register for alloca automatic storage; this is always reg 31.
13010 Only emit this if the alloca bit was set above. */
13011 if (frame_pointer_needed)
13012 fputs ("\t.byte 31\n", file);
13014 fputs ("\t.align 2\n", file);
13018 /* A C compound statement that outputs the assembler code for a thunk
13019 function, used to implement C++ virtual function calls with
13020 multiple inheritance. The thunk acts as a wrapper around a virtual
13021 function, adjusting the implicit object parameter before handing
13022 control off to the real function.
13024 First, emit code to add the integer DELTA to the location that
13025 contains the incoming first argument. Assume that this argument
13026 contains a pointer, and is the one used to pass the `this' pointer
13027 in C++. This is the incoming argument *before* the function
13028 prologue, e.g. `%o0' on a sparc. The addition must preserve the
13029 values of all other incoming arguments.
13031 After the addition, emit code to jump to FUNCTION, which is a
13032 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
13033 not touch the return address. Hence returning from FUNCTION will
13034 return to whoever called the current `thunk'.
13036 The effect must be as if FUNCTION had been called directly with the
13037 adjusted first argument. This macro is responsible for emitting
13038 all of the code for a thunk function; output_function_prologue()
13039 and output_function_epilogue() are not invoked.
13041 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
13042 been extracted from it.) It might possibly be useful on some
13043 targets, but probably not.
13045 If you do not define this macro, the target-independent code in the
13046 C++ frontend will generate a less efficient heavyweight thunk that
13047 calls FUNCTION instead of jumping to it. The generic approach does
13048 not support varargs. */
13051 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
13052 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
13055 rtx this, insn, funexp;
13057 reload_completed = 1;
13058 epilogue_completed = 1;
13059 no_new_pseudos = 1;
13061 /* Mark the end of the (empty) prologue. */
13062 emit_note (NOTE_INSN_PROLOGUE_END);
13064 /* Find the "this" pointer. If the function returns a structure,
13065 the structure return pointer is in r3. */
13066 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
13067 this = gen_rtx_REG (Pmode, 4);
13069 this = gen_rtx_REG (Pmode, 3);
13071 /* Apply the constant offset, if required. */
13074 rtx delta_rtx = GEN_INT (delta);
13075 emit_insn (TARGET_32BIT
13076 ? gen_addsi3 (this, this, delta_rtx)
13077 : gen_adddi3 (this, this, delta_rtx));
13080 /* Apply the offset from the vtable, if required. */
13083 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
13084 rtx tmp = gen_rtx_REG (Pmode, 12);
13086 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
13087 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
13089 emit_insn (TARGET_32BIT
13090 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
13091 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
13092 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
13096 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
13098 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
13100 emit_insn (TARGET_32BIT
13101 ? gen_addsi3 (this, this, tmp)
13102 : gen_adddi3 (this, this, tmp));
13105 /* Generate a tail call to the target function. */
13106 if (!TREE_USED (function))
13108 assemble_external (function);
13109 TREE_USED (function) = 1;
13111 funexp = XEXP (DECL_RTL (function), 0);
13112 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
13115 if (MACHOPIC_INDIRECT)
13116 funexp = machopic_indirect_call_target (funexp);
13119 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
13120 generate sibcall RTL explicitly to avoid constraint abort. */
13121 insn = emit_call_insn (
13122 gen_rtx_PARALLEL (VOIDmode,
13124 gen_rtx_CALL (VOIDmode,
13125 funexp, const0_rtx),
13126 gen_rtx_USE (VOIDmode, const0_rtx),
13127 gen_rtx_USE (VOIDmode,
13128 gen_rtx_REG (SImode,
13129 LINK_REGISTER_REGNUM)),
13130 gen_rtx_RETURN (VOIDmode))));
13131 SIBLING_CALL_P (insn) = 1;
13134 /* Run just enough of rest_of_compilation to get the insns emitted.
13135 There's not really enough bulk here to make other passes such as
13136 instruction scheduling worth while. Note that use_thunk calls
13137 assemble_start_function and assemble_end_function. */
13138 insn = get_insns ();
13139 insn_locators_initialize ();
13140 shorten_branches (insn);
13141 final_start_function (insn, file, 1);
13142 final (insn, file, 1, 0);
13143 final_end_function ();
13145 reload_completed = 0;
13146 epilogue_completed = 0;
13147 no_new_pseudos = 0;
13150 /* A quick summary of the various types of 'constant-pool tables'
13153 Target Flags Name One table per
13154 AIX (none) AIX TOC object file
13155 AIX -mfull-toc AIX TOC object file
13156 AIX -mminimal-toc AIX minimal TOC translation unit
13157 SVR4/EABI (none) SVR4 SDATA object file
13158 SVR4/EABI -fpic SVR4 pic object file
13159 SVR4/EABI -fPIC SVR4 PIC translation unit
13160 SVR4/EABI -mrelocatable EABI TOC function
13161 SVR4/EABI -maix AIX TOC object file
13162 SVR4/EABI -maix -mminimal-toc
13163 AIX minimal TOC translation unit
13165 Name Reg. Set by entries contains:
13166 made by addrs? fp? sum?
13168 AIX TOC 2 crt0 as Y option option
13169 AIX minimal TOC 30 prolog gcc Y Y option
13170 SVR4 SDATA 13 crt0 gcc N Y N
13171 SVR4 pic 30 prolog ld Y not yet N
13172 SVR4 PIC 30 prolog gcc Y option option
13173 EABI TOC 30 prolog gcc Y option option
13177 /* Hash functions for the hash table. */
13180 rs6000_hash_constant (rtx k)
13182 enum rtx_code code = GET_CODE (k);
13183 enum machine_mode mode = GET_MODE (k);
13184 unsigned result = (code << 3) ^ mode;
13185 const char *format;
13188 format = GET_RTX_FORMAT (code);
13189 flen = strlen (format);
13195 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
13198 if (mode != VOIDmode)
13199 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
13211 for (; fidx < flen; fidx++)
13212 switch (format[fidx])
13217 const char *str = XSTR (k, fidx);
13218 len = strlen (str);
13219 result = result * 613 + len;
13220 for (i = 0; i < len; i++)
13221 result = result * 613 + (unsigned) str[i];
13226 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
13230 result = result * 613 + (unsigned) XINT (k, fidx);
13233 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
13234 result = result * 613 + (unsigned) XWINT (k, fidx);
13238 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
13239 result = result * 613 + (unsigned) (XWINT (k, fidx)
13253 toc_hash_function (const void *hash_entry)
13255 const struct toc_hash_struct *thc =
13256 (const struct toc_hash_struct *) hash_entry;
13257 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
13260 /* Compare H1 and H2 for equivalence. */
13263 toc_hash_eq (const void *h1, const void *h2)
13265 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
13266 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
13268 if (((const struct toc_hash_struct *) h1)->key_mode
13269 != ((const struct toc_hash_struct *) h2)->key_mode)
13272 return rtx_equal_p (r1, r2);
13275 /* These are the names given by the C++ front-end to vtables, and
13276 vtable-like objects. Ideally, this logic should not be here;
13277 instead, there should be some programmatic way of inquiring as
13278 to whether or not an object is a vtable. */
13280 #define VTABLE_NAME_P(NAME) \
13281 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
13282 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
13283 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
13284 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
13287 rs6000_output_symbol_ref (FILE *file, rtx x)
13289 /* Currently C++ toc references to vtables can be emitted before it
13290 is decided whether the vtable is public or private. If this is
13291 the case, then the linker will eventually complain that there is
13292 a reference to an unknown section. Thus, for vtables only,
13293 we emit the TOC reference to reference the symbol and not the
13295 const char *name = XSTR (x, 0);
13297 if (VTABLE_NAME_P (name))
13299 RS6000_OUTPUT_BASENAME (file, name);
13302 assemble_name (file, name);
13305 /* Output a TOC entry. We derive the entry name from what is being
13309 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
13312 const char *name = buf;
13313 const char *real_name;
13320 /* When the linker won't eliminate them, don't output duplicate
13321 TOC entries (this happens on AIX if there is any kind of TOC,
13322 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
13324 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
13326 struct toc_hash_struct *h;
13329 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
13330 time because GGC is not initialized at that point. */
13331 if (toc_hash_table == NULL)
13332 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
13333 toc_hash_eq, NULL);
13335 h = ggc_alloc (sizeof (*h));
13337 h->key_mode = mode;
13338 h->labelno = labelno;
13340 found = htab_find_slot (toc_hash_table, h, 1);
13341 if (*found == NULL)
13343 else /* This is indeed a duplicate.
13344 Set this label equal to that label. */
13346 fputs ("\t.set ", file);
13347 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13348 fprintf (file, "%d,", labelno);
13349 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
13350 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
13356 /* If we're going to put a double constant in the TOC, make sure it's
13357 aligned properly when strict alignment is on. */
13358 if (GET_CODE (x) == CONST_DOUBLE
13359 && STRICT_ALIGNMENT
13360 && GET_MODE_BITSIZE (mode) >= 64
13361 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
13362 ASM_OUTPUT_ALIGN (file, 3);
13365 (*targetm.asm_out.internal_label) (file, "LC", labelno);
13367 /* Handle FP constants specially. Note that if we have a minimal
13368 TOC, things we put here aren't actually in the TOC, so we can allow
13370 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
13372 REAL_VALUE_TYPE rv;
13375 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13376 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
13380 if (TARGET_MINIMAL_TOC)
13381 fputs (DOUBLE_INT_ASM_OP, file);
13383 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13384 k[0] & 0xffffffff, k[1] & 0xffffffff,
13385 k[2] & 0xffffffff, k[3] & 0xffffffff);
13386 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
13387 k[0] & 0xffffffff, k[1] & 0xffffffff,
13388 k[2] & 0xffffffff, k[3] & 0xffffffff);
13393 if (TARGET_MINIMAL_TOC)
13394 fputs ("\t.long ", file);
13396 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
13397 k[0] & 0xffffffff, k[1] & 0xffffffff,
13398 k[2] & 0xffffffff, k[3] & 0xffffffff);
13399 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
13400 k[0] & 0xffffffff, k[1] & 0xffffffff,
13401 k[2] & 0xffffffff, k[3] & 0xffffffff);
13405 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
13407 REAL_VALUE_TYPE rv;
13410 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13411 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
13415 if (TARGET_MINIMAL_TOC)
13416 fputs (DOUBLE_INT_ASM_OP, file);
13418 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13419 k[0] & 0xffffffff, k[1] & 0xffffffff);
13420 fprintf (file, "0x%lx%08lx\n",
13421 k[0] & 0xffffffff, k[1] & 0xffffffff);
13426 if (TARGET_MINIMAL_TOC)
13427 fputs ("\t.long ", file);
13429 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
13430 k[0] & 0xffffffff, k[1] & 0xffffffff);
13431 fprintf (file, "0x%lx,0x%lx\n",
13432 k[0] & 0xffffffff, k[1] & 0xffffffff);
13436 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
13438 REAL_VALUE_TYPE rv;
13441 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
13442 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
13446 if (TARGET_MINIMAL_TOC)
13447 fputs (DOUBLE_INT_ASM_OP, file);
13449 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13450 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
13455 if (TARGET_MINIMAL_TOC)
13456 fputs ("\t.long ", file);
13458 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
13459 fprintf (file, "0x%lx\n", l & 0xffffffff);
13463 else if (GET_MODE (x) == VOIDmode
13464 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
13466 unsigned HOST_WIDE_INT low;
13467 HOST_WIDE_INT high;
13469 if (GET_CODE (x) == CONST_DOUBLE)
13471 low = CONST_DOUBLE_LOW (x);
13472 high = CONST_DOUBLE_HIGH (x);
13475 #if HOST_BITS_PER_WIDE_INT == 32
13478 high = (low & 0x80000000) ? ~0 : 0;
13482 low = INTVAL (x) & 0xffffffff;
13483 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
13487 /* TOC entries are always Pmode-sized, but since this
13488 is a bigendian machine then if we're putting smaller
13489 integer constants in the TOC we have to pad them.
13490 (This is still a win over putting the constants in
13491 a separate constant pool, because then we'd have
13492 to have both a TOC entry _and_ the actual constant.)
13494 For a 32-bit target, CONST_INT values are loaded and shifted
13495 entirely within `low' and can be stored in one TOC entry. */
13497 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
13498 abort ();/* It would be easy to make this work, but it doesn't now. */
13500 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
13502 #if HOST_BITS_PER_WIDE_INT == 32
13503 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
13504 POINTER_SIZE, &low, &high, 0);
13507 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
13508 high = (HOST_WIDE_INT) low >> 32;
13515 if (TARGET_MINIMAL_TOC)
13516 fputs (DOUBLE_INT_ASM_OP, file);
13518 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13519 (long) high & 0xffffffff, (long) low & 0xffffffff);
13520 fprintf (file, "0x%lx%08lx\n",
13521 (long) high & 0xffffffff, (long) low & 0xffffffff);
13526 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
13528 if (TARGET_MINIMAL_TOC)
13529 fputs ("\t.long ", file);
13531 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
13532 (long) high & 0xffffffff, (long) low & 0xffffffff);
13533 fprintf (file, "0x%lx,0x%lx\n",
13534 (long) high & 0xffffffff, (long) low & 0xffffffff);
13538 if (TARGET_MINIMAL_TOC)
13539 fputs ("\t.long ", file);
13541 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
13542 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
13548 if (GET_CODE (x) == CONST)
13550 if (GET_CODE (XEXP (x, 0)) != PLUS)
13553 base = XEXP (XEXP (x, 0), 0);
13554 offset = INTVAL (XEXP (XEXP (x, 0), 1));
13557 if (GET_CODE (base) == SYMBOL_REF)
13558 name = XSTR (base, 0);
13559 else if (GET_CODE (base) == LABEL_REF)
13560 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
13561 else if (GET_CODE (base) == CODE_LABEL)
13562 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
13566 real_name = (*targetm.strip_name_encoding) (name);
13567 if (TARGET_MINIMAL_TOC)
13568 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
13571 fprintf (file, "\t.tc %s", real_name);
13574 fprintf (file, ".N%d", - offset);
13576 fprintf (file, ".P%d", offset);
13578 fputs ("[TC],", file);
13581 /* Currently C++ toc references to vtables can be emitted before it
13582 is decided whether the vtable is public or private. If this is
13583 the case, then the linker will eventually complain that there is
13584 a TOC reference to an unknown section. Thus, for vtables only,
13585 we emit the TOC reference to reference the symbol and not the
13587 if (VTABLE_NAME_P (name))
13589 RS6000_OUTPUT_BASENAME (file, name);
13591 fprintf (file, "%d", offset);
13592 else if (offset > 0)
13593 fprintf (file, "+%d", offset);
13596 output_addr_const (file, x);
13600 /* Output an assembler pseudo-op to write an ASCII string of N characters
13601 starting at P to FILE.
13603 On the RS/6000, we have to do this using the .byte operation and
13604 write out special characters outside the quoted string.
13605 Also, the assembler is broken; very long strings are truncated,
13606 so we must artificially break them up early. */
13609 output_ascii (FILE *file, const char *p, int n)
13612 int i, count_string;
13613 const char *for_string = "\t.byte \"";
13614 const char *for_decimal = "\t.byte ";
13615 const char *to_close = NULL;
13618 for (i = 0; i < n; i++)
13621 if (c >= ' ' && c < 0177)
13624 fputs (for_string, file);
13627 /* Write two quotes to get one. */
13635 for_decimal = "\"\n\t.byte ";
13639 if (count_string >= 512)
13641 fputs (to_close, file);
13643 for_string = "\t.byte \"";
13644 for_decimal = "\t.byte ";
13652 fputs (for_decimal, file);
13653 fprintf (file, "%d", c);
13655 for_string = "\n\t.byte \"";
13656 for_decimal = ", ";
13662 /* Now close the string if we have written one. Then end the line. */
13664 fputs (to_close, file);
13667 /* Generate a unique section name for FILENAME for a section type
13668 represented by SECTION_DESC. Output goes into BUF.
13670 SECTION_DESC can be any string, as long as it is different for each
13671 possible section type.
13673 We name the section in the same manner as xlc. The name begins with an
13674 underscore followed by the filename (after stripping any leading directory
13675 names) with the last period replaced by the string SECTION_DESC. If
13676 FILENAME does not contain a period, SECTION_DESC is appended to the end of
13680 rs6000_gen_section_name (char **buf, const char *filename,
13681 const char *section_desc)
13683 const char *q, *after_last_slash, *last_period = 0;
13687 after_last_slash = filename;
13688 for (q = filename; *q; q++)
13691 after_last_slash = q + 1;
13692 else if (*q == '.')
13696 len = strlen (after_last_slash) + strlen (section_desc) + 2;
13697 *buf = (char *) xmalloc (len);
13702 for (q = after_last_slash; *q; q++)
13704 if (q == last_period)
13706 strcpy (p, section_desc);
13707 p += strlen (section_desc);
13711 else if (ISALNUM (*q))
13715 if (last_period == 0)
13716 strcpy (p, section_desc);
13721 /* Emit profile function. */
13724 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
13726 if (TARGET_PROFILE_KERNEL)
13729 if (DEFAULT_ABI == ABI_AIX)
13731 #ifndef NO_PROFILE_COUNTERS
13732 # define NO_PROFILE_COUNTERS 0
13734 if (NO_PROFILE_COUNTERS)
13735 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
13739 const char *label_name;
13742 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13743 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
13744 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
13746 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
13750 else if (DEFAULT_ABI == ABI_DARWIN)
13752 const char *mcount_name = RS6000_MCOUNT;
13753 int caller_addr_regno = LINK_REGISTER_REGNUM;
13755 /* Be conservative and always set this, at least for now. */
13756 current_function_uses_pic_offset_table = 1;
13759 /* For PIC code, set up a stub and collect the caller's address
13760 from r0, which is where the prologue puts it. */
13761 if (MACHOPIC_INDIRECT)
13763 mcount_name = machopic_stub_name (mcount_name);
13764 if (current_function_uses_pic_offset_table)
13765 caller_addr_regno = 0;
13768 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
13770 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
13774 /* Write function profiler code. */
13777 output_function_profiler (FILE *file, int labelno)
13782 switch (DEFAULT_ABI)
13791 warning ("no profiling of 64-bit code for this ABI");
13794 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
13795 fprintf (file, "\tmflr %s\n", reg_names[0]);
13798 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
13799 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13800 reg_names[0], save_lr, reg_names[1]);
13801 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
13802 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
13803 assemble_name (file, buf);
13804 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
13806 else if (flag_pic > 1)
13808 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13809 reg_names[0], save_lr, reg_names[1]);
13810 /* Now, we need to get the address of the label. */
13811 fputs ("\tbl 1f\n\t.long ", file);
13812 assemble_name (file, buf);
13813 fputs ("-.\n1:", file);
13814 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13815 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13816 reg_names[0], reg_names[11]);
13817 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13818 reg_names[0], reg_names[0], reg_names[11]);
13822 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13823 assemble_name (file, buf);
13824 fputs ("@ha\n", file);
13825 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13826 reg_names[0], save_lr, reg_names[1]);
13827 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13828 assemble_name (file, buf);
13829 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13832 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13833 fprintf (file, "\tbl %s%s\n",
13834 RS6000_MCOUNT, flag_pic ? "@plt" : "");
13839 if (!TARGET_PROFILE_KERNEL)
13841 /* Don't do anything, done in output_profile_hook (). */
13848 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13849 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13851 if (current_function_needs_context)
13853 asm_fprintf (file, "\tstd %s,24(%s)\n",
13854 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13855 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13856 asm_fprintf (file, "\tld %s,24(%s)\n",
13857 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13860 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13868 rs6000_use_dfa_pipeline_interface (void)
13873 /* Power4 load update and store update instructions are cracked into a
13874 load or store and an integer insn which are executed in the same cycle.
13875 Branches have their own dispatch slot which does not count against the
13876 GCC issue rate, but it changes the program flow so there are no other
13877 instructions to issue in this cycle. */
13880 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13881 int verbose ATTRIBUTE_UNUSED,
13882 rtx insn, int more)
13884 if (GET_CODE (PATTERN (insn)) == USE
13885 || GET_CODE (PATTERN (insn)) == CLOBBER)
13888 if (rs6000_cpu == PROCESSOR_POWER4)
13890 if (is_microcoded_insn (insn))
13892 else if (is_cracked_insn (insn))
13893 return more > 2 ? more - 2 : 0;
13899 /* Adjust the cost of a scheduling dependency. Return the new cost of
13900 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13903 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13906 if (! recog_memoized (insn))
13909 if (REG_NOTE_KIND (link) != 0)
13912 if (REG_NOTE_KIND (link) == 0)
13914 /* Data dependency; DEP_INSN writes a register that INSN reads
13915 some cycles later. */
13916 switch (get_attr_type (insn))
13919 /* Tell the first scheduling pass about the latency between
13920 a mtctr and bctr (and mtlr and br/blr). The first
13921 scheduling pass will not know about this latency since
13922 the mtctr instruction, which has the latency associated
13923 to it, will be generated by reload. */
13924 return TARGET_POWER ? 5 : 4;
13926 /* Leave some extra cycles between a compare and its
13927 dependent branch, to inhibit expensive mispredicts. */
13928 if ((rs6000_cpu_attr == CPU_PPC603
13929 || rs6000_cpu_attr == CPU_PPC604
13930 || rs6000_cpu_attr == CPU_PPC604E
13931 || rs6000_cpu_attr == CPU_PPC620
13932 || rs6000_cpu_attr == CPU_PPC630
13933 || rs6000_cpu_attr == CPU_PPC750
13934 || rs6000_cpu_attr == CPU_PPC7400
13935 || rs6000_cpu_attr == CPU_PPC7450
13936 || rs6000_cpu_attr == CPU_POWER4)
13937 && recog_memoized (dep_insn)
13938 && (INSN_CODE (dep_insn) >= 0)
13939 && (get_attr_type (dep_insn) == TYPE_CMP
13940 || get_attr_type (dep_insn) == TYPE_COMPARE
13941 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13942 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13943 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13944 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13945 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13946 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13951 /* Fall out to return default cost. */
13957 /* The function returns a true if INSN is microcoded.
13958 Return false otherwise. */
13961 is_microcoded_insn (rtx insn)
13963 if (!insn || !INSN_P (insn)
13964 || GET_CODE (PATTERN (insn)) == USE
13965 || GET_CODE (PATTERN (insn)) == CLOBBER)
13968 if (rs6000_cpu == PROCESSOR_POWER4)
13970 enum attr_type type = get_attr_type (insn);
13971 if (type == TYPE_LOAD_EXT_U
13972 || type == TYPE_LOAD_EXT_UX
13973 || type == TYPE_LOAD_UX
13974 || type == TYPE_STORE_UX
13975 || type == TYPE_MFCR)
13982 /* The function returns a nonzero value if INSN can be scheduled only
13983 as the first insn in a dispatch group ("dispatch-slot restricted").
13984 In this case, the returned value indicates how many dispatch slots
13985 the insn occupies (at the beginning of the group).
13986 Return 0 otherwise. */
13989 is_dispatch_slot_restricted (rtx insn)
13991 enum attr_type type;
13993 if (rs6000_cpu != PROCESSOR_POWER4)
13997 || insn == NULL_RTX
13998 || GET_CODE (insn) == NOTE
13999 || GET_CODE (PATTERN (insn)) == USE
14000 || GET_CODE (PATTERN (insn)) == CLOBBER)
14003 type = get_attr_type (insn);
14009 case TYPE_DELAYED_CR:
14010 case TYPE_CR_LOGICAL:
14022 /* The function returns true if INSN is cracked into 2 instructions
14023 by the processor (and therefore occupies 2 issue slots). */
14026 is_cracked_insn (rtx insn)
14028 if (!insn || !INSN_P (insn)
14029 || GET_CODE (PATTERN (insn)) == USE
14030 || GET_CODE (PATTERN (insn)) == CLOBBER)
14033 if (rs6000_cpu == PROCESSOR_POWER4)
14035 enum attr_type type = get_attr_type (insn);
14036 if (type == TYPE_LOAD_U || type == TYPE_STORE_U
14037 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
14038 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
14039 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
14040 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
14041 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
14042 || type == TYPE_IDIV || type == TYPE_LDIV
14043 || type == TYPE_INSERT_WORD)
14050 /* The function returns true if INSN can be issued only from
14051 the branch slot. */
14054 is_branch_slot_insn (rtx insn)
14056 if (!insn || !INSN_P (insn)
14057 || GET_CODE (PATTERN (insn)) == USE
14058 || GET_CODE (PATTERN (insn)) == CLOBBER)
14061 if (rs6000_cpu == PROCESSOR_POWER4)
14063 enum attr_type type = get_attr_type (insn);
14064 if (type == TYPE_BRANCH || type == TYPE_JMPREG)
14072 /* A C statement (sans semicolon) to update the integer scheduling
14073 priority INSN_PRIORITY (INSN). Increase the priority to execute the
14074 INSN earlier, reduce the priority to execute INSN later. Do not
14075 define this macro if you do not need to adjust the scheduling
14076 priorities of insns. */
14079 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
14081 /* On machines (like the 750) which have asymmetric integer units,
14082 where one integer unit can do multiply and divides and the other
14083 can't, reduce the priority of multiply/divide so it is scheduled
14084 before other integer operations. */
14087 if (! INSN_P (insn))
14090 if (GET_CODE (PATTERN (insn)) == USE)
14093 switch (rs6000_cpu_attr) {
14095 switch (get_attr_type (insn))
14102 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
14103 priority, priority);
14104 if (priority >= 0 && priority < 0x01000000)
14111 if (is_dispatch_slot_restricted (insn)
14112 && reload_completed
14113 && current_sched_info->sched_max_insns_priority
14114 && rs6000_sched_restricted_insns_priority)
14117 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
14118 if (rs6000_sched_restricted_insns_priority == 1)
14119 /* Attach highest priority to insn. This means that in
14120 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
14121 precede 'priority' (critical path) considerations. */
14122 return current_sched_info->sched_max_insns_priority;
14123 else if (rs6000_sched_restricted_insns_priority == 2)
14124 /* Increase priority of insn by a minimal amount. This means that in
14125 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
14126 precede dispatch-slot restriction considerations. */
14127 return (priority + 1);
14133 /* Return how many instructions the machine can issue per cycle. */
14136 rs6000_issue_rate (void)
14138 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
14139 if (!reload_completed)
14142 switch (rs6000_cpu_attr) {
14143 case CPU_RIOS1: /* ? */
14145 case CPU_PPC601: /* ? */
14167 /* Return how many instructions to look ahead for better insn
14171 rs6000_use_sched_lookahead (void)
14173 if (rs6000_cpu_attr == CPU_PPC8540)
14178 /* Determine is PAT refers to memory. */
14181 is_mem_ref (rtx pat)
14187 if (GET_CODE (pat) == MEM)
14190 /* Recursively process the pattern. */
14191 fmt = GET_RTX_FORMAT (GET_CODE (pat));
14193 for (i = GET_RTX_LENGTH (GET_CODE (pat)) - 1; i >= 0 && !ret; i--)
14196 ret |= is_mem_ref (XEXP (pat, i));
14197 else if (fmt[i] == 'E')
14198 for (j = XVECLEN (pat, i) - 1; j >= 0; j--)
14199 ret |= is_mem_ref (XVECEXP (pat, i, j));
14205 /* Determine if PAT is a PATTERN of a load insn. */
14208 is_load_insn1 (rtx pat)
14210 if (!pat || pat == NULL_RTX)
14213 if (GET_CODE (pat) == SET)
14214 return is_mem_ref (SET_SRC (pat));
14216 if (GET_CODE (pat) == PARALLEL)
14220 for (i = 0; i < XVECLEN (pat, 0); i++)
14221 if (is_load_insn1 (XVECEXP (pat, 0, i)))
14228 /* Determine if INSN loads from memory. */
14231 is_load_insn (rtx insn)
14233 if (!insn || !INSN_P (insn))
14236 if (GET_CODE (insn) == CALL_INSN)
14239 return is_load_insn1 (PATTERN (insn));
14242 /* Determine if PAT is a PATTERN of a store insn. */
14245 is_store_insn1 (rtx pat)
14247 if (!pat || pat == NULL_RTX)
14250 if (GET_CODE (pat) == SET)
14251 return is_mem_ref (SET_DEST (pat));
14253 if (GET_CODE (pat) == PARALLEL)
14257 for (i = 0; i < XVECLEN (pat, 0); i++)
14258 if (is_store_insn1 (XVECEXP (pat, 0, i)))
14265 /* Determine if INSN stores to memory. */
14268 is_store_insn (rtx insn)
14270 if (!insn || !INSN_P (insn))
14273 return is_store_insn1 (PATTERN (insn));
14276 /* Returns whether the dependence between INSN and NEXT is considered
14277 costly by the given target. */
14280 rs6000_is_costly_dependence (rtx insn, rtx next, rtx link, int cost, int distance)
14282 /* If the flag is not enbled - no dependence is considered costly;
14283 allow all dependent insns in the same group.
14284 This is the most aggressive option. */
14285 if (rs6000_sched_costly_dep == no_dep_costly)
14288 /* If the flag is set to 1 - a dependence is always considered costly;
14289 do not allow dependent instructions in the same group.
14290 This is the most conservative option. */
14291 if (rs6000_sched_costly_dep == all_deps_costly)
14294 if (rs6000_sched_costly_dep == store_to_load_dep_costly
14295 && is_load_insn (next)
14296 && is_store_insn (insn))
14297 /* Prevent load after store in the same group. */
14300 if (rs6000_sched_costly_dep == true_store_to_load_dep_costly
14301 && is_load_insn (next)
14302 && is_store_insn (insn)
14303 && (!link || (int) REG_NOTE_KIND (link) == 0))
14304 /* Prevent load after store in the same group if it is a true dependence. */
14307 /* The flag is set to X; dependences with latency >= X are considered costly,
14308 and will not be scheduled in the same group. */
14309 if (rs6000_sched_costly_dep <= max_dep_latency
14310 && ((cost - distance) >= (int)rs6000_sched_costly_dep))
14316 /* Return the next insn after INSN that is found before TAIL is reached,
14317 skipping any "non-active" insns - insns that will not actually occupy
14318 an issue slot. Return NULL_RTX if such an insn is not found. */
14321 get_next_active_insn (rtx insn, rtx tail)
14325 if (!insn || insn == tail)
14328 next_insn = NEXT_INSN (insn);
14331 && next_insn != tail
14332 && (GET_CODE(next_insn) == NOTE
14333 || GET_CODE (PATTERN (next_insn)) == USE
14334 || GET_CODE (PATTERN (next_insn)) == CLOBBER))
14336 next_insn = NEXT_INSN (next_insn);
14339 if (!next_insn || next_insn == tail)
14345 /* Return whether the presence of INSN causes a dispatch group termination
14346 of group WHICH_GROUP.
14348 If WHICH_GROUP == current_group, this function will return true if INSN
14349 causes the termination of the current group (i.e, the dispatch group to
14350 which INSN belongs). This means that INSN will be the last insn in the
14351 group it belongs to.
14353 If WHICH_GROUP == previous_group, this function will return true if INSN
14354 causes the termination of the previous group (i.e, the dispatch group that
14355 precedes the group to which INSN belongs). This means that INSN will be
14356 the first insn in the group it belongs to). */
14359 insn_terminates_group_p (rtx insn, enum group_termination which_group)
14361 enum attr_type type;
14366 type = get_attr_type (insn);
14368 if (is_microcoded_insn (insn))
14371 if (which_group == current_group)
14373 if (is_branch_slot_insn (insn))
14377 else if (which_group == previous_group)
14379 if (is_dispatch_slot_restricted (insn))
14387 /* Return true if it is recommended to keep NEXT_INSN "far" (in a separate
14388 dispatch group) from the insns in GROUP_INSNS. Return false otherwise. */
14391 is_costly_group (rtx *group_insns, rtx next_insn)
14396 int issue_rate = rs6000_issue_rate ();
14398 for (i = 0; i < issue_rate; i++)
14400 rtx insn = group_insns[i];
14403 for (link = INSN_DEPEND (insn); link != 0; link = XEXP (link, 1))
14405 rtx next = XEXP (link, 0);
14406 if (next == next_insn)
14408 cost = insn_cost (insn, link, next_insn);
14409 if (rs6000_is_costly_dependence (insn, next_insn, link, cost, 0))
14418 /* Utility of the function redefine_groups.
14419 Check if it is too costly to schedule NEXT_INSN together with GROUP_INSNS
14420 in the same dispatch group. If so, insert nops before NEXT_INSN, in order
14421 to keep it "far" (in a separate group) from GROUP_INSNS, following
14422 one of the following schemes, depending on the value of the flag
14423 -minsert_sched_nops = X:
14424 (1) X == sched_finish_regroup_exact: insert exactly as many nops as needed
14425 in order to force NEXT_INSN into a separate group.
14426 (2) X < sched_finish_regroup_exact: insert exactly X nops.
14427 GROUP_END, CAN_ISSUE_MORE and GROUP_COUNT record the state after nop
14428 insertion (has a group just ended, how many vacant issue slots remain in the
14429 last group, and how many dispatch groups were encountered so far). */
14432 force_new_group (int sched_verbose, FILE *dump, rtx *group_insns, rtx next_insn,
14433 bool *group_end, int can_issue_more, int *group_count)
14437 int issue_rate = rs6000_issue_rate ();
14438 bool end = *group_end;
14441 if (next_insn == NULL_RTX)
14442 return can_issue_more;
14444 if (rs6000_sched_insert_nops > sched_finish_regroup_exact)
14445 return can_issue_more;
14447 force = is_costly_group (group_insns, next_insn);
14449 return can_issue_more;
14451 if (sched_verbose > 6)
14452 fprintf (dump,"force: group count = %d, can_issue_more = %d\n",
14453 *group_count ,can_issue_more);
14455 if (rs6000_sched_insert_nops == sched_finish_regroup_exact)
14458 can_issue_more = 0;
14460 /* Since only a branch can be issued in the last issue_slot, it is
14461 sufficient to insert 'can_issue_more - 1' nops if next_insn is not
14462 a branch. If next_insn is a branch, we insert 'can_issue_more' nops;
14463 in this case the last nop will start a new group and the branch will be
14464 forced to the new group. */
14465 if (can_issue_more && !is_branch_slot_insn (next_insn))
14468 while (can_issue_more > 0)
14471 emit_insn_before (nop, next_insn);
14479 if (rs6000_sched_insert_nops < sched_finish_regroup_exact)
14481 int n_nops = rs6000_sched_insert_nops;
14483 /* Nops can't be issued from the branch slot, so the effective
14484 issue_rate for nops is 'issue_rate - 1'. */
14485 if (can_issue_more == 0)
14486 can_issue_more = issue_rate;
14488 if (can_issue_more == 0)
14490 can_issue_more = issue_rate - 1;
14493 for (i = 0; i < issue_rate; i++)
14495 group_insns[i] = 0;
14502 emit_insn_before (nop, next_insn);
14503 if (can_issue_more == issue_rate - 1) /* new group begins */
14506 if (can_issue_more == 0)
14508 can_issue_more = issue_rate - 1;
14511 for (i = 0; i < issue_rate; i++)
14513 group_insns[i] = 0;
14519 /* Scale back relative to 'issue_rate' (instead of 'issue_rate - 1'). */
14522 *group_end = /* Is next_insn going to start a new group? */
14524 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14525 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14526 || (can_issue_more < issue_rate &&
14527 insn_terminates_group_p (next_insn, previous_group)));
14528 if (*group_end && end)
14531 if (sched_verbose > 6)
14532 fprintf (dump, "done force: group count = %d, can_issue_more = %d\n",
14533 *group_count, can_issue_more);
14534 return can_issue_more;
14537 return can_issue_more;
14540 /* This function tries to synch the dispatch groups that the compiler "sees"
14541 with the dispatch groups that the processor dispatcher is expected to
14542 form in practice. It tries to achieve this synchronization by forcing the
14543 estimated processor grouping on the compiler (as opposed to the function
14544 'pad_goups' which tries to force the scheduler's grouping on the processor).
14546 The function scans the insn sequence between PREV_HEAD_INSN and TAIL and
14547 examines the (estimated) dispatch groups that will be formed by the processor
14548 dispatcher. It marks these group boundaries to reflect the estimated
14549 processor grouping, overriding the grouping that the scheduler had marked.
14550 Depending on the value of the flag '-minsert-sched-nops' this function can
14551 force certain insns into separate groups or force a certain distance between
14552 them by inserting nops, for example, if there exists a "costly dependence"
14555 The function estimates the group boundaries that the processor will form as
14556 folllows: It keeps track of how many vacant issue slots are available after
14557 each insn. A subsequent insn will start a new group if one of the following
14559 - no more vacant issue slots remain in the current dispatch group.
14560 - only the last issue slot, which is the branch slot, is vacant, but the next
14561 insn is not a branch.
14562 - only the last 2 or less issue slots, including the branch slot, are vacant,
14563 which means that a cracked insn (which occupies two issue slots) can't be
14564 issued in this group.
14565 - less than 'issue_rate' slots are vacant, and the next insn always needs to
14566 start a new group. */
14569 redefine_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14571 rtx insn, next_insn;
14573 int can_issue_more;
14576 int group_count = 0;
14580 issue_rate = rs6000_issue_rate ();
14581 group_insns = alloca (issue_rate * sizeof (rtx));
14582 for (i = 0; i < issue_rate; i++)
14584 group_insns[i] = 0;
14586 can_issue_more = issue_rate;
14588 insn = get_next_active_insn (prev_head_insn, tail);
14591 while (insn != NULL_RTX)
14593 slot = (issue_rate - can_issue_more);
14594 group_insns[slot] = insn;
14596 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14597 if (insn_terminates_group_p (insn, current_group))
14598 can_issue_more = 0;
14600 next_insn = get_next_active_insn (insn, tail);
14601 if (next_insn == NULL_RTX)
14602 return group_count + 1;
14604 group_end = /* Is next_insn going to start a new group? */
14605 (can_issue_more == 0
14606 || (can_issue_more == 1 && !is_branch_slot_insn (next_insn))
14607 || (can_issue_more <= 2 && is_cracked_insn (next_insn))
14608 || (can_issue_more < issue_rate &&
14609 insn_terminates_group_p (next_insn, previous_group)));
14611 can_issue_more = force_new_group (sched_verbose, dump, group_insns,
14612 next_insn, &group_end, can_issue_more, &group_count);
14617 can_issue_more = 0;
14618 for (i = 0; i < issue_rate; i++)
14620 group_insns[i] = 0;
14624 if (GET_MODE (next_insn) == TImode && can_issue_more)
14625 PUT_MODE(next_insn, VOIDmode);
14626 else if (!can_issue_more && GET_MODE (next_insn) != TImode)
14627 PUT_MODE (next_insn, TImode);
14630 if (can_issue_more == 0)
14631 can_issue_more = issue_rate;
14634 return group_count;
14637 /* Scan the insn sequence between PREV_HEAD_INSN and TAIL and examine the
14638 dispatch group boundaries that the scheduler had marked. Pad with nops
14639 any dispatch groups which have vacant issue slots, in order to force the
14640 scheduler's grouping on the processor dispatcher. The function
14641 returns the number of dispatch groups found. */
14644 pad_groups (FILE *dump, int sched_verbose, rtx prev_head_insn, rtx tail)
14646 rtx insn, next_insn;
14649 int can_issue_more;
14651 int group_count = 0;
14653 /* Initialize issue_rate. */
14654 issue_rate = rs6000_issue_rate ();
14655 can_issue_more = issue_rate;
14657 insn = get_next_active_insn (prev_head_insn, tail);
14658 next_insn = get_next_active_insn (insn, tail);
14660 while (insn != NULL_RTX)
14663 rs6000_variable_issue (dump, sched_verbose, insn, can_issue_more);
14665 group_end = (next_insn == NULL_RTX || GET_MODE (next_insn) == TImode);
14667 if (next_insn == NULL_RTX)
14672 /* If the scheduler had marked group termination at this location
14673 (between insn and next_indn), and neither insn nor next_insn will
14674 force group termination, pad the group with nops to force group
14677 && (rs6000_sched_insert_nops == sched_finish_pad_groups)
14678 && !insn_terminates_group_p (insn, current_group)
14679 && !insn_terminates_group_p (next_insn, previous_group))
14681 if (!is_branch_slot_insn(next_insn))
14684 while (can_issue_more)
14687 emit_insn_before (nop, next_insn);
14692 can_issue_more = issue_rate;
14697 next_insn = get_next_active_insn (insn, tail);
14700 return group_count;
14703 /* The following function is called at the end of scheduling BB.
14704 After reload, it inserts nops at insn group bundling. */
14707 rs6000_sched_finish (FILE *dump, int sched_verbose)
14712 fprintf (dump, "=== Finishing schedule.\n");
14714 if (reload_completed && rs6000_cpu == PROCESSOR_POWER4)
14716 if (rs6000_sched_insert_nops == sched_finish_none)
14719 if (rs6000_sched_insert_nops == sched_finish_pad_groups)
14720 n_groups = pad_groups (dump, sched_verbose,
14721 current_sched_info->prev_head,
14722 current_sched_info->next_tail);
14724 n_groups = redefine_groups (dump, sched_verbose,
14725 current_sched_info->prev_head,
14726 current_sched_info->next_tail);
14728 if (sched_verbose >= 6)
14730 fprintf (dump, "ngroups = %d\n", n_groups);
14731 print_rtl (dump, current_sched_info->prev_head);
14732 fprintf (dump, "Done finish_sched\n");
14737 /* Length in units of the trampoline for entering a nested function. */
14740 rs6000_trampoline_size (void)
14744 switch (DEFAULT_ABI)
14750 ret = (TARGET_32BIT) ? 12 : 24;
14755 ret = (TARGET_32BIT) ? 40 : 48;
14762 /* Emit RTL insns to initialize the variable parts of a trampoline.
14763 FNADDR is an RTX for the address of the function's pure code.
14764 CXT is an RTX for the static chain value for the function. */
14767 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
14769 enum machine_mode pmode = Pmode;
14770 int regsize = (TARGET_32BIT) ? 4 : 8;
14771 rtx ctx_reg = force_reg (pmode, cxt);
14773 switch (DEFAULT_ABI)
14778 /* Macros to shorten the code expansions below. */
14779 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
14780 #define MEM_PLUS(addr,offset) \
14781 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
14783 /* Under AIX, just build the 3 word function descriptor */
14786 rtx fn_reg = gen_reg_rtx (pmode);
14787 rtx toc_reg = gen_reg_rtx (pmode);
14788 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
14789 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
14790 emit_move_insn (MEM_DEREF (addr), fn_reg);
14791 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
14792 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
14796 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
14799 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
14800 FALSE, VOIDmode, 4,
14802 GEN_INT (rs6000_trampoline_size ()), SImode,
14812 /* Table of valid machine attributes. */
14814 const struct attribute_spec rs6000_attribute_table[] =
14816 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
14817 { "altivec", 1, 1, false, true, false, rs6000_handle_altivec_attribute },
14818 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14819 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
14820 { NULL, 0, 0, false, false, false, NULL }
14823 /* Handle the "altivec" attribute. The attribute may have
14824 arguments as follows:
14826 __attribute__((altivec(vector__)))
14827 __attribute__((altivec(pixel__))) (always followed by 'unsigned short')
14828 __attribute__((altivec(bool__))) (always followed by 'unsigned')
14830 and may appear more than once (e.g., 'vector bool char') in a
14831 given declaration. */
14834 rs6000_handle_altivec_attribute (tree *node, tree name, tree args,
14835 int flags ATTRIBUTE_UNUSED,
14836 bool *no_add_attrs)
14838 tree type = *node, result = NULL_TREE;
14839 enum machine_mode mode;
14842 = ((args && TREE_CODE (args) == TREE_LIST && TREE_VALUE (args)
14843 && TREE_CODE (TREE_VALUE (args)) == IDENTIFIER_NODE)
14844 ? *IDENTIFIER_POINTER (TREE_VALUE (args))
14847 while (POINTER_TYPE_P (type)
14848 || TREE_CODE (type) == FUNCTION_TYPE
14849 || TREE_CODE (type) == METHOD_TYPE
14850 || TREE_CODE (type) == ARRAY_TYPE)
14851 type = TREE_TYPE (type);
14853 mode = TYPE_MODE (type);
14855 if (rs6000_warn_altivec_long
14856 && (type == long_unsigned_type_node || type == long_integer_type_node))
14857 warning ("use of 'long' in AltiVec types is deprecated; use 'int'");
14859 switch (altivec_type)
14862 unsigned_p = TREE_UNSIGNED (type);
14866 result = (unsigned_p ? unsigned_V4SI_type_node : V4SI_type_node);
14869 result = (unsigned_p ? unsigned_V8HI_type_node : V8HI_type_node);
14872 result = (unsigned_p ? unsigned_V16QI_type_node : V16QI_type_node);
14874 case SFmode: result = V4SF_type_node; break;
14875 /* If the user says 'vector int bool', we may be handed the 'bool'
14876 attribute _before_ the 'vector' attribute, and so select the proper
14877 type in the 'b' case below. */
14878 case V4SImode: case V8HImode: case V16QImode: result = type;
14885 case SImode: case V4SImode: result = bool_V4SI_type_node; break;
14886 case HImode: case V8HImode: result = bool_V8HI_type_node; break;
14887 case QImode: case V16QImode: result = bool_V16QI_type_node;
14894 case V8HImode: result = pixel_V8HI_type_node;
14900 *no_add_attrs = true; /* No need to hang on to the attribute. */
14903 warning ("`%s' attribute ignored", IDENTIFIER_POINTER (name));
14905 *node = reconstruct_complex_type (*node, result);
14910 /* AltiVec defines four built-in scalar types that serve as vector
14911 elements; we must teach the compiler how to mangle them. */
14913 static const char *
14914 rs6000_mangle_fundamental_type (tree type)
14916 if (type == bool_char_type_node) return "U6__boolc";
14917 if (type == bool_short_type_node) return "U6__bools";
14918 if (type == pixel_type_node) return "u7__pixel";
14919 if (type == bool_int_type_node) return "U6__booli";
14921 /* For all other types, use normal C++ mangling. */
14925 /* Handle a "longcall" or "shortcall" attribute; arguments as in
14926 struct attribute_spec.handler. */
14929 rs6000_handle_longcall_attribute (tree *node, tree name,
14930 tree args ATTRIBUTE_UNUSED,
14931 int flags ATTRIBUTE_UNUSED,
14932 bool *no_add_attrs)
14934 if (TREE_CODE (*node) != FUNCTION_TYPE
14935 && TREE_CODE (*node) != FIELD_DECL
14936 && TREE_CODE (*node) != TYPE_DECL)
14938 warning ("`%s' attribute only applies to functions",
14939 IDENTIFIER_POINTER (name));
14940 *no_add_attrs = true;
14946 /* Set longcall attributes on all functions declared when
14947 rs6000_default_long_calls is true. */
14949 rs6000_set_default_type_attributes (tree type)
14951 if (rs6000_default_long_calls
14952 && (TREE_CODE (type) == FUNCTION_TYPE
14953 || TREE_CODE (type) == METHOD_TYPE))
14954 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
14956 TYPE_ATTRIBUTES (type));
14959 /* Return a reference suitable for calling a function with the
14960 longcall attribute. */
14963 rs6000_longcall_ref (rtx call_ref)
14965 const char *call_name;
14968 if (GET_CODE (call_ref) != SYMBOL_REF)
14971 /* System V adds '.' to the internal name, so skip them. */
14972 call_name = XSTR (call_ref, 0);
14973 if (*call_name == '.')
14975 while (*call_name == '.')
14978 node = get_identifier (call_name);
14979 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
14982 return force_reg (Pmode, call_ref);
14985 #ifdef USING_ELFOS_H
14987 /* A C statement or statements to switch to the appropriate section
14988 for output of RTX in mode MODE. You can assume that RTX is some
14989 kind of constant in RTL. The argument MODE is redundant except in
14990 the case of a `const_int' rtx. Select the section by calling
14991 `text_section' or one of the alternatives for other sections.
14993 Do not define this macro if you put all constants in the read-only
14997 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
14998 unsigned HOST_WIDE_INT align)
15000 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15003 default_elf_select_rtx_section (mode, x, align);
15006 /* A C statement or statements to switch to the appropriate
15007 section for output of DECL. DECL is either a `VAR_DECL' node
15008 or a constant of some sort. RELOC indicates whether forming
15009 the initial value of DECL requires link-time relocations. */
15012 rs6000_elf_select_section (tree decl, int reloc,
15013 unsigned HOST_WIDE_INT align)
15015 /* Pretend that we're always building for a shared library when
15016 ABI_AIX, because otherwise we end up with dynamic relocations
15017 in read-only sections. This happens for function pointers,
15018 references to vtables in typeinfo, and probably other cases. */
15019 default_elf_select_section_1 (decl, reloc, align,
15020 flag_pic || DEFAULT_ABI == ABI_AIX);
15023 /* A C statement to build up a unique section name, expressed as a
15024 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
15025 RELOC indicates whether the initial value of EXP requires
15026 link-time relocations. If you do not define this macro, GCC will use
15027 the symbol name prefixed by `.' as the section name. Note - this
15028 macro can now be called for uninitialized data items as well as
15029 initialized data and functions. */
15032 rs6000_elf_unique_section (tree decl, int reloc)
15034 /* As above, pretend that we're always building for a shared library
15035 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
15036 default_unique_section_1 (decl, reloc,
15037 flag_pic || DEFAULT_ABI == ABI_AIX);
15040 /* For a SYMBOL_REF, set generic flags and then perform some
15041 target-specific processing.
15043 When the AIX ABI is requested on a non-AIX system, replace the
15044 function name with the real name (with a leading .) rather than the
15045 function descriptor name. This saves a lot of overriding code to
15046 read the prefixes. */
15049 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
15051 default_encode_section_info (decl, rtl, first);
15054 && TREE_CODE (decl) == FUNCTION_DECL
15056 && DEFAULT_ABI == ABI_AIX)
15058 rtx sym_ref = XEXP (rtl, 0);
15059 size_t len = strlen (XSTR (sym_ref, 0));
15060 char *str = alloca (len + 2);
15062 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
15063 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
15068 rs6000_elf_in_small_data_p (tree decl)
15070 if (rs6000_sdata == SDATA_NONE)
15073 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
15075 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
15076 if (strcmp (section, ".sdata") == 0
15077 || strcmp (section, ".sdata2") == 0
15078 || strcmp (section, ".sbss") == 0
15079 || strcmp (section, ".sbss2") == 0
15080 || strcmp (section, ".PPC.EMB.sdata0") == 0
15081 || strcmp (section, ".PPC.EMB.sbss0") == 0)
15086 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
15089 && (unsigned HOST_WIDE_INT) size <= g_switch_value
15090 /* If it's not public, and we're not going to reference it there,
15091 there's no need to put it in the small data section. */
15092 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
15099 #endif /* USING_ELFOS_H */
15102 /* Return a REG that occurs in ADDR with coefficient 1.
15103 ADDR can be effectively incremented by incrementing REG.
15105 r0 is special and we must not select it as an address
15106 register by this routine since our caller will try to
15107 increment the returned register via an "la" instruction. */
15110 find_addr_reg (rtx addr)
15112 while (GET_CODE (addr) == PLUS)
15114 if (GET_CODE (XEXP (addr, 0)) == REG
15115 && REGNO (XEXP (addr, 0)) != 0)
15116 addr = XEXP (addr, 0);
15117 else if (GET_CODE (XEXP (addr, 1)) == REG
15118 && REGNO (XEXP (addr, 1)) != 0)
15119 addr = XEXP (addr, 1);
15120 else if (CONSTANT_P (XEXP (addr, 0)))
15121 addr = XEXP (addr, 1);
15122 else if (CONSTANT_P (XEXP (addr, 1)))
15123 addr = XEXP (addr, 0);
15127 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
15133 rs6000_fatal_bad_address (rtx op)
15135 fatal_insn ("bad address", op);
15141 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
15142 reference and a constant. */
15145 symbolic_operand (rtx op)
15147 switch (GET_CODE (op))
15154 return (GET_CODE (op) == SYMBOL_REF ||
15155 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
15156 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
15157 && GET_CODE (XEXP (op, 1)) == CONST_INT);
15166 static tree branch_island_list = 0;
15168 /* Remember to generate a branch island for far calls to the given
15172 add_compiler_branch_island (tree label_name, tree function_name, int line_number)
15174 tree branch_island = build_tree_list (function_name, label_name);
15175 TREE_TYPE (branch_island) = build_int_2 (line_number, 0);
15176 TREE_CHAIN (branch_island) = branch_island_list;
15177 branch_island_list = branch_island;
15180 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND)
15181 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND)
15182 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \
15183 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND))
15185 /* Generate far-jump branch islands for everything on the
15186 branch_island_list. Invoked immediately after the last instruction
15187 of the epilogue has been emitted; the branch-islands must be
15188 appended to, and contiguous with, the function body. Mach-O stubs
15189 are generated in machopic_output_stub(). */
15192 macho_branch_islands (void)
15195 tree branch_island;
15197 for (branch_island = branch_island_list;
15199 branch_island = TREE_CHAIN (branch_island))
15201 const char *label =
15202 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
15204 darwin_strip_name_encoding (
15205 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island)));
15206 char name_buf[512];
15207 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
15208 if (name[0] == '*' || name[0] == '&')
15209 strcpy (name_buf, name+1);
15213 strcpy (name_buf+1, name);
15215 strcpy (tmp_buf, "\n");
15216 strcat (tmp_buf, label);
15217 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15218 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15219 fprintf (asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15220 BRANCH_ISLAND_LINE_NUMBER(branch_island));
15221 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15224 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
15225 strcat (tmp_buf, label);
15226 strcat (tmp_buf, "_pic\n");
15227 strcat (tmp_buf, label);
15228 strcat (tmp_buf, "_pic:\n\tmflr r11\n");
15230 strcat (tmp_buf, "\taddis r11,r11,ha16(");
15231 strcat (tmp_buf, name_buf);
15232 strcat (tmp_buf, " - ");
15233 strcat (tmp_buf, label);
15234 strcat (tmp_buf, "_pic)\n");
15236 strcat (tmp_buf, "\tmtlr r0\n");
15238 strcat (tmp_buf, "\taddi r12,r11,lo16(");
15239 strcat (tmp_buf, name_buf);
15240 strcat (tmp_buf, " - ");
15241 strcat (tmp_buf, label);
15242 strcat (tmp_buf, "_pic)\n");
15244 strcat (tmp_buf, "\tmtctr r12\n\tbctr\n");
15248 strcat (tmp_buf, ":\nlis r12,hi16(");
15249 strcat (tmp_buf, name_buf);
15250 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
15251 strcat (tmp_buf, name_buf);
15252 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
15254 output_asm_insn (tmp_buf, 0);
15255 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
15256 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
15257 fprintf(asm_out_file, "\t.stabd 68,0," HOST_WIDE_INT_PRINT_UNSIGNED "\n",
15258 BRANCH_ISLAND_LINE_NUMBER (branch_island));
15259 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
15262 branch_island_list = 0;
15265 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
15266 already there or not. */
15269 no_previous_def (tree function_name)
15271 tree branch_island;
15272 for (branch_island = branch_island_list;
15274 branch_island = TREE_CHAIN (branch_island))
15275 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15280 /* GET_PREV_LABEL gets the label name from the previous definition of
15284 get_prev_label (tree function_name)
15286 tree branch_island;
15287 for (branch_island = branch_island_list;
15289 branch_island = TREE_CHAIN (branch_island))
15290 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island))
15291 return BRANCH_ISLAND_LABEL_NAME (branch_island);
15295 /* INSN is either a function call or a millicode call. It may have an
15296 unconditional jump in its delay slot.
15298 CALL_DEST is the routine we are calling. */
15301 output_call (rtx insn, rtx *operands, int dest_operand_number, int cookie_operand_number)
15303 static char buf[256];
15304 if (GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
15305 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
15308 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
15310 if (no_previous_def (funname))
15312 int line_number = 0;
15313 rtx label_rtx = gen_label_rtx ();
15314 char *label_buf, temp_buf[256];
15315 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
15316 CODE_LABEL_NUMBER (label_rtx));
15317 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
15318 labelname = get_identifier (label_buf);
15319 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
15321 line_number = NOTE_LINE_NUMBER (insn);
15322 add_compiler_branch_island (labelname, funname, line_number);
15325 labelname = get_prev_label (funname);
15327 /* "jbsr foo, L42" is Mach-O for "Link as 'bl foo' if a 'bl'
15328 instruction will reach 'foo', otherwise link as 'bl L42'".
15329 "L42" should be a 'branch island', that will do a far jump to
15330 'foo'. Branch islands are generated in
15331 macho_branch_islands(). */
15332 sprintf (buf, "jbsr %%z%d,%.246s",
15333 dest_operand_number, IDENTIFIER_POINTER (labelname));
15336 sprintf (buf, "bl %%z%d", dest_operand_number);
15340 #endif /* TARGET_MACHO */
15342 /* Generate PIC and indirect symbol stubs. */
15345 machopic_output_stub (FILE *file, const char *symb, const char *stub)
15347 unsigned int length;
15348 char *symbol_name, *lazy_ptr_name;
15349 char *local_label_0;
15350 static int label = 0;
15352 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
15353 symb = (*targetm.strip_name_encoding) (symb);
15356 length = strlen (symb);
15357 symbol_name = alloca (length + 32);
15358 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
15360 lazy_ptr_name = alloca (length + 32);
15361 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
15364 machopic_picsymbol_stub1_section ();
15366 machopic_symbol_stub1_section ();
15367 fprintf (file, "\t.align 2\n");
15369 fprintf (file, "%s:\n", stub);
15370 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15375 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
15376 sprintf (local_label_0, "\"L%011d$spb\"", label);
15378 fprintf (file, "\tmflr r0\n");
15379 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
15380 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
15381 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
15382 lazy_ptr_name, local_label_0);
15383 fprintf (file, "\tmtlr r0\n");
15384 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
15385 lazy_ptr_name, local_label_0);
15386 fprintf (file, "\tmtctr r12\n");
15387 fprintf (file, "\tbctr\n");
15391 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
15392 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
15393 fprintf (file, "\tmtctr r12\n");
15394 fprintf (file, "\tbctr\n");
15397 machopic_lazy_symbol_ptr_section ();
15398 fprintf (file, "%s:\n", lazy_ptr_name);
15399 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
15400 fprintf (file, "\t.long dyld_stub_binding_helper\n");
15403 /* Legitimize PIC addresses. If the address is already
15404 position-independent, we return ORIG. Newly generated
15405 position-independent addresses go into a reg. This is REG if non
15406 zero, otherwise we allocate register(s) as necessary. */
15408 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
15411 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
15416 if (reg == NULL && ! reload_in_progress && ! reload_completed)
15417 reg = gen_reg_rtx (Pmode);
15419 if (GET_CODE (orig) == CONST)
15421 if (GET_CODE (XEXP (orig, 0)) == PLUS
15422 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
15425 if (GET_CODE (XEXP (orig, 0)) == PLUS)
15427 /* Use a different reg for the intermediate value, as
15428 it will be marked UNCHANGING. */
15429 rtx reg_temp = no_new_pseudos ? reg : gen_reg_rtx (Pmode);
15432 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
15435 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
15441 if (GET_CODE (offset) == CONST_INT)
15443 if (SMALL_INT (offset))
15444 return plus_constant (base, INTVAL (offset));
15445 else if (! reload_in_progress && ! reload_completed)
15446 offset = force_reg (Pmode, offset);
15449 rtx mem = force_const_mem (Pmode, orig);
15450 return machopic_legitimize_pic_address (mem, Pmode, reg);
15453 return gen_rtx_PLUS (Pmode, base, offset);
15456 /* Fall back on generic machopic code. */
15457 return machopic_legitimize_pic_address (orig, mode, reg);
15460 /* This is just a placeholder to make linking work without having to
15461 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
15462 ever needed for Darwin (not too likely!) this would have to get a
15463 real definition. */
15470 #endif /* TARGET_MACHO */
15473 static unsigned int
15474 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
15476 return default_section_type_flags_1 (decl, name, reloc,
15477 flag_pic || DEFAULT_ABI == ABI_AIX);
15480 /* Record an element in the table of global constructors. SYMBOL is
15481 a SYMBOL_REF of the function to be called; PRIORITY is a number
15482 between 0 and MAX_INIT_PRIORITY.
15484 This differs from default_named_section_asm_out_constructor in
15485 that we have special handling for -mrelocatable. */
15488 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
15490 const char *section = ".ctors";
15493 if (priority != DEFAULT_INIT_PRIORITY)
15495 sprintf (buf, ".ctors.%.5u",
15496 /* Invert the numbering so the linker puts us in the proper
15497 order; constructors are run from right to left, and the
15498 linker sorts in increasing order. */
15499 MAX_INIT_PRIORITY - priority);
15503 named_section_flags (section, SECTION_WRITE);
15504 assemble_align (POINTER_SIZE);
15506 if (TARGET_RELOCATABLE)
15508 fputs ("\t.long (", asm_out_file);
15509 output_addr_const (asm_out_file, symbol);
15510 fputs (")@fixup\n", asm_out_file);
15513 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15517 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
15519 const char *section = ".dtors";
15522 if (priority != DEFAULT_INIT_PRIORITY)
15524 sprintf (buf, ".dtors.%.5u",
15525 /* Invert the numbering so the linker puts us in the proper
15526 order; constructors are run from right to left, and the
15527 linker sorts in increasing order. */
15528 MAX_INIT_PRIORITY - priority);
15532 named_section_flags (section, SECTION_WRITE);
15533 assemble_align (POINTER_SIZE);
15535 if (TARGET_RELOCATABLE)
15537 fputs ("\t.long (", asm_out_file);
15538 output_addr_const (asm_out_file, symbol);
15539 fputs (")@fixup\n", asm_out_file);
15542 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
15546 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
15550 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
15551 ASM_OUTPUT_LABEL (file, name);
15552 fputs (DOUBLE_INT_ASM_OP, file);
15554 assemble_name (file, name);
15555 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
15556 assemble_name (file, name);
15557 fputs (",24\n\t.type\t.", file);
15558 assemble_name (file, name);
15559 fputs (",@function\n", file);
15560 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
15562 fputs ("\t.globl\t.", file);
15563 assemble_name (file, name);
15566 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15568 ASM_OUTPUT_LABEL (file, name);
15572 if (TARGET_RELOCATABLE
15573 && (get_pool_size () != 0 || current_function_profile)
15578 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
15580 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
15581 fprintf (file, "\t.long ");
15582 assemble_name (file, buf);
15584 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
15585 assemble_name (file, buf);
15589 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
15590 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
15592 if (DEFAULT_ABI == ABI_AIX)
15594 const char *desc_name, *orig_name;
15596 orig_name = (*targetm.strip_name_encoding) (name);
15597 desc_name = orig_name;
15598 while (*desc_name == '.')
15601 if (TREE_PUBLIC (decl))
15602 fprintf (file, "\t.globl %s\n", desc_name);
15604 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
15605 fprintf (file, "%s:\n", desc_name);
15606 fprintf (file, "\t.long %s\n", orig_name);
15607 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
15608 if (DEFAULT_ABI == ABI_AIX)
15609 fputs ("\t.long 0\n", file);
15610 fprintf (file, "\t.previous\n");
15612 ASM_OUTPUT_LABEL (file, name);
15618 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
15620 fputs (GLOBAL_ASM_OP, stream);
15621 RS6000_OUTPUT_BASENAME (stream, name);
15622 putc ('\n', stream);
15626 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
15629 static const char * const suffix[3] = { "PR", "RO", "RW" };
15631 if (flags & SECTION_CODE)
15633 else if (flags & SECTION_WRITE)
15638 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
15639 (flags & SECTION_CODE) ? "." : "",
15640 name, suffix[smclass], flags & SECTION_ENTSIZE);
15644 rs6000_xcoff_select_section (tree decl, int reloc,
15645 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15647 if (decl_readonly_section_1 (decl, reloc, 1))
15649 if (TREE_PUBLIC (decl))
15650 read_only_data_section ();
15652 read_only_private_data_section ();
15656 if (TREE_PUBLIC (decl))
15659 private_data_section ();
15664 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
15668 /* Use select_section for private and uninitialized data. */
15669 if (!TREE_PUBLIC (decl)
15670 || DECL_COMMON (decl)
15671 || DECL_INITIAL (decl) == NULL_TREE
15672 || DECL_INITIAL (decl) == error_mark_node
15673 || (flag_zero_initialized_in_bss
15674 && initializer_zerop (DECL_INITIAL (decl))))
15677 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
15678 name = (*targetm.strip_name_encoding) (name);
15679 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
15682 /* Select section for constant in constant pool.
15684 On RS/6000, all constants are in the private read-only data area.
15685 However, if this is being placed in the TOC it must be output as a
15689 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
15690 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
15692 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
15695 read_only_private_data_section ();
15698 /* Remove any trailing [DS] or the like from the symbol name. */
15700 static const char *
15701 rs6000_xcoff_strip_name_encoding (const char *name)
15706 len = strlen (name);
15707 if (name[len - 1] == ']')
15708 return ggc_alloc_string (name, len - 4);
15713 /* Section attributes. AIX is always PIC. */
15715 static unsigned int
15716 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
15718 unsigned int align;
15719 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
15721 /* Align to at least UNIT size. */
15722 if (flags & SECTION_CODE)
15723 align = MIN_UNITS_PER_WORD;
15725 /* Increase alignment of large objects if not already stricter. */
15726 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
15727 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
15728 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
15730 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
15733 /* Output at beginning of assembler file.
15735 Initialize the section names for the RS/6000 at this point.
15737 Specify filename, including full path, to assembler.
15739 We want to go into the TOC section so at least one .toc will be emitted.
15740 Also, in order to output proper .bs/.es pairs, we need at least one static
15741 [RW] section emitted.
15743 Finally, declare mcount when profiling to make the assembler happy. */
15746 rs6000_xcoff_file_start (void)
15748 rs6000_gen_section_name (&xcoff_bss_section_name,
15749 main_input_filename, ".bss_");
15750 rs6000_gen_section_name (&xcoff_private_data_section_name,
15751 main_input_filename, ".rw_");
15752 rs6000_gen_section_name (&xcoff_read_only_section_name,
15753 main_input_filename, ".ro_");
15755 fputs ("\t.file\t", asm_out_file);
15756 output_quoted_string (asm_out_file, main_input_filename);
15757 fputc ('\n', asm_out_file);
15759 if (write_symbols != NO_DEBUG)
15760 private_data_section ();
15763 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
15764 rs6000_file_start ();
15767 /* Output at end of assembler file.
15768 On the RS/6000, referencing data should automatically pull in text. */
15771 rs6000_xcoff_file_end (void)
15774 fputs ("_section_.text:\n", asm_out_file);
15776 fputs (TARGET_32BIT
15777 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
15780 #endif /* TARGET_XCOFF */
15783 /* Cross-module name binding. Darwin does not support overriding
15784 functions at dynamic-link time. */
15787 rs6000_binds_local_p (tree decl)
15789 return default_binds_local_p_1 (decl, 0);
15793 /* Compute a (partial) cost for rtx X. Return true if the complete
15794 cost has been computed, and false if subexpressions should be
15795 scanned. In either case, *TOTAL contains the cost result. */
15798 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
15803 /* On the RS/6000, if it is valid in the insn, it is free.
15804 So this always returns 0. */
15815 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15816 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
15817 + 0x8000) >= 0x10000)
15818 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15819 ? COSTS_N_INSNS (2)
15820 : COSTS_N_INSNS (1));
15826 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
15827 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
15828 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
15829 ? COSTS_N_INSNS (2)
15830 : COSTS_N_INSNS (1));
15836 *total = COSTS_N_INSNS (2);
15839 switch (rs6000_cpu)
15841 case PROCESSOR_RIOS1:
15842 case PROCESSOR_PPC405:
15843 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15844 ? COSTS_N_INSNS (5)
15845 : (INTVAL (XEXP (x, 1)) >= -256
15846 && INTVAL (XEXP (x, 1)) <= 255)
15847 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15850 case PROCESSOR_PPC440:
15851 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15852 ? COSTS_N_INSNS (3)
15853 : COSTS_N_INSNS (2));
15856 case PROCESSOR_RS64A:
15857 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15858 ? GET_MODE (XEXP (x, 1)) != DImode
15859 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
15860 : (INTVAL (XEXP (x, 1)) >= -256
15861 && INTVAL (XEXP (x, 1)) <= 255)
15862 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
15865 case PROCESSOR_RIOS2:
15866 case PROCESSOR_MPCCORE:
15867 case PROCESSOR_PPC604e:
15868 *total = COSTS_N_INSNS (2);
15871 case PROCESSOR_PPC601:
15872 *total = COSTS_N_INSNS (5);
15875 case PROCESSOR_PPC603:
15876 case PROCESSOR_PPC7400:
15877 case PROCESSOR_PPC750:
15878 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15879 ? COSTS_N_INSNS (5)
15880 : (INTVAL (XEXP (x, 1)) >= -256
15881 && INTVAL (XEXP (x, 1)) <= 255)
15882 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
15885 case PROCESSOR_PPC7450:
15886 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15887 ? COSTS_N_INSNS (4)
15888 : COSTS_N_INSNS (3));
15891 case PROCESSOR_PPC403:
15892 case PROCESSOR_PPC604:
15893 case PROCESSOR_PPC8540:
15894 *total = COSTS_N_INSNS (4);
15897 case PROCESSOR_PPC620:
15898 case PROCESSOR_PPC630:
15899 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15900 ? GET_MODE (XEXP (x, 1)) != DImode
15901 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
15902 : (INTVAL (XEXP (x, 1)) >= -256
15903 && INTVAL (XEXP (x, 1)) <= 255)
15904 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
15907 case PROCESSOR_POWER4:
15908 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
15909 ? GET_MODE (XEXP (x, 1)) != DImode
15910 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
15911 : COSTS_N_INSNS (2));
15920 if (GET_CODE (XEXP (x, 1)) == CONST_INT
15921 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
15923 *total = COSTS_N_INSNS (2);
15930 switch (rs6000_cpu)
15932 case PROCESSOR_RIOS1:
15933 *total = COSTS_N_INSNS (19);
15936 case PROCESSOR_RIOS2:
15937 *total = COSTS_N_INSNS (13);
15940 case PROCESSOR_RS64A:
15941 *total = (GET_MODE (XEXP (x, 1)) != DImode
15942 ? COSTS_N_INSNS (65)
15943 : COSTS_N_INSNS (67));
15946 case PROCESSOR_MPCCORE:
15947 *total = COSTS_N_INSNS (6);
15950 case PROCESSOR_PPC403:
15951 *total = COSTS_N_INSNS (33);
15954 case PROCESSOR_PPC405:
15955 *total = COSTS_N_INSNS (35);
15958 case PROCESSOR_PPC440:
15959 *total = COSTS_N_INSNS (34);
15962 case PROCESSOR_PPC601:
15963 *total = COSTS_N_INSNS (36);
15966 case PROCESSOR_PPC603:
15967 *total = COSTS_N_INSNS (37);
15970 case PROCESSOR_PPC604:
15971 case PROCESSOR_PPC604e:
15972 *total = COSTS_N_INSNS (20);
15975 case PROCESSOR_PPC620:
15976 case PROCESSOR_PPC630:
15977 *total = (GET_MODE (XEXP (x, 1)) != DImode
15978 ? COSTS_N_INSNS (21)
15979 : COSTS_N_INSNS (37));
15982 case PROCESSOR_PPC750:
15983 case PROCESSOR_PPC8540:
15984 case PROCESSOR_PPC7400:
15985 *total = COSTS_N_INSNS (19);
15988 case PROCESSOR_PPC7450:
15989 *total = COSTS_N_INSNS (23);
15992 case PROCESSOR_POWER4:
15993 *total = (GET_MODE (XEXP (x, 1)) != DImode
15994 ? COSTS_N_INSNS (18)
15995 : COSTS_N_INSNS (34));
16003 *total = COSTS_N_INSNS (4);
16007 /* MEM should be slightly more expensive than (plus (reg) (const)). */
16016 /* A C expression returning the cost of moving data from a register of class
16017 CLASS1 to one of CLASS2. */
16020 rs6000_register_move_cost (enum machine_mode mode,
16021 enum reg_class from, enum reg_class to)
16023 /* Moves from/to GENERAL_REGS. */
16024 if (reg_classes_intersect_p (to, GENERAL_REGS)
16025 || reg_classes_intersect_p (from, GENERAL_REGS))
16027 if (! reg_classes_intersect_p (to, GENERAL_REGS))
16030 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
16031 return (rs6000_memory_move_cost (mode, from, 0)
16032 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
16034 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift.... */
16035 else if (from == CR_REGS)
16039 /* A move will cost one instruction per GPR moved. */
16040 return 2 * HARD_REGNO_NREGS (0, mode);
16043 /* Moving between two similar registers is just one instruction. */
16044 else if (reg_classes_intersect_p (to, from))
16045 return mode == TFmode ? 4 : 2;
16047 /* Everything else has to go through GENERAL_REGS. */
16049 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
16050 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
16053 /* A C expressions returning the cost of moving data of MODE from a register to
16057 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
16058 int in ATTRIBUTE_UNUSED)
16060 if (reg_classes_intersect_p (class, GENERAL_REGS))
16061 return 4 * HARD_REGNO_NREGS (0, mode);
16062 else if (reg_classes_intersect_p (class, FLOAT_REGS))
16063 return 4 * HARD_REGNO_NREGS (32, mode);
16064 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
16065 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
16067 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
16070 /* Return an RTX representing where to find the function value of a
16071 function returning MODE. */
16073 rs6000_complex_function_value (enum machine_mode mode)
16075 unsigned int regno;
16077 enum machine_mode inner = GET_MODE_INNER (mode);
16078 unsigned int inner_bytes = GET_MODE_SIZE (inner);
16080 if (FLOAT_MODE_P (mode))
16081 regno = FP_ARG_RETURN;
16084 regno = GP_ARG_RETURN;
16086 /* 32-bit is OK since it'll go in r3/r4. */
16087 if (TARGET_32BIT && inner_bytes >= 4)
16088 return gen_rtx_REG (mode, regno);
16091 if (inner_bytes >= 8)
16092 return gen_rtx_REG (mode, regno);
16094 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
16096 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
16097 GEN_INT (inner_bytes));
16098 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
16101 /* Define how to find the value returned by a function.
16102 VALTYPE is the data type of the value (as a tree).
16103 If the precise function being called is known, FUNC is its FUNCTION_DECL;
16104 otherwise, FUNC is 0.
16106 On the SPE, both FPs and vectors are returned in r3.
16108 On RS/6000 an integer value is in r3 and a floating-point value is in
16109 fp1, unless -msoft-float. */
16112 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
16114 enum machine_mode mode;
16115 unsigned int regno;
16117 if (TARGET_32BIT && TARGET_POWERPC64 && TYPE_MODE (valtype) == DImode)
16119 /* Long long return value need be split in -mpowerpc64, 32bit ABI. */
16120 return gen_rtx_PARALLEL (DImode,
16122 gen_rtx_EXPR_LIST (VOIDmode,
16123 gen_rtx_REG (SImode, GP_ARG_RETURN),
16125 gen_rtx_EXPR_LIST (VOIDmode,
16126 gen_rtx_REG (SImode,
16127 GP_ARG_RETURN + 1),
16131 if ((INTEGRAL_TYPE_P (valtype)
16132 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
16133 || POINTER_TYPE_P (valtype))
16134 mode = TARGET_32BIT ? SImode : DImode;
16136 mode = TYPE_MODE (valtype);
16138 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
16139 regno = FP_ARG_RETURN;
16140 else if (TREE_CODE (valtype) == COMPLEX_TYPE
16141 && TARGET_HARD_FLOAT
16142 && targetm.calls.split_complex_arg)
16143 return rs6000_complex_function_value (mode);
16144 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
16145 regno = ALTIVEC_ARG_RETURN;
16147 regno = GP_ARG_RETURN;
16149 return gen_rtx_REG (mode, regno);
16152 /* Define how to find the value returned by a library function
16153 assuming the value has mode MODE. */
16155 rs6000_libcall_value (enum machine_mode mode)
16157 unsigned int regno;
16159 if (GET_MODE_CLASS (mode) == MODE_FLOAT
16160 && TARGET_HARD_FLOAT && TARGET_FPRS)
16161 regno = FP_ARG_RETURN;
16162 else if (ALTIVEC_VECTOR_MODE (mode))
16163 regno = ALTIVEC_ARG_RETURN;
16164 else if (COMPLEX_MODE_P (mode) && targetm.calls.split_complex_arg)
16165 return rs6000_complex_function_value (mode);
16167 regno = GP_ARG_RETURN;
16169 return gen_rtx_REG (mode, regno);
16172 /* Define the offset between two registers, FROM to be eliminated and its
16173 replacement TO, at the start of a routine. */
16175 rs6000_initial_elimination_offset (int from, int to)
16177 rs6000_stack_t *info = rs6000_stack_info ();
16178 HOST_WIDE_INT offset;
16180 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16181 offset = info->push_p ? 0 : -info->total_size;
16182 else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
16183 offset = info->total_size;
16184 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
16185 offset = info->push_p ? info->total_size : 0;
16186 else if (from == RS6000_PIC_OFFSET_TABLE_REGNUM)
16194 /* Return true if TYPE is of type __ev64_opaque__. */
16197 is_ev64_opaque_type (tree type)
16200 && (type == opaque_V2SI_type_node
16201 || type == opaque_V2SF_type_node
16202 || type == opaque_p_V2SI_type_node));
16206 rs6000_dwarf_register_span (rtx reg)
16210 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
16213 regno = REGNO (reg);
16215 /* The duality of the SPE register size wreaks all kinds of havoc.
16216 This is a way of distinguishing r0 in 32-bits from r0 in
16219 gen_rtx_PARALLEL (VOIDmode,
16222 gen_rtx_REG (SImode, regno + 1200),
16223 gen_rtx_REG (SImode, regno))
16225 gen_rtx_REG (SImode, regno),
16226 gen_rtx_REG (SImode, regno + 1200)));
16229 /* Map internal gcc register numbers to DWARF2 register numbers. */
16232 rs6000_dbx_register_number (unsigned int regno)
16234 if (regno <= 63 || write_symbols != DWARF2_DEBUG)
16236 if (regno == MQ_REGNO)
16238 if (regno == LINK_REGISTER_REGNUM)
16240 if (regno == COUNT_REGISTER_REGNUM)
16242 if (CR_REGNO_P (regno))
16243 return regno - CR0_REGNO + 86;
16244 if (regno == XER_REGNO)
16246 if (ALTIVEC_REGNO_P (regno))
16247 return regno - FIRST_ALTIVEC_REGNO + 1124;
16248 if (regno == VRSAVE_REGNO)
16250 if (regno == VSCR_REGNO)
16252 if (regno == SPE_ACC_REGNO)
16254 if (regno == SPEFSCR_REGNO)
16256 /* SPE high reg number. We get these values of regno from
16257 rs6000_dwarf_register_span. */
16258 if (regno >= 1200 && regno < 1232)
16264 #include "gt-rs6000.h"