1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
54 #include "sched-int.h"
56 #include "xcoffout.h" /* get declarations of xcoff_*_section_name */
59 #ifndef TARGET_NO_PROTOTYPE
60 #define TARGET_NO_PROTOTYPE 0
63 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
64 && easy_vector_same (x, y))
66 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
68 && easy_vector_same (x, y))
70 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B))
75 enum processor_type rs6000_cpu;
76 struct rs6000_cpu_select rs6000_select[3] =
78 /* switch name, tune arch */
79 { (const char *)0, "--with-cpu=", 1, 1 },
80 { (const char *)0, "-mcpu=", 1, 1 },
81 { (const char *)0, "-mtune=", 1, 0 },
84 /* Support adjust_priority scheduler hook
85 and -mprioritize-restricted-insns= option. */
86 const char *rs6000_sched_restricted_insns_priority_str;
87 int rs6000_sched_restricted_insns_priority;
89 /* Size of long double */
90 const char *rs6000_long_double_size_string;
91 int rs6000_long_double_type_size;
93 /* Whether -mabi=altivec has appeared */
94 int rs6000_altivec_abi;
96 /* Whether VRSAVE instructions should be generated. */
97 int rs6000_altivec_vrsave;
99 /* String from -mvrsave= option. */
100 const char *rs6000_altivec_vrsave_string;
102 /* Nonzero if we want SPE ABI extensions. */
105 /* Whether isel instructions should be generated. */
108 /* Whether SPE simd instructions should be generated. */
111 /* Nonzero if floating point operations are done in the GPRs. */
112 int rs6000_float_gprs = 0;
114 /* String from -mfloat-gprs=. */
115 const char *rs6000_float_gprs_string;
117 /* String from -misel=. */
118 const char *rs6000_isel_string;
120 /* String from -mspe=. */
121 const char *rs6000_spe_string;
123 /* Set to nonzero once AIX common-mode calls have been defined. */
124 static GTY(()) int common_mode_defined;
126 /* Save information from a "cmpxx" operation until the branch or scc is
128 rtx rs6000_compare_op0, rs6000_compare_op1;
129 int rs6000_compare_fp_p;
131 /* Label number of label created for -mrelocatable, to call to so we can
132 get the address of the GOT section */
133 int rs6000_pic_labelno;
136 /* Which abi to adhere to */
137 const char *rs6000_abi_name;
139 /* Semantics of the small data area */
140 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
142 /* Which small data model to use */
143 const char *rs6000_sdata_name = (char *)0;
145 /* Counter for labels which are to be placed in .fixup. */
146 int fixuplabelno = 0;
149 /* Bit size of immediate TLS offsets and string from which it is decoded. */
150 int rs6000_tls_size = 32;
151 const char *rs6000_tls_size_string;
153 /* ABI enumeration available for subtarget to use. */
154 enum rs6000_abi rs6000_current_abi;
156 /* ABI string from -mabi= option. */
157 const char *rs6000_abi_string;
160 const char *rs6000_debug_name;
161 int rs6000_debug_stack; /* debug stack applications */
162 int rs6000_debug_arg; /* debug argument handling */
165 static GTY(()) tree opaque_V2SI_type_node;
166 static GTY(()) tree opaque_V2SF_type_node;
167 static GTY(()) tree opaque_p_V2SI_type_node;
169 const char *rs6000_traceback_name;
171 traceback_default = 0,
177 /* Flag to say the TOC is initialized */
179 char toc_label_name[10];
181 /* Alias set for saves and restores from the rs6000 stack. */
182 static int rs6000_sr_alias_set;
184 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
185 The only place that looks at this is rs6000_set_default_type_attributes;
186 everywhere else should rely on the presence or absence of a longcall
187 attribute on the function declaration. */
188 int rs6000_default_long_calls;
189 const char *rs6000_longcall_switch;
191 /* Control alignment for fields within structures. */
192 /* String from -malign-XXXXX. */
193 const char *rs6000_alignment_string;
194 int rs6000_alignment_flags;
196 struct builtin_description
198 /* mask is not const because we're going to alter it below. This
199 nonsense will go away when we rewrite the -march infrastructure
200 to give us more target flag bits. */
202 const enum insn_code icode;
203 const char *const name;
204 const enum rs6000_builtins code;
207 static bool rs6000_function_ok_for_sibcall (tree, tree);
208 static int num_insns_constant_wide (HOST_WIDE_INT);
209 static void validate_condition_mode (enum rtx_code, enum machine_mode);
210 static rtx rs6000_generate_compare (enum rtx_code);
211 static void rs6000_maybe_dead (rtx);
212 static void rs6000_emit_stack_tie (void);
213 static void rs6000_frame_related (rtx, rtx, HOST_WIDE_INT, rtx, rtx);
214 static rtx spe_synthesize_frame_save (rtx);
215 static bool spe_func_has_64bit_regs_p (void);
216 static void emit_frame_save (rtx, rtx, enum machine_mode,
217 unsigned int, int, int);
218 static rtx gen_frame_mem_offset (enum machine_mode, rtx, int);
219 static void rs6000_emit_allocate_stack (HOST_WIDE_INT, int);
220 static unsigned rs6000_hash_constant (rtx);
221 static unsigned toc_hash_function (const void *);
222 static int toc_hash_eq (const void *, const void *);
223 static int constant_pool_expr_1 (rtx, int *, int *);
224 static bool constant_pool_expr_p (rtx);
225 static bool toc_relative_expr_p (rtx);
226 static bool legitimate_small_data_p (enum machine_mode, rtx);
227 static bool legitimate_offset_address_p (enum machine_mode, rtx, int);
228 static bool legitimate_indexed_address_p (rtx, int);
229 static bool legitimate_indirect_address_p (rtx, int);
230 static bool legitimate_lo_sum_address_p (enum machine_mode, rtx, int);
231 static struct machine_function * rs6000_init_machine_status (void);
232 static bool rs6000_assemble_integer (rtx, unsigned int, int);
233 #ifdef HAVE_GAS_HIDDEN
234 static void rs6000_assemble_visibility (tree, int);
236 static int rs6000_ra_ever_killed (void);
237 static tree rs6000_handle_longcall_attribute (tree *, tree, tree, int, bool *);
238 extern const struct attribute_spec rs6000_attribute_table[];
239 static void rs6000_set_default_type_attributes (tree);
240 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
241 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
242 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
243 HOST_WIDE_INT, tree);
244 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
245 static void rs6000_file_start (void);
247 static unsigned int rs6000_elf_section_type_flags (tree, const char *, int);
248 static void rs6000_elf_asm_out_constructor (rtx, int);
249 static void rs6000_elf_asm_out_destructor (rtx, int);
250 static void rs6000_elf_select_section (tree, int, unsigned HOST_WIDE_INT);
251 static void rs6000_elf_unique_section (tree, int);
252 static void rs6000_elf_select_rtx_section (enum machine_mode, rtx,
253 unsigned HOST_WIDE_INT);
254 static void rs6000_elf_encode_section_info (tree, rtx, int)
256 static bool rs6000_elf_in_small_data_p (tree);
259 static void rs6000_xcoff_asm_globalize_label (FILE *, const char *);
260 static void rs6000_xcoff_asm_named_section (const char *, unsigned int);
261 static void rs6000_xcoff_select_section (tree, int, unsigned HOST_WIDE_INT);
262 static void rs6000_xcoff_unique_section (tree, int);
263 static void rs6000_xcoff_select_rtx_section (enum machine_mode, rtx,
264 unsigned HOST_WIDE_INT);
265 static const char * rs6000_xcoff_strip_name_encoding (const char *);
266 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
267 static void rs6000_xcoff_file_start (void);
268 static void rs6000_xcoff_file_end (void);
271 static bool rs6000_binds_local_p (tree);
273 static int rs6000_use_dfa_pipeline_interface (void);
274 static int rs6000_variable_issue (FILE *, int, rtx, int);
275 static bool rs6000_rtx_costs (rtx, int, int, int *);
276 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
277 static int is_dispatch_slot_restricted (rtx);
278 static int rs6000_adjust_priority (rtx, int);
279 static int rs6000_issue_rate (void);
280 static int rs6000_use_sched_lookahead (void);
282 static void rs6000_init_builtins (void);
283 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
284 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
285 static rtx rs6000_expand_ternop_builtin (enum insn_code, tree, rtx);
286 static rtx rs6000_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
287 static void altivec_init_builtins (void);
288 static void rs6000_common_init_builtins (void);
289 static void rs6000_init_libfuncs (void);
291 static void enable_mask_for_builtins (struct builtin_description *,
292 int, enum rs6000_builtins,
293 enum rs6000_builtins);
294 static void spe_init_builtins (void);
295 static rtx spe_expand_builtin (tree, rtx, bool *);
296 static rtx spe_expand_predicate_builtin (enum insn_code, tree, rtx);
297 static rtx spe_expand_evsel_builtin (enum insn_code, tree, rtx);
298 static int rs6000_emit_int_cmove (rtx, rtx, rtx, rtx);
300 static rtx altivec_expand_builtin (tree, rtx, bool *);
301 static rtx altivec_expand_ld_builtin (tree, rtx, bool *);
302 static rtx altivec_expand_st_builtin (tree, rtx, bool *);
303 static rtx altivec_expand_dst_builtin (tree, rtx, bool *);
304 static rtx altivec_expand_abs_builtin (enum insn_code, tree, rtx);
305 static rtx altivec_expand_predicate_builtin (enum insn_code,
306 const char *, tree, rtx);
307 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
308 static void rs6000_parse_abi_options (void);
309 static void rs6000_parse_alignment_option (void);
310 static void rs6000_parse_tls_size_option (void);
311 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
312 static int first_altivec_reg_to_save (void);
313 static unsigned int compute_vrsave_mask (void);
314 static void is_altivec_return_reg (rtx, void *);
315 static rtx generate_set_vrsave (rtx, rs6000_stack_t *, int);
316 int easy_vector_constant (rtx, enum machine_mode);
317 static int easy_vector_same (rtx, enum machine_mode);
318 static bool is_ev64_opaque_type (tree);
319 static rtx rs6000_dwarf_register_span (rtx);
320 static rtx rs6000_legitimize_tls_address (rtx, enum tls_model);
321 static rtx rs6000_tls_get_addr (void);
322 static rtx rs6000_got_sym (void);
323 static inline int rs6000_tls_symbol_ref_1 (rtx *, void *);
324 static const char *rs6000_get_some_local_dynamic_name (void);
325 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
326 static rtx rs6000_complex_function_value (enum machine_mode);
327 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *,
328 enum machine_mode, tree);
330 /* Hash table stuff for keeping track of TOC entries. */
332 struct toc_hash_struct GTY(())
334 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
335 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
337 enum machine_mode key_mode;
341 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
343 /* Default register names. */
344 char rs6000_reg_names[][8] =
346 "0", "1", "2", "3", "4", "5", "6", "7",
347 "8", "9", "10", "11", "12", "13", "14", "15",
348 "16", "17", "18", "19", "20", "21", "22", "23",
349 "24", "25", "26", "27", "28", "29", "30", "31",
350 "0", "1", "2", "3", "4", "5", "6", "7",
351 "8", "9", "10", "11", "12", "13", "14", "15",
352 "16", "17", "18", "19", "20", "21", "22", "23",
353 "24", "25", "26", "27", "28", "29", "30", "31",
354 "mq", "lr", "ctr","ap",
355 "0", "1", "2", "3", "4", "5", "6", "7",
357 /* AltiVec registers. */
358 "0", "1", "2", "3", "4", "5", "6", "7",
359 "8", "9", "10", "11", "12", "13", "14", "15",
360 "16", "17", "18", "19", "20", "21", "22", "23",
361 "24", "25", "26", "27", "28", "29", "30", "31",
367 #ifdef TARGET_REGNAMES
368 static const char alt_reg_names[][8] =
370 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
371 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
372 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
373 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
374 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
375 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
376 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
377 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
378 "mq", "lr", "ctr", "ap",
379 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
381 /* AltiVec registers. */
382 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
383 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
384 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
385 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
392 #ifndef MASK_STRICT_ALIGN
393 #define MASK_STRICT_ALIGN 0
395 #ifndef TARGET_PROFILE_KERNEL
396 #define TARGET_PROFILE_KERNEL 0
399 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
400 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
402 /* Return 1 for a symbol ref for a thread-local storage symbol. */
403 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
404 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
406 /* Initialize the GCC target structure. */
407 #undef TARGET_ATTRIBUTE_TABLE
408 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
409 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
410 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
412 #undef TARGET_ASM_ALIGNED_DI_OP
413 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
415 /* Default unaligned ops are only provided for ELF. Find the ops needed
416 for non-ELF systems. */
417 #ifndef OBJECT_FORMAT_ELF
419 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
421 #undef TARGET_ASM_UNALIGNED_HI_OP
422 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
423 #undef TARGET_ASM_UNALIGNED_SI_OP
424 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
425 #undef TARGET_ASM_UNALIGNED_DI_OP
426 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
429 #undef TARGET_ASM_UNALIGNED_HI_OP
430 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
431 #undef TARGET_ASM_UNALIGNED_SI_OP
432 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
436 /* This hook deals with fixups for relocatable code and DI-mode objects
438 #undef TARGET_ASM_INTEGER
439 #define TARGET_ASM_INTEGER rs6000_assemble_integer
441 #ifdef HAVE_GAS_HIDDEN
442 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
443 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
446 #undef TARGET_HAVE_TLS
447 #define TARGET_HAVE_TLS HAVE_AS_TLS
449 #undef TARGET_CANNOT_FORCE_CONST_MEM
450 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
452 #undef TARGET_ASM_FUNCTION_PROLOGUE
453 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
454 #undef TARGET_ASM_FUNCTION_EPILOGUE
455 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
457 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
458 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
459 #undef TARGET_SCHED_VARIABLE_ISSUE
460 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
462 #undef TARGET_SCHED_ISSUE_RATE
463 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
464 #undef TARGET_SCHED_ADJUST_COST
465 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
466 #undef TARGET_SCHED_ADJUST_PRIORITY
467 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
469 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
470 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
472 #undef TARGET_INIT_BUILTINS
473 #define TARGET_INIT_BUILTINS rs6000_init_builtins
475 #undef TARGET_EXPAND_BUILTIN
476 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
478 #undef TARGET_INIT_LIBFUNCS
479 #define TARGET_INIT_LIBFUNCS rs6000_init_libfuncs
482 #undef TARGET_BINDS_LOCAL_P
483 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
486 #undef TARGET_ASM_OUTPUT_MI_THUNK
487 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
489 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
490 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
492 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
493 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
495 #undef TARGET_RTX_COSTS
496 #define TARGET_RTX_COSTS rs6000_rtx_costs
497 #undef TARGET_ADDRESS_COST
498 #define TARGET_ADDRESS_COST hook_int_rtx_0
500 #undef TARGET_VECTOR_OPAQUE_P
501 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
503 #undef TARGET_DWARF_REGISTER_SPAN
504 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
506 struct gcc_target targetm = TARGET_INITIALIZER;
508 /* Override command line options. Mostly we process the processor
509 type and sometimes adjust other TARGET_ options. */
512 rs6000_override_options (const char *default_cpu)
515 struct rs6000_cpu_select *ptr;
517 /* Simplify the entries below by making a mask for any POWER
518 variant and any PowerPC variant. */
520 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
521 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
522 | MASK_PPC_GFXOPT | MASK_POWERPC64)
523 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
527 const char *const name; /* Canonical processor name. */
528 const enum processor_type processor; /* Processor type enum value. */
529 const int target_enable; /* Target flags to enable. */
530 const int target_disable; /* Target flags to disable. */
531 } const processor_target_table[]
532 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
533 POWER_MASKS | POWERPC_MASKS},
534 {"power", PROCESSOR_POWER,
535 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
536 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
537 {"power2", PROCESSOR_POWER,
538 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
539 POWERPC_MASKS | MASK_NEW_MNEMONICS},
540 {"power3", PROCESSOR_PPC630,
541 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
543 {"power4", PROCESSOR_POWER4,
544 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
546 {"powerpc", PROCESSOR_POWERPC,
547 MASK_POWERPC | MASK_NEW_MNEMONICS,
548 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
549 {"powerpc64", PROCESSOR_POWERPC64,
550 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
551 POWER_MASKS | POWERPC_OPT_MASKS},
552 {"rios", PROCESSOR_RIOS1,
553 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
554 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
555 {"rios1", PROCESSOR_RIOS1,
556 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
557 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
558 {"rsc", PROCESSOR_PPC601,
559 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
560 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
561 {"rsc1", PROCESSOR_PPC601,
562 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
563 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
564 {"rios2", PROCESSOR_RIOS2,
565 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
566 POWERPC_MASKS | MASK_NEW_MNEMONICS},
567 {"rs64a", PROCESSOR_RS64A,
568 MASK_POWERPC | MASK_NEW_MNEMONICS,
569 POWER_MASKS | POWERPC_OPT_MASKS},
570 {"401", PROCESSOR_PPC403,
571 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
572 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
573 {"403", PROCESSOR_PPC403,
574 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
575 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
576 {"405", PROCESSOR_PPC405,
577 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
578 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
579 {"405fp", PROCESSOR_PPC405,
580 MASK_POWERPC | MASK_NEW_MNEMONICS,
581 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
582 {"440", PROCESSOR_PPC440,
583 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
584 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
585 {"440fp", PROCESSOR_PPC440,
586 MASK_POWERPC | MASK_NEW_MNEMONICS,
587 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
588 {"505", PROCESSOR_MPCCORE,
589 MASK_POWERPC | MASK_NEW_MNEMONICS,
590 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
591 {"601", PROCESSOR_PPC601,
592 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
593 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
594 {"602", PROCESSOR_PPC603,
595 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
596 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
597 {"603", PROCESSOR_PPC603,
598 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
599 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
600 {"603e", PROCESSOR_PPC603,
601 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
602 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
603 {"ec603e", PROCESSOR_PPC603,
604 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
605 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
606 {"604", PROCESSOR_PPC604,
607 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
608 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
609 {"604e", PROCESSOR_PPC604e,
610 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
611 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
612 {"620", PROCESSOR_PPC620,
613 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
615 {"630", PROCESSOR_PPC630,
616 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
618 {"740", PROCESSOR_PPC750,
619 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
620 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
621 {"750", PROCESSOR_PPC750,
622 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
623 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
624 {"7400", PROCESSOR_PPC7400,
625 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
626 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
627 {"7450", PROCESSOR_PPC7450,
628 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
629 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
630 {"8540", PROCESSOR_PPC8540,
631 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
632 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
633 {"801", PROCESSOR_MPCCORE,
634 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
635 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
636 {"821", PROCESSOR_MPCCORE,
637 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
638 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
639 {"823", PROCESSOR_MPCCORE,
640 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
641 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
642 {"860", PROCESSOR_MPCCORE,
643 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
644 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
645 {"970", PROCESSOR_POWER4,
646 MASK_POWERPC | POWERPC_OPT_MASKS | MASK_NEW_MNEMONICS,
649 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
651 /* Save current -mmultiple/-mno-multiple status. */
652 int multiple = TARGET_MULTIPLE;
653 /* Save current -mstring/-mno-string status. */
654 int string = TARGET_STRING;
656 /* Identify the processor type. */
657 rs6000_select[0].string = default_cpu;
658 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
660 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
662 ptr = &rs6000_select[i];
663 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
665 for (j = 0; j < ptt_size; j++)
666 if (! strcmp (ptr->string, processor_target_table[j].name))
669 rs6000_cpu = processor_target_table[j].processor;
673 target_flags |= processor_target_table[j].target_enable;
674 target_flags &= ~processor_target_table[j].target_disable;
680 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
687 /* If we are optimizing big endian systems for space, use the load/store
688 multiple and string instructions. */
689 if (BYTES_BIG_ENDIAN && optimize_size)
690 target_flags |= MASK_MULTIPLE | MASK_STRING;
692 /* If -mmultiple or -mno-multiple was explicitly used, don't
693 override with the processor default */
694 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
695 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
697 /* If -mstring or -mno-string was explicitly used, don't override
698 with the processor default. */
699 if ((target_flags_explicit & MASK_STRING) != 0)
700 target_flags = (target_flags & ~MASK_STRING) | string;
702 /* Don't allow -mmultiple or -mstring on little endian systems
703 unless the cpu is a 750, because the hardware doesn't support the
704 instructions used in little endian mode, and causes an alignment
705 trap. The 750 does not cause an alignment trap (except when the
706 target is unaligned). */
708 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
712 target_flags &= ~MASK_MULTIPLE;
713 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
714 warning ("-mmultiple is not supported on little endian systems");
719 target_flags &= ~MASK_STRING;
720 if ((target_flags_explicit & MASK_STRING) != 0)
721 warning ("-mstring is not supported on little endian systems");
725 /* Set debug flags */
726 if (rs6000_debug_name)
728 if (! strcmp (rs6000_debug_name, "all"))
729 rs6000_debug_stack = rs6000_debug_arg = 1;
730 else if (! strcmp (rs6000_debug_name, "stack"))
731 rs6000_debug_stack = 1;
732 else if (! strcmp (rs6000_debug_name, "arg"))
733 rs6000_debug_arg = 1;
735 error ("unknown -mdebug-%s switch", rs6000_debug_name);
738 if (rs6000_traceback_name)
740 if (! strncmp (rs6000_traceback_name, "full", 4))
741 rs6000_traceback = traceback_full;
742 else if (! strncmp (rs6000_traceback_name, "part", 4))
743 rs6000_traceback = traceback_part;
744 else if (! strncmp (rs6000_traceback_name, "no", 2))
745 rs6000_traceback = traceback_none;
747 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
748 rs6000_traceback_name);
751 /* Set size of long double */
752 rs6000_long_double_type_size = 64;
753 if (rs6000_long_double_size_string)
756 int size = strtol (rs6000_long_double_size_string, &tail, 10);
757 if (*tail != '\0' || (size != 64 && size != 128))
758 error ("Unknown switch -mlong-double-%s",
759 rs6000_long_double_size_string);
761 rs6000_long_double_type_size = size;
764 /* Handle -mabi= options. */
765 rs6000_parse_abi_options ();
767 /* Handle -malign-XXXXX option. */
768 rs6000_parse_alignment_option ();
770 /* Handle generic -mFOO=YES/NO options. */
771 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
772 &rs6000_altivec_vrsave);
773 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
775 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
776 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
779 /* Handle -mtls-size option. */
780 rs6000_parse_tls_size_option ();
782 #ifdef SUBTARGET_OVERRIDE_OPTIONS
783 SUBTARGET_OVERRIDE_OPTIONS;
785 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
786 SUBSUBTARGET_OVERRIDE_OPTIONS;
791 /* The e500 does not have string instructions, and we set
792 MASK_STRING above when optimizing for size. */
793 if ((target_flags & MASK_STRING) != 0)
794 target_flags = target_flags & ~MASK_STRING;
796 /* No SPE means 64-bit long doubles, even if an E500. */
797 if (rs6000_spe_string != 0
798 && !strcmp (rs6000_spe_string, "no"))
799 rs6000_long_double_type_size = 64;
801 else if (rs6000_select[1].string != NULL)
803 /* For the powerpc-eabispe configuration, we set all these by
804 default, so let's unset them if we manually set another
805 CPU that is not the E500. */
806 if (rs6000_abi_string == 0)
808 if (rs6000_spe_string == 0)
810 if (rs6000_float_gprs_string == 0)
811 rs6000_float_gprs = 0;
812 if (rs6000_isel_string == 0)
814 if (rs6000_long_double_size_string == 0)
815 rs6000_long_double_type_size = 64;
818 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
819 using TARGET_OPTIONS to handle a toggle switch, but we're out of
820 bits in target_flags so TARGET_SWITCHES cannot be used.
821 Assumption here is that rs6000_longcall_switch points into the
822 text of the complete option, rather than being a copy, so we can
823 scan back for the presence or absence of the no- modifier. */
824 if (rs6000_longcall_switch)
826 const char *base = rs6000_longcall_switch;
827 while (base[-1] != 'm') base--;
829 if (*rs6000_longcall_switch != '\0')
830 error ("invalid option `%s'", base);
831 rs6000_default_long_calls = (base[0] != 'n');
834 /* Handle -mprioritize-restrcted-insns option. */
835 rs6000_sched_restricted_insns_priority = DEFAULT_RESTRICTED_INSNS_PRIORITY;
836 if (rs6000_sched_restricted_insns_priority_str)
837 rs6000_sched_restricted_insns_priority =
838 atoi (rs6000_sched_restricted_insns_priority_str);
840 #ifdef TARGET_REGNAMES
841 /* If the user desires alternate register names, copy in the
842 alternate names now. */
844 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
847 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
848 If -maix-struct-return or -msvr4-struct-return was explicitly
849 used, don't override with the ABI default. */
850 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
852 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
853 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
855 target_flags |= MASK_AIX_STRUCT_RET;
858 if (TARGET_LONG_DOUBLE_128
859 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
860 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
862 /* Allocate an alias set for register saves & restores from stack. */
863 rs6000_sr_alias_set = new_alias_set ();
866 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
868 /* We can only guarantee the availability of DI pseudo-ops when
869 assembling for 64-bit targets. */
872 targetm.asm_out.aligned_op.di = NULL;
873 targetm.asm_out.unaligned_op.di = NULL;
876 /* Set maximum branch target alignment at two instructions, eight bytes. */
877 align_jumps_max_skip = 8;
878 align_loops_max_skip = 8;
880 /* Arrange to save and restore machine status around nested functions. */
881 init_machine_status = rs6000_init_machine_status;
884 /* Handle generic options of the form -mfoo=yes/no.
885 NAME is the option name.
886 VALUE is the option value.
887 FLAG is the pointer to the flag where to store a 1 or 0, depending on
888 whether the option value is 'yes' or 'no' respectively. */
890 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
894 else if (!strcmp (value, "yes"))
896 else if (!strcmp (value, "no"))
899 error ("unknown -m%s= option specified: '%s'", name, value);
902 /* Handle -mabi= options. */
904 rs6000_parse_abi_options (void)
906 if (rs6000_abi_string == 0)
908 else if (! strcmp (rs6000_abi_string, "altivec"))
909 rs6000_altivec_abi = 1;
910 else if (! strcmp (rs6000_abi_string, "no-altivec"))
911 rs6000_altivec_abi = 0;
912 else if (! strcmp (rs6000_abi_string, "spe"))
916 error ("not configured for ABI: '%s'", rs6000_abi_string);
919 else if (! strcmp (rs6000_abi_string, "no-spe"))
922 error ("unknown ABI specified: '%s'", rs6000_abi_string);
925 /* Handle -malign-XXXXXX options. */
927 rs6000_parse_alignment_option (void)
929 if (rs6000_alignment_string == 0
930 || ! strcmp (rs6000_alignment_string, "power"))
931 rs6000_alignment_flags = MASK_ALIGN_POWER;
932 else if (! strcmp (rs6000_alignment_string, "natural"))
933 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
935 error ("unknown -malign-XXXXX option specified: '%s'",
936 rs6000_alignment_string);
939 /* Validate and record the size specified with the -mtls-size option. */
942 rs6000_parse_tls_size_option (void)
944 if (rs6000_tls_size_string == 0)
946 else if (strcmp (rs6000_tls_size_string, "16") == 0)
947 rs6000_tls_size = 16;
948 else if (strcmp (rs6000_tls_size_string, "32") == 0)
949 rs6000_tls_size = 32;
950 else if (strcmp (rs6000_tls_size_string, "64") == 0)
951 rs6000_tls_size = 64;
953 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
957 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
961 /* Do anything needed at the start of the asm file. */
964 rs6000_file_start (void)
968 const char *start = buffer;
969 struct rs6000_cpu_select *ptr;
970 const char *default_cpu = TARGET_CPU_DEFAULT;
971 FILE *file = asm_out_file;
973 default_file_start ();
975 #ifdef TARGET_BI_ARCH
976 if ((TARGET_DEFAULT ^ target_flags) & MASK_64BIT)
980 if (flag_verbose_asm)
982 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
983 rs6000_select[0].string = default_cpu;
985 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
987 ptr = &rs6000_select[i];
988 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
990 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
996 switch (rs6000_sdata)
998 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
999 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
1000 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
1001 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
1004 if (rs6000_sdata && g_switch_value)
1006 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
1017 /* Return nonzero if this function is known to have a null epilogue. */
1020 direct_return (void)
1022 if (reload_completed)
1024 rs6000_stack_t *info = rs6000_stack_info ();
1026 if (info->first_gp_reg_save == 32
1027 && info->first_fp_reg_save == 64
1028 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1029 && ! info->lr_save_p
1030 && ! info->cr_save_p
1031 && info->vrsave_mask == 0
1039 /* Returns 1 always. */
1042 any_operand (rtx op ATTRIBUTE_UNUSED,
1043 enum machine_mode mode ATTRIBUTE_UNUSED)
1048 /* Returns 1 if op is the count register. */
1050 count_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1052 if (GET_CODE (op) != REG)
1055 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1058 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1064 /* Returns 1 if op is an altivec register. */
1066 altivec_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1069 return (register_operand (op, mode)
1070 && (GET_CODE (op) != REG
1071 || REGNO (op) > FIRST_PSEUDO_REGISTER
1072 || ALTIVEC_REGNO_P (REGNO (op))));
1076 xer_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1078 if (GET_CODE (op) != REG)
1081 if (XER_REGNO_P (REGNO (op)))
1087 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1088 by such constants completes more quickly. */
1091 s8bit_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1093 return ( GET_CODE (op) == CONST_INT
1094 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1097 /* Return 1 if OP is a constant that can fit in a D field. */
1100 short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1102 return (GET_CODE (op) == CONST_INT
1103 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1106 /* Similar for an unsigned D field. */
1109 u_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1111 return (GET_CODE (op) == CONST_INT
1112 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1115 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1118 non_short_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1120 return (GET_CODE (op) == CONST_INT
1121 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1124 /* Returns 1 if OP is a CONST_INT that is a positive value
1125 and an exact power of 2. */
1128 exact_log2_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1130 return (GET_CODE (op) == CONST_INT
1132 && exact_log2 (INTVAL (op)) >= 0);
1135 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1139 gpc_reg_operand (rtx op, enum machine_mode mode)
1141 return (register_operand (op, mode)
1142 && (GET_CODE (op) != REG
1143 || (REGNO (op) >= ARG_POINTER_REGNUM
1144 && !XER_REGNO_P (REGNO (op)))
1145 || REGNO (op) < MQ_REGNO));
1148 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1152 cc_reg_operand (rtx op, enum machine_mode mode)
1154 return (register_operand (op, mode)
1155 && (GET_CODE (op) != REG
1156 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1157 || CR_REGNO_P (REGNO (op))));
1160 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1161 CR field that isn't CR0. */
1164 cc_reg_not_cr0_operand (rtx op, enum machine_mode mode)
1166 return (register_operand (op, mode)
1167 && (GET_CODE (op) != REG
1168 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1169 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1172 /* Returns 1 if OP is either a constant integer valid for a D-field or
1173 a non-special register. If a register, it must be in the proper
1174 mode unless MODE is VOIDmode. */
1177 reg_or_short_operand (rtx op, enum machine_mode mode)
1179 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1182 /* Similar, except check if the negation of the constant would be
1183 valid for a D-field. */
1186 reg_or_neg_short_operand (rtx op, enum machine_mode mode)
1188 if (GET_CODE (op) == CONST_INT)
1189 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1191 return gpc_reg_operand (op, mode);
1194 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1195 a non-special register. If a register, it must be in the proper
1196 mode unless MODE is VOIDmode. */
1199 reg_or_aligned_short_operand (rtx op, enum machine_mode mode)
1201 if (gpc_reg_operand (op, mode))
1203 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1210 /* Return 1 if the operand is either a register or an integer whose
1211 high-order 16 bits are zero. */
1214 reg_or_u_short_operand (rtx op, enum machine_mode mode)
1216 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1219 /* Return 1 is the operand is either a non-special register or ANY
1220 constant integer. */
1223 reg_or_cint_operand (rtx op, enum machine_mode mode)
1225 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1228 /* Return 1 is the operand is either a non-special register or ANY
1229 32-bit signed constant integer. */
1232 reg_or_arith_cint_operand (rtx op, enum machine_mode mode)
1234 return (gpc_reg_operand (op, mode)
1235 || (GET_CODE (op) == CONST_INT
1236 #if HOST_BITS_PER_WIDE_INT != 32
1237 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1238 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1243 /* Return 1 is the operand is either a non-special register or a 32-bit
1244 signed constant integer valid for 64-bit addition. */
1247 reg_or_add_cint64_operand (rtx op, enum machine_mode mode)
1249 return (gpc_reg_operand (op, mode)
1250 || (GET_CODE (op) == CONST_INT
1251 #if HOST_BITS_PER_WIDE_INT == 32
1252 && INTVAL (op) < 0x7fff8000
1254 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1260 /* Return 1 is the operand is either a non-special register or a 32-bit
1261 signed constant integer valid for 64-bit subtraction. */
1264 reg_or_sub_cint64_operand (rtx op, enum machine_mode mode)
1266 return (gpc_reg_operand (op, mode)
1267 || (GET_CODE (op) == CONST_INT
1268 #if HOST_BITS_PER_WIDE_INT == 32
1269 && (- INTVAL (op)) < 0x7fff8000
1271 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1277 /* Return 1 is the operand is either a non-special register or ANY
1278 32-bit unsigned constant integer. */
1281 reg_or_logical_cint_operand (rtx op, enum machine_mode mode)
1283 if (GET_CODE (op) == CONST_INT)
1285 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1287 if (GET_MODE_BITSIZE (mode) <= 32)
1290 if (INTVAL (op) < 0)
1294 return ((INTVAL (op) & GET_MODE_MASK (mode)
1295 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1297 else if (GET_CODE (op) == CONST_DOUBLE)
1299 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1303 return CONST_DOUBLE_HIGH (op) == 0;
1306 return gpc_reg_operand (op, mode);
1309 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1312 got_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1314 return (GET_CODE (op) == SYMBOL_REF
1315 || GET_CODE (op) == CONST
1316 || GET_CODE (op) == LABEL_REF);
1319 /* Return 1 if the operand is a simple references that can be loaded via
1320 the GOT (labels involving addition aren't allowed). */
1323 got_no_const_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1325 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1328 /* Return the number of instructions it takes to form a constant in an
1329 integer register. */
1332 num_insns_constant_wide (HOST_WIDE_INT value)
1334 /* signed constant loadable with {cal|addi} */
1335 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1338 /* constant loadable with {cau|addis} */
1339 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1342 #if HOST_BITS_PER_WIDE_INT == 64
1343 else if (TARGET_POWERPC64)
1345 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1346 HOST_WIDE_INT high = value >> 31;
1348 if (high == 0 || high == -1)
1354 return num_insns_constant_wide (high) + 1;
1356 return (num_insns_constant_wide (high)
1357 + num_insns_constant_wide (low) + 1);
1366 num_insns_constant (rtx op, enum machine_mode mode)
1368 if (GET_CODE (op) == CONST_INT)
1370 #if HOST_BITS_PER_WIDE_INT == 64
1371 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1372 && mask64_operand (op, mode))
1376 return num_insns_constant_wide (INTVAL (op));
1379 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1384 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1385 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1386 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1389 else if (GET_CODE (op) == CONST_DOUBLE)
1395 int endian = (WORDS_BIG_ENDIAN == 0);
1397 if (mode == VOIDmode || mode == DImode)
1399 high = CONST_DOUBLE_HIGH (op);
1400 low = CONST_DOUBLE_LOW (op);
1404 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1405 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1407 low = l[1 - endian];
1411 return (num_insns_constant_wide (low)
1412 + num_insns_constant_wide (high));
1416 if (high == 0 && low >= 0)
1417 return num_insns_constant_wide (low);
1419 else if (high == -1 && low < 0)
1420 return num_insns_constant_wide (low);
1422 else if (mask64_operand (op, mode))
1426 return num_insns_constant_wide (high) + 1;
1429 return (num_insns_constant_wide (high)
1430 + num_insns_constant_wide (low) + 1);
1438 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1439 register with one instruction per word. We only do this if we can
1440 safely read CONST_DOUBLE_{LOW,HIGH}. */
1443 easy_fp_constant (rtx op, enum machine_mode mode)
1445 if (GET_CODE (op) != CONST_DOUBLE
1446 || GET_MODE (op) != mode
1447 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1450 /* Consider all constants with -msoft-float to be easy. */
1451 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1455 /* If we are using V.4 style PIC, consider all constants to be hard. */
1456 if (flag_pic && DEFAULT_ABI == ABI_V4)
1459 #ifdef TARGET_RELOCATABLE
1460 /* Similarly if we are using -mrelocatable, consider all constants
1462 if (TARGET_RELOCATABLE)
1471 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1472 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1474 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1475 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1476 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1477 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1480 else if (mode == DFmode)
1485 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1486 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1488 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1489 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1492 else if (mode == SFmode)
1497 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1498 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1500 return num_insns_constant_wide (l) == 1;
1503 else if (mode == DImode)
1504 return ((TARGET_POWERPC64
1505 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1506 || (num_insns_constant (op, DImode) <= 2));
1508 else if (mode == SImode)
1514 /* Return nonzero if all elements of a vector have the same value. */
1517 easy_vector_same (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1521 units = CONST_VECTOR_NUNITS (op);
1523 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1524 for (i = 1; i < units; ++i)
1525 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1532 /* Return 1 if the operand is a CONST_INT and can be put into a
1533 register without using memory. */
1536 easy_vector_constant (rtx op, enum machine_mode mode)
1540 if (GET_CODE (op) != CONST_VECTOR
1545 if (zero_constant (op, mode)
1546 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1547 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1550 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1553 if (TARGET_SPE && mode == V1DImode)
1556 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1557 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1559 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1561 evmergelo r0, r0, r0
1564 I don't know how efficient it would be to allow bigger constants,
1565 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1566 instructions is better than a 64-bit memory load, but I don't
1567 have the e500 timing specs. */
1568 if (TARGET_SPE && mode == V2SImode
1569 && cst >= -0x7fff && cst <= 0x7fff
1570 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1573 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1576 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1582 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1585 easy_vector_constant_add_self (rtx op, enum machine_mode mode)
1589 if (!easy_vector_constant (op, mode))
1592 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1594 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1598 output_vec_const_move (rtx *operands)
1601 enum machine_mode mode;
1607 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1608 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1609 mode = GET_MODE (dest);
1613 if (zero_constant (vec, mode))
1614 return "vxor %0,%0,%0";
1615 else if (EASY_VECTOR_15 (cst, vec, mode))
1617 operands[1] = GEN_INT (cst);
1621 return "vspltisw %0,%1";
1623 return "vspltish %0,%1";
1625 return "vspltisb %0,%1";
1630 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1638 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1639 pattern of V1DI, V4HI, and V2SF.
1641 FIXME: We should probably return # and add post reload
1642 splitters for these, but this way is so easy ;-).
1644 operands[1] = GEN_INT (cst);
1645 operands[2] = GEN_INT (cst2);
1647 return "li %0,%1\n\tevmergelo %0,%0,%0";
1649 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1655 /* Return 1 if the operand is the constant 0. This works for scalars
1656 as well as vectors. */
1658 zero_constant (rtx op, enum machine_mode mode)
1660 return op == CONST0_RTX (mode);
1663 /* Return 1 if the operand is 0.0. */
1665 zero_fp_constant (rtx op, enum machine_mode mode)
1667 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1670 /* Return 1 if the operand is in volatile memory. Note that during
1671 the RTL generation phase, memory_operand does not return TRUE for
1672 volatile memory references. So this function allows us to
1673 recognize volatile references where its safe. */
1676 volatile_mem_operand (rtx op, enum machine_mode mode)
1678 if (GET_CODE (op) != MEM)
1681 if (!MEM_VOLATILE_P (op))
1684 if (mode != GET_MODE (op))
1687 if (reload_completed)
1688 return memory_operand (op, mode);
1690 if (reload_in_progress)
1691 return strict_memory_address_p (mode, XEXP (op, 0));
1693 return memory_address_p (mode, XEXP (op, 0));
1696 /* Return 1 if the operand is an offsettable memory operand. */
1699 offsettable_mem_operand (rtx op, enum machine_mode mode)
1701 return ((GET_CODE (op) == MEM)
1702 && offsettable_address_p (reload_completed || reload_in_progress,
1703 mode, XEXP (op, 0)));
1706 /* Return 1 if the operand is either an easy FP constant (see above) or
1710 mem_or_easy_const_operand (rtx op, enum machine_mode mode)
1712 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1715 /* Return 1 if the operand is either a non-special register or an item
1716 that can be used as the operand of a `mode' add insn. */
1719 add_operand (rtx op, enum machine_mode mode)
1721 if (GET_CODE (op) == CONST_INT)
1722 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1723 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1725 return gpc_reg_operand (op, mode);
1728 /* Return 1 if OP is a constant but not a valid add_operand. */
1731 non_add_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1733 return (GET_CODE (op) == CONST_INT
1734 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1735 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1738 /* Return 1 if the operand is a non-special register or a constant that
1739 can be used as the operand of an OR or XOR insn on the RS/6000. */
1742 logical_operand (rtx op, enum machine_mode mode)
1744 HOST_WIDE_INT opl, oph;
1746 if (gpc_reg_operand (op, mode))
1749 if (GET_CODE (op) == CONST_INT)
1751 opl = INTVAL (op) & GET_MODE_MASK (mode);
1753 #if HOST_BITS_PER_WIDE_INT <= 32
1754 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1758 else if (GET_CODE (op) == CONST_DOUBLE)
1760 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1763 opl = CONST_DOUBLE_LOW (op);
1764 oph = CONST_DOUBLE_HIGH (op);
1771 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1772 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1775 /* Return 1 if C is a constant that is not a logical operand (as
1776 above), but could be split into one. */
1779 non_logical_cint_operand (rtx op, enum machine_mode mode)
1781 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1782 && ! logical_operand (op, mode)
1783 && reg_or_logical_cint_operand (op, mode));
1786 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1787 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1788 Reject all ones and all zeros, since these should have been optimized
1789 away and confuse the making of MB and ME. */
1792 mask_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1794 HOST_WIDE_INT c, lsb;
1796 if (GET_CODE (op) != CONST_INT)
1801 /* Fail in 64-bit mode if the mask wraps around because the upper
1802 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1803 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1806 /* We don't change the number of transitions by inverting,
1807 so make sure we start with the LS bit zero. */
1811 /* Reject all zeros or all ones. */
1815 /* Find the first transition. */
1818 /* Invert to look for a second transition. */
1821 /* Erase first transition. */
1824 /* Find the second transition (if any). */
1827 /* Match if all the bits above are 1's (or c is zero). */
1831 /* Return 1 for the PowerPC64 rlwinm corner case. */
1834 mask_operand_wrap (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1836 HOST_WIDE_INT c, lsb;
1838 if (GET_CODE (op) != CONST_INT)
1843 if ((c & 0x80000001) != 0x80000001)
1857 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1858 It is if there are no more than one 1->0 or 0->1 transitions.
1859 Reject all zeros, since zero should have been optimized away and
1860 confuses the making of MB and ME. */
1863 mask64_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1865 if (GET_CODE (op) == CONST_INT)
1867 HOST_WIDE_INT c, lsb;
1871 /* Reject all zeros. */
1875 /* We don't change the number of transitions by inverting,
1876 so make sure we start with the LS bit zero. */
1880 /* Find the transition, and check that all bits above are 1's. */
1883 /* Match if all the bits above are 1's (or c is zero). */
1889 /* Like mask64_operand, but allow up to three transitions. This
1890 predicate is used by insn patterns that generate two rldicl or
1891 rldicr machine insns. */
1894 mask64_2_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1896 if (GET_CODE (op) == CONST_INT)
1898 HOST_WIDE_INT c, lsb;
1902 /* Disallow all zeros. */
1906 /* We don't change the number of transitions by inverting,
1907 so make sure we start with the LS bit zero. */
1911 /* Find the first transition. */
1914 /* Invert to look for a second transition. */
1917 /* Erase first transition. */
1920 /* Find the second transition. */
1923 /* Invert to look for a third transition. */
1926 /* Erase second transition. */
1929 /* Find the third transition (if any). */
1932 /* Match if all the bits above are 1's (or c is zero). */
1938 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1939 implement ANDing by the mask IN. */
1941 build_mask64_2_operands (rtx in, rtx *out)
1943 #if HOST_BITS_PER_WIDE_INT >= 64
1944 unsigned HOST_WIDE_INT c, lsb, m1, m2;
1947 if (GET_CODE (in) != CONST_INT)
1953 /* Assume c initially something like 0x00fff000000fffff. The idea
1954 is to rotate the word so that the middle ^^^^^^ group of zeros
1955 is at the MS end and can be cleared with an rldicl mask. We then
1956 rotate back and clear off the MS ^^ group of zeros with a
1958 c = ~c; /* c == 0xff000ffffff00000 */
1959 lsb = c & -c; /* lsb == 0x0000000000100000 */
1960 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
1961 c = ~c; /* c == 0x00fff000000fffff */
1962 c &= -lsb; /* c == 0x00fff00000000000 */
1963 lsb = c & -c; /* lsb == 0x0000100000000000 */
1964 c = ~c; /* c == 0xff000fffffffffff */
1965 c &= -lsb; /* c == 0xff00000000000000 */
1967 while ((lsb >>= 1) != 0)
1968 shift++; /* shift == 44 on exit from loop */
1969 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
1970 m1 = ~m1; /* m1 == 0x000000ffffffffff */
1971 m2 = ~c; /* m2 == 0x00ffffffffffffff */
1975 /* Assume c initially something like 0xff000f0000000000. The idea
1976 is to rotate the word so that the ^^^ middle group of zeros
1977 is at the LS end and can be cleared with an rldicr mask. We then
1978 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
1980 lsb = c & -c; /* lsb == 0x0000010000000000 */
1981 m2 = -lsb; /* m2 == 0xffffff0000000000 */
1982 c = ~c; /* c == 0x00fff0ffffffffff */
1983 c &= -lsb; /* c == 0x00fff00000000000 */
1984 lsb = c & -c; /* lsb == 0x0000100000000000 */
1985 c = ~c; /* c == 0xff000fffffffffff */
1986 c &= -lsb; /* c == 0xff00000000000000 */
1988 while ((lsb >>= 1) != 0)
1989 shift++; /* shift == 44 on exit from loop */
1990 m1 = ~c; /* m1 == 0x00ffffffffffffff */
1991 m1 >>= shift; /* m1 == 0x0000000000000fff */
1992 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
1995 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
1996 masks will be all 1's. We are guaranteed more than one transition. */
1997 out[0] = GEN_INT (64 - shift);
1998 out[1] = GEN_INT (m1);
1999 out[2] = GEN_INT (shift);
2000 out[3] = GEN_INT (m2);
2008 /* Return 1 if the operand is either a non-special register or a constant
2009 that can be used as the operand of a PowerPC64 logical AND insn. */
2012 and64_operand (rtx op, enum machine_mode mode)
2014 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2015 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2017 return (logical_operand (op, mode) || mask64_operand (op, mode));
2020 /* Like the above, but also match constants that can be implemented
2021 with two rldicl or rldicr insns. */
2024 and64_2_operand (rtx op, enum machine_mode mode)
2026 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2027 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2029 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2032 /* Return 1 if the operand is either a non-special register or a
2033 constant that can be used as the operand of an RS/6000 logical AND insn. */
2036 and_operand (rtx op, enum machine_mode mode)
2038 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2039 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2041 return (logical_operand (op, mode) || mask_operand (op, mode));
2044 /* Return 1 if the operand is a general register or memory operand. */
2047 reg_or_mem_operand (rtx op, enum machine_mode mode)
2049 return (gpc_reg_operand (op, mode)
2050 || memory_operand (op, mode)
2051 || volatile_mem_operand (op, mode));
2054 /* Return 1 if the operand is a general register or memory operand without
2055 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2059 lwa_operand (rtx op, enum machine_mode mode)
2063 if (reload_completed && GET_CODE (inner) == SUBREG)
2064 inner = SUBREG_REG (inner);
2066 return gpc_reg_operand (inner, mode)
2067 || (memory_operand (inner, mode)
2068 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2069 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2070 && (GET_CODE (XEXP (inner, 0)) != PLUS
2071 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2072 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2075 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2078 symbol_ref_operand (rtx op, enum machine_mode mode)
2080 if (mode != VOIDmode && GET_MODE (op) != mode)
2083 return (GET_CODE (op) == SYMBOL_REF
2084 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2087 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2088 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2091 call_operand (rtx op, enum machine_mode mode)
2093 if (mode != VOIDmode && GET_MODE (op) != mode)
2096 return (GET_CODE (op) == SYMBOL_REF
2097 || (GET_CODE (op) == REG
2098 && (REGNO (op) == LINK_REGISTER_REGNUM
2099 || REGNO (op) == COUNT_REGISTER_REGNUM
2100 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2103 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2107 current_file_function_operand (rtx op,
2108 enum machine_mode mode ATTRIBUTE_UNUSED)
2110 return (GET_CODE (op) == SYMBOL_REF
2111 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2112 && (SYMBOL_REF_LOCAL_P (op)
2113 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2116 /* Return 1 if this operand is a valid input for a move insn. */
2119 input_operand (rtx op, enum machine_mode mode)
2121 /* Memory is always valid. */
2122 if (memory_operand (op, mode))
2125 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2126 if (GET_CODE (op) == CONSTANT_P_RTX)
2129 /* For floating-point, easy constants are valid. */
2130 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2132 && easy_fp_constant (op, mode))
2135 /* Allow any integer constant. */
2136 if (GET_MODE_CLASS (mode) == MODE_INT
2137 && (GET_CODE (op) == CONST_INT
2138 || GET_CODE (op) == CONST_DOUBLE))
2141 /* Allow easy vector constants. */
2142 if (GET_CODE (op) == CONST_VECTOR
2143 && easy_vector_constant (op, mode))
2146 /* For floating-point or multi-word mode, the only remaining valid type
2148 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2149 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2150 return register_operand (op, mode);
2152 /* The only cases left are integral modes one word or smaller (we
2153 do not get called for MODE_CC values). These can be in any
2155 if (register_operand (op, mode))
2158 /* A SYMBOL_REF referring to the TOC is valid. */
2159 if (legitimate_constant_pool_address_p (op))
2162 /* A constant pool expression (relative to the TOC) is valid */
2163 if (toc_relative_expr_p (op))
2166 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2168 if (DEFAULT_ABI == ABI_V4
2169 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2170 && small_data_operand (op, Pmode))
2176 /* Return 1 for an operand in small memory on V.4/eabi. */
2179 small_data_operand (rtx op ATTRIBUTE_UNUSED,
2180 enum machine_mode mode ATTRIBUTE_UNUSED)
2185 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2188 if (DEFAULT_ABI != ABI_V4)
2191 if (GET_CODE (op) == SYMBOL_REF)
2194 else if (GET_CODE (op) != CONST
2195 || GET_CODE (XEXP (op, 0)) != PLUS
2196 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2197 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2202 rtx sum = XEXP (op, 0);
2203 HOST_WIDE_INT summand;
2205 /* We have to be careful here, because it is the referenced address
2206 that must be 32k from _SDA_BASE_, not just the symbol. */
2207 summand = INTVAL (XEXP (sum, 1));
2208 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2211 sym_ref = XEXP (sum, 0);
2214 return SYMBOL_REF_SMALL_P (sym_ref);
2220 /* Return true if either operand is a general purpose register. */
2223 gpr_or_gpr_p (rtx op0, rtx op1)
2225 return ((REG_P (op0) && INT_REGNO_P (REGNO (op0)))
2226 || (REG_P (op1) && INT_REGNO_P (REGNO (op1))));
2230 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2233 constant_pool_expr_1 (rtx op, int *have_sym, int *have_toc)
2235 switch (GET_CODE(op))
2238 if (RS6000_SYMBOL_REF_TLS_P (op))
2240 else if (CONSTANT_POOL_ADDRESS_P (op))
2242 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2250 else if (! strcmp (XSTR (op, 0), toc_label_name))
2259 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2260 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2262 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2271 constant_pool_expr_p (rtx op)
2275 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2279 toc_relative_expr_p (rtx op)
2283 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2286 /* SPE offset addressing is limited to 5-bits worth of double words. */
2287 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2290 legitimate_constant_pool_address_p (rtx x)
2293 && GET_CODE (x) == PLUS
2294 && GET_CODE (XEXP (x, 0)) == REG
2295 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2296 && constant_pool_expr_p (XEXP (x, 1)));
2300 legitimate_small_data_p (enum machine_mode mode, rtx x)
2302 return (DEFAULT_ABI == ABI_V4
2303 && !flag_pic && !TARGET_TOC
2304 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2305 && small_data_operand (x, mode));
2309 legitimate_offset_address_p (enum machine_mode mode, rtx x, int strict)
2311 unsigned HOST_WIDE_INT offset, extra;
2313 if (GET_CODE (x) != PLUS)
2315 if (GET_CODE (XEXP (x, 0)) != REG)
2317 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2319 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2322 offset = INTVAL (XEXP (x, 1));
2330 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2331 which leaves the only valid constant offset of zero, which by
2332 canonicalization rules is also invalid. */
2339 /* SPE vector modes. */
2340 return SPE_CONST_OFFSET_OK (offset);
2346 else if (offset & 3)
2354 else if (offset & 3)
2364 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2368 legitimate_indexed_address_p (rtx x, int strict)
2372 if (GET_CODE (x) != PLUS)
2377 if (!REG_P (op0) || !REG_P (op1))
2380 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2381 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2382 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2383 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2387 legitimate_indirect_address_p (rtx x, int strict)
2389 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2393 legitimate_lo_sum_address_p (enum machine_mode mode, rtx x, int strict)
2395 if (GET_CODE (x) != LO_SUM)
2397 if (GET_CODE (XEXP (x, 0)) != REG)
2399 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2405 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2409 if (GET_MODE_NUNITS (mode) != 1)
2411 if (GET_MODE_BITSIZE (mode) > 32
2412 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2415 return CONSTANT_P (x);
2422 /* Try machine-dependent ways of modifying an illegitimate address
2423 to be legitimate. If we find one, return the new, valid address.
2424 This is used from only one place: `memory_address' in explow.c.
2426 OLDX is the address as it was before break_out_memory_refs was
2427 called. In some cases it is useful to look at this to decide what
2430 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2432 It is always safe for this function to do nothing. It exists to
2433 recognize opportunities to optimize the output.
2435 On RS/6000, first check for the sum of a register with a constant
2436 integer that is out of range. If so, generate code to add the
2437 constant with the low-order 16 bits masked to the register and force
2438 this result into another register (this can be done with `cau').
2439 Then generate an address of REG+(CONST&0xffff), allowing for the
2440 possibility of bit 16 being a one.
2442 Then check for the sum of a register and something not constant, try to
2443 load the other things into a register and return the sum. */
2446 rs6000_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
2447 enum machine_mode mode)
2449 if (GET_CODE (x) == SYMBOL_REF)
2451 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2453 return rs6000_legitimize_tls_address (x, model);
2456 if (GET_CODE (x) == PLUS
2457 && GET_CODE (XEXP (x, 0)) == REG
2458 && GET_CODE (XEXP (x, 1)) == CONST_INT
2459 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2461 HOST_WIDE_INT high_int, low_int;
2463 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2464 high_int = INTVAL (XEXP (x, 1)) - low_int;
2465 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2466 GEN_INT (high_int)), 0);
2467 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2469 else if (GET_CODE (x) == PLUS
2470 && GET_CODE (XEXP (x, 0)) == REG
2471 && GET_CODE (XEXP (x, 1)) != CONST_INT
2472 && GET_MODE_NUNITS (mode) == 1
2473 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2475 || (mode != DFmode && mode != TFmode))
2476 && (TARGET_POWERPC64 || mode != DImode)
2479 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2480 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2482 else if (ALTIVEC_VECTOR_MODE (mode))
2486 /* Make sure both operands are registers. */
2487 if (GET_CODE (x) == PLUS)
2488 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2489 force_reg (Pmode, XEXP (x, 1)));
2491 reg = force_reg (Pmode, x);
2494 else if (SPE_VECTOR_MODE (mode))
2496 /* We accept [reg + reg] and [reg + OFFSET]. */
2498 if (GET_CODE (x) == PLUS)
2500 rtx op1 = XEXP (x, 0);
2501 rtx op2 = XEXP (x, 1);
2503 op1 = force_reg (Pmode, op1);
2505 if (GET_CODE (op2) != REG
2506 && (GET_CODE (op2) != CONST_INT
2507 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2508 op2 = force_reg (Pmode, op2);
2510 return gen_rtx_PLUS (Pmode, op1, op2);
2513 return force_reg (Pmode, x);
2519 && GET_CODE (x) != CONST_INT
2520 && GET_CODE (x) != CONST_DOUBLE
2522 && GET_MODE_NUNITS (mode) == 1
2523 && (GET_MODE_BITSIZE (mode) <= 32
2524 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2526 rtx reg = gen_reg_rtx (Pmode);
2527 emit_insn (gen_elf_high (reg, (x)));
2528 return gen_rtx_LO_SUM (Pmode, reg, (x));
2530 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2533 && ! MACHO_DYNAMIC_NO_PIC_P
2535 && GET_CODE (x) != CONST_INT
2536 && GET_CODE (x) != CONST_DOUBLE
2538 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2542 rtx reg = gen_reg_rtx (Pmode);
2543 emit_insn (gen_macho_high (reg, (x)));
2544 return gen_rtx_LO_SUM (Pmode, reg, (x));
2547 && constant_pool_expr_p (x)
2548 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2550 return create_TOC_reference (x);
2556 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2558 static GTY(()) rtx rs6000_tls_symbol;
2560 rs6000_tls_get_addr (void)
2562 if (!rs6000_tls_symbol)
2563 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2565 return rs6000_tls_symbol;
2568 /* Construct the SYMBOL_REF for TLS GOT references. */
2570 static GTY(()) rtx rs6000_got_symbol;
2572 rs6000_got_sym (void)
2574 if (!rs6000_got_symbol)
2576 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2577 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2578 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2581 return rs6000_got_symbol;
2584 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2585 this (thread-local) address. */
2588 rs6000_legitimize_tls_address (rtx addr, enum tls_model model)
2592 dest = gen_reg_rtx (Pmode);
2593 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2599 tlsreg = gen_rtx_REG (Pmode, 13);
2600 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2604 tlsreg = gen_rtx_REG (Pmode, 2);
2605 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2609 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2613 tmp = gen_reg_rtx (Pmode);
2616 tlsreg = gen_rtx_REG (Pmode, 13);
2617 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2621 tlsreg = gen_rtx_REG (Pmode, 2);
2622 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2626 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2628 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2633 rtx r3, got, tga, tmp1, tmp2, eqv;
2636 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2640 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2643 rtx gsym = rs6000_got_sym ();
2644 got = gen_reg_rtx (Pmode);
2646 rs6000_emit_move (got, gsym, Pmode);
2650 static int tls_got_labelno = 0;
2651 rtx tempLR, lab, tmp3, mem;
2654 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2655 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2656 tempLR = gen_reg_rtx (Pmode);
2657 tmp1 = gen_reg_rtx (Pmode);
2658 tmp2 = gen_reg_rtx (Pmode);
2659 tmp3 = gen_reg_rtx (Pmode);
2660 mem = gen_rtx_MEM (Pmode, tmp1);
2661 RTX_UNCHANGING_P (mem) = 1;
2663 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2665 emit_move_insn (tmp1, tempLR);
2666 emit_move_insn (tmp2, mem);
2667 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2668 last = emit_move_insn (got, tmp3);
2669 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2671 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2673 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2679 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2681 r3 = gen_rtx_REG (Pmode, 3);
2683 insn = gen_tls_gd_64 (r3, got, addr);
2685 insn = gen_tls_gd_32 (r3, got, addr);
2688 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2689 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2690 insn = emit_call_insn (insn);
2691 CONST_OR_PURE_CALL_P (insn) = 1;
2692 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2693 insn = get_insns ();
2695 emit_libcall_block (insn, dest, r3, addr);
2697 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2699 r3 = gen_rtx_REG (Pmode, 3);
2701 insn = gen_tls_ld_64 (r3, got);
2703 insn = gen_tls_ld_32 (r3, got);
2706 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2707 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2708 insn = emit_call_insn (insn);
2709 CONST_OR_PURE_CALL_P (insn) = 1;
2710 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2711 insn = get_insns ();
2713 tmp1 = gen_reg_rtx (Pmode);
2714 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2716 emit_libcall_block (insn, tmp1, r3, eqv);
2717 if (rs6000_tls_size == 16)
2720 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2722 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2724 else if (rs6000_tls_size == 32)
2726 tmp2 = gen_reg_rtx (Pmode);
2728 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2730 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2733 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2735 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2739 tmp2 = gen_reg_rtx (Pmode);
2741 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2743 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2745 insn = gen_rtx_SET (Pmode, dest,
2746 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2752 /* IE, or 64 bit offset LE. */
2753 tmp2 = gen_reg_rtx (Pmode);
2755 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2757 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2760 insn = gen_tls_tls_64 (dest, tmp2, addr);
2762 insn = gen_tls_tls_32 (dest, tmp2, addr);
2770 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2771 instruction definitions. */
2774 rs6000_tls_symbol_ref (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED)
2776 return RS6000_SYMBOL_REF_TLS_P (x);
2779 /* Return 1 if X contains a thread-local symbol. */
2782 rs6000_tls_referenced_p (rtx x)
2784 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2787 /* Return 1 if *X is a thread-local symbol. This is the same as
2788 rs6000_tls_symbol_ref except for the type of the unused argument. */
2791 rs6000_tls_symbol_ref_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
2793 return RS6000_SYMBOL_REF_TLS_P (*x);
2796 /* The convention appears to be to define this wherever it is used.
2797 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2798 is now used here. */
2799 #ifndef REG_MODE_OK_FOR_BASE_P
2800 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2803 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2804 replace the input X, or the original X if no replacement is called for.
2805 The output parameter *WIN is 1 if the calling macro should goto WIN,
2808 For RS/6000, we wish to handle large displacements off a base
2809 register by splitting the addend across an addiu/addis and the mem insn.
2810 This cuts number of extra insns needed from 3 to 1.
2812 On Darwin, we use this to generate code for floating point constants.
2813 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2814 The Darwin code is inside #if TARGET_MACHO because only then is
2815 machopic_function_base_name() defined. */
2817 rs6000_legitimize_reload_address (rtx x, enum machine_mode mode,
2818 int opnum, int type, int ind_levels ATTRIBUTE_UNUSED, int *win)
2820 /* We must recognize output that we have already generated ourselves. */
2821 if (GET_CODE (x) == PLUS
2822 && GET_CODE (XEXP (x, 0)) == PLUS
2823 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2824 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2825 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2827 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2828 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2829 opnum, (enum reload_type)type);
2835 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2836 && GET_CODE (x) == LO_SUM
2837 && GET_CODE (XEXP (x, 0)) == PLUS
2838 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2839 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2840 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2841 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2842 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2843 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2844 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2846 /* Result of previous invocation of this function on Darwin
2847 floating point constant. */
2848 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2849 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2850 opnum, (enum reload_type)type);
2855 if (GET_CODE (x) == PLUS
2856 && GET_CODE (XEXP (x, 0)) == REG
2857 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2858 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2859 && GET_CODE (XEXP (x, 1)) == CONST_INT
2860 && !SPE_VECTOR_MODE (mode)
2861 && !ALTIVEC_VECTOR_MODE (mode))
2863 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2864 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2866 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2868 /* Check for 32-bit overflow. */
2869 if (high + low != val)
2875 /* Reload the high part into a base reg; leave the low part
2876 in the mem directly. */
2878 x = gen_rtx_PLUS (GET_MODE (x),
2879 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2883 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2884 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2885 opnum, (enum reload_type)type);
2890 if (GET_CODE (x) == SYMBOL_REF
2891 && DEFAULT_ABI == ABI_DARWIN
2892 && !ALTIVEC_VECTOR_MODE (mode)
2895 /* Darwin load of floating point constant. */
2896 rtx offset = gen_rtx (CONST, Pmode,
2897 gen_rtx (MINUS, Pmode, x,
2898 gen_rtx (SYMBOL_REF, Pmode,
2899 machopic_function_base_name ())));
2900 x = gen_rtx (LO_SUM, GET_MODE (x),
2901 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
2902 gen_rtx (HIGH, Pmode, offset)), offset);
2903 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2904 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2905 opnum, (enum reload_type)type);
2909 if (GET_CODE (x) == SYMBOL_REF
2910 && DEFAULT_ABI == ABI_DARWIN
2911 && !ALTIVEC_VECTOR_MODE (mode)
2912 && MACHO_DYNAMIC_NO_PIC_P)
2914 /* Darwin load of floating point constant. */
2915 x = gen_rtx (LO_SUM, GET_MODE (x),
2916 gen_rtx (HIGH, Pmode, x), x);
2917 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2918 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2919 opnum, (enum reload_type)type);
2925 && constant_pool_expr_p (x)
2926 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
2928 (x) = create_TOC_reference (x);
2936 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
2937 that is a valid memory address for an instruction.
2938 The MODE argument is the machine mode for the MEM expression
2939 that wants to use this address.
2941 On the RS/6000, there are four valid address: a SYMBOL_REF that
2942 refers to a constant pool entry of an address (or the sum of it
2943 plus a constant), a short (16-bit signed) constant plus a register,
2944 the sum of two registers, or a register indirect, possibly with an
2945 auto-increment. For DFmode and DImode with a constant plus register,
2946 we must ensure that both words are addressable or PowerPC64 with offset
2949 For modes spanning multiple registers (DFmode in 32-bit GPRs,
2950 32-bit DImode, TImode), indexed addressing cannot be used because
2951 adjacent memory cells are accessed by adding word-sized offsets
2952 during assembly output. */
2954 rs6000_legitimate_address (enum machine_mode mode, rtx x, int reg_ok_strict)
2956 if (RS6000_SYMBOL_REF_TLS_P (x))
2958 if (legitimate_indirect_address_p (x, reg_ok_strict))
2960 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2961 && !ALTIVEC_VECTOR_MODE (mode)
2962 && !SPE_VECTOR_MODE (mode)
2964 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
2966 if (legitimate_small_data_p (mode, x))
2968 if (legitimate_constant_pool_address_p (x))
2970 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
2972 && GET_CODE (x) == PLUS
2973 && GET_CODE (XEXP (x, 0)) == REG
2974 && XEXP (x, 0) == virtual_stack_vars_rtx
2975 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2977 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
2980 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2982 || (mode != DFmode && mode != TFmode))
2983 && (TARGET_POWERPC64 || mode != DImode)
2984 && legitimate_indexed_address_p (x, reg_ok_strict))
2986 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
2991 /* Go to LABEL if ADDR (a legitimate address expression)
2992 has an effect that depends on the machine mode it is used for.
2994 On the RS/6000 this is true of all integral offsets (since AltiVec
2995 modes don't allow them) or is a pre-increment or decrement.
2997 ??? Except that due to conceptual problems in offsettable_address_p
2998 we can't really report the problems of integral offsets. So leave
2999 this assuming that the adjustable offset must be valid for the
3000 sub-words of a TFmode operand, which is what we had before. */
3003 rs6000_mode_dependent_address (rtx addr)
3005 switch (GET_CODE (addr))
3008 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3010 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3011 return val + 12 + 0x8000 >= 0x10000;
3020 return TARGET_UPDATE;
3029 /* Try to output insns to set TARGET equal to the constant C if it can
3030 be done in less than N insns. Do all computations in MODE.
3031 Returns the place where the output has been placed if it can be
3032 done and the insns have been emitted. If it would take more than N
3033 insns, zero is returned and no insns and emitted. */
3036 rs6000_emit_set_const (rtx dest, enum machine_mode mode,
3037 rtx source, int n ATTRIBUTE_UNUSED)
3039 rtx result, insn, set;
3040 HOST_WIDE_INT c0, c1;
3042 if (mode == QImode || mode == HImode)
3045 dest = gen_reg_rtx (mode);
3046 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3049 else if (mode == SImode)
3051 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3053 emit_insn (gen_rtx_SET (VOIDmode, result,
3054 GEN_INT (INTVAL (source)
3055 & (~ (HOST_WIDE_INT) 0xffff))));
3056 emit_insn (gen_rtx_SET (VOIDmode, dest,
3057 gen_rtx_IOR (SImode, result,
3058 GEN_INT (INTVAL (source) & 0xffff))));
3061 else if (mode == DImode)
3063 if (GET_CODE (source) == CONST_INT)
3065 c0 = INTVAL (source);
3068 else if (GET_CODE (source) == CONST_DOUBLE)
3070 #if HOST_BITS_PER_WIDE_INT >= 64
3071 c0 = CONST_DOUBLE_LOW (source);
3074 c0 = CONST_DOUBLE_LOW (source);
3075 c1 = CONST_DOUBLE_HIGH (source);
3081 result = rs6000_emit_set_long_const (dest, c0, c1);
3086 insn = get_last_insn ();
3087 set = single_set (insn);
3088 if (! CONSTANT_P (SET_SRC (set)))
3089 set_unique_reg_note (insn, REG_EQUAL, source);
3094 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3095 fall back to a straight forward decomposition. We do this to avoid
3096 exponential run times encountered when looking for longer sequences
3097 with rs6000_emit_set_const. */
3099 rs6000_emit_set_long_const (rtx dest, HOST_WIDE_INT c1, HOST_WIDE_INT c2)
3101 if (!TARGET_POWERPC64)
3103 rtx operand1, operand2;
3105 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3107 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3109 emit_move_insn (operand1, GEN_INT (c1));
3110 emit_move_insn (operand2, GEN_INT (c2));
3114 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3117 ud2 = (c1 & 0xffff0000) >> 16;
3118 #if HOST_BITS_PER_WIDE_INT >= 64
3122 ud4 = (c2 & 0xffff0000) >> 16;
3124 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3125 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3128 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3130 emit_move_insn (dest, GEN_INT (ud1));
3133 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3134 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3137 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3140 emit_move_insn (dest, GEN_INT (ud2 << 16));
3142 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3144 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3145 || (ud4 == 0 && ! (ud3 & 0x8000)))
3148 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3151 emit_move_insn (dest, GEN_INT (ud3 << 16));
3154 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3155 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3157 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3162 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3165 emit_move_insn (dest, GEN_INT (ud4 << 16));
3168 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3170 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3172 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3173 GEN_INT (ud2 << 16)));
3175 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3181 /* Emit a move from SOURCE to DEST in mode MODE. */
3183 rs6000_emit_move (rtx dest, rtx source, enum machine_mode mode)
3187 operands[1] = source;
3189 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3190 if (GET_CODE (operands[1]) == CONST_DOUBLE
3191 && ! FLOAT_MODE_P (mode)
3192 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3194 /* FIXME. This should never happen. */
3195 /* Since it seems that it does, do the safe thing and convert
3197 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3199 if (GET_CODE (operands[1]) == CONST_DOUBLE
3200 && ! FLOAT_MODE_P (mode)
3201 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3202 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3203 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3204 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3207 /* Check if GCC is setting up a block move that will end up using FP
3208 registers as temporaries. We must make sure this is acceptable. */
3209 if (GET_CODE (operands[0]) == MEM
3210 && GET_CODE (operands[1]) == MEM
3212 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3213 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3214 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3215 ? 32 : MEM_ALIGN (operands[0])))
3216 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3218 : MEM_ALIGN (operands[1]))))
3219 && ! MEM_VOLATILE_P (operands [0])
3220 && ! MEM_VOLATILE_P (operands [1]))
3222 emit_move_insn (adjust_address (operands[0], SImode, 0),
3223 adjust_address (operands[1], SImode, 0));
3224 emit_move_insn (adjust_address (operands[0], SImode, 4),
3225 adjust_address (operands[1], SImode, 4));
3229 if (!no_new_pseudos)
3231 if (GET_CODE (operands[1]) == MEM && optimize > 0
3232 && (mode == QImode || mode == HImode || mode == SImode)
3233 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3235 rtx reg = gen_reg_rtx (word_mode);
3237 emit_insn (gen_rtx_SET (word_mode, reg,
3238 gen_rtx_ZERO_EXTEND (word_mode,
3240 operands[1] = gen_lowpart (mode, reg);
3242 if (GET_CODE (operands[0]) != REG)
3243 operands[1] = force_reg (mode, operands[1]);
3246 if (mode == SFmode && ! TARGET_POWERPC
3247 && TARGET_HARD_FLOAT && TARGET_FPRS
3248 && GET_CODE (operands[0]) == MEM)
3252 if (reload_in_progress || reload_completed)
3253 regnum = true_regnum (operands[1]);
3254 else if (GET_CODE (operands[1]) == REG)
3255 regnum = REGNO (operands[1]);
3259 /* If operands[1] is a register, on POWER it may have
3260 double-precision data in it, so truncate it to single
3262 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3265 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3266 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3267 operands[1] = newreg;
3271 /* Recognize the case where operand[1] is a reference to thread-local
3272 data and load its address to a register. */
3273 if (GET_CODE (operands[1]) == SYMBOL_REF)
3275 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3277 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3280 /* Handle the case where reload calls us with an invalid address. */
3281 if (reload_in_progress && mode == Pmode
3282 && (! general_operand (operands[1], mode)
3283 || ! nonimmediate_operand (operands[0], mode)))
3286 /* Handle the case of CONSTANT_P_RTX. */
3287 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3290 /* FIXME: In the long term, this switch statement should go away
3291 and be replaced by a sequence of tests based on things like
3297 if (CONSTANT_P (operands[1])
3298 && GET_CODE (operands[1]) != CONST_INT)
3299 operands[1] = force_const_mem (mode, operands[1]);
3305 if (CONSTANT_P (operands[1])
3306 && ! easy_fp_constant (operands[1], mode))
3307 operands[1] = force_const_mem (mode, operands[1]);
3318 if (CONSTANT_P (operands[1])
3319 && !easy_vector_constant (operands[1], mode))
3320 operands[1] = force_const_mem (mode, operands[1]);
3325 /* Use default pattern for address of ELF small data */
3328 && DEFAULT_ABI == ABI_V4
3329 && (GET_CODE (operands[1]) == SYMBOL_REF
3330 || GET_CODE (operands[1]) == CONST)
3331 && small_data_operand (operands[1], mode))
3333 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3337 if (DEFAULT_ABI == ABI_V4
3338 && mode == Pmode && mode == SImode
3339 && flag_pic == 1 && got_operand (operands[1], mode))
3341 emit_insn (gen_movsi_got (operands[0], operands[1]));
3345 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3349 && CONSTANT_P (operands[1])
3350 && GET_CODE (operands[1]) != HIGH
3351 && GET_CODE (operands[1]) != CONST_INT)
3353 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3355 /* If this is a function address on -mcall-aixdesc,
3356 convert it to the address of the descriptor. */
3357 if (DEFAULT_ABI == ABI_AIX
3358 && GET_CODE (operands[1]) == SYMBOL_REF
3359 && XSTR (operands[1], 0)[0] == '.')
3361 const char *name = XSTR (operands[1], 0);
3363 while (*name == '.')
3365 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3366 CONSTANT_POOL_ADDRESS_P (new_ref)
3367 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3368 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3369 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3370 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3371 operands[1] = new_ref;
3374 if (DEFAULT_ABI == ABI_DARWIN)
3377 if (MACHO_DYNAMIC_NO_PIC_P)
3379 /* Take care of any required data indirection. */
3380 operands[1] = rs6000_machopic_legitimize_pic_address (
3381 operands[1], mode, operands[0]);
3382 if (operands[0] != operands[1])
3383 emit_insn (gen_rtx_SET (VOIDmode,
3384 operands[0], operands[1]));
3388 emit_insn (gen_macho_high (target, operands[1]));
3389 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3393 emit_insn (gen_elf_high (target, operands[1]));
3394 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3398 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3399 and we have put it in the TOC, we just need to make a TOC-relative
3402 && GET_CODE (operands[1]) == SYMBOL_REF
3403 && constant_pool_expr_p (operands[1])
3404 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3405 get_pool_mode (operands[1])))
3407 operands[1] = create_TOC_reference (operands[1]);
3409 else if (mode == Pmode
3410 && CONSTANT_P (operands[1])
3411 && ((GET_CODE (operands[1]) != CONST_INT
3412 && ! easy_fp_constant (operands[1], mode))
3413 || (GET_CODE (operands[1]) == CONST_INT
3414 && num_insns_constant (operands[1], mode) > 2)
3415 || (GET_CODE (operands[0]) == REG
3416 && FP_REGNO_P (REGNO (operands[0]))))
3417 && GET_CODE (operands[1]) != HIGH
3418 && ! legitimate_constant_pool_address_p (operands[1])
3419 && ! toc_relative_expr_p (operands[1]))
3421 /* Emit a USE operation so that the constant isn't deleted if
3422 expensive optimizations are turned on because nobody
3423 references it. This should only be done for operands that
3424 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3425 This should not be done for operands that contain LABEL_REFs.
3426 For now, we just handle the obvious case. */
3427 if (GET_CODE (operands[1]) != LABEL_REF)
3428 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3431 /* Darwin uses a special PIC legitimizer. */
3432 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3435 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3437 if (operands[0] != operands[1])
3438 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3443 /* If we are to limit the number of things we put in the TOC and
3444 this is a symbol plus a constant we can add in one insn,
3445 just put the symbol in the TOC and add the constant. Don't do
3446 this if reload is in progress. */
3447 if (GET_CODE (operands[1]) == CONST
3448 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3449 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3450 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3451 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3452 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3453 && ! side_effects_p (operands[0]))
3456 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3457 rtx other = XEXP (XEXP (operands[1], 0), 1);
3459 sym = force_reg (mode, sym);
3461 emit_insn (gen_addsi3 (operands[0], sym, other));
3463 emit_insn (gen_adddi3 (operands[0], sym, other));
3467 operands[1] = force_const_mem (mode, operands[1]);
3470 && constant_pool_expr_p (XEXP (operands[1], 0))
3471 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3472 get_pool_constant (XEXP (operands[1], 0)),
3473 get_pool_mode (XEXP (operands[1], 0))))
3476 = gen_rtx_MEM (mode,
3477 create_TOC_reference (XEXP (operands[1], 0)));
3478 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3479 RTX_UNCHANGING_P (operands[1]) = 1;
3485 if (GET_CODE (operands[0]) == MEM
3486 && GET_CODE (XEXP (operands[0], 0)) != REG
3487 && ! reload_in_progress)
3489 = replace_equiv_address (operands[0],
3490 copy_addr_to_reg (XEXP (operands[0], 0)));
3492 if (GET_CODE (operands[1]) == MEM
3493 && GET_CODE (XEXP (operands[1], 0)) != REG
3494 && ! reload_in_progress)
3496 = replace_equiv_address (operands[1],
3497 copy_addr_to_reg (XEXP (operands[1], 0)));
3500 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3502 gen_rtx_SET (VOIDmode,
3503 operands[0], operands[1]),
3504 gen_rtx_CLOBBER (VOIDmode,
3505 gen_rtx_SCRATCH (SImode)))));
3514 /* Above, we may have called force_const_mem which may have returned
3515 an invalid address. If we can, fix this up; otherwise, reload will
3516 have to deal with it. */
3517 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3518 operands[1] = validize_mem (operands[1]);
3521 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3524 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3525 for a call to a function whose data type is FNTYPE.
3526 For a library call, FNTYPE is 0.
3528 For incoming args we set the number of arguments in the prototype large
3529 so we never return a PARALLEL. */
3532 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
3533 rtx libname ATTRIBUTE_UNUSED, int incoming, int libcall)
3535 static CUMULATIVE_ARGS zero_cumulative;
3537 *cum = zero_cumulative;
3539 cum->fregno = FP_ARG_MIN_REG;
3540 cum->vregno = ALTIVEC_ARG_MIN_REG;
3541 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3542 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
3543 ? CALL_LIBCALL : CALL_NORMAL);
3544 cum->sysv_gregno = GP_ARG_MIN_REG;
3545 cum->stdarg = fntype
3546 && (TYPE_ARG_TYPES (fntype) != 0
3547 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3548 != void_type_node));
3551 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3553 else if (cum->prototype)
3554 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3555 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3556 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3559 cum->nargs_prototype = 0;
3561 /* Check for a longcall attribute. */
3563 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3564 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3565 cum->call_cookie = CALL_LONG;
3567 if (TARGET_DEBUG_ARG)
3569 fprintf (stderr, "\ninit_cumulative_args:");
3572 tree ret_type = TREE_TYPE (fntype);
3573 fprintf (stderr, " ret code = %s,",
3574 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3577 if (cum->call_cookie & CALL_LONG)
3578 fprintf (stderr, " longcall,");
3580 fprintf (stderr, " proto = %d, nargs = %d\n",
3581 cum->prototype, cum->nargs_prototype);
3585 /* If defined, a C expression which determines whether, and in which
3586 direction, to pad out an argument with extra space. The value
3587 should be of type `enum direction': either `upward' to pad above
3588 the argument, `downward' to pad below, or `none' to inhibit
3591 For the AIX ABI structs are always stored left shifted in their
3595 function_arg_padding (enum machine_mode mode, tree type)
3597 #ifndef AGGREGATE_PADDING_FIXED
3598 #define AGGREGATE_PADDING_FIXED 0
3600 #ifndef AGGREGATES_PAD_UPWARD_ALWAYS
3601 #define AGGREGATES_PAD_UPWARD_ALWAYS 0
3604 if (!AGGREGATE_PADDING_FIXED)
3606 /* GCC used to pass structures of the same size as integer types as
3607 if they were in fact integers, ignoring FUNCTION_ARG_PADDING.
3608 ie. Structures of size 1 or 2 (or 4 when TARGET_64BIT) were
3609 passed padded downward, except that -mstrict-align further
3610 muddied the water in that multi-component structures of 2 and 4
3611 bytes in size were passed padded upward.
3613 The following arranges for best compatibility with previous
3614 versions of gcc, but removes the -mstrict-align dependency. */
3615 if (BYTES_BIG_ENDIAN)
3617 HOST_WIDE_INT size = 0;
3619 if (mode == BLKmode)
3621 if (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
3622 size = int_size_in_bytes (type);
3625 size = GET_MODE_SIZE (mode);
3627 if (size == 1 || size == 2 || size == 4)
3633 if (AGGREGATES_PAD_UPWARD_ALWAYS)
3635 if (type != 0 && AGGREGATE_TYPE_P (type))
3639 /* Fall back to the default. */
3640 return DEFAULT_FUNCTION_ARG_PADDING (mode, type);
3643 /* If defined, a C expression that gives the alignment boundary, in bits,
3644 of an argument with the specified mode and type. If it is not defined,
3645 PARM_BOUNDARY is used for all arguments.
3647 V.4 wants long longs to be double word aligned. */
3650 function_arg_boundary (enum machine_mode mode, tree type ATTRIBUTE_UNUSED)
3652 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3654 else if (SPE_VECTOR_MODE (mode))
3656 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3659 return PARM_BOUNDARY;
3662 /* Update the data in CUM to advance over an argument
3663 of mode MODE and data type TYPE.
3664 (TYPE is null for libcalls where that information may not be available.) */
3667 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3668 tree type, int named)
3670 cum->nargs_prototype--;
3672 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3674 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3677 cum->words += RS6000_ARG_SIZE (mode, type);
3679 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3681 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3683 else if (DEFAULT_ABI == ABI_V4)
3685 if (TARGET_HARD_FLOAT && TARGET_FPRS
3686 && (mode == SFmode || mode == DFmode))
3688 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3693 cum->words += cum->words & 1;
3694 cum->words += RS6000_ARG_SIZE (mode, type);
3700 int gregno = cum->sysv_gregno;
3702 /* Aggregates and IEEE quad get passed by reference. */
3703 if ((type && AGGREGATE_TYPE_P (type))
3707 n_words = RS6000_ARG_SIZE (mode, type);
3709 /* Long long and SPE vectors are put in odd registers. */
3710 if (n_words == 2 && (gregno & 1) == 0)
3713 /* Long long and SPE vectors are not split between registers
3715 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3717 /* Long long is aligned on the stack. */
3719 cum->words += cum->words & 1;
3720 cum->words += n_words;
3723 /* Note: continuing to accumulate gregno past when we've started
3724 spilling to the stack indicates the fact that we've started
3725 spilling to the stack to expand_builtin_saveregs. */
3726 cum->sysv_gregno = gregno + n_words;
3729 if (TARGET_DEBUG_ARG)
3731 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3732 cum->words, cum->fregno);
3733 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3734 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3735 fprintf (stderr, "mode = %4s, named = %d\n",
3736 GET_MODE_NAME (mode), named);
3741 int align = (TARGET_32BIT && (cum->words & 1) != 0
3742 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3744 cum->words += align + RS6000_ARG_SIZE (mode, type);
3746 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3747 && TARGET_HARD_FLOAT && TARGET_FPRS)
3748 cum->fregno += (mode == TFmode ? 2 : 1);
3750 if (TARGET_DEBUG_ARG)
3752 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3753 cum->words, cum->fregno);
3754 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3755 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3756 fprintf (stderr, "named = %d, align = %d\n", named, align);
3761 /* Determine where to put a SIMD argument on the SPE. */
3763 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3768 int gregno = cum->sysv_gregno;
3769 int n_words = RS6000_ARG_SIZE (mode, type);
3771 /* SPE vectors are put in odd registers. */
3772 if (n_words == 2 && (gregno & 1) == 0)
3775 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3778 enum machine_mode m = SImode;
3780 r1 = gen_rtx_REG (m, gregno);
3781 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3782 r2 = gen_rtx_REG (m, gregno + 1);
3783 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3784 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3791 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3792 return gen_rtx_REG (mode, cum->sysv_gregno);
3798 /* Determine where to put an argument to a function.
3799 Value is zero to push the argument on the stack,
3800 or a hard register in which to store the argument.
3802 MODE is the argument's machine mode.
3803 TYPE is the data type of the argument (as a tree).
3804 This is null for libcalls where that information may
3806 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3807 the preceding args and about the function being called.
3808 NAMED is nonzero if this argument is a named parameter
3809 (otherwise it is an extra parameter matching an ellipsis).
3811 On RS/6000 the first eight words of non-FP are normally in registers
3812 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3813 Under V.4, the first 8 FP args are in registers.
3815 If this is floating-point and no prototype is specified, we use
3816 both an FP and integer register (or possibly FP reg and stack). Library
3817 functions (when CALL_LIBCALL is set) always have the proper types for args,
3818 so we can pass the FP value just in one register. emit_library_function
3819 doesn't support PARALLEL anyway. */
3822 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3823 tree type, int named)
3825 enum rs6000_abi abi = DEFAULT_ABI;
3827 /* Return a marker to indicate whether CR1 needs to set or clear the
3828 bit that V.4 uses to say fp args were passed in registers.
3829 Assume that we don't need the marker for software floating point,
3830 or compiler generated library calls. */
3831 if (mode == VOIDmode)
3834 && cum->nargs_prototype < 0
3835 && (cum->call_cookie & CALL_LIBCALL) == 0
3836 && (cum->prototype || TARGET_NO_PROTOTYPE))
3838 /* For the SPE, we need to crxor CR6 always. */
3840 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3841 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3842 return GEN_INT (cum->call_cookie
3843 | ((cum->fregno == FP_ARG_MIN_REG)
3844 ? CALL_V4_SET_FP_ARGS
3845 : CALL_V4_CLEAR_FP_ARGS));
3848 return GEN_INT (cum->call_cookie);
3851 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3853 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3854 return gen_rtx_REG (mode, cum->vregno);
3858 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
3859 return rs6000_spe_function_arg (cum, mode, type);
3860 else if (abi == ABI_V4)
3862 if (TARGET_HARD_FLOAT && TARGET_FPRS
3863 && (mode == SFmode || mode == DFmode))
3865 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3866 return gen_rtx_REG (mode, cum->fregno);
3873 int gregno = cum->sysv_gregno;
3875 /* Aggregates and IEEE quad get passed by reference. */
3876 if ((type && AGGREGATE_TYPE_P (type))
3880 n_words = RS6000_ARG_SIZE (mode, type);
3882 /* Long long and SPE vectors are put in odd registers. */
3883 if (n_words == 2 && (gregno & 1) == 0)
3886 /* Long long do not split between registers and stack. */
3887 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3888 return gen_rtx_REG (mode, gregno);
3895 int align = (TARGET_32BIT && (cum->words & 1) != 0
3896 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3897 int align_words = cum->words + align;
3899 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3902 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3905 || ((cum->nargs_prototype > 0)
3906 /* IBM AIX extended its linkage convention definition always
3907 to require FP args after register save area hole on the
3909 && (DEFAULT_ABI != ABI_AIX
3911 || (align_words < GP_ARG_NUM_REG))))
3912 return gen_rtx_REG (mode, cum->fregno);
3914 return gen_rtx_PARALLEL (mode,
3916 gen_rtx_EXPR_LIST (VOIDmode,
3917 ((align_words >= GP_ARG_NUM_REG)
3920 + RS6000_ARG_SIZE (mode, type)
3922 /* If this is partially on the stack, then
3923 we only include the portion actually
3924 in registers here. */
3925 ? gen_rtx_REG (SImode,
3926 GP_ARG_MIN_REG + align_words)
3927 : gen_rtx_REG (mode,
3928 GP_ARG_MIN_REG + align_words))),
3930 gen_rtx_EXPR_LIST (VOIDmode,
3931 gen_rtx_REG (mode, cum->fregno),
3934 else if (align_words < GP_ARG_NUM_REG)
3935 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
3941 /* For an arg passed partly in registers and partly in memory,
3942 this is the number of registers used.
3943 For args passed entirely in registers or entirely in memory, zero. */
3946 function_arg_partial_nregs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
3947 tree type, int named ATTRIBUTE_UNUSED)
3949 if (DEFAULT_ABI == ABI_V4)
3952 if (USE_FP_FOR_ARG_P (*cum, mode, type)
3953 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
3955 if (cum->nargs_prototype >= 0)
3959 if (cum->words < GP_ARG_NUM_REG
3960 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
3962 int ret = GP_ARG_NUM_REG - cum->words;
3963 if (ret && TARGET_DEBUG_ARG)
3964 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
3972 /* A C expression that indicates when an argument must be passed by
3973 reference. If nonzero for an argument, a copy of that argument is
3974 made in memory and a pointer to the argument is passed instead of
3975 the argument itself. The pointer is passed in whatever way is
3976 appropriate for passing a pointer to that type.
3978 Under V.4, structures and unions are passed by reference.
3980 As an extension to all ABIs, variable sized types are passed by
3984 function_arg_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
3985 enum machine_mode mode ATTRIBUTE_UNUSED,
3986 tree type, int named ATTRIBUTE_UNUSED)
3988 if (DEFAULT_ABI == ABI_V4
3989 && ((type && AGGREGATE_TYPE_P (type))
3992 if (TARGET_DEBUG_ARG)
3993 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
3997 return type && int_size_in_bytes (type) < 0;
4000 /* Perform any needed actions needed for a function that is receiving a
4001 variable number of arguments.
4005 MODE and TYPE are the mode and type of the current parameter.
4007 PRETEND_SIZE is a variable that should be set to the amount of stack
4008 that must be pushed by the prolog to pretend that our caller pushed
4011 Normally, this macro will push all remaining incoming registers on the
4012 stack and set PRETEND_SIZE to the length of the registers pushed. */
4015 setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
4016 tree type, int *pretend_size ATTRIBUTE_UNUSED, int no_rtl)
4018 CUMULATIVE_ARGS next_cum;
4019 int reg_size = TARGET_32BIT ? 4 : 8;
4020 rtx save_area = NULL_RTX, mem;
4021 int first_reg_offset, set;
4025 fntype = TREE_TYPE (current_function_decl);
4026 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4027 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4028 != void_type_node));
4030 /* For varargs, we do not want to skip the dummy va_dcl argument.
4031 For stdargs, we do want to skip the last named argument. */
4034 function_arg_advance (&next_cum, mode, type, 1);
4036 if (DEFAULT_ABI == ABI_V4)
4038 /* Indicate to allocate space on the stack for varargs save area. */
4039 cfun->machine->sysv_varargs_p = 1;
4041 save_area = plus_constant (virtual_stack_vars_rtx,
4042 - RS6000_VARARGS_SIZE);
4044 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4048 first_reg_offset = next_cum.words;
4049 save_area = virtual_incoming_args_rtx;
4050 cfun->machine->sysv_varargs_p = 0;
4052 if (MUST_PASS_IN_STACK (mode, type))
4053 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4056 set = get_varargs_alias_set ();
4057 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4059 mem = gen_rtx_MEM (BLKmode,
4060 plus_constant (save_area,
4061 first_reg_offset * reg_size)),
4062 set_mem_alias_set (mem, set);
4063 set_mem_align (mem, BITS_PER_WORD);
4065 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4066 GP_ARG_NUM_REG - first_reg_offset);
4069 /* Save FP registers if needed. */
4070 if (DEFAULT_ABI == ABI_V4
4071 && TARGET_HARD_FLOAT && TARGET_FPRS
4073 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4075 int fregno = next_cum.fregno;
4076 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4077 rtx lab = gen_label_rtx ();
4078 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4080 emit_jump_insn (gen_rtx_SET (VOIDmode,
4082 gen_rtx_IF_THEN_ELSE (VOIDmode,
4083 gen_rtx_NE (VOIDmode, cr1,
4085 gen_rtx_LABEL_REF (VOIDmode, lab),
4088 while (fregno <= FP_ARG_V4_MAX_REG)
4090 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4091 set_mem_alias_set (mem, set);
4092 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4101 /* Create the va_list data type. */
4104 rs6000_build_va_list (void)
4106 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4108 /* For AIX, prefer 'char *' because that's what the system
4109 header files like. */
4110 if (DEFAULT_ABI != ABI_V4)
4111 return build_pointer_type (char_type_node);
4113 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4114 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4116 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4117 unsigned_char_type_node);
4118 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4119 unsigned_char_type_node);
4120 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4122 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4123 short_unsigned_type_node);
4124 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4126 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4129 DECL_FIELD_CONTEXT (f_gpr) = record;
4130 DECL_FIELD_CONTEXT (f_fpr) = record;
4131 DECL_FIELD_CONTEXT (f_res) = record;
4132 DECL_FIELD_CONTEXT (f_ovf) = record;
4133 DECL_FIELD_CONTEXT (f_sav) = record;
4135 TREE_CHAIN (record) = type_decl;
4136 TYPE_NAME (record) = type_decl;
4137 TYPE_FIELDS (record) = f_gpr;
4138 TREE_CHAIN (f_gpr) = f_fpr;
4139 TREE_CHAIN (f_fpr) = f_res;
4140 TREE_CHAIN (f_res) = f_ovf;
4141 TREE_CHAIN (f_ovf) = f_sav;
4143 layout_type (record);
4145 /* The correct type is an array type of one element. */
4146 return build_array_type (record, build_index_type (size_zero_node));
4149 /* Implement va_start. */
4152 rs6000_va_start (tree valist, rtx nextarg)
4154 HOST_WIDE_INT words, n_gpr, n_fpr;
4155 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4156 tree gpr, fpr, ovf, sav, t;
4158 /* Only SVR4 needs something special. */
4159 if (DEFAULT_ABI != ABI_V4)
4161 std_expand_builtin_va_start (valist, nextarg);
4165 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4166 f_fpr = TREE_CHAIN (f_gpr);
4167 f_res = TREE_CHAIN (f_fpr);
4168 f_ovf = TREE_CHAIN (f_res);
4169 f_sav = TREE_CHAIN (f_ovf);
4171 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4172 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4173 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4174 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4175 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4177 /* Count number of gp and fp argument registers used. */
4178 words = current_function_args_info.words;
4179 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4180 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4182 if (TARGET_DEBUG_ARG)
4183 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4184 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4185 words, n_gpr, n_fpr);
4187 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4188 TREE_SIDE_EFFECTS (t) = 1;
4189 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4191 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4192 TREE_SIDE_EFFECTS (t) = 1;
4193 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4195 /* Find the overflow area. */
4196 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4198 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4199 build_int_2 (words * UNITS_PER_WORD, 0));
4200 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4201 TREE_SIDE_EFFECTS (t) = 1;
4202 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4204 /* Find the register save area. */
4205 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4206 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4207 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4208 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4209 TREE_SIDE_EFFECTS (t) = 1;
4210 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4213 /* Implement va_arg. */
4216 rs6000_va_arg (tree valist, tree type)
4218 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4219 tree gpr, fpr, ovf, sav, reg, t, u;
4220 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4221 rtx lab_false, lab_over, addr_rtx, r;
4223 if (DEFAULT_ABI != ABI_V4)
4225 /* Variable sized types are passed by reference. */
4226 if (int_size_in_bytes (type) < 0)
4228 u = build_pointer_type (type);
4230 /* Args grow upward. */
4231 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4232 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4233 TREE_SIDE_EFFECTS (t) = 1;
4235 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4236 TREE_SIDE_EFFECTS (t) = 1;
4238 t = build1 (INDIRECT_REF, u, t);
4239 TREE_SIDE_EFFECTS (t) = 1;
4241 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4244 return std_expand_builtin_va_arg (valist, type);
4247 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4248 f_fpr = TREE_CHAIN (f_gpr);
4249 f_res = TREE_CHAIN (f_fpr);
4250 f_ovf = TREE_CHAIN (f_res);
4251 f_sav = TREE_CHAIN (f_ovf);
4253 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4254 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4255 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4256 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4257 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4259 size = int_size_in_bytes (type);
4260 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4262 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4264 /* Aggregates and long doubles are passed by reference. */
4270 size = UNITS_PER_WORD;
4273 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4275 /* FP args go in FP registers, if present. */
4284 /* Otherwise into GP registers. */
4292 /* Pull the value out of the saved registers ... */
4294 lab_false = gen_label_rtx ();
4295 lab_over = gen_label_rtx ();
4296 addr_rtx = gen_reg_rtx (Pmode);
4298 /* AltiVec vectors never go in registers. */
4299 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4301 TREE_THIS_VOLATILE (reg) = 1;
4302 emit_cmp_and_jump_insns
4303 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4304 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4307 /* Long long is aligned in the registers. */
4310 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4311 build_int_2 (n_reg - 1, 0));
4312 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4313 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4314 TREE_SIDE_EFFECTS (u) = 1;
4315 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4319 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4323 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4324 build_int_2 (n_reg, 0));
4325 TREE_SIDE_EFFECTS (u) = 1;
4327 u = build1 (CONVERT_EXPR, integer_type_node, u);
4328 TREE_SIDE_EFFECTS (u) = 1;
4330 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4331 TREE_SIDE_EFFECTS (u) = 1;
4333 t = build (PLUS_EXPR, ptr_type_node, t, u);
4334 TREE_SIDE_EFFECTS (t) = 1;
4336 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4338 emit_move_insn (addr_rtx, r);
4340 emit_jump_insn (gen_jump (lab_over));
4344 emit_label (lab_false);
4346 /* ... otherwise out of the overflow area. */
4348 /* Make sure we don't find reg 7 for the next int arg.
4350 All AltiVec vectors go in the overflow area. So in the AltiVec
4351 case we need to get the vectors from the overflow area, but
4352 remember where the GPRs and FPRs are. */
4353 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4354 || !TARGET_ALTIVEC))
4356 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4357 TREE_SIDE_EFFECTS (t) = 1;
4358 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4361 /* Care for on-stack alignment if needed. */
4368 /* AltiVec vectors are 16 byte aligned. */
4369 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4374 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4375 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4379 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4381 emit_move_insn (addr_rtx, r);
4383 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4384 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4385 TREE_SIDE_EFFECTS (t) = 1;
4386 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4388 emit_label (lab_over);
4392 r = gen_rtx_MEM (Pmode, addr_rtx);
4393 set_mem_alias_set (r, get_varargs_alias_set ());
4394 emit_move_insn (addr_rtx, r);
4402 #define def_builtin(MASK, NAME, TYPE, CODE) \
4404 if ((MASK) & target_flags) \
4405 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4409 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4411 static const struct builtin_description bdesc_3arg[] =
4413 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4414 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4415 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4416 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4417 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4418 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4419 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4420 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4421 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4422 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4423 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4424 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4425 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4426 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4427 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4428 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4429 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4430 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4431 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4432 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4433 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4434 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4435 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4438 /* DST operations: void foo (void *, const int, const char). */
4440 static const struct builtin_description bdesc_dst[] =
4442 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4443 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4444 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4445 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4448 /* Simple binary operations: VECc = foo (VECa, VECb). */
4450 static struct builtin_description bdesc_2arg[] =
4452 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4453 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4454 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4455 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4456 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4457 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4458 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4459 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4460 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4461 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4462 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4463 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4464 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4465 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4466 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4467 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4468 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4469 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4470 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4471 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4472 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4473 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4474 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4475 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4476 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4477 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4478 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4479 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4480 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4481 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4482 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4483 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4484 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4485 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4486 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4487 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4488 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4489 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4490 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4491 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4492 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4493 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4494 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4495 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4496 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4497 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4498 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4499 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4500 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4501 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4502 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4503 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4504 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4505 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4506 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4507 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4508 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4509 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4510 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4511 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4512 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4513 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4514 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4515 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4516 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4517 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4518 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4519 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4520 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4521 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4522 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4523 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4524 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4525 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4526 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4527 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4528 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4529 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4530 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4531 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4532 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4533 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4534 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4535 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4536 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4537 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4538 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4539 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4540 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4541 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4542 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4543 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4544 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4545 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4546 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4547 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4548 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4549 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4550 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4551 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4552 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4553 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4554 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4555 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4556 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4557 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4558 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4559 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4560 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4561 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4562 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4563 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4564 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4566 /* Place holder, leave as first spe builtin. */
4567 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4568 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4569 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4570 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4571 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4572 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4573 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4574 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4575 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4576 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4577 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4578 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4579 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4580 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4581 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4582 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4583 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4584 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4585 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4586 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4587 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4588 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4589 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4590 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4591 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4592 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4593 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4594 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4595 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4596 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4597 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4598 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4599 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4600 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4601 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4602 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4603 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4604 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4605 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4606 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4607 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4608 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4609 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4610 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4611 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4612 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4613 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4614 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4615 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4616 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4617 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4618 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4619 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4620 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4621 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4622 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4623 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4624 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4625 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4626 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4627 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4628 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4629 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4630 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4631 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4632 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4633 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4634 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4635 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4636 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4637 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4638 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4639 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4640 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4641 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4642 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4643 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4644 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4645 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4646 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4647 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4648 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4649 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4650 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4651 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4652 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4653 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4654 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4655 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4656 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4657 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4658 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4659 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4660 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4661 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4662 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4663 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4664 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4665 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4666 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4667 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4668 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4669 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4670 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4671 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4672 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4673 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4674 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4675 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4677 /* SPE binary operations expecting a 5-bit unsigned literal. */
4678 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4680 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4681 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4682 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4683 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4684 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4685 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4686 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4687 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4688 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4689 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4690 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4691 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4692 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4693 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4694 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4695 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4696 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4697 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4698 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4699 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4700 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4701 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4702 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4703 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4704 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4705 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4707 /* Place-holder. Leave as last binary SPE builtin. */
4708 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4711 /* AltiVec predicates. */
4713 struct builtin_description_predicates
4715 const unsigned int mask;
4716 const enum insn_code icode;
4718 const char *const name;
4719 const enum rs6000_builtins code;
4722 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4724 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4725 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4726 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4727 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4728 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4729 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4730 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4731 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4732 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4733 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4734 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4735 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4736 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4739 /* SPE predicates. */
4740 static struct builtin_description bdesc_spe_predicates[] =
4742 /* Place-holder. Leave as first. */
4743 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4744 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4745 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4746 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4747 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4748 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4749 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4750 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4751 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4752 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4753 /* Place-holder. Leave as last. */
4754 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4757 /* SPE evsel predicates. */
4758 static struct builtin_description bdesc_spe_evsel[] =
4760 /* Place-holder. Leave as first. */
4761 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4762 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4763 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4764 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4765 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4766 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4767 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4768 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4769 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4770 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4771 /* Place-holder. Leave as last. */
4772 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4775 /* ABS* operations. */
4777 static const struct builtin_description bdesc_abs[] =
4779 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4780 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4781 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4782 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4783 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4784 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4785 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4788 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4791 static struct builtin_description bdesc_1arg[] =
4793 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4794 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4795 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4796 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4797 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4798 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4799 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4800 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4801 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4802 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4803 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4804 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4805 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4806 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4807 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4808 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4809 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4811 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4812 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4813 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4814 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4815 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4816 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4817 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4818 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4819 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4820 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4821 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4822 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4823 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4824 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4825 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4826 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4827 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4828 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4829 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4830 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4831 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4832 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4833 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4834 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4835 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4836 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4837 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4838 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4839 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4840 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4842 /* Place-holder. Leave as last unary SPE builtin. */
4843 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4847 rs6000_expand_unop_builtin (enum insn_code icode, tree arglist, rtx target)
4850 tree arg0 = TREE_VALUE (arglist);
4851 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4852 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4853 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4855 if (icode == CODE_FOR_nothing)
4856 /* Builtin not supported on this processor. */
4859 /* If we got invalid arguments bail out before generating bad rtl. */
4860 if (arg0 == error_mark_node)
4863 if (icode == CODE_FOR_altivec_vspltisb
4864 || icode == CODE_FOR_altivec_vspltish
4865 || icode == CODE_FOR_altivec_vspltisw
4866 || icode == CODE_FOR_spe_evsplatfi
4867 || icode == CODE_FOR_spe_evsplati)
4869 /* Only allow 5-bit *signed* literals. */
4870 if (GET_CODE (op0) != CONST_INT
4871 || INTVAL (op0) > 0x1f
4872 || INTVAL (op0) < -0x1f)
4874 error ("argument 1 must be a 5-bit signed literal");
4880 || GET_MODE (target) != tmode
4881 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4882 target = gen_reg_rtx (tmode);
4884 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4885 op0 = copy_to_mode_reg (mode0, op0);
4887 pat = GEN_FCN (icode) (target, op0);
4896 altivec_expand_abs_builtin (enum insn_code icode, tree arglist, rtx target)
4898 rtx pat, scratch1, scratch2;
4899 tree arg0 = TREE_VALUE (arglist);
4900 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4901 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4902 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4904 /* If we have invalid arguments, bail out before generating bad rtl. */
4905 if (arg0 == error_mark_node)
4909 || GET_MODE (target) != tmode
4910 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4911 target = gen_reg_rtx (tmode);
4913 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4914 op0 = copy_to_mode_reg (mode0, op0);
4916 scratch1 = gen_reg_rtx (mode0);
4917 scratch2 = gen_reg_rtx (mode0);
4919 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
4928 rs6000_expand_binop_builtin (enum insn_code icode, tree arglist, rtx target)
4931 tree arg0 = TREE_VALUE (arglist);
4932 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
4933 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4934 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
4935 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4936 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4937 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
4939 if (icode == CODE_FOR_nothing)
4940 /* Builtin not supported on this processor. */
4943 /* If we got invalid arguments bail out before generating bad rtl. */
4944 if (arg0 == error_mark_node || arg1 == error_mark_node)
4947 if (icode == CODE_FOR_altivec_vcfux
4948 || icode == CODE_FOR_altivec_vcfsx
4949 || icode == CODE_FOR_altivec_vctsxs
4950 || icode == CODE_FOR_altivec_vctuxs
4951 || icode == CODE_FOR_altivec_vspltb
4952 || icode == CODE_FOR_altivec_vsplth
4953 || icode == CODE_FOR_altivec_vspltw
4954 || icode == CODE_FOR_spe_evaddiw
4955 || icode == CODE_FOR_spe_evldd
4956 || icode == CODE_FOR_spe_evldh
4957 || icode == CODE_FOR_spe_evldw
4958 || icode == CODE_FOR_spe_evlhhesplat
4959 || icode == CODE_FOR_spe_evlhhossplat
4960 || icode == CODE_FOR_spe_evlhhousplat
4961 || icode == CODE_FOR_spe_evlwhe
4962 || icode == CODE_FOR_spe_evlwhos
4963 || icode == CODE_FOR_spe_evlwhou
4964 || icode == CODE_FOR_spe_evlwhsplat
4965 || icode == CODE_FOR_spe_evlwwsplat
4966 || icode == CODE_FOR_spe_evrlwi
4967 || icode == CODE_FOR_spe_evslwi
4968 || icode == CODE_FOR_spe_evsrwis
4969 || icode == CODE_FOR_spe_evsubifw
4970 || icode == CODE_FOR_spe_evsrwiu)
4972 /* Only allow 5-bit unsigned literals. */
4973 if (TREE_CODE (arg1) != INTEGER_CST
4974 || TREE_INT_CST_LOW (arg1) & ~0x1f)
4976 error ("argument 2 must be a 5-bit unsigned literal");
4982 || GET_MODE (target) != tmode
4983 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4984 target = gen_reg_rtx (tmode);
4986 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4987 op0 = copy_to_mode_reg (mode0, op0);
4988 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
4989 op1 = copy_to_mode_reg (mode1, op1);
4991 pat = GEN_FCN (icode) (target, op0, op1);
5000 altivec_expand_predicate_builtin (enum insn_code icode, const char *opcode,
5001 tree arglist, rtx target)
5004 tree cr6_form = TREE_VALUE (arglist);
5005 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5006 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5007 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5008 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5009 enum machine_mode tmode = SImode;
5010 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5011 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5014 if (TREE_CODE (cr6_form) != INTEGER_CST)
5016 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5020 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5025 /* If we have invalid arguments, bail out before generating bad rtl. */
5026 if (arg0 == error_mark_node || arg1 == error_mark_node)
5030 || GET_MODE (target) != tmode
5031 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5032 target = gen_reg_rtx (tmode);
5034 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5035 op0 = copy_to_mode_reg (mode0, op0);
5036 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5037 op1 = copy_to_mode_reg (mode1, op1);
5039 scratch = gen_reg_rtx (mode0);
5041 pat = GEN_FCN (icode) (scratch, op0, op1,
5042 gen_rtx (SYMBOL_REF, Pmode, opcode));
5047 /* The vec_any* and vec_all* predicates use the same opcodes for two
5048 different operations, but the bits in CR6 will be different
5049 depending on what information we want. So we have to play tricks
5050 with CR6 to get the right bits out.
5052 If you think this is disgusting, look at the specs for the
5053 AltiVec predicates. */
5055 switch (cr6_form_int)
5058 emit_insn (gen_cr6_test_for_zero (target));
5061 emit_insn (gen_cr6_test_for_zero_reverse (target));
5064 emit_insn (gen_cr6_test_for_lt (target));
5067 emit_insn (gen_cr6_test_for_lt_reverse (target));
5070 error ("argument 1 of __builtin_altivec_predicate is out of range");
5078 altivec_expand_stv_builtin (enum insn_code icode, tree arglist)
5080 tree arg0 = TREE_VALUE (arglist);
5081 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5082 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5083 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5084 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5085 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5087 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5088 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5089 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5091 /* Invalid arguments. Bail before doing anything stoopid! */
5092 if (arg0 == error_mark_node
5093 || arg1 == error_mark_node
5094 || arg2 == error_mark_node)
5097 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5098 op0 = copy_to_mode_reg (mode2, op0);
5099 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5100 op1 = copy_to_mode_reg (mode0, op1);
5101 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5102 op2 = copy_to_mode_reg (mode1, op2);
5104 pat = GEN_FCN (icode) (op1, op2, op0);
5111 rs6000_expand_ternop_builtin (enum insn_code icode, tree arglist, rtx target)
5114 tree arg0 = TREE_VALUE (arglist);
5115 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5116 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5117 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5118 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5119 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5120 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5121 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5122 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5123 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5125 if (icode == CODE_FOR_nothing)
5126 /* Builtin not supported on this processor. */
5129 /* If we got invalid arguments bail out before generating bad rtl. */
5130 if (arg0 == error_mark_node
5131 || arg1 == error_mark_node
5132 || arg2 == error_mark_node)
5135 if (icode == CODE_FOR_altivec_vsldoi_4sf
5136 || icode == CODE_FOR_altivec_vsldoi_4si
5137 || icode == CODE_FOR_altivec_vsldoi_8hi
5138 || icode == CODE_FOR_altivec_vsldoi_16qi)
5140 /* Only allow 4-bit unsigned literals. */
5141 if (TREE_CODE (arg2) != INTEGER_CST
5142 || TREE_INT_CST_LOW (arg2) & ~0xf)
5144 error ("argument 3 must be a 4-bit unsigned literal");
5150 || GET_MODE (target) != tmode
5151 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5152 target = gen_reg_rtx (tmode);
5154 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5155 op0 = copy_to_mode_reg (mode0, op0);
5156 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5157 op1 = copy_to_mode_reg (mode1, op1);
5158 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5159 op2 = copy_to_mode_reg (mode2, op2);
5161 pat = GEN_FCN (icode) (target, op0, op1, op2);
5169 /* Expand the lvx builtins. */
5171 altivec_expand_ld_builtin (tree exp, rtx target, bool *expandedp)
5173 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5174 tree arglist = TREE_OPERAND (exp, 1);
5175 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5177 enum machine_mode tmode, mode0;
5179 enum insn_code icode;
5183 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5184 icode = CODE_FOR_altivec_lvx_16qi;
5186 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5187 icode = CODE_FOR_altivec_lvx_8hi;
5189 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5190 icode = CODE_FOR_altivec_lvx_4si;
5192 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5193 icode = CODE_FOR_altivec_lvx_4sf;
5202 arg0 = TREE_VALUE (arglist);
5203 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5204 tmode = insn_data[icode].operand[0].mode;
5205 mode0 = insn_data[icode].operand[1].mode;
5208 || GET_MODE (target) != tmode
5209 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5210 target = gen_reg_rtx (tmode);
5212 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5213 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5215 pat = GEN_FCN (icode) (target, op0);
5222 /* Expand the stvx builtins. */
5224 altivec_expand_st_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5227 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5228 tree arglist = TREE_OPERAND (exp, 1);
5229 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5231 enum machine_mode mode0, mode1;
5233 enum insn_code icode;
5237 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5238 icode = CODE_FOR_altivec_stvx_16qi;
5240 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5241 icode = CODE_FOR_altivec_stvx_8hi;
5243 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5244 icode = CODE_FOR_altivec_stvx_4si;
5246 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5247 icode = CODE_FOR_altivec_stvx_4sf;
5254 arg0 = TREE_VALUE (arglist);
5255 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5256 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5257 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5258 mode0 = insn_data[icode].operand[0].mode;
5259 mode1 = insn_data[icode].operand[1].mode;
5261 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5262 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5263 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5264 op1 = copy_to_mode_reg (mode1, op1);
5266 pat = GEN_FCN (icode) (op0, op1);
5274 /* Expand the dst builtins. */
5276 altivec_expand_dst_builtin (tree exp, rtx target ATTRIBUTE_UNUSED,
5279 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5280 tree arglist = TREE_OPERAND (exp, 1);
5281 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5282 tree arg0, arg1, arg2;
5283 enum machine_mode mode0, mode1, mode2;
5284 rtx pat, op0, op1, op2;
5285 struct builtin_description *d;
5290 /* Handle DST variants. */
5291 d = (struct builtin_description *) bdesc_dst;
5292 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5293 if (d->code == fcode)
5295 arg0 = TREE_VALUE (arglist);
5296 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5297 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5298 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5299 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5300 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5301 mode0 = insn_data[d->icode].operand[0].mode;
5302 mode1 = insn_data[d->icode].operand[1].mode;
5303 mode2 = insn_data[d->icode].operand[2].mode;
5305 /* Invalid arguments, bail out before generating bad rtl. */
5306 if (arg0 == error_mark_node
5307 || arg1 == error_mark_node
5308 || arg2 == error_mark_node)
5311 if (TREE_CODE (arg2) != INTEGER_CST
5312 || TREE_INT_CST_LOW (arg2) & ~0x3)
5314 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5318 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5319 op0 = copy_to_mode_reg (mode0, op0);
5320 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5321 op1 = copy_to_mode_reg (mode1, op1);
5323 pat = GEN_FCN (d->icode) (op0, op1, op2);
5334 /* Expand the builtin in EXP and store the result in TARGET. Store
5335 true in *EXPANDEDP if we found a builtin to expand. */
5337 altivec_expand_builtin (tree exp, rtx target, bool *expandedp)
5339 struct builtin_description *d;
5340 struct builtin_description_predicates *dp;
5342 enum insn_code icode;
5343 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5344 tree arglist = TREE_OPERAND (exp, 1);
5347 enum machine_mode tmode, mode0;
5348 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5350 target = altivec_expand_ld_builtin (exp, target, expandedp);
5354 target = altivec_expand_st_builtin (exp, target, expandedp);
5358 target = altivec_expand_dst_builtin (exp, target, expandedp);
5366 case ALTIVEC_BUILTIN_STVX:
5367 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5368 case ALTIVEC_BUILTIN_STVEBX:
5369 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5370 case ALTIVEC_BUILTIN_STVEHX:
5371 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5372 case ALTIVEC_BUILTIN_STVEWX:
5373 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5374 case ALTIVEC_BUILTIN_STVXL:
5375 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5377 case ALTIVEC_BUILTIN_MFVSCR:
5378 icode = CODE_FOR_altivec_mfvscr;
5379 tmode = insn_data[icode].operand[0].mode;
5382 || GET_MODE (target) != tmode
5383 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5384 target = gen_reg_rtx (tmode);
5386 pat = GEN_FCN (icode) (target);
5392 case ALTIVEC_BUILTIN_MTVSCR:
5393 icode = CODE_FOR_altivec_mtvscr;
5394 arg0 = TREE_VALUE (arglist);
5395 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5396 mode0 = insn_data[icode].operand[0].mode;
5398 /* If we got invalid arguments bail out before generating bad rtl. */
5399 if (arg0 == error_mark_node)
5402 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5403 op0 = copy_to_mode_reg (mode0, op0);
5405 pat = GEN_FCN (icode) (op0);
5410 case ALTIVEC_BUILTIN_DSSALL:
5411 emit_insn (gen_altivec_dssall ());
5414 case ALTIVEC_BUILTIN_DSS:
5415 icode = CODE_FOR_altivec_dss;
5416 arg0 = TREE_VALUE (arglist);
5417 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5418 mode0 = insn_data[icode].operand[0].mode;
5420 /* If we got invalid arguments bail out before generating bad rtl. */
5421 if (arg0 == error_mark_node)
5424 if (TREE_CODE (arg0) != INTEGER_CST
5425 || TREE_INT_CST_LOW (arg0) & ~0x3)
5427 error ("argument to dss must be a 2-bit unsigned literal");
5431 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5432 op0 = copy_to_mode_reg (mode0, op0);
5434 emit_insn (gen_altivec_dss (op0));
5438 /* Expand abs* operations. */
5439 d = (struct builtin_description *) bdesc_abs;
5440 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5441 if (d->code == fcode)
5442 return altivec_expand_abs_builtin (d->icode, arglist, target);
5444 /* Expand the AltiVec predicates. */
5445 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5446 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5447 if (dp->code == fcode)
5448 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5450 /* LV* are funky. We initialized them differently. */
5453 case ALTIVEC_BUILTIN_LVSL:
5454 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5456 case ALTIVEC_BUILTIN_LVSR:
5457 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5459 case ALTIVEC_BUILTIN_LVEBX:
5460 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5462 case ALTIVEC_BUILTIN_LVEHX:
5463 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5465 case ALTIVEC_BUILTIN_LVEWX:
5466 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5468 case ALTIVEC_BUILTIN_LVXL:
5469 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5471 case ALTIVEC_BUILTIN_LVX:
5472 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5483 /* Binops that need to be initialized manually, but can be expanded
5484 automagically by rs6000_expand_binop_builtin. */
5485 static struct builtin_description bdesc_2arg_spe[] =
5487 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5488 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5489 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5490 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5491 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5492 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5493 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5494 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5495 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5496 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5497 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5498 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5499 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5500 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5501 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5502 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5503 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5504 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5505 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5506 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5507 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5508 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5511 /* Expand the builtin in EXP and store the result in TARGET. Store
5512 true in *EXPANDEDP if we found a builtin to expand.
5514 This expands the SPE builtins that are not simple unary and binary
5517 spe_expand_builtin (tree exp, rtx target, bool *expandedp)
5519 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5520 tree arglist = TREE_OPERAND (exp, 1);
5522 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5523 enum insn_code icode;
5524 enum machine_mode tmode, mode0;
5526 struct builtin_description *d;
5531 /* Syntax check for a 5-bit unsigned immediate. */
5534 case SPE_BUILTIN_EVSTDD:
5535 case SPE_BUILTIN_EVSTDH:
5536 case SPE_BUILTIN_EVSTDW:
5537 case SPE_BUILTIN_EVSTWHE:
5538 case SPE_BUILTIN_EVSTWHO:
5539 case SPE_BUILTIN_EVSTWWE:
5540 case SPE_BUILTIN_EVSTWWO:
5541 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5542 if (TREE_CODE (arg1) != INTEGER_CST
5543 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5545 error ("argument 2 must be a 5-bit unsigned literal");
5553 /* The evsplat*i instructions are not quite generic. */
5556 case SPE_BUILTIN_EVSPLATFI:
5557 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplatfi,
5559 case SPE_BUILTIN_EVSPLATI:
5560 return rs6000_expand_unop_builtin (CODE_FOR_spe_evsplati,
5566 d = (struct builtin_description *) bdesc_2arg_spe;
5567 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5568 if (d->code == fcode)
5569 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5571 d = (struct builtin_description *) bdesc_spe_predicates;
5572 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5573 if (d->code == fcode)
5574 return spe_expand_predicate_builtin (d->icode, arglist, target);
5576 d = (struct builtin_description *) bdesc_spe_evsel;
5577 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5578 if (d->code == fcode)
5579 return spe_expand_evsel_builtin (d->icode, arglist, target);
5583 case SPE_BUILTIN_EVSTDDX:
5584 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5585 case SPE_BUILTIN_EVSTDHX:
5586 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5587 case SPE_BUILTIN_EVSTDWX:
5588 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5589 case SPE_BUILTIN_EVSTWHEX:
5590 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5591 case SPE_BUILTIN_EVSTWHOX:
5592 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5593 case SPE_BUILTIN_EVSTWWEX:
5594 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5595 case SPE_BUILTIN_EVSTWWOX:
5596 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5597 case SPE_BUILTIN_EVSTDD:
5598 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5599 case SPE_BUILTIN_EVSTDH:
5600 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5601 case SPE_BUILTIN_EVSTDW:
5602 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5603 case SPE_BUILTIN_EVSTWHE:
5604 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5605 case SPE_BUILTIN_EVSTWHO:
5606 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5607 case SPE_BUILTIN_EVSTWWE:
5608 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5609 case SPE_BUILTIN_EVSTWWO:
5610 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5611 case SPE_BUILTIN_MFSPEFSCR:
5612 icode = CODE_FOR_spe_mfspefscr;
5613 tmode = insn_data[icode].operand[0].mode;
5616 || GET_MODE (target) != tmode
5617 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5618 target = gen_reg_rtx (tmode);
5620 pat = GEN_FCN (icode) (target);
5625 case SPE_BUILTIN_MTSPEFSCR:
5626 icode = CODE_FOR_spe_mtspefscr;
5627 arg0 = TREE_VALUE (arglist);
5628 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5629 mode0 = insn_data[icode].operand[0].mode;
5631 if (arg0 == error_mark_node)
5634 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5635 op0 = copy_to_mode_reg (mode0, op0);
5637 pat = GEN_FCN (icode) (op0);
5650 spe_expand_predicate_builtin (enum insn_code icode, tree arglist, rtx target)
5652 rtx pat, scratch, tmp;
5653 tree form = TREE_VALUE (arglist);
5654 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5655 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5656 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5657 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5658 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5659 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5663 if (TREE_CODE (form) != INTEGER_CST)
5665 error ("argument 1 of __builtin_spe_predicate must be a constant");
5669 form_int = TREE_INT_CST_LOW (form);
5674 if (arg0 == error_mark_node || arg1 == error_mark_node)
5678 || GET_MODE (target) != SImode
5679 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5680 target = gen_reg_rtx (SImode);
5682 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5683 op0 = copy_to_mode_reg (mode0, op0);
5684 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5685 op1 = copy_to_mode_reg (mode1, op1);
5687 scratch = gen_reg_rtx (CCmode);
5689 pat = GEN_FCN (icode) (scratch, op0, op1);
5694 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5695 _lower_. We use one compare, but look in different bits of the
5696 CR for each variant.
5698 There are 2 elements in each SPE simd type (upper/lower). The CR
5699 bits are set as follows:
5701 BIT0 | BIT 1 | BIT 2 | BIT 3
5702 U | L | (U | L) | (U & L)
5704 So, for an "all" relationship, BIT 3 would be set.
5705 For an "any" relationship, BIT 2 would be set. Etc.
5707 Following traditional nomenclature, these bits map to:
5709 BIT0 | BIT 1 | BIT 2 | BIT 3
5712 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5717 /* All variant. OV bit. */
5719 /* We need to get to the OV bit, which is the ORDERED bit. We
5720 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5721 that's ugly and will trigger a validate_condition_mode abort.
5722 So let's just use another pattern. */
5723 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5725 /* Any variant. EQ bit. */
5729 /* Upper variant. LT bit. */
5733 /* Lower variant. GT bit. */
5738 error ("argument 1 of __builtin_spe_predicate is out of range");
5742 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5743 emit_move_insn (target, tmp);
5748 /* The evsel builtins look like this:
5750 e = __builtin_spe_evsel_OP (a, b, c, d);
5754 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5755 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5759 spe_expand_evsel_builtin (enum insn_code icode, tree arglist, rtx target)
5762 tree arg0 = TREE_VALUE (arglist);
5763 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5764 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5765 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5766 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5767 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5768 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5769 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5770 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5771 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5776 if (arg0 == error_mark_node || arg1 == error_mark_node
5777 || arg2 == error_mark_node || arg3 == error_mark_node)
5781 || GET_MODE (target) != mode0
5782 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5783 target = gen_reg_rtx (mode0);
5785 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5786 op0 = copy_to_mode_reg (mode0, op0);
5787 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5788 op1 = copy_to_mode_reg (mode0, op1);
5789 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5790 op2 = copy_to_mode_reg (mode0, op2);
5791 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5792 op3 = copy_to_mode_reg (mode0, op3);
5794 /* Generate the compare. */
5795 scratch = gen_reg_rtx (CCmode);
5796 pat = GEN_FCN (icode) (scratch, op0, op1);
5801 if (mode0 == V2SImode)
5802 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5804 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5809 /* Expand an expression EXP that calls a built-in function,
5810 with result going to TARGET if that's convenient
5811 (and in mode MODE if that's convenient).
5812 SUBTARGET may be used as the target for computing one of EXP's operands.
5813 IGNORE is nonzero if the value is to be ignored. */
5816 rs6000_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
5817 enum machine_mode mode ATTRIBUTE_UNUSED,
5818 int ignore ATTRIBUTE_UNUSED)
5820 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5821 tree arglist = TREE_OPERAND (exp, 1);
5822 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5823 struct builtin_description *d;
5830 ret = altivec_expand_builtin (exp, target, &success);
5837 ret = spe_expand_builtin (exp, target, &success);
5843 if (TARGET_ALTIVEC || TARGET_SPE)
5845 /* Handle simple unary operations. */
5846 d = (struct builtin_description *) bdesc_1arg;
5847 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5848 if (d->code == fcode)
5849 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5851 /* Handle simple binary operations. */
5852 d = (struct builtin_description *) bdesc_2arg;
5853 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5854 if (d->code == fcode)
5855 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5857 /* Handle simple ternary operations. */
5858 d = (struct builtin_description *) bdesc_3arg;
5859 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5860 if (d->code == fcode)
5861 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
5869 rs6000_init_builtins (void)
5871 opaque_V2SI_type_node = copy_node (V2SI_type_node);
5872 opaque_V2SF_type_node = copy_node (V2SF_type_node);
5873 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
5876 spe_init_builtins ();
5878 altivec_init_builtins ();
5879 if (TARGET_ALTIVEC || TARGET_SPE)
5880 rs6000_common_init_builtins ();
5883 /* Search through a set of builtins and enable the mask bits.
5884 DESC is an array of builtins.
5885 SIZE is the total number of builtins.
5886 START is the builtin enum at which to start.
5887 END is the builtin enum at which to end. */
5889 enable_mask_for_builtins (struct builtin_description *desc, int size,
5890 enum rs6000_builtins start,
5891 enum rs6000_builtins end)
5895 for (i = 0; i < size; ++i)
5896 if (desc[i].code == start)
5902 for (; i < size; ++i)
5904 /* Flip all the bits on. */
5905 desc[i].mask = target_flags;
5906 if (desc[i].code == end)
5912 spe_init_builtins (void)
5914 tree endlink = void_list_node;
5915 tree puint_type_node = build_pointer_type (unsigned_type_node);
5916 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
5917 struct builtin_description *d;
5920 tree v2si_ftype_4_v2si
5921 = build_function_type
5922 (opaque_V2SI_type_node,
5923 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5924 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5925 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5926 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5929 tree v2sf_ftype_4_v2sf
5930 = build_function_type
5931 (opaque_V2SF_type_node,
5932 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5933 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5934 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5935 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5938 tree int_ftype_int_v2si_v2si
5939 = build_function_type
5941 tree_cons (NULL_TREE, integer_type_node,
5942 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5943 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5946 tree int_ftype_int_v2sf_v2sf
5947 = build_function_type
5949 tree_cons (NULL_TREE, integer_type_node,
5950 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5951 tree_cons (NULL_TREE, opaque_V2SF_type_node,
5954 tree void_ftype_v2si_puint_int
5955 = build_function_type (void_type_node,
5956 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5957 tree_cons (NULL_TREE, puint_type_node,
5958 tree_cons (NULL_TREE,
5962 tree void_ftype_v2si_puint_char
5963 = build_function_type (void_type_node,
5964 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5965 tree_cons (NULL_TREE, puint_type_node,
5966 tree_cons (NULL_TREE,
5970 tree void_ftype_v2si_pv2si_int
5971 = build_function_type (void_type_node,
5972 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5973 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5974 tree_cons (NULL_TREE,
5978 tree void_ftype_v2si_pv2si_char
5979 = build_function_type (void_type_node,
5980 tree_cons (NULL_TREE, opaque_V2SI_type_node,
5981 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5982 tree_cons (NULL_TREE,
5987 = build_function_type (void_type_node,
5988 tree_cons (NULL_TREE, integer_type_node, endlink));
5991 = build_function_type (integer_type_node, endlink);
5993 tree v2si_ftype_pv2si_int
5994 = build_function_type (opaque_V2SI_type_node,
5995 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
5996 tree_cons (NULL_TREE, integer_type_node,
5999 tree v2si_ftype_puint_int
6000 = build_function_type (opaque_V2SI_type_node,
6001 tree_cons (NULL_TREE, puint_type_node,
6002 tree_cons (NULL_TREE, integer_type_node,
6005 tree v2si_ftype_pushort_int
6006 = build_function_type (opaque_V2SI_type_node,
6007 tree_cons (NULL_TREE, pushort_type_node,
6008 tree_cons (NULL_TREE, integer_type_node,
6011 tree v2si_ftype_signed_char
6012 = build_function_type (opaque_V2SI_type_node,
6013 tree_cons (NULL_TREE, signed_char_type_node,
6016 /* The initialization of the simple binary and unary builtins is
6017 done in rs6000_common_init_builtins, but we have to enable the
6018 mask bits here manually because we have run out of `target_flags'
6019 bits. We really need to redesign this mask business. */
6021 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6022 ARRAY_SIZE (bdesc_2arg),
6025 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6026 ARRAY_SIZE (bdesc_1arg),
6028 SPE_BUILTIN_EVSUBFUSIAAW);
6029 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6030 ARRAY_SIZE (bdesc_spe_predicates),
6031 SPE_BUILTIN_EVCMPEQ,
6032 SPE_BUILTIN_EVFSTSTLT);
6033 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6034 ARRAY_SIZE (bdesc_spe_evsel),
6035 SPE_BUILTIN_EVSEL_CMPGTS,
6036 SPE_BUILTIN_EVSEL_FSTSTEQ);
6038 (*lang_hooks.decls.pushdecl)
6039 (build_decl (TYPE_DECL, get_identifier ("__ev64_opaque__"),
6040 opaque_V2SI_type_node));
6042 /* Initialize irregular SPE builtins. */
6044 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6045 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6046 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6047 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6048 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6049 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6050 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6051 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6052 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6053 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6054 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6055 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6056 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6057 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6058 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6059 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6060 def_builtin (target_flags, "__builtin_spe_evsplatfi", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATFI);
6061 def_builtin (target_flags, "__builtin_spe_evsplati", v2si_ftype_signed_char, SPE_BUILTIN_EVSPLATI);
6064 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6065 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6066 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6067 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6068 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6069 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6070 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6071 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6072 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6073 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6074 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6075 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6076 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6077 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6078 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6079 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6080 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6081 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6082 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6083 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6084 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6085 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6088 d = (struct builtin_description *) bdesc_spe_predicates;
6089 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6093 switch (insn_data[d->icode].operand[1].mode)
6096 type = int_ftype_int_v2si_v2si;
6099 type = int_ftype_int_v2sf_v2sf;
6105 def_builtin (d->mask, d->name, type, d->code);
6108 /* Evsel predicates. */
6109 d = (struct builtin_description *) bdesc_spe_evsel;
6110 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6114 switch (insn_data[d->icode].operand[1].mode)
6117 type = v2si_ftype_4_v2si;
6120 type = v2sf_ftype_4_v2sf;
6126 def_builtin (d->mask, d->name, type, d->code);
6131 altivec_init_builtins (void)
6133 struct builtin_description *d;
6134 struct builtin_description_predicates *dp;
6136 tree pfloat_type_node = build_pointer_type (float_type_node);
6137 tree pint_type_node = build_pointer_type (integer_type_node);
6138 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6139 tree pchar_type_node = build_pointer_type (char_type_node);
6141 tree pvoid_type_node = build_pointer_type (void_type_node);
6143 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6144 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6145 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6146 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6148 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6150 tree int_ftype_int_v4si_v4si
6151 = build_function_type_list (integer_type_node,
6152 integer_type_node, V4SI_type_node,
6153 V4SI_type_node, NULL_TREE);
6154 tree v4sf_ftype_pcfloat
6155 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6156 tree void_ftype_pfloat_v4sf
6157 = build_function_type_list (void_type_node,
6158 pfloat_type_node, V4SF_type_node, NULL_TREE);
6159 tree v4si_ftype_pcint
6160 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6161 tree void_ftype_pint_v4si
6162 = build_function_type_list (void_type_node,
6163 pint_type_node, V4SI_type_node, NULL_TREE);
6164 tree v8hi_ftype_pcshort
6165 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6166 tree void_ftype_pshort_v8hi
6167 = build_function_type_list (void_type_node,
6168 pshort_type_node, V8HI_type_node, NULL_TREE);
6169 tree v16qi_ftype_pcchar
6170 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6171 tree void_ftype_pchar_v16qi
6172 = build_function_type_list (void_type_node,
6173 pchar_type_node, V16QI_type_node, NULL_TREE);
6174 tree void_ftype_v4si
6175 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6176 tree v8hi_ftype_void
6177 = build_function_type (V8HI_type_node, void_list_node);
6178 tree void_ftype_void
6179 = build_function_type (void_type_node, void_list_node);
6181 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6183 tree v16qi_ftype_int_pcvoid
6184 = build_function_type_list (V16QI_type_node,
6185 integer_type_node, pcvoid_type_node, NULL_TREE);
6186 tree v8hi_ftype_int_pcvoid
6187 = build_function_type_list (V8HI_type_node,
6188 integer_type_node, pcvoid_type_node, NULL_TREE);
6189 tree v4si_ftype_int_pcvoid
6190 = build_function_type_list (V4SI_type_node,
6191 integer_type_node, pcvoid_type_node, NULL_TREE);
6193 tree void_ftype_v4si_int_pvoid
6194 = build_function_type_list (void_type_node,
6195 V4SI_type_node, integer_type_node,
6196 pvoid_type_node, NULL_TREE);
6197 tree void_ftype_v16qi_int_pvoid
6198 = build_function_type_list (void_type_node,
6199 V16QI_type_node, integer_type_node,
6200 pvoid_type_node, NULL_TREE);
6201 tree void_ftype_v8hi_int_pvoid
6202 = build_function_type_list (void_type_node,
6203 V8HI_type_node, integer_type_node,
6204 pvoid_type_node, NULL_TREE);
6205 tree int_ftype_int_v8hi_v8hi
6206 = build_function_type_list (integer_type_node,
6207 integer_type_node, V8HI_type_node,
6208 V8HI_type_node, NULL_TREE);
6209 tree int_ftype_int_v16qi_v16qi
6210 = build_function_type_list (integer_type_node,
6211 integer_type_node, V16QI_type_node,
6212 V16QI_type_node, NULL_TREE);
6213 tree int_ftype_int_v4sf_v4sf
6214 = build_function_type_list (integer_type_node,
6215 integer_type_node, V4SF_type_node,
6216 V4SF_type_node, NULL_TREE);
6217 tree v4si_ftype_v4si
6218 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6219 tree v8hi_ftype_v8hi
6220 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6221 tree v16qi_ftype_v16qi
6222 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6223 tree v4sf_ftype_v4sf
6224 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6225 tree void_ftype_pcvoid_int_char
6226 = build_function_type_list (void_type_node,
6227 pcvoid_type_node, integer_type_node,
6228 char_type_node, NULL_TREE);
6230 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6231 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6232 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6233 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6234 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6235 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6236 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6237 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6238 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6239 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6240 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6241 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6242 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6243 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6244 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6245 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6246 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6247 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6248 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6249 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6250 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6251 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6252 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6253 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6254 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6255 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6256 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6257 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6258 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6259 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6260 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6261 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6263 /* Add the DST variants. */
6264 d = (struct builtin_description *) bdesc_dst;
6265 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6266 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6268 /* Initialize the predicates. */
6269 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6270 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6272 enum machine_mode mode1;
6275 mode1 = insn_data[dp->icode].operand[1].mode;
6280 type = int_ftype_int_v4si_v4si;
6283 type = int_ftype_int_v8hi_v8hi;
6286 type = int_ftype_int_v16qi_v16qi;
6289 type = int_ftype_int_v4sf_v4sf;
6295 def_builtin (dp->mask, dp->name, type, dp->code);
6298 /* Initialize the abs* operators. */
6299 d = (struct builtin_description *) bdesc_abs;
6300 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6302 enum machine_mode mode0;
6305 mode0 = insn_data[d->icode].operand[0].mode;
6310 type = v4si_ftype_v4si;
6313 type = v8hi_ftype_v8hi;
6316 type = v16qi_ftype_v16qi;
6319 type = v4sf_ftype_v4sf;
6325 def_builtin (d->mask, d->name, type, d->code);
6330 rs6000_common_init_builtins (void)
6332 struct builtin_description *d;
6335 tree v4sf_ftype_v4sf_v4sf_v16qi
6336 = build_function_type_list (V4SF_type_node,
6337 V4SF_type_node, V4SF_type_node,
6338 V16QI_type_node, NULL_TREE);
6339 tree v4si_ftype_v4si_v4si_v16qi
6340 = build_function_type_list (V4SI_type_node,
6341 V4SI_type_node, V4SI_type_node,
6342 V16QI_type_node, NULL_TREE);
6343 tree v8hi_ftype_v8hi_v8hi_v16qi
6344 = build_function_type_list (V8HI_type_node,
6345 V8HI_type_node, V8HI_type_node,
6346 V16QI_type_node, NULL_TREE);
6347 tree v16qi_ftype_v16qi_v16qi_v16qi
6348 = build_function_type_list (V16QI_type_node,
6349 V16QI_type_node, V16QI_type_node,
6350 V16QI_type_node, NULL_TREE);
6351 tree v4si_ftype_char
6352 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6353 tree v8hi_ftype_char
6354 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6355 tree v16qi_ftype_char
6356 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6357 tree v8hi_ftype_v16qi
6358 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6359 tree v4sf_ftype_v4sf
6360 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6362 tree v2si_ftype_v2si_v2si
6363 = build_function_type_list (opaque_V2SI_type_node,
6364 opaque_V2SI_type_node,
6365 opaque_V2SI_type_node, NULL_TREE);
6367 tree v2sf_ftype_v2sf_v2sf
6368 = build_function_type_list (opaque_V2SF_type_node,
6369 opaque_V2SF_type_node,
6370 opaque_V2SF_type_node, NULL_TREE);
6372 tree v2si_ftype_int_int
6373 = build_function_type_list (opaque_V2SI_type_node,
6374 integer_type_node, integer_type_node,
6377 tree v2si_ftype_v2si
6378 = build_function_type_list (opaque_V2SI_type_node,
6379 opaque_V2SI_type_node, NULL_TREE);
6381 tree v2sf_ftype_v2sf
6382 = build_function_type_list (opaque_V2SF_type_node,
6383 opaque_V2SF_type_node, NULL_TREE);
6385 tree v2sf_ftype_v2si
6386 = build_function_type_list (opaque_V2SF_type_node,
6387 opaque_V2SI_type_node, NULL_TREE);
6389 tree v2si_ftype_v2sf
6390 = build_function_type_list (opaque_V2SI_type_node,
6391 opaque_V2SF_type_node, NULL_TREE);
6393 tree v2si_ftype_v2si_char
6394 = build_function_type_list (opaque_V2SI_type_node,
6395 opaque_V2SI_type_node,
6396 char_type_node, NULL_TREE);
6398 tree v2si_ftype_int_char
6399 = build_function_type_list (opaque_V2SI_type_node,
6400 integer_type_node, char_type_node, NULL_TREE);
6402 tree v2si_ftype_char
6403 = build_function_type_list (opaque_V2SI_type_node,
6404 char_type_node, NULL_TREE);
6406 tree int_ftype_int_int
6407 = build_function_type_list (integer_type_node,
6408 integer_type_node, integer_type_node,
6411 tree v4si_ftype_v4si_v4si
6412 = build_function_type_list (V4SI_type_node,
6413 V4SI_type_node, V4SI_type_node, NULL_TREE);
6414 tree v4sf_ftype_v4si_char
6415 = build_function_type_list (V4SF_type_node,
6416 V4SI_type_node, char_type_node, NULL_TREE);
6417 tree v4si_ftype_v4sf_char
6418 = build_function_type_list (V4SI_type_node,
6419 V4SF_type_node, char_type_node, NULL_TREE);
6420 tree v4si_ftype_v4si_char
6421 = build_function_type_list (V4SI_type_node,
6422 V4SI_type_node, char_type_node, NULL_TREE);
6423 tree v8hi_ftype_v8hi_char
6424 = build_function_type_list (V8HI_type_node,
6425 V8HI_type_node, char_type_node, NULL_TREE);
6426 tree v16qi_ftype_v16qi_char
6427 = build_function_type_list (V16QI_type_node,
6428 V16QI_type_node, char_type_node, NULL_TREE);
6429 tree v16qi_ftype_v16qi_v16qi_char
6430 = build_function_type_list (V16QI_type_node,
6431 V16QI_type_node, V16QI_type_node,
6432 char_type_node, NULL_TREE);
6433 tree v8hi_ftype_v8hi_v8hi_char
6434 = build_function_type_list (V8HI_type_node,
6435 V8HI_type_node, V8HI_type_node,
6436 char_type_node, NULL_TREE);
6437 tree v4si_ftype_v4si_v4si_char
6438 = build_function_type_list (V4SI_type_node,
6439 V4SI_type_node, V4SI_type_node,
6440 char_type_node, NULL_TREE);
6441 tree v4sf_ftype_v4sf_v4sf_char
6442 = build_function_type_list (V4SF_type_node,
6443 V4SF_type_node, V4SF_type_node,
6444 char_type_node, NULL_TREE);
6445 tree v4sf_ftype_v4sf_v4sf
6446 = build_function_type_list (V4SF_type_node,
6447 V4SF_type_node, V4SF_type_node, NULL_TREE);
6448 tree v4sf_ftype_v4sf_v4sf_v4si
6449 = build_function_type_list (V4SF_type_node,
6450 V4SF_type_node, V4SF_type_node,
6451 V4SI_type_node, NULL_TREE);
6452 tree v4sf_ftype_v4sf_v4sf_v4sf
6453 = build_function_type_list (V4SF_type_node,
6454 V4SF_type_node, V4SF_type_node,
6455 V4SF_type_node, NULL_TREE);
6456 tree v4si_ftype_v4si_v4si_v4si
6457 = build_function_type_list (V4SI_type_node,
6458 V4SI_type_node, V4SI_type_node,
6459 V4SI_type_node, NULL_TREE);
6460 tree v8hi_ftype_v8hi_v8hi
6461 = build_function_type_list (V8HI_type_node,
6462 V8HI_type_node, V8HI_type_node, NULL_TREE);
6463 tree v8hi_ftype_v8hi_v8hi_v8hi
6464 = build_function_type_list (V8HI_type_node,
6465 V8HI_type_node, V8HI_type_node,
6466 V8HI_type_node, NULL_TREE);
6467 tree v4si_ftype_v8hi_v8hi_v4si
6468 = build_function_type_list (V4SI_type_node,
6469 V8HI_type_node, V8HI_type_node,
6470 V4SI_type_node, NULL_TREE);
6471 tree v4si_ftype_v16qi_v16qi_v4si
6472 = build_function_type_list (V4SI_type_node,
6473 V16QI_type_node, V16QI_type_node,
6474 V4SI_type_node, NULL_TREE);
6475 tree v16qi_ftype_v16qi_v16qi
6476 = build_function_type_list (V16QI_type_node,
6477 V16QI_type_node, V16QI_type_node, NULL_TREE);
6478 tree v4si_ftype_v4sf_v4sf
6479 = build_function_type_list (V4SI_type_node,
6480 V4SF_type_node, V4SF_type_node, NULL_TREE);
6481 tree v8hi_ftype_v16qi_v16qi
6482 = build_function_type_list (V8HI_type_node,
6483 V16QI_type_node, V16QI_type_node, NULL_TREE);
6484 tree v4si_ftype_v8hi_v8hi
6485 = build_function_type_list (V4SI_type_node,
6486 V8HI_type_node, V8HI_type_node, NULL_TREE);
6487 tree v8hi_ftype_v4si_v4si
6488 = build_function_type_list (V8HI_type_node,
6489 V4SI_type_node, V4SI_type_node, NULL_TREE);
6490 tree v16qi_ftype_v8hi_v8hi
6491 = build_function_type_list (V16QI_type_node,
6492 V8HI_type_node, V8HI_type_node, NULL_TREE);
6493 tree v4si_ftype_v16qi_v4si
6494 = build_function_type_list (V4SI_type_node,
6495 V16QI_type_node, V4SI_type_node, NULL_TREE);
6496 tree v4si_ftype_v16qi_v16qi
6497 = build_function_type_list (V4SI_type_node,
6498 V16QI_type_node, V16QI_type_node, NULL_TREE);
6499 tree v4si_ftype_v8hi_v4si
6500 = build_function_type_list (V4SI_type_node,
6501 V8HI_type_node, V4SI_type_node, NULL_TREE);
6502 tree v4si_ftype_v8hi
6503 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6504 tree int_ftype_v4si_v4si
6505 = build_function_type_list (integer_type_node,
6506 V4SI_type_node, V4SI_type_node, NULL_TREE);
6507 tree int_ftype_v4sf_v4sf
6508 = build_function_type_list (integer_type_node,
6509 V4SF_type_node, V4SF_type_node, NULL_TREE);
6510 tree int_ftype_v16qi_v16qi
6511 = build_function_type_list (integer_type_node,
6512 V16QI_type_node, V16QI_type_node, NULL_TREE);
6513 tree int_ftype_v8hi_v8hi
6514 = build_function_type_list (integer_type_node,
6515 V8HI_type_node, V8HI_type_node, NULL_TREE);
6517 /* Add the simple ternary operators. */
6518 d = (struct builtin_description *) bdesc_3arg;
6519 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6522 enum machine_mode mode0, mode1, mode2, mode3;
6525 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6528 mode0 = insn_data[d->icode].operand[0].mode;
6529 mode1 = insn_data[d->icode].operand[1].mode;
6530 mode2 = insn_data[d->icode].operand[2].mode;
6531 mode3 = insn_data[d->icode].operand[3].mode;
6533 /* When all four are of the same mode. */
6534 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6539 type = v4si_ftype_v4si_v4si_v4si;
6542 type = v4sf_ftype_v4sf_v4sf_v4sf;
6545 type = v8hi_ftype_v8hi_v8hi_v8hi;
6548 type = v16qi_ftype_v16qi_v16qi_v16qi;
6554 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6559 type = v4si_ftype_v4si_v4si_v16qi;
6562 type = v4sf_ftype_v4sf_v4sf_v16qi;
6565 type = v8hi_ftype_v8hi_v8hi_v16qi;
6568 type = v16qi_ftype_v16qi_v16qi_v16qi;
6574 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6575 && mode3 == V4SImode)
6576 type = v4si_ftype_v16qi_v16qi_v4si;
6577 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6578 && mode3 == V4SImode)
6579 type = v4si_ftype_v8hi_v8hi_v4si;
6580 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6581 && mode3 == V4SImode)
6582 type = v4sf_ftype_v4sf_v4sf_v4si;
6584 /* vchar, vchar, vchar, 4 bit literal. */
6585 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6587 type = v16qi_ftype_v16qi_v16qi_char;
6589 /* vshort, vshort, vshort, 4 bit literal. */
6590 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6592 type = v8hi_ftype_v8hi_v8hi_char;
6594 /* vint, vint, vint, 4 bit literal. */
6595 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6597 type = v4si_ftype_v4si_v4si_char;
6599 /* vfloat, vfloat, vfloat, 4 bit literal. */
6600 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6602 type = v4sf_ftype_v4sf_v4sf_char;
6607 def_builtin (d->mask, d->name, type, d->code);
6610 /* Add the simple binary operators. */
6611 d = (struct builtin_description *) bdesc_2arg;
6612 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6614 enum machine_mode mode0, mode1, mode2;
6617 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6620 mode0 = insn_data[d->icode].operand[0].mode;
6621 mode1 = insn_data[d->icode].operand[1].mode;
6622 mode2 = insn_data[d->icode].operand[2].mode;
6624 /* When all three operands are of the same mode. */
6625 if (mode0 == mode1 && mode1 == mode2)
6630 type = v4sf_ftype_v4sf_v4sf;
6633 type = v4si_ftype_v4si_v4si;
6636 type = v16qi_ftype_v16qi_v16qi;
6639 type = v8hi_ftype_v8hi_v8hi;
6642 type = v2si_ftype_v2si_v2si;
6645 type = v2sf_ftype_v2sf_v2sf;
6648 type = int_ftype_int_int;
6655 /* A few other combos we really don't want to do manually. */
6657 /* vint, vfloat, vfloat. */
6658 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6659 type = v4si_ftype_v4sf_v4sf;
6661 /* vshort, vchar, vchar. */
6662 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6663 type = v8hi_ftype_v16qi_v16qi;
6665 /* vint, vshort, vshort. */
6666 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6667 type = v4si_ftype_v8hi_v8hi;
6669 /* vshort, vint, vint. */
6670 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6671 type = v8hi_ftype_v4si_v4si;
6673 /* vchar, vshort, vshort. */
6674 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6675 type = v16qi_ftype_v8hi_v8hi;
6677 /* vint, vchar, vint. */
6678 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6679 type = v4si_ftype_v16qi_v4si;
6681 /* vint, vchar, vchar. */
6682 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6683 type = v4si_ftype_v16qi_v16qi;
6685 /* vint, vshort, vint. */
6686 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6687 type = v4si_ftype_v8hi_v4si;
6689 /* vint, vint, 5 bit literal. */
6690 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6691 type = v4si_ftype_v4si_char;
6693 /* vshort, vshort, 5 bit literal. */
6694 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6695 type = v8hi_ftype_v8hi_char;
6697 /* vchar, vchar, 5 bit literal. */
6698 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6699 type = v16qi_ftype_v16qi_char;
6701 /* vfloat, vint, 5 bit literal. */
6702 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6703 type = v4sf_ftype_v4si_char;
6705 /* vint, vfloat, 5 bit literal. */
6706 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6707 type = v4si_ftype_v4sf_char;
6709 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6710 type = v2si_ftype_int_int;
6712 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6713 type = v2si_ftype_v2si_char;
6715 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6716 type = v2si_ftype_int_char;
6719 else if (mode0 == SImode)
6724 type = int_ftype_v4si_v4si;
6727 type = int_ftype_v4sf_v4sf;
6730 type = int_ftype_v16qi_v16qi;
6733 type = int_ftype_v8hi_v8hi;
6743 def_builtin (d->mask, d->name, type, d->code);
6746 /* Add the simple unary operators. */
6747 d = (struct builtin_description *) bdesc_1arg;
6748 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6750 enum machine_mode mode0, mode1;
6753 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6756 mode0 = insn_data[d->icode].operand[0].mode;
6757 mode1 = insn_data[d->icode].operand[1].mode;
6759 if (mode0 == V4SImode && mode1 == QImode)
6760 type = v4si_ftype_char;
6761 else if (mode0 == V8HImode && mode1 == QImode)
6762 type = v8hi_ftype_char;
6763 else if (mode0 == V16QImode && mode1 == QImode)
6764 type = v16qi_ftype_char;
6765 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6766 type = v4sf_ftype_v4sf;
6767 else if (mode0 == V8HImode && mode1 == V16QImode)
6768 type = v8hi_ftype_v16qi;
6769 else if (mode0 == V4SImode && mode1 == V8HImode)
6770 type = v4si_ftype_v8hi;
6771 else if (mode0 == V2SImode && mode1 == V2SImode)
6772 type = v2si_ftype_v2si;
6773 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6774 type = v2sf_ftype_v2sf;
6775 else if (mode0 == V2SFmode && mode1 == V2SImode)
6776 type = v2sf_ftype_v2si;
6777 else if (mode0 == V2SImode && mode1 == V2SFmode)
6778 type = v2si_ftype_v2sf;
6779 else if (mode0 == V2SImode && mode1 == QImode)
6780 type = v2si_ftype_char;
6784 def_builtin (d->mask, d->name, type, d->code);
6789 rs6000_init_libfuncs (void)
6791 if (!TARGET_HARD_FLOAT)
6794 if (DEFAULT_ABI != ABI_V4)
6796 if (TARGET_XCOFF && ! TARGET_POWER2 && ! TARGET_POWERPC)
6798 /* AIX library routines for float->int conversion. */
6799 set_conv_libfunc (sfix_optab, SImode, DFmode, "__itrunc");
6800 set_conv_libfunc (ufix_optab, SImode, DFmode, "__uitrunc");
6803 /* Standard AIX/Darwin/64-bit SVR4 quad floating point routines. */
6804 set_optab_libfunc (add_optab, TFmode, "_xlqadd");
6805 set_optab_libfunc (sub_optab, TFmode, "_xlqsub");
6806 set_optab_libfunc (smul_optab, TFmode, "_xlqmul");
6807 set_optab_libfunc (sdiv_optab, TFmode, "_xlqdiv");
6811 /* 32-bit SVR4 quad floating point routines. */
6813 set_optab_libfunc (add_optab, TFmode, "_q_add");
6814 set_optab_libfunc (sub_optab, TFmode, "_q_sub");
6815 set_optab_libfunc (neg_optab, TFmode, "_q_neg");
6816 set_optab_libfunc (smul_optab, TFmode, "_q_mul");
6817 set_optab_libfunc (sdiv_optab, TFmode, "_q_div");
6818 if (TARGET_PPC_GPOPT || TARGET_POWER2)
6819 set_optab_libfunc (sqrt_optab, TFmode, "_q_sqrt");
6821 set_optab_libfunc (eq_optab, TFmode, "_q_feq");
6822 set_optab_libfunc (ne_optab, TFmode, "_q_fne");
6823 set_optab_libfunc (gt_optab, TFmode, "_q_fgt");
6824 set_optab_libfunc (ge_optab, TFmode, "_q_fge");
6825 set_optab_libfunc (lt_optab, TFmode, "_q_flt");
6826 set_optab_libfunc (le_optab, TFmode, "_q_fle");
6828 set_conv_libfunc (sext_optab, TFmode, SFmode, "_q_stoq");
6829 set_conv_libfunc (sext_optab, TFmode, DFmode, "_q_dtoq");
6830 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_q_qtos");
6831 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_q_qtod");
6832 set_conv_libfunc (sfix_optab, SImode, TFmode, "_q_qtoi");
6833 set_conv_libfunc (ufix_optab, SImode, TFmode, "_q_qtou");
6834 set_conv_libfunc (sfloat_optab, TFmode, SImode, "_q_itoq");
6838 /* Expand a block move operation, and return 1 if successful. Return 0
6839 if we should let the compiler generate normal code.
6841 operands[0] is the destination
6842 operands[1] is the source
6843 operands[2] is the length
6844 operands[3] is the alignment */
6846 #define MAX_MOVE_REG 4
6849 expand_block_move (rtx operands[])
6851 rtx orig_dest = operands[0];
6852 rtx orig_src = operands[1];
6853 rtx bytes_rtx = operands[2];
6854 rtx align_rtx = operands[3];
6855 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6860 rtx stores[MAX_MOVE_REG];
6863 /* If this is not a fixed size move, just call memcpy */
6867 /* If this is not a fixed size alignment, abort */
6868 if (GET_CODE (align_rtx) != CONST_INT)
6870 align = INTVAL (align_rtx);
6872 /* Anything to move? */
6873 bytes = INTVAL (bytes_rtx);
6877 /* store_one_arg depends on expand_block_move to handle at least the size of
6878 reg_parm_stack_space. */
6879 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6882 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6885 rtx (*movstrsi) (rtx, rtx, rtx, rtx);
6886 rtx (*mov) (rtx, rtx);
6888 enum machine_mode mode = BLKmode;
6892 && bytes > 24 /* move up to 32 bytes at a time */
6900 && ! fixed_regs[12])
6902 move_bytes = (bytes > 32) ? 32 : bytes;
6903 gen_func.movstrsi = gen_movstrsi_8reg;
6905 else if (TARGET_STRING
6906 && bytes > 16 /* move up to 24 bytes at a time */
6912 && ! fixed_regs[10])
6914 move_bytes = (bytes > 24) ? 24 : bytes;
6915 gen_func.movstrsi = gen_movstrsi_6reg;
6917 else if (TARGET_STRING
6918 && bytes > 8 /* move up to 16 bytes at a time */
6924 move_bytes = (bytes > 16) ? 16 : bytes;
6925 gen_func.movstrsi = gen_movstrsi_4reg;
6927 else if (bytes >= 8 && TARGET_POWERPC64
6928 /* 64-bit loads and stores require word-aligned
6930 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
6934 gen_func.mov = gen_movdi;
6936 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
6937 { /* move up to 8 bytes at a time */
6938 move_bytes = (bytes > 8) ? 8 : bytes;
6939 gen_func.movstrsi = gen_movstrsi_2reg;
6941 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
6942 { /* move 4 bytes */
6945 gen_func.mov = gen_movsi;
6947 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
6948 { /* move 2 bytes */
6951 gen_func.mov = gen_movhi;
6953 else if (TARGET_STRING && bytes > 1)
6954 { /* move up to 4 bytes at a time */
6955 move_bytes = (bytes > 4) ? 4 : bytes;
6956 gen_func.movstrsi = gen_movstrsi_1reg;
6958 else /* move 1 byte at a time */
6962 gen_func.mov = gen_movqi;
6965 src = adjust_address (orig_src, mode, offset);
6966 dest = adjust_address (orig_dest, mode, offset);
6968 if (mode != BLKmode)
6970 rtx tmp_reg = gen_reg_rtx (mode);
6972 emit_insn ((*gen_func.mov) (tmp_reg, src));
6973 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
6976 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
6979 for (i = 0; i < num_reg; i++)
6980 emit_insn (stores[i]);
6984 if (mode == BLKmode)
6986 /* Move the address into scratch registers. The movstrsi
6987 patterns require zero offset. */
6988 if (!REG_P (XEXP (src, 0)))
6990 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
6991 src = replace_equiv_address (src, src_reg);
6993 set_mem_size (src, GEN_INT (move_bytes));
6995 if (!REG_P (XEXP (dest, 0)))
6997 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
6998 dest = replace_equiv_address (dest, dest_reg);
7000 set_mem_size (dest, GEN_INT (move_bytes));
7002 emit_insn ((*gen_func.movstrsi) (dest, src,
7003 GEN_INT (move_bytes & 31),
7012 /* Return 1 if OP is a load multiple operation. It is known to be a
7013 PARALLEL and the first section will be tested. */
7016 load_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7018 int count = XVECLEN (op, 0);
7019 unsigned int dest_regno;
7023 /* Perform a quick check so we don't blow up below. */
7025 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7026 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7027 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7030 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7031 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7033 for (i = 1; i < count; i++)
7035 rtx elt = XVECEXP (op, 0, i);
7037 if (GET_CODE (elt) != SET
7038 || GET_CODE (SET_DEST (elt)) != REG
7039 || GET_MODE (SET_DEST (elt)) != SImode
7040 || REGNO (SET_DEST (elt)) != dest_regno + i
7041 || GET_CODE (SET_SRC (elt)) != MEM
7042 || GET_MODE (SET_SRC (elt)) != SImode
7043 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7044 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7045 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7046 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7053 /* Similar, but tests for store multiple. Here, the second vector element
7054 is a CLOBBER. It will be tested later. */
7057 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7059 int count = XVECLEN (op, 0) - 1;
7060 unsigned int src_regno;
7064 /* Perform a quick check so we don't blow up below. */
7066 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7067 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7068 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7071 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7072 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7074 for (i = 1; i < count; i++)
7076 rtx elt = XVECEXP (op, 0, i + 1);
7078 if (GET_CODE (elt) != SET
7079 || GET_CODE (SET_SRC (elt)) != REG
7080 || GET_MODE (SET_SRC (elt)) != SImode
7081 || REGNO (SET_SRC (elt)) != src_regno + i
7082 || GET_CODE (SET_DEST (elt)) != MEM
7083 || GET_MODE (SET_DEST (elt)) != SImode
7084 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7085 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7086 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7087 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7094 /* Return a string to perform a load_multiple operation.
7095 operands[0] is the vector.
7096 operands[1] is the source address.
7097 operands[2] is the first destination register. */
7100 rs6000_output_load_multiple (rtx operands[3])
7102 /* We have to handle the case where the pseudo used to contain the address
7103 is assigned to one of the output registers. */
7105 int words = XVECLEN (operands[0], 0);
7108 if (XVECLEN (operands[0], 0) == 1)
7109 return "{l|lwz} %2,0(%1)";
7111 for (i = 0; i < words; i++)
7112 if (refers_to_regno_p (REGNO (operands[2]) + i,
7113 REGNO (operands[2]) + i + 1, operands[1], 0))
7117 xop[0] = GEN_INT (4 * (words-1));
7118 xop[1] = operands[1];
7119 xop[2] = operands[2];
7120 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7125 xop[0] = GEN_INT (4 * (words-1));
7126 xop[1] = operands[1];
7127 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7128 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7133 for (j = 0; j < words; j++)
7136 xop[0] = GEN_INT (j * 4);
7137 xop[1] = operands[1];
7138 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7139 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7141 xop[0] = GEN_INT (i * 4);
7142 xop[1] = operands[1];
7143 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7148 return "{lsi|lswi} %2,%1,%N0";
7151 /* Return 1 for a parallel vrsave operation. */
7154 vrsave_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7156 int count = XVECLEN (op, 0);
7157 unsigned int dest_regno, src_regno;
7161 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7162 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7163 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7166 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7167 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7169 if (dest_regno != VRSAVE_REGNO
7170 && src_regno != VRSAVE_REGNO)
7173 for (i = 1; i < count; i++)
7175 rtx elt = XVECEXP (op, 0, i);
7177 if (GET_CODE (elt) != CLOBBER
7178 && GET_CODE (elt) != SET)
7185 /* Return 1 for an PARALLEL suitable for mfcr. */
7188 mfcr_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7190 int count = XVECLEN (op, 0);
7193 /* Perform a quick check so we don't blow up below. */
7195 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7196 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7197 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7200 for (i = 0; i < count; i++)
7202 rtx exp = XVECEXP (op, 0, i);
7207 src_reg = XVECEXP (SET_SRC (exp), 0, 0);
7209 if (GET_CODE (src_reg) != REG
7210 || GET_MODE (src_reg) != CCmode
7211 || ! CR_REGNO_P (REGNO (src_reg)))
7214 if (GET_CODE (exp) != SET
7215 || GET_CODE (SET_DEST (exp)) != REG
7216 || GET_MODE (SET_DEST (exp)) != SImode
7217 || ! INT_REGNO_P (REGNO (SET_DEST (exp))))
7219 unspec = SET_SRC (exp);
7220 maskval = 1 << (MAX_CR_REGNO - REGNO (src_reg));
7222 if (GET_CODE (unspec) != UNSPEC
7223 || XINT (unspec, 1) != UNSPEC_MOVESI_FROM_CR
7224 || XVECLEN (unspec, 0) != 2
7225 || XVECEXP (unspec, 0, 0) != src_reg
7226 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7227 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7233 /* Return 1 for an PARALLEL suitable for mtcrf. */
7236 mtcrf_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7238 int count = XVECLEN (op, 0);
7242 /* Perform a quick check so we don't blow up below. */
7244 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7245 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7246 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7248 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7250 if (GET_CODE (src_reg) != REG
7251 || GET_MODE (src_reg) != SImode
7252 || ! INT_REGNO_P (REGNO (src_reg)))
7255 for (i = 0; i < count; i++)
7257 rtx exp = XVECEXP (op, 0, i);
7261 if (GET_CODE (exp) != SET
7262 || GET_CODE (SET_DEST (exp)) != REG
7263 || GET_MODE (SET_DEST (exp)) != CCmode
7264 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7266 unspec = SET_SRC (exp);
7267 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7269 if (GET_CODE (unspec) != UNSPEC
7270 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7271 || XVECLEN (unspec, 0) != 2
7272 || XVECEXP (unspec, 0, 0) != src_reg
7273 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7274 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7280 /* Return 1 for an PARALLEL suitable for lmw. */
7283 lmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7285 int count = XVECLEN (op, 0);
7286 unsigned int dest_regno;
7288 unsigned int base_regno;
7289 HOST_WIDE_INT offset;
7292 /* Perform a quick check so we don't blow up below. */
7294 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7295 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7296 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7299 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7300 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7303 || count != 32 - (int) dest_regno)
7306 if (legitimate_indirect_address_p (src_addr, 0))
7309 base_regno = REGNO (src_addr);
7310 if (base_regno == 0)
7313 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7315 offset = INTVAL (XEXP (src_addr, 1));
7316 base_regno = REGNO (XEXP (src_addr, 0));
7321 for (i = 0; i < count; i++)
7323 rtx elt = XVECEXP (op, 0, i);
7326 HOST_WIDE_INT newoffset;
7328 if (GET_CODE (elt) != SET
7329 || GET_CODE (SET_DEST (elt)) != REG
7330 || GET_MODE (SET_DEST (elt)) != SImode
7331 || REGNO (SET_DEST (elt)) != dest_regno + i
7332 || GET_CODE (SET_SRC (elt)) != MEM
7333 || GET_MODE (SET_SRC (elt)) != SImode)
7335 newaddr = XEXP (SET_SRC (elt), 0);
7336 if (legitimate_indirect_address_p (newaddr, 0))
7341 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7343 addr_reg = XEXP (newaddr, 0);
7344 newoffset = INTVAL (XEXP (newaddr, 1));
7348 if (REGNO (addr_reg) != base_regno
7349 || newoffset != offset + 4 * i)
7356 /* Return 1 for an PARALLEL suitable for stmw. */
7359 stmw_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7361 int count = XVECLEN (op, 0);
7362 unsigned int src_regno;
7364 unsigned int base_regno;
7365 HOST_WIDE_INT offset;
7368 /* Perform a quick check so we don't blow up below. */
7370 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7371 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7372 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7375 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7376 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7379 || count != 32 - (int) src_regno)
7382 if (legitimate_indirect_address_p (dest_addr, 0))
7385 base_regno = REGNO (dest_addr);
7386 if (base_regno == 0)
7389 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7391 offset = INTVAL (XEXP (dest_addr, 1));
7392 base_regno = REGNO (XEXP (dest_addr, 0));
7397 for (i = 0; i < count; i++)
7399 rtx elt = XVECEXP (op, 0, i);
7402 HOST_WIDE_INT newoffset;
7404 if (GET_CODE (elt) != SET
7405 || GET_CODE (SET_SRC (elt)) != REG
7406 || GET_MODE (SET_SRC (elt)) != SImode
7407 || REGNO (SET_SRC (elt)) != src_regno + i
7408 || GET_CODE (SET_DEST (elt)) != MEM
7409 || GET_MODE (SET_DEST (elt)) != SImode)
7411 newaddr = XEXP (SET_DEST (elt), 0);
7412 if (legitimate_indirect_address_p (newaddr, 0))
7417 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7419 addr_reg = XEXP (newaddr, 0);
7420 newoffset = INTVAL (XEXP (newaddr, 1));
7424 if (REGNO (addr_reg) != base_regno
7425 || newoffset != offset + 4 * i)
7432 /* A validation routine: say whether CODE, a condition code, and MODE
7433 match. The other alternatives either don't make sense or should
7434 never be generated. */
7437 validate_condition_mode (enum rtx_code code, enum machine_mode mode)
7439 if (GET_RTX_CLASS (code) != '<'
7440 || GET_MODE_CLASS (mode) != MODE_CC)
7443 /* These don't make sense. */
7444 if ((code == GT || code == LT || code == GE || code == LE)
7445 && mode == CCUNSmode)
7448 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7449 && mode != CCUNSmode)
7452 if (mode != CCFPmode
7453 && (code == ORDERED || code == UNORDERED
7454 || code == UNEQ || code == LTGT
7455 || code == UNGT || code == UNLT
7456 || code == UNGE || code == UNLE))
7459 /* These should never be generated except for
7460 flag_finite_math_only. */
7461 if (mode == CCFPmode
7462 && ! flag_finite_math_only
7463 && (code == LE || code == GE
7464 || code == UNEQ || code == LTGT
7465 || code == UNGT || code == UNLT))
7468 /* These are invalid; the information is not there. */
7469 if (mode == CCEQmode
7470 && code != EQ && code != NE)
7474 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7475 We only check the opcode against the mode of the CC value here. */
7478 branch_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7480 enum rtx_code code = GET_CODE (op);
7481 enum machine_mode cc_mode;
7483 if (GET_RTX_CLASS (code) != '<')
7486 cc_mode = GET_MODE (XEXP (op, 0));
7487 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7490 validate_condition_mode (code, cc_mode);
7495 /* Return 1 if OP is a comparison operation that is valid for a branch
7496 insn and which is true if the corresponding bit in the CC register
7500 branch_positive_comparison_operator (rtx op, enum machine_mode mode)
7504 if (! branch_comparison_operator (op, mode))
7507 code = GET_CODE (op);
7508 return (code == EQ || code == LT || code == GT
7509 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7510 || code == LTU || code == GTU
7511 || code == UNORDERED);
7514 /* Return 1 if OP is a comparison operation that is valid for an scc
7515 insn: it must be a positive comparison. */
7518 scc_comparison_operator (rtx op, enum machine_mode mode)
7520 return branch_positive_comparison_operator (op, mode);
7524 trap_comparison_operator (rtx op, enum machine_mode mode)
7526 if (mode != VOIDmode && mode != GET_MODE (op))
7528 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7532 boolean_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7534 enum rtx_code code = GET_CODE (op);
7535 return (code == AND || code == IOR || code == XOR);
7539 boolean_or_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7541 enum rtx_code code = GET_CODE (op);
7542 return (code == IOR || code == XOR);
7546 min_max_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
7548 enum rtx_code code = GET_CODE (op);
7549 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7552 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7553 mask required to convert the result of a rotate insn into a shift
7554 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7557 includes_lshift_p (rtx shiftop, rtx andop)
7559 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7561 shift_mask <<= INTVAL (shiftop);
7563 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7566 /* Similar, but for right shift. */
7569 includes_rshift_p (rtx shiftop, rtx andop)
7571 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7573 shift_mask >>= INTVAL (shiftop);
7575 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7578 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7579 to perform a left shift. It must have exactly SHIFTOP least
7580 significant 0's, then one or more 1's, then zero or more 0's. */
7583 includes_rldic_lshift_p (rtx shiftop, rtx andop)
7585 if (GET_CODE (andop) == CONST_INT)
7587 HOST_WIDE_INT c, lsb, shift_mask;
7590 if (c == 0 || c == ~0)
7594 shift_mask <<= INTVAL (shiftop);
7596 /* Find the least significant one bit. */
7599 /* It must coincide with the LSB of the shift mask. */
7600 if (-lsb != shift_mask)
7603 /* Invert to look for the next transition (if any). */
7606 /* Remove the low group of ones (originally low group of zeros). */
7609 /* Again find the lsb, and check we have all 1's above. */
7613 else if (GET_CODE (andop) == CONST_DOUBLE
7614 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7616 HOST_WIDE_INT low, high, lsb;
7617 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7619 low = CONST_DOUBLE_LOW (andop);
7620 if (HOST_BITS_PER_WIDE_INT < 64)
7621 high = CONST_DOUBLE_HIGH (andop);
7623 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7624 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7627 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7629 shift_mask_high = ~0;
7630 if (INTVAL (shiftop) > 32)
7631 shift_mask_high <<= INTVAL (shiftop) - 32;
7635 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7642 return high == -lsb;
7645 shift_mask_low = ~0;
7646 shift_mask_low <<= INTVAL (shiftop);
7650 if (-lsb != shift_mask_low)
7653 if (HOST_BITS_PER_WIDE_INT < 64)
7658 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7661 return high == -lsb;
7665 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7671 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7672 to perform a left shift. It must have SHIFTOP or more least
7673 significant 0's, with the remainder of the word 1's. */
7676 includes_rldicr_lshift_p (rtx shiftop, rtx andop)
7678 if (GET_CODE (andop) == CONST_INT)
7680 HOST_WIDE_INT c, lsb, shift_mask;
7683 shift_mask <<= INTVAL (shiftop);
7686 /* Find the least significant one bit. */
7689 /* It must be covered by the shift mask.
7690 This test also rejects c == 0. */
7691 if ((lsb & shift_mask) == 0)
7694 /* Check we have all 1's above the transition, and reject all 1's. */
7695 return c == -lsb && lsb != 1;
7697 else if (GET_CODE (andop) == CONST_DOUBLE
7698 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7700 HOST_WIDE_INT low, lsb, shift_mask_low;
7702 low = CONST_DOUBLE_LOW (andop);
7704 if (HOST_BITS_PER_WIDE_INT < 64)
7706 HOST_WIDE_INT high, shift_mask_high;
7708 high = CONST_DOUBLE_HIGH (andop);
7712 shift_mask_high = ~0;
7713 if (INTVAL (shiftop) > 32)
7714 shift_mask_high <<= INTVAL (shiftop) - 32;
7718 if ((lsb & shift_mask_high) == 0)
7721 return high == -lsb;
7727 shift_mask_low = ~0;
7728 shift_mask_low <<= INTVAL (shiftop);
7732 if ((lsb & shift_mask_low) == 0)
7735 return low == -lsb && lsb != 1;
7741 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7742 for lfq and stfq insns.
7744 Note reg1 and reg2 *must* be hard registers. To be sure we will
7745 abort if we are passed pseudo registers. */
7748 registers_ok_for_quad_peep (rtx reg1, rtx reg2)
7750 /* We might have been passed a SUBREG. */
7751 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7754 return (REGNO (reg1) == REGNO (reg2) - 1);
7757 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7758 addr1 and addr2 must be in consecutive memory locations
7759 (addr2 == addr1 + 8). */
7762 addrs_ok_for_quad_peep (rtx addr1, rtx addr2)
7767 /* Extract an offset (if used) from the first addr. */
7768 if (GET_CODE (addr1) == PLUS)
7770 /* If not a REG, return zero. */
7771 if (GET_CODE (XEXP (addr1, 0)) != REG)
7775 reg1 = REGNO (XEXP (addr1, 0));
7776 /* The offset must be constant! */
7777 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7779 offset1 = INTVAL (XEXP (addr1, 1));
7782 else if (GET_CODE (addr1) != REG)
7786 reg1 = REGNO (addr1);
7787 /* This was a simple (mem (reg)) expression. Offset is 0. */
7791 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7792 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7793 register as addr1. */
7794 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7796 if (GET_CODE (addr2) != PLUS)
7799 if (GET_CODE (XEXP (addr2, 0)) != REG
7800 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7803 if (reg1 != REGNO (XEXP (addr2, 0)))
7806 /* The offset for the second addr must be 8 more than the first addr. */
7807 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7810 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7815 /* Return the register class of a scratch register needed to copy IN into
7816 or out of a register in CLASS in MODE. If it can be done directly,
7817 NO_REGS is returned. */
7820 secondary_reload_class (enum reg_class class,
7821 enum machine_mode mode ATTRIBUTE_UNUSED, rtx in)
7825 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7827 && MACHOPIC_INDIRECT
7831 /* We cannot copy a symbolic operand directly into anything
7832 other than BASE_REGS for TARGET_ELF. So indicate that a
7833 register from BASE_REGS is needed as an intermediate
7836 On Darwin, pic addresses require a load from memory, which
7837 needs a base register. */
7838 if (class != BASE_REGS
7839 && (GET_CODE (in) == SYMBOL_REF
7840 || GET_CODE (in) == HIGH
7841 || GET_CODE (in) == LABEL_REF
7842 || GET_CODE (in) == CONST))
7846 if (GET_CODE (in) == REG)
7849 if (regno >= FIRST_PSEUDO_REGISTER)
7851 regno = true_regnum (in);
7852 if (regno >= FIRST_PSEUDO_REGISTER)
7856 else if (GET_CODE (in) == SUBREG)
7858 regno = true_regnum (in);
7859 if (regno >= FIRST_PSEUDO_REGISTER)
7865 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7867 if (class == GENERAL_REGS || class == BASE_REGS
7868 || (regno >= 0 && INT_REGNO_P (regno)))
7871 /* Constants, memory, and FP registers can go into FP registers. */
7872 if ((regno == -1 || FP_REGNO_P (regno))
7873 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7876 /* Memory, and AltiVec registers can go into AltiVec registers. */
7877 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7878 && class == ALTIVEC_REGS)
7881 /* We can copy among the CR registers. */
7882 if ((class == CR_REGS || class == CR0_REGS)
7883 && regno >= 0 && CR_REGNO_P (regno))
7886 /* Otherwise, we need GENERAL_REGS. */
7887 return GENERAL_REGS;
7890 /* Given a comparison operation, return the bit number in CCR to test. We
7891 know this is a valid comparison.
7893 SCC_P is 1 if this is for an scc. That means that %D will have been
7894 used instead of %C, so the bits will be in different places.
7896 Return -1 if OP isn't a valid comparison for some reason. */
7899 ccr_bit (rtx op, int scc_p)
7901 enum rtx_code code = GET_CODE (op);
7902 enum machine_mode cc_mode;
7907 if (GET_RTX_CLASS (code) != '<')
7912 if (GET_CODE (reg) != REG
7913 || ! CR_REGNO_P (REGNO (reg)))
7916 cc_mode = GET_MODE (reg);
7917 cc_regnum = REGNO (reg);
7918 base_bit = 4 * (cc_regnum - CR0_REGNO);
7920 validate_condition_mode (code, cc_mode);
7922 /* When generating a sCOND operation, only positive conditions are
7924 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
7925 && code != GTU && code != LTU)
7931 if (TARGET_E500 && !TARGET_FPRS
7932 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7933 return base_bit + 1;
7934 return scc_p ? base_bit + 3 : base_bit + 2;
7936 if (TARGET_E500 && !TARGET_FPRS
7937 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
7938 return base_bit + 1;
7939 return base_bit + 2;
7940 case GT: case GTU: case UNLE:
7941 return base_bit + 1;
7942 case LT: case LTU: case UNGE:
7944 case ORDERED: case UNORDERED:
7945 return base_bit + 3;
7948 /* If scc, we will have done a cror to put the bit in the
7949 unordered position. So test that bit. For integer, this is ! LT
7950 unless this is an scc insn. */
7951 return scc_p ? base_bit + 3 : base_bit;
7954 return scc_p ? base_bit + 3 : base_bit + 1;
7961 /* Return the GOT register. */
7964 rs6000_got_register (rtx value ATTRIBUTE_UNUSED)
7966 /* The second flow pass currently (June 1999) can't update
7967 regs_ever_live without disturbing other parts of the compiler, so
7968 update it here to make the prolog/epilogue code happy. */
7969 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
7970 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
7972 current_function_uses_pic_offset_table = 1;
7974 return pic_offset_table_rtx;
7977 /* Function to init struct machine_function.
7978 This will be called, via a pointer variable,
7979 from push_function_context. */
7981 static struct machine_function *
7982 rs6000_init_machine_status (void)
7984 return ggc_alloc_cleared (sizeof (machine_function));
7987 /* These macros test for integers and extract the low-order bits. */
7989 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
7990 && GET_MODE (X) == VOIDmode)
7992 #define INT_LOWPART(X) \
7993 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
7999 unsigned long val = INT_LOWPART (op);
8001 /* If the high bit is zero, the value is the first 1 bit we find
8003 if ((val & 0x80000000) == 0)
8005 if ((val & 0xffffffff) == 0)
8009 while (((val <<= 1) & 0x80000000) == 0)
8014 /* If the high bit is set and the low bit is not, or the mask is all
8015 1's, the value is zero. */
8016 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8019 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8022 while (((val >>= 1) & 1) != 0)
8032 unsigned long val = INT_LOWPART (op);
8034 /* If the low bit is zero, the value is the first 1 bit we find from
8038 if ((val & 0xffffffff) == 0)
8042 while (((val >>= 1) & 1) == 0)
8048 /* If the low bit is set and the high bit is not, or the mask is all
8049 1's, the value is 31. */
8050 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8053 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8056 while (((val <<= 1) & 0x80000000) != 0)
8062 /* Locate some local-dynamic symbol still in use by this function
8063 so that we can print its name in some tls_ld pattern. */
8066 rs6000_get_some_local_dynamic_name (void)
8070 if (cfun->machine->some_ld_name)
8071 return cfun->machine->some_ld_name;
8073 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8075 && for_each_rtx (&PATTERN (insn),
8076 rs6000_get_some_local_dynamic_name_1, 0))
8077 return cfun->machine->some_ld_name;
8082 /* Helper function for rs6000_get_some_local_dynamic_name. */
8085 rs6000_get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED)
8089 if (GET_CODE (x) == SYMBOL_REF)
8091 const char *str = XSTR (x, 0);
8092 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8094 cfun->machine->some_ld_name = str;
8102 /* Print an operand. Recognize special options, documented below. */
8105 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8106 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8108 #define SMALL_DATA_RELOC "sda21"
8109 #define SMALL_DATA_REG 0
8113 print_operand (FILE *file, rtx x, int code)
8117 unsigned HOST_WIDE_INT uval;
8122 /* Write out an instruction after the call which may be replaced
8123 with glue code by the loader. This depends on the AIX version. */
8124 asm_fprintf (file, RS6000_CALL_GLUE);
8127 /* %a is output_address. */
8130 /* If X is a constant integer whose low-order 5 bits are zero,
8131 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8132 in the AIX assembler where "sri" with a zero shift count
8133 writes a trash instruction. */
8134 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8141 /* If constant, low-order 16 bits of constant, unsigned.
8142 Otherwise, write normally. */
8144 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8146 print_operand (file, x, 0);
8150 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8151 for 64-bit mask direction. */
8152 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8155 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8159 /* X is a CR register. Print the number of the EQ bit of the CR */
8160 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8161 output_operand_lossage ("invalid %%E value");
8163 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8167 /* X is a CR register. Print the shift count needed to move it
8168 to the high-order four bits. */
8169 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8170 output_operand_lossage ("invalid %%f value");
8172 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8176 /* Similar, but print the count for the rotate in the opposite
8178 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8179 output_operand_lossage ("invalid %%F value");
8181 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8185 /* X is a constant integer. If it is negative, print "m",
8186 otherwise print "z". This is to make an aze or ame insn. */
8187 if (GET_CODE (x) != CONST_INT)
8188 output_operand_lossage ("invalid %%G value");
8189 else if (INTVAL (x) >= 0)
8196 /* If constant, output low-order five bits. Otherwise, write
8199 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8201 print_operand (file, x, 0);
8205 /* If constant, output low-order six bits. Otherwise, write
8208 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8210 print_operand (file, x, 0);
8214 /* Print `i' if this is a constant, else nothing. */
8220 /* Write the bit number in CCR for jump. */
8223 output_operand_lossage ("invalid %%j code");
8225 fprintf (file, "%d", i);
8229 /* Similar, but add one for shift count in rlinm for scc and pass
8230 scc flag to `ccr_bit'. */
8233 output_operand_lossage ("invalid %%J code");
8235 /* If we want bit 31, write a shift count of zero, not 32. */
8236 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8240 /* X must be a constant. Write the 1's complement of the
8243 output_operand_lossage ("invalid %%k value");
8245 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8249 /* X must be a symbolic constant on ELF. Write an
8250 expression suitable for an 'addi' that adds in the low 16
8252 if (GET_CODE (x) != CONST)
8254 print_operand_address (file, x);
8259 if (GET_CODE (XEXP (x, 0)) != PLUS
8260 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8261 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8262 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8263 output_operand_lossage ("invalid %%K value");
8264 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8266 /* For GNU as, there must be a non-alphanumeric character
8267 between 'l' and the number. The '-' is added by
8268 print_operand() already. */
8269 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8271 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8275 /* %l is output_asm_label. */
8278 /* Write second word of DImode or DFmode reference. Works on register
8279 or non-indexed memory only. */
8280 if (GET_CODE (x) == REG)
8281 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8282 else if (GET_CODE (x) == MEM)
8284 /* Handle possible auto-increment. Since it is pre-increment and
8285 we have already done it, we can just use an offset of word. */
8286 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8287 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8288 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8291 output_address (XEXP (adjust_address_nv (x, SImode,
8295 if (small_data_operand (x, GET_MODE (x)))
8296 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8297 reg_names[SMALL_DATA_REG]);
8302 /* MB value for a mask operand. */
8303 if (! mask_operand (x, SImode))
8304 output_operand_lossage ("invalid %%m value");
8306 fprintf (file, "%d", extract_MB (x));
8310 /* ME value for a mask operand. */
8311 if (! mask_operand (x, SImode))
8312 output_operand_lossage ("invalid %%M value");
8314 fprintf (file, "%d", extract_ME (x));
8317 /* %n outputs the negative of its operand. */
8320 /* Write the number of elements in the vector times 4. */
8321 if (GET_CODE (x) != PARALLEL)
8322 output_operand_lossage ("invalid %%N value");
8324 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8328 /* Similar, but subtract 1 first. */
8329 if (GET_CODE (x) != PARALLEL)
8330 output_operand_lossage ("invalid %%O value");
8332 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8336 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8338 || INT_LOWPART (x) < 0
8339 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8340 output_operand_lossage ("invalid %%p value");
8342 fprintf (file, "%d", i);
8346 /* The operand must be an indirect memory reference. The result
8347 is the register number. */
8348 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8349 || REGNO (XEXP (x, 0)) >= 32)
8350 output_operand_lossage ("invalid %%P value");
8352 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8356 /* This outputs the logical code corresponding to a boolean
8357 expression. The expression may have one or both operands
8358 negated (if one, only the first one). For condition register
8359 logical operations, it will also treat the negated
8360 CR codes as NOTs, but not handle NOTs of them. */
8362 const char *const *t = 0;
8364 enum rtx_code code = GET_CODE (x);
8365 static const char * const tbl[3][3] = {
8366 { "and", "andc", "nor" },
8367 { "or", "orc", "nand" },
8368 { "xor", "eqv", "xor" } };
8372 else if (code == IOR)
8374 else if (code == XOR)
8377 output_operand_lossage ("invalid %%q value");
8379 if (GET_CODE (XEXP (x, 0)) != NOT)
8383 if (GET_CODE (XEXP (x, 1)) == NOT)
8401 /* X is a CR register. Print the mask for `mtcrf'. */
8402 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8403 output_operand_lossage ("invalid %%R value");
8405 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8409 /* Low 5 bits of 32 - value */
8411 output_operand_lossage ("invalid %%s value");
8413 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8417 /* PowerPC64 mask position. All 0's is excluded.
8418 CONST_INT 32-bit mask is considered sign-extended so any
8419 transition must occur within the CONST_INT, not on the boundary. */
8420 if (! mask64_operand (x, DImode))
8421 output_operand_lossage ("invalid %%S value");
8423 uval = INT_LOWPART (x);
8425 if (uval & 1) /* Clear Left */
8427 #if HOST_BITS_PER_WIDE_INT > 64
8428 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8432 else /* Clear Right */
8435 #if HOST_BITS_PER_WIDE_INT > 64
8436 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8444 fprintf (file, "%d", i);
8448 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8449 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8452 /* Bit 3 is OV bit. */
8453 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8455 /* If we want bit 31, write a shift count of zero, not 32. */
8456 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8460 /* Print the symbolic name of a branch target register. */
8461 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8462 && REGNO (x) != COUNT_REGISTER_REGNUM))
8463 output_operand_lossage ("invalid %%T value");
8464 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8465 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8467 fputs ("ctr", file);
8471 /* High-order 16 bits of constant for use in unsigned operand. */
8473 output_operand_lossage ("invalid %%u value");
8475 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8476 (INT_LOWPART (x) >> 16) & 0xffff);
8480 /* High-order 16 bits of constant for use in signed operand. */
8482 output_operand_lossage ("invalid %%v value");
8484 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8485 (INT_LOWPART (x) >> 16) & 0xffff);
8489 /* Print `u' if this has an auto-increment or auto-decrement. */
8490 if (GET_CODE (x) == MEM
8491 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8492 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8497 /* Print the trap code for this operand. */
8498 switch (GET_CODE (x))
8501 fputs ("eq", file); /* 4 */
8504 fputs ("ne", file); /* 24 */
8507 fputs ("lt", file); /* 16 */
8510 fputs ("le", file); /* 20 */
8513 fputs ("gt", file); /* 8 */
8516 fputs ("ge", file); /* 12 */
8519 fputs ("llt", file); /* 2 */
8522 fputs ("lle", file); /* 6 */
8525 fputs ("lgt", file); /* 1 */
8528 fputs ("lge", file); /* 5 */
8536 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8539 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8540 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8542 print_operand (file, x, 0);
8546 /* MB value for a PowerPC64 rldic operand. */
8547 val = (GET_CODE (x) == CONST_INT
8548 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8553 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8554 if ((val <<= 1) < 0)
8557 #if HOST_BITS_PER_WIDE_INT == 32
8558 if (GET_CODE (x) == CONST_INT && i >= 0)
8559 i += 32; /* zero-extend high-part was all 0's */
8560 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8562 val = CONST_DOUBLE_LOW (x);
8569 for ( ; i < 64; i++)
8570 if ((val <<= 1) < 0)
8575 fprintf (file, "%d", i + 1);
8579 if (GET_CODE (x) == MEM
8580 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8585 /* Like 'L', for third word of TImode */
8586 if (GET_CODE (x) == REG)
8587 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8588 else if (GET_CODE (x) == MEM)
8590 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8591 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8592 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8594 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8595 if (small_data_operand (x, GET_MODE (x)))
8596 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8597 reg_names[SMALL_DATA_REG]);
8602 /* X is a SYMBOL_REF. Write out the name preceded by a
8603 period and without any trailing data in brackets. Used for function
8604 names. If we are configured for System V (or the embedded ABI) on
8605 the PowerPC, do not emit the period, since those systems do not use
8606 TOCs and the like. */
8607 if (GET_CODE (x) != SYMBOL_REF)
8610 if (XSTR (x, 0)[0] != '.')
8612 switch (DEFAULT_ABI)
8627 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8629 assemble_name (file, XSTR (x, 0));
8633 /* Like 'L', for last word of TImode. */
8634 if (GET_CODE (x) == REG)
8635 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8636 else if (GET_CODE (x) == MEM)
8638 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8639 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8640 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8642 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8643 if (small_data_operand (x, GET_MODE (x)))
8644 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8645 reg_names[SMALL_DATA_REG]);
8649 /* Print AltiVec or SPE memory operand. */
8654 if (GET_CODE (x) != MEM)
8662 if (GET_CODE (tmp) == REG)
8664 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8667 /* Handle [reg+UIMM]. */
8668 else if (GET_CODE (tmp) == PLUS &&
8669 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8673 if (GET_CODE (XEXP (tmp, 0)) != REG)
8676 x = INTVAL (XEXP (tmp, 1));
8677 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8681 /* Fall through. Must be [reg+reg]. */
8683 if (GET_CODE (tmp) == REG)
8684 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8685 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8687 if (REGNO (XEXP (tmp, 0)) == 0)
8688 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8689 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8691 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8692 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8700 if (GET_CODE (x) == REG)
8701 fprintf (file, "%s", reg_names[REGNO (x)]);
8702 else if (GET_CODE (x) == MEM)
8704 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8705 know the width from the mode. */
8706 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8707 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8708 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8709 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8710 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8711 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8713 output_address (XEXP (x, 0));
8716 output_addr_const (file, x);
8720 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8724 output_operand_lossage ("invalid %%xn code");
8728 /* Print the address of an operand. */
8731 print_operand_address (FILE *file, rtx x)
8733 if (GET_CODE (x) == REG)
8734 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8735 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8736 || GET_CODE (x) == LABEL_REF)
8738 output_addr_const (file, x);
8739 if (small_data_operand (x, GET_MODE (x)))
8740 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8741 reg_names[SMALL_DATA_REG]);
8742 else if (TARGET_TOC)
8745 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8747 if (REGNO (XEXP (x, 0)) == 0)
8748 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8749 reg_names[ REGNO (XEXP (x, 0)) ]);
8751 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8752 reg_names[ REGNO (XEXP (x, 1)) ]);
8754 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8755 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8756 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8758 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8759 && CONSTANT_P (XEXP (x, 1)))
8761 output_addr_const (file, XEXP (x, 1));
8762 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8766 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8767 && CONSTANT_P (XEXP (x, 1)))
8769 fprintf (file, "lo16(");
8770 output_addr_const (file, XEXP (x, 1));
8771 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8774 else if (legitimate_constant_pool_address_p (x))
8776 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8778 rtx contains_minus = XEXP (x, 1);
8782 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8783 turn it into (sym) for output_addr_const. */
8784 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8785 contains_minus = XEXP (contains_minus, 0);
8787 minus = XEXP (contains_minus, 0);
8788 symref = XEXP (minus, 0);
8789 XEXP (contains_minus, 0) = symref;
8794 name = XSTR (symref, 0);
8795 newname = alloca (strlen (name) + sizeof ("@toc"));
8796 strcpy (newname, name);
8797 strcat (newname, "@toc");
8798 XSTR (symref, 0) = newname;
8800 output_addr_const (file, XEXP (x, 1));
8802 XSTR (symref, 0) = name;
8803 XEXP (contains_minus, 0) = minus;
8806 output_addr_const (file, XEXP (x, 1));
8808 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8814 /* Target hook for assembling integer objects. The PowerPC version has
8815 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8816 is defined. It also needs to handle DI-mode objects on 64-bit
8820 rs6000_assemble_integer (rtx x, unsigned int size, int aligned_p)
8822 #ifdef RELOCATABLE_NEEDS_FIXUP
8823 /* Special handling for SI values. */
8824 if (size == 4 && aligned_p)
8826 extern int in_toc_section (void);
8827 static int recurse = 0;
8829 /* For -mrelocatable, we mark all addresses that need to be fixed up
8830 in the .fixup section. */
8831 if (TARGET_RELOCATABLE
8832 && !in_toc_section ()
8833 && !in_text_section ()
8835 && GET_CODE (x) != CONST_INT
8836 && GET_CODE (x) != CONST_DOUBLE
8842 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8844 ASM_OUTPUT_LABEL (asm_out_file, buf);
8845 fprintf (asm_out_file, "\t.long\t(");
8846 output_addr_const (asm_out_file, x);
8847 fprintf (asm_out_file, ")@fixup\n");
8848 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8849 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8850 fprintf (asm_out_file, "\t.long\t");
8851 assemble_name (asm_out_file, buf);
8852 fprintf (asm_out_file, "\n\t.previous\n");
8856 /* Remove initial .'s to turn a -mcall-aixdesc function
8857 address into the address of the descriptor, not the function
8859 else if (GET_CODE (x) == SYMBOL_REF
8860 && XSTR (x, 0)[0] == '.'
8861 && DEFAULT_ABI == ABI_AIX)
8863 const char *name = XSTR (x, 0);
8864 while (*name == '.')
8867 fprintf (asm_out_file, "\t.long\t%s\n", name);
8871 #endif /* RELOCATABLE_NEEDS_FIXUP */
8872 return default_assemble_integer (x, size, aligned_p);
8875 #ifdef HAVE_GAS_HIDDEN
8876 /* Emit an assembler directive to set symbol visibility for DECL to
8880 rs6000_assemble_visibility (tree decl, int vis)
8882 /* Functions need to have their entry point symbol visibility set as
8883 well as their descriptor symbol visibility. */
8884 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8886 static const char * const visibility_types[] = {
8887 NULL, "internal", "hidden", "protected"
8890 const char *name, *type;
8892 name = ((* targetm.strip_name_encoding)
8893 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8894 type = visibility_types[vis];
8896 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8897 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8900 default_assemble_visibility (decl, vis);
8905 rs6000_reverse_condition (enum machine_mode mode, enum rtx_code code)
8907 /* Reversal of FP compares takes care -- an ordered compare
8908 becomes an unordered compare and vice versa. */
8909 if (mode == CCFPmode
8910 && (!flag_finite_math_only
8911 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
8912 || code == UNEQ || code == LTGT))
8913 return reverse_condition_maybe_unordered (code);
8915 return reverse_condition (code);
8918 /* Generate a compare for CODE. Return a brand-new rtx that
8919 represents the result of the compare. */
8922 rs6000_generate_compare (enum rtx_code code)
8924 enum machine_mode comp_mode;
8927 if (rs6000_compare_fp_p)
8928 comp_mode = CCFPmode;
8929 else if (code == GTU || code == LTU
8930 || code == GEU || code == LEU)
8931 comp_mode = CCUNSmode;
8935 /* First, the compare. */
8936 compare_result = gen_reg_rtx (comp_mode);
8938 /* SPE FP compare instructions on the GPRs. Yuck! */
8939 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
8940 && rs6000_compare_fp_p)
8942 rtx cmp, or1, or2, or_result, compare_result2;
8950 cmp = flag_finite_math_only
8951 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
8953 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
8954 rs6000_compare_op1);
8962 cmp = flag_finite_math_only
8963 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
8965 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
8966 rs6000_compare_op1);
8974 cmp = flag_finite_math_only
8975 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
8977 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
8978 rs6000_compare_op1);
8984 /* Synthesize LE and GE from LT/GT || EQ. */
8985 if (code == LE || code == GE || code == LEU || code == GEU)
8987 /* Synthesize GE/LE frome GT/LT || EQ. */
8993 case LE: code = LT; break;
8994 case GE: code = GT; break;
8995 case LEU: code = LT; break;
8996 case GEU: code = GT; break;
9000 or1 = gen_reg_rtx (SImode);
9001 or2 = gen_reg_rtx (SImode);
9002 or_result = gen_reg_rtx (CCEQmode);
9003 compare_result2 = gen_reg_rtx (CCFPmode);
9006 cmp = flag_finite_math_only
9007 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9009 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9010 rs6000_compare_op1);
9013 /* The MC8540 FP compare instructions set the CR bits
9014 differently than other PPC compare instructions. For
9015 that matter, there is no generic test instruction, but a
9016 testgt, testlt, and testeq. For a true condition, bit 2
9017 is set (x1xx) in the CR. Following the traditional CR
9023 ... bit 2 would be a GT CR alias, so later on we
9024 look in the GT bits for the branch instructions.
9025 However, we must be careful to emit correct RTL in
9026 the meantime, so optimizations don't get confused. */
9028 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9029 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9031 /* OR them together. */
9032 cmp = gen_rtx_SET (VOIDmode, or_result,
9033 gen_rtx_COMPARE (CCEQmode,
9034 gen_rtx_IOR (SImode, or1, or2),
9036 compare_result = or_result;
9041 /* We only care about 1 bit (x1xx), so map everything to NE to
9042 maintain rtl sanity. We'll get to the right bit (x1xx) at
9043 code output time. */
9044 if (code == NE || code == LTGT)
9045 /* Do the inverse here because we have no cmpne
9046 instruction. We use the cmpeq instruction and expect
9047 to get a 0 instead. */
9056 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9057 gen_rtx_COMPARE (comp_mode,
9059 rs6000_compare_op1)));
9061 /* Some kinds of FP comparisons need an OR operation;
9062 under flag_finite_math_only we don't bother. */
9063 if (rs6000_compare_fp_p
9064 && ! flag_finite_math_only
9065 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9066 && (code == LE || code == GE
9067 || code == UNEQ || code == LTGT
9068 || code == UNGT || code == UNLT))
9070 enum rtx_code or1, or2;
9071 rtx or1_rtx, or2_rtx, compare2_rtx;
9072 rtx or_result = gen_reg_rtx (CCEQmode);
9076 case LE: or1 = LT; or2 = EQ; break;
9077 case GE: or1 = GT; or2 = EQ; break;
9078 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9079 case LTGT: or1 = LT; or2 = GT; break;
9080 case UNGT: or1 = UNORDERED; or2 = GT; break;
9081 case UNLT: or1 = UNORDERED; or2 = LT; break;
9084 validate_condition_mode (or1, comp_mode);
9085 validate_condition_mode (or2, comp_mode);
9086 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9087 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9088 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9089 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9091 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9093 compare_result = or_result;
9097 validate_condition_mode (code, GET_MODE (compare_result));
9099 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9103 /* Emit the RTL for an sCOND pattern. */
9106 rs6000_emit_sCOND (enum rtx_code code, rtx result)
9109 enum machine_mode op_mode;
9110 enum rtx_code cond_code;
9112 condition_rtx = rs6000_generate_compare (code);
9113 cond_code = GET_CODE (condition_rtx);
9116 || cond_code == GE || cond_code == LE
9117 || cond_code == GEU || cond_code == LEU
9118 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9120 rtx not_result = gen_reg_rtx (CCEQmode);
9121 rtx not_op, rev_cond_rtx;
9122 enum machine_mode cc_mode;
9124 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9126 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9127 SImode, XEXP (condition_rtx, 0), const0_rtx);
9128 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9129 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9130 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9133 op_mode = GET_MODE (rs6000_compare_op0);
9134 if (op_mode == VOIDmode)
9135 op_mode = GET_MODE (rs6000_compare_op1);
9137 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9139 PUT_MODE (condition_rtx, DImode);
9140 convert_move (result, condition_rtx, 0);
9144 PUT_MODE (condition_rtx, SImode);
9145 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9149 /* Emit a branch of kind CODE to location LOC. */
9152 rs6000_emit_cbranch (enum rtx_code code, rtx loc)
9154 rtx condition_rtx, loc_ref;
9156 condition_rtx = rs6000_generate_compare (code);
9157 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9158 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9159 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9163 /* Return the string to output a conditional branch to LABEL, which is
9164 the operand number of the label, or -1 if the branch is really a
9167 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9168 condition code register and its mode specifies what kind of
9171 REVERSED is nonzero if we should reverse the sense of the comparison.
9173 INSN is the insn. */
9176 output_cbranch (rtx op, const char *label, int reversed, rtx insn)
9178 static char string[64];
9179 enum rtx_code code = GET_CODE (op);
9180 rtx cc_reg = XEXP (op, 0);
9181 enum machine_mode mode = GET_MODE (cc_reg);
9182 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9183 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9184 int really_reversed = reversed ^ need_longbranch;
9190 validate_condition_mode (code, mode);
9192 /* Work out which way this really branches. We could use
9193 reverse_condition_maybe_unordered here always but this
9194 makes the resulting assembler clearer. */
9195 if (really_reversed)
9197 /* Reversal of FP compares takes care -- an ordered compare
9198 becomes an unordered compare and vice versa. */
9199 if (mode == CCFPmode)
9200 code = reverse_condition_maybe_unordered (code);
9202 code = reverse_condition (code);
9205 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9207 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9210 /* Opposite of GT. */
9212 else if (code == NE)
9220 /* Not all of these are actually distinct opcodes, but
9221 we distinguish them for clarity of the resulting assembler. */
9223 ccode = "ne"; break;
9225 ccode = "eq"; break;
9227 ccode = "ge"; break;
9228 case GT: case GTU: case UNGT:
9229 ccode = "gt"; break;
9231 ccode = "le"; break;
9232 case LT: case LTU: case UNLT:
9233 ccode = "lt"; break;
9234 case UNORDERED: ccode = "un"; break;
9235 case ORDERED: ccode = "nu"; break;
9236 case UNGE: ccode = "nl"; break;
9237 case UNLE: ccode = "ng"; break;
9242 /* Maybe we have a guess as to how likely the branch is.
9243 The old mnemonics don't have a way to specify this information. */
9245 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9246 if (note != NULL_RTX)
9248 /* PROB is the difference from 50%. */
9249 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9250 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9252 /* Only hint for highly probable/improbable branches on newer
9253 cpus as static prediction overrides processor dynamic
9254 prediction. For older cpus we may as well always hint, but
9255 assume not taken for branches that are very close to 50% as a
9256 mispredicted taken branch is more expensive than a
9257 mispredicted not-taken branch. */
9259 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9261 if (abs (prob) > REG_BR_PROB_BASE / 20
9262 && ((prob > 0) ^ need_longbranch))
9270 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9272 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9274 /* We need to escape any '%' characters in the reg_names string.
9275 Assume they'd only be the first character... */
9276 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9278 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9282 /* If the branch distance was too far, we may have to use an
9283 unconditional branch to go the distance. */
9284 if (need_longbranch)
9285 s += sprintf (s, ",$+8\n\tb %s", label);
9287 s += sprintf (s, ",%s", label);
9293 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9294 operands of the last comparison is nonzero/true, FALSE_COND if it
9295 is zero/false. Return 0 if the hardware has no such operation. */
9298 rs6000_emit_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9300 enum rtx_code code = GET_CODE (op);
9301 rtx op0 = rs6000_compare_op0;
9302 rtx op1 = rs6000_compare_op1;
9304 enum machine_mode compare_mode = GET_MODE (op0);
9305 enum machine_mode result_mode = GET_MODE (dest);
9308 /* These modes should always match. */
9309 if (GET_MODE (op1) != compare_mode
9310 /* In the isel case however, we can use a compare immediate, so
9311 op1 may be a small constant. */
9312 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9314 if (GET_MODE (true_cond) != result_mode)
9316 if (GET_MODE (false_cond) != result_mode)
9319 /* First, work out if the hardware can do this at all, or
9320 if it's too slow... */
9321 if (! rs6000_compare_fp_p)
9324 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9328 /* Eliminate half of the comparisons by switching operands, this
9329 makes the remaining code simpler. */
9330 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9331 || code == LTGT || code == LT || code == UNLE)
9333 code = reverse_condition_maybe_unordered (code);
9335 true_cond = false_cond;
9339 /* UNEQ and LTGT take four instructions for a comparison with zero,
9340 it'll probably be faster to use a branch here too. */
9341 if (code == UNEQ && HONOR_NANS (compare_mode))
9344 if (GET_CODE (op1) == CONST_DOUBLE)
9345 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9347 /* We're going to try to implement comparisons by performing
9348 a subtract, then comparing against zero. Unfortunately,
9349 Inf - Inf is NaN which is not zero, and so if we don't
9350 know that the operand is finite and the comparison
9351 would treat EQ different to UNORDERED, we can't do it. */
9352 if (HONOR_INFINITIES (compare_mode)
9353 && code != GT && code != UNGE
9354 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9355 /* Constructs of the form (a OP b ? a : b) are safe. */
9356 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9357 || (! rtx_equal_p (op0, true_cond)
9358 && ! rtx_equal_p (op1, true_cond))))
9360 /* At this point we know we can use fsel. */
9362 /* Reduce the comparison to a comparison against zero. */
9363 temp = gen_reg_rtx (compare_mode);
9364 emit_insn (gen_rtx_SET (VOIDmode, temp,
9365 gen_rtx_MINUS (compare_mode, op0, op1)));
9367 op1 = CONST0_RTX (compare_mode);
9369 /* If we don't care about NaNs we can reduce some of the comparisons
9370 down to faster ones. */
9371 if (! HONOR_NANS (compare_mode))
9377 true_cond = false_cond;
9390 /* Now, reduce everything down to a GE. */
9397 temp = gen_reg_rtx (compare_mode);
9398 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9403 temp = gen_reg_rtx (compare_mode);
9404 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9409 temp = gen_reg_rtx (compare_mode);
9410 emit_insn (gen_rtx_SET (VOIDmode, temp,
9411 gen_rtx_NEG (compare_mode,
9412 gen_rtx_ABS (compare_mode, op0))));
9417 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9418 temp = gen_reg_rtx (result_mode);
9419 emit_insn (gen_rtx_SET (VOIDmode, temp,
9420 gen_rtx_IF_THEN_ELSE (result_mode,
9421 gen_rtx_GE (VOIDmode,
9423 true_cond, false_cond)));
9424 false_cond = true_cond;
9427 temp = gen_reg_rtx (compare_mode);
9428 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9433 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9434 temp = gen_reg_rtx (result_mode);
9435 emit_insn (gen_rtx_SET (VOIDmode, temp,
9436 gen_rtx_IF_THEN_ELSE (result_mode,
9437 gen_rtx_GE (VOIDmode,
9439 true_cond, false_cond)));
9440 true_cond = false_cond;
9443 temp = gen_reg_rtx (compare_mode);
9444 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9452 emit_insn (gen_rtx_SET (VOIDmode, dest,
9453 gen_rtx_IF_THEN_ELSE (result_mode,
9454 gen_rtx_GE (VOIDmode,
9456 true_cond, false_cond)));
9460 /* Same as above, but for ints (isel). */
9463 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
9465 rtx condition_rtx, cr;
9467 /* All isel implementations thus far are 32-bits. */
9468 if (GET_MODE (rs6000_compare_op0) != SImode)
9471 /* We still have to do the compare, because isel doesn't do a
9472 compare, it just looks at the CRx bits set by a previous compare
9474 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9475 cr = XEXP (condition_rtx, 0);
9477 if (GET_MODE (cr) == CCmode)
9478 emit_insn (gen_isel_signed (dest, condition_rtx,
9479 true_cond, false_cond, cr));
9481 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9482 true_cond, false_cond, cr));
9488 output_isel (rtx *operands)
9492 code = GET_CODE (operands[1]);
9493 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9495 PUT_CODE (operands[1], reverse_condition (code));
9496 return "isel %0,%3,%2,%j1";
9499 return "isel %0,%2,%3,%j1";
9503 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
9505 enum machine_mode mode = GET_MODE (op0);
9509 if (code == SMAX || code == SMIN)
9514 if (code == SMAX || code == UMAX)
9515 target = emit_conditional_move (dest, c, op0, op1, mode,
9518 target = emit_conditional_move (dest, c, op0, op1, mode,
9520 if (target == NULL_RTX)
9523 emit_move_insn (dest, target);
9526 /* Called by splitter for multireg moves.
9528 operands[0] : Destination of move
9529 operands[1] : Source of move
9532 operands[2-n] : Destination slots
9533 operands[n-m] : Source slots
9534 where n = 2 + HARD_REGNO_NREGS (reg, GET_MODE (operands[0]))
9535 m = 2 + 2 * HARD_REGNO_NREGS (reg, GET_MODE (operands[0])) - 1
9537 Splits the move of operands[1] to operands[0].
9538 This is done, if GPRs are one of the operands. In this case
9539 a sequence of simple move insns has to be issued. The sequence of these
9540 move insns has to be done in correct order to avoid early clobber of the
9541 base register or destructive overlap of registers.
9545 rs6000_split_multireg_move (rtx *operands)
9547 int nregs, reg, i, j, used_update = 0;
9548 enum machine_mode mode;
9549 rtx dst = operands[0];
9550 rtx src = operands[1];
9553 /* Calculate number to move (2/4 for 32/64 bit mode). */
9555 reg = REG_P (operands[0]) ? REGNO (operands[0]) : REGNO (operands[1]);
9556 mode = GET_MODE (operands[0]);
9557 nregs = HARD_REGNO_NREGS (reg, mode);
9559 if (REG_P (operands[1])
9560 && REG_P (operands[0])
9561 && (REGNO (operands[1]) < REGNO (operands[0])))
9563 /* Move register range backwards, if we have destructive overlap. */
9566 for (i = 0; i < nregs; i++)
9569 operands[i+2] = operand_subword (operands[0], j, 0, mode);
9570 operands[i+2+nregs] =
9571 operand_subword (operands[1], j, 0, mode);
9578 if (GET_CODE (operands[1]) == MEM)
9582 if (GET_CODE (XEXP (operands[1], 0)) == PRE_INC
9583 || GET_CODE (XEXP (operands[1], 0)) == PRE_DEC)
9586 breg = XEXP (XEXP (operands[1], 0), 0);
9587 delta_rtx = GET_CODE (XEXP (operands[1], 0)) == PRE_INC
9588 ? GEN_INT (GET_MODE_SIZE (GET_MODE (operands[1])))
9589 : GEN_INT (-GET_MODE_SIZE (GET_MODE (operands[1])));
9590 insn = emit_insn (TARGET_32BIT
9591 ? gen_addsi3 (breg, breg, delta_rtx)
9592 : gen_adddi3 (breg, breg, delta_rtx));
9593 src = gen_rtx_MEM (mode, breg);
9596 /* We have now address involving an base register only.
9597 If we use one of the registers to address memory,
9598 we have change that register last. */
9600 breg = (GET_CODE (XEXP (src, 0)) == PLUS
9601 ? XEXP (XEXP (src, 0), 0)
9607 if (REGNO (breg) >= REGNO (dst)
9608 && REGNO (breg) < REGNO (dst) + nregs)
9609 j = REGNO (breg) - REGNO (dst);
9612 if (GET_CODE (operands[0]) == MEM)
9616 if (GET_CODE (XEXP (operands[0], 0)) == PRE_INC
9617 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
9620 breg = XEXP (XEXP (operands[0], 0), 0);
9621 delta_rtx = GET_CODE (XEXP (operands[0], 0)) == PRE_INC
9622 ? GEN_INT (GET_MODE_SIZE (GET_MODE (operands[0])))
9623 : GEN_INT (-GET_MODE_SIZE (GET_MODE (operands[0])));
9625 /* We have to update the breg before doing the store.
9626 Use store with update, if available. */
9630 insn = emit_insn (TARGET_32BIT
9631 ? gen_movsi_update (breg, breg, delta_rtx,
9632 operand_subword (src, 0, 0, mode))
9633 : gen_movdi_update (breg, breg, delta_rtx,
9634 operand_subword (src, 0, 0, mode)));
9638 insn = emit_insn (TARGET_32BIT
9639 ? gen_addsi3 (breg, breg, delta_rtx)
9640 : gen_adddi3 (breg, breg, delta_rtx));
9641 dst = gen_rtx_MEM (mode, breg);
9645 for (i = 0; i < nregs; i++)
9647 /* Calculate index to next subword. */
9652 operands[i+2] = operand_subword (dst, j, 0, mode);
9653 operands[i+2+nregs] = operand_subword (src, j, 0, mode);
9655 if (j == 0 && used_update)
9657 /* Already emited move of first word by
9658 store with update -> emit dead insn instead (r := r). */
9659 operands[i+2] = operands[i+2+nregs];
9666 /* This page contains routines that are used to determine what the
9667 function prologue and epilogue code will do and write them out. */
9669 /* Return the first fixed-point register that is required to be
9670 saved. 32 if none. */
9673 first_reg_to_save (void)
9677 /* Find lowest numbered live register. */
9678 for (first_reg = 13; first_reg <= 31; first_reg++)
9679 if (regs_ever_live[first_reg]
9680 && (! call_used_regs[first_reg]
9681 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9682 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9683 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9688 && current_function_uses_pic_offset_table
9689 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9690 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9696 /* Similar, for FP regs. */
9699 first_fp_reg_to_save (void)
9703 /* Find lowest numbered live register. */
9704 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9705 if (regs_ever_live[first_reg])
9711 /* Similar, for AltiVec regs. */
9714 first_altivec_reg_to_save (void)
9718 /* Stack frame remains as is unless we are in AltiVec ABI. */
9719 if (! TARGET_ALTIVEC_ABI)
9720 return LAST_ALTIVEC_REGNO + 1;
9722 /* Find lowest numbered live register. */
9723 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9724 if (regs_ever_live[i])
9730 /* Return a 32-bit mask of the AltiVec registers we need to set in
9731 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9732 the 32-bit word is 0. */
9735 compute_vrsave_mask (void)
9737 unsigned int i, mask = 0;
9739 /* First, find out if we use _any_ altivec registers. */
9740 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9741 if (regs_ever_live[i])
9742 mask |= ALTIVEC_REG_BIT (i);
9747 /* Next, remove the argument registers from the set. These must
9748 be in the VRSAVE mask set by the caller, so we don't need to add
9749 them in again. More importantly, the mask we compute here is
9750 used to generate CLOBBERs in the set_vrsave insn, and we do not
9751 wish the argument registers to die. */
9752 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9753 mask &= ~ALTIVEC_REG_BIT (i);
9755 /* Similarly, remove the return value from the set. */
9758 diddle_return_value (is_altivec_return_reg, &yes);
9760 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9767 is_altivec_return_reg (rtx reg, void *xyes)
9769 bool *yes = (bool *) xyes;
9770 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9775 /* Calculate the stack information for the current function. This is
9776 complicated by having two separate calling sequences, the AIX calling
9777 sequence and the V.4 calling sequence.
9779 AIX (and Darwin/Mac OS X) stack frames look like:
9781 SP----> +---------------------------------------+
9782 | back chain to caller | 0 0
9783 +---------------------------------------+
9784 | saved CR | 4 8 (8-11)
9785 +---------------------------------------+
9787 +---------------------------------------+
9788 | reserved for compilers | 12 24
9789 +---------------------------------------+
9790 | reserved for binders | 16 32
9791 +---------------------------------------+
9792 | saved TOC pointer | 20 40
9793 +---------------------------------------+
9794 | Parameter save area (P) | 24 48
9795 +---------------------------------------+
9796 | Alloca space (A) | 24+P etc.
9797 +---------------------------------------+
9798 | Local variable space (L) | 24+P+A
9799 +---------------------------------------+
9800 | Float/int conversion temporary (X) | 24+P+A+L
9801 +---------------------------------------+
9802 | Save area for AltiVec registers (W) | 24+P+A+L+X
9803 +---------------------------------------+
9804 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9805 +---------------------------------------+
9806 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9807 +---------------------------------------+
9808 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9809 +---------------------------------------+
9810 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9811 +---------------------------------------+
9812 old SP->| back chain to caller's caller |
9813 +---------------------------------------+
9815 The required alignment for AIX configurations is two words (i.e., 8
9819 V.4 stack frames look like:
9821 SP----> +---------------------------------------+
9822 | back chain to caller | 0
9823 +---------------------------------------+
9824 | caller's saved LR | 4
9825 +---------------------------------------+
9826 | Parameter save area (P) | 8
9827 +---------------------------------------+
9828 | Alloca space (A) | 8+P
9829 +---------------------------------------+
9830 | Varargs save area (V) | 8+P+A
9831 +---------------------------------------+
9832 | Local variable space (L) | 8+P+A+V
9833 +---------------------------------------+
9834 | Float/int conversion temporary (X) | 8+P+A+V+L
9835 +---------------------------------------+
9836 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9837 +---------------------------------------+
9838 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9839 +---------------------------------------+
9840 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9841 +---------------------------------------+
9842 | SPE: area for 64-bit GP registers |
9843 +---------------------------------------+
9844 | SPE alignment padding |
9845 +---------------------------------------+
9846 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9847 +---------------------------------------+
9848 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9849 +---------------------------------------+
9850 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9851 +---------------------------------------+
9852 old SP->| back chain to caller's caller |
9853 +---------------------------------------+
9855 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9856 given. (But note below and in sysv4.h that we require only 8 and
9857 may round up the size of our stack frame anyways. The historical
9858 reason is early versions of powerpc-linux which didn't properly
9859 align the stack at program startup. A happy side-effect is that
9860 -mno-eabi libraries can be used with -meabi programs.)
9862 The EABI configuration defaults to the V.4 layout. However,
9863 the stack alignment requirements may differ. If -mno-eabi is not
9864 given, the required stack alignment is 8 bytes; if -mno-eabi is
9865 given, the required alignment is 16 bytes. (But see V.4 comment
9868 #ifndef ABI_STACK_BOUNDARY
9869 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9873 rs6000_stack_info (void)
9875 static rs6000_stack_t info, zero_info;
9876 rs6000_stack_t *info_ptr = &info;
9877 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9881 /* Zero all fields portably. */
9886 /* Cache value so we don't rescan instruction chain over and over. */
9887 if (cfun->machine->insn_chain_scanned_p == 0)
9889 cfun->machine->insn_chain_scanned_p = 1;
9890 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9894 /* Select which calling sequence. */
9895 info_ptr->abi = DEFAULT_ABI;
9897 /* Calculate which registers need to be saved & save area size. */
9898 info_ptr->first_gp_reg_save = first_reg_to_save ();
9899 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9900 even if it currently looks like we won't. */
9901 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9902 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9903 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9904 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9905 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9907 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9909 /* For the SPE, we have an additional upper 32-bits on each GPR.
9910 Ideally we should save the entire 64-bits only when the upper
9911 half is used in SIMD instructions. Since we only record
9912 registers live (not the size they are used in), this proves
9913 difficult because we'd have to traverse the instruction chain at
9914 the right time, taking reload into account. This is a real pain,
9915 so we opt to save the GPRs in 64-bits always if but one register
9916 gets used in 64-bits. Otherwise, all the registers in the frame
9917 get saved in 32-bits.
9919 So... since when we save all GPRs (except the SP) in 64-bits, the
9920 traditional GP save area will be empty. */
9921 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9922 info_ptr->gp_size = 0;
9924 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9925 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9927 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9928 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9929 - info_ptr->first_altivec_reg_save);
9931 /* Does this function call anything? */
9932 info_ptr->calls_p = (! current_function_is_leaf
9933 || cfun->machine->ra_needs_full_frame);
9935 /* Determine if we need to save the link register. */
9936 if (rs6000_ra_ever_killed ()
9937 || (DEFAULT_ABI == ABI_AIX
9938 && current_function_profile
9939 && !TARGET_PROFILE_KERNEL)
9940 #ifdef TARGET_RELOCATABLE
9941 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9943 || (info_ptr->first_fp_reg_save != 64
9944 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9945 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9946 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9947 || (DEFAULT_ABI == ABI_DARWIN
9949 && current_function_uses_pic_offset_table)
9950 || info_ptr->calls_p)
9952 info_ptr->lr_save_p = 1;
9953 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9956 /* Determine if we need to save the condition code registers. */
9957 if (regs_ever_live[CR2_REGNO]
9958 || regs_ever_live[CR3_REGNO]
9959 || regs_ever_live[CR4_REGNO])
9961 info_ptr->cr_save_p = 1;
9962 if (DEFAULT_ABI == ABI_V4)
9963 info_ptr->cr_size = reg_size;
9966 /* If the current function calls __builtin_eh_return, then we need
9967 to allocate stack space for registers that will hold data for
9968 the exception handler. */
9969 if (current_function_calls_eh_return)
9972 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9975 /* SPE saves EH registers in 64-bits. */
9976 ehrd_size = i * (TARGET_SPE_ABI
9977 && info_ptr->spe_64bit_regs_used != 0
9978 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9983 /* Determine various sizes. */
9984 info_ptr->reg_size = reg_size;
9985 info_ptr->fixed_size = RS6000_SAVE_AREA;
9986 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9987 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9988 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9991 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9992 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9994 info_ptr->spe_gp_size = 0;
9996 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9998 info_ptr->vrsave_mask = compute_vrsave_mask ();
9999 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
10003 info_ptr->vrsave_mask = 0;
10004 info_ptr->vrsave_size = 0;
10007 /* Calculate the offsets. */
10008 switch (DEFAULT_ABI)
10016 info_ptr->fp_save_offset = - info_ptr->fp_size;
10017 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10019 if (TARGET_ALTIVEC_ABI)
10021 info_ptr->vrsave_save_offset
10022 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
10024 /* Align stack so vector save area is on a quadword boundary. */
10025 if (info_ptr->altivec_size != 0)
10026 info_ptr->altivec_padding_size
10027 = 16 - (-info_ptr->vrsave_save_offset % 16);
10029 info_ptr->altivec_padding_size = 0;
10031 info_ptr->altivec_save_offset
10032 = info_ptr->vrsave_save_offset
10033 - info_ptr->altivec_padding_size
10034 - info_ptr->altivec_size;
10036 /* Adjust for AltiVec case. */
10037 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10040 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10041 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10042 info_ptr->lr_save_offset = 2*reg_size;
10046 info_ptr->fp_save_offset = - info_ptr->fp_size;
10047 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10048 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10050 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10052 /* Align stack so SPE GPR save area is aligned on a
10053 double-word boundary. */
10054 if (info_ptr->spe_gp_size != 0)
10055 info_ptr->spe_padding_size
10056 = 8 - (-info_ptr->cr_save_offset % 8);
10058 info_ptr->spe_padding_size = 0;
10060 info_ptr->spe_gp_save_offset
10061 = info_ptr->cr_save_offset
10062 - info_ptr->spe_padding_size
10063 - info_ptr->spe_gp_size;
10065 /* Adjust for SPE case. */
10066 info_ptr->toc_save_offset
10067 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10069 else if (TARGET_ALTIVEC_ABI)
10071 info_ptr->vrsave_save_offset
10072 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10074 /* Align stack so vector save area is on a quadword boundary. */
10075 if (info_ptr->altivec_size != 0)
10076 info_ptr->altivec_padding_size
10077 = 16 - (-info_ptr->vrsave_save_offset % 16);
10079 info_ptr->altivec_padding_size = 0;
10081 info_ptr->altivec_save_offset
10082 = info_ptr->vrsave_save_offset
10083 - info_ptr->altivec_padding_size
10084 - info_ptr->altivec_size;
10086 /* Adjust for AltiVec case. */
10087 info_ptr->toc_save_offset
10088 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10091 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10092 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10093 info_ptr->lr_save_offset = reg_size;
10097 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10098 + info_ptr->gp_size
10099 + info_ptr->altivec_size
10100 + info_ptr->altivec_padding_size
10101 + info_ptr->spe_gp_size
10102 + info_ptr->spe_padding_size
10104 + info_ptr->cr_size
10105 + info_ptr->lr_size
10106 + info_ptr->vrsave_size
10107 + info_ptr->toc_size,
10108 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10111 total_raw_size = (info_ptr->vars_size
10112 + info_ptr->parm_size
10113 + info_ptr->save_size
10114 + info_ptr->varargs_size
10115 + info_ptr->fixed_size);
10117 info_ptr->total_size =
10118 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10120 /* Determine if we need to allocate any stack frame:
10122 For AIX we need to push the stack if a frame pointer is needed
10123 (because the stack might be dynamically adjusted), if we are
10124 debugging, if we make calls, or if the sum of fp_save, gp_save,
10125 and local variables are more than the space needed to save all
10126 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10127 + 18*8 = 288 (GPR13 reserved).
10129 For V.4 we don't have the stack cushion that AIX uses, but assume
10130 that the debugger can handle stackless frames. */
10132 if (info_ptr->calls_p)
10133 info_ptr->push_p = 1;
10135 else if (DEFAULT_ABI == ABI_V4)
10136 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10138 else if (frame_pointer_needed)
10139 info_ptr->push_p = 1;
10141 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10142 info_ptr->push_p = 1;
10146 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10148 /* Zero offsets if we're not saving those registers. */
10149 if (info_ptr->fp_size == 0)
10150 info_ptr->fp_save_offset = 0;
10152 if (info_ptr->gp_size == 0)
10153 info_ptr->gp_save_offset = 0;
10155 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10156 info_ptr->altivec_save_offset = 0;
10158 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10159 info_ptr->vrsave_save_offset = 0;
10161 if (! TARGET_SPE_ABI
10162 || info_ptr->spe_64bit_regs_used == 0
10163 || info_ptr->spe_gp_size == 0)
10164 info_ptr->spe_gp_save_offset = 0;
10166 if (! info_ptr->lr_save_p)
10167 info_ptr->lr_save_offset = 0;
10169 if (! info_ptr->cr_save_p)
10170 info_ptr->cr_save_offset = 0;
10172 if (! info_ptr->toc_save_p)
10173 info_ptr->toc_save_offset = 0;
10178 /* Return true if the current function uses any GPRs in 64-bit SIMD
10182 spe_func_has_64bit_regs_p (void)
10186 /* Functions that save and restore all the call-saved registers will
10187 need to save/restore the registers in 64-bits. */
10188 if (current_function_calls_eh_return
10189 || current_function_calls_setjmp
10190 || current_function_has_nonlocal_goto)
10193 insns = get_insns ();
10195 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10201 i = PATTERN (insn);
10202 if (GET_CODE (i) == SET
10203 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10212 debug_stack_info (rs6000_stack_t *info)
10214 const char *abi_string;
10217 info = rs6000_stack_info ();
10219 fprintf (stderr, "\nStack information for function %s:\n",
10220 ((current_function_decl && DECL_NAME (current_function_decl))
10221 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10226 default: abi_string = "Unknown"; break;
10227 case ABI_NONE: abi_string = "NONE"; break;
10228 case ABI_AIX: abi_string = "AIX"; break;
10229 case ABI_DARWIN: abi_string = "Darwin"; break;
10230 case ABI_V4: abi_string = "V.4"; break;
10233 fprintf (stderr, "\tABI = %5s\n", abi_string);
10235 if (TARGET_ALTIVEC_ABI)
10236 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10238 if (TARGET_SPE_ABI)
10239 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10241 if (info->first_gp_reg_save != 32)
10242 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10244 if (info->first_fp_reg_save != 64)
10245 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10247 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10248 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10249 info->first_altivec_reg_save);
10251 if (info->lr_save_p)
10252 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10254 if (info->cr_save_p)
10255 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10257 if (info->toc_save_p)
10258 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10260 if (info->vrsave_mask)
10261 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10264 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10267 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10269 if (info->gp_save_offset)
10270 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10272 if (info->fp_save_offset)
10273 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10275 if (info->altivec_save_offset)
10276 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10277 info->altivec_save_offset);
10279 if (info->spe_gp_save_offset)
10280 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10281 info->spe_gp_save_offset);
10283 if (info->vrsave_save_offset)
10284 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10285 info->vrsave_save_offset);
10287 if (info->lr_save_offset)
10288 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10290 if (info->cr_save_offset)
10291 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10293 if (info->toc_save_offset)
10294 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10296 if (info->varargs_save_offset)
10297 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10299 if (info->total_size)
10300 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10302 if (info->varargs_size)
10303 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10305 if (info->vars_size)
10306 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10308 if (info->parm_size)
10309 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10311 if (info->fixed_size)
10312 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10315 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10317 if (info->spe_gp_size)
10318 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10321 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10323 if (info->altivec_size)
10324 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10326 if (info->vrsave_size)
10327 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10329 if (info->altivec_padding_size)
10330 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10331 info->altivec_padding_size);
10333 if (info->spe_padding_size)
10334 fprintf (stderr, "\tspe_padding_size = %5d\n",
10335 info->spe_padding_size);
10338 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10341 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10343 if (info->toc_size)
10344 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10346 if (info->save_size)
10347 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10349 if (info->reg_size != 4)
10350 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10352 fprintf (stderr, "\n");
10356 rs6000_return_addr (int count, rtx frame)
10358 /* Currently we don't optimize very well between prolog and body
10359 code and for PIC code the code can be actually quite bad, so
10360 don't try to be too clever here. */
10361 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10363 cfun->machine->ra_needs_full_frame = 1;
10370 plus_constant (copy_to_reg
10371 (gen_rtx_MEM (Pmode,
10372 memory_address (Pmode, frame))),
10373 RETURN_ADDRESS_OFFSET)));
10376 cfun->machine->ra_need_lr = 1;
10377 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10380 /* Say whether a function is a candidate for sibcall handling or not.
10381 We do not allow indirect calls to be optimized into sibling calls.
10382 Also, we can't do it if there are any vector parameters; there's
10383 nowhere to put the VRsave code so it works; note that functions with
10384 vector parameters are required to have a prototype, so the argument
10385 type info must be available here. (The tail recursion case can work
10386 with vector parameters, but there's no way to distinguish here.) */
10388 rs6000_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
10393 if (TARGET_ALTIVEC_VRSAVE)
10395 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10396 type; type = TREE_CHAIN (type))
10398 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10402 if (DEFAULT_ABI == ABI_DARWIN
10403 || (*targetm.binds_local_p) (decl))
10405 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10407 if (!lookup_attribute ("longcall", attr_list)
10408 || lookup_attribute ("shortcall", attr_list))
10416 rs6000_ra_ever_killed (void)
10422 /* Irritatingly, there are two kinds of thunks -- those created with
10423 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10424 through the regular part of the compiler. This is a very hacky
10425 way to tell them apart. */
10426 if (current_function_is_thunk && !no_new_pseudos)
10429 /* regs_ever_live has LR marked as used if any sibcalls are present,
10430 but this should not force saving and restoring in the
10431 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10432 clobbers LR, so that is inappropriate. */
10434 /* Also, the prologue can generate a store into LR that
10435 doesn't really count, like this:
10438 bcl to set PIC register
10442 When we're called from the epilogue, we need to avoid counting
10443 this as a store. */
10445 push_topmost_sequence ();
10446 top = get_insns ();
10447 pop_topmost_sequence ();
10448 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10450 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10454 if (FIND_REG_INC_NOTE (insn, reg))
10456 else if (GET_CODE (insn) == CALL_INSN
10457 && !SIBLING_CALL_P (insn))
10459 else if (set_of (reg, insn) != NULL_RTX
10460 && !prologue_epilogue_contains (insn))
10467 /* Add a REG_MAYBE_DEAD note to the insn. */
10469 rs6000_maybe_dead (rtx insn)
10471 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10476 /* Emit instructions needed to load the TOC register.
10477 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10478 a constant pool; or for SVR4 -fpic. */
10481 rs6000_emit_load_toc_table (int fromprolog)
10484 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10486 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10488 rtx temp = (fromprolog
10489 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10490 : gen_reg_rtx (Pmode));
10491 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10493 rs6000_maybe_dead (insn);
10494 insn = emit_move_insn (dest, temp);
10496 rs6000_maybe_dead (insn);
10498 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10501 rtx tempLR = (fromprolog
10502 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10503 : gen_reg_rtx (Pmode));
10504 rtx temp0 = (fromprolog
10505 ? gen_rtx_REG (Pmode, 0)
10506 : gen_reg_rtx (Pmode));
10509 /* possibly create the toc section */
10510 if (! toc_initialized)
10513 function_section (current_function_decl);
10520 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10521 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10523 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10524 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10526 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10528 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10529 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10536 static int reload_toc_labelno = 0;
10538 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10540 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10541 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10543 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10544 emit_move_insn (dest, tempLR);
10545 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10547 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10549 rs6000_maybe_dead (insn);
10551 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10553 /* This is for AIX code running in non-PIC ELF32. */
10556 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10557 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10559 insn = emit_insn (gen_elf_high (dest, realsym));
10561 rs6000_maybe_dead (insn);
10562 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10564 rs6000_maybe_dead (insn);
10566 else if (DEFAULT_ABI == ABI_AIX)
10569 insn = emit_insn (gen_load_toc_aix_si (dest));
10571 insn = emit_insn (gen_load_toc_aix_di (dest));
10573 rs6000_maybe_dead (insn);
10580 get_TOC_alias_set (void)
10582 static int set = -1;
10584 set = new_alias_set ();
10588 /* This returns nonzero if the current function uses the TOC. This is
10589 determined by the presence of (unspec ... UNSPEC_TOC) or
10590 use (unspec ... UNSPEC_TOC), which are generated by the various
10591 load_toc_* patterns. */
10598 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10601 rtx pat = PATTERN (insn);
10604 if (GET_CODE (pat) == PARALLEL)
10605 for (i = 0; i < XVECLEN (pat, 0); i++)
10607 rtx sub = XVECEXP (pat, 0, i);
10608 if (GET_CODE (sub) == USE)
10610 sub = XEXP (sub, 0);
10611 if (GET_CODE (sub) == UNSPEC
10612 && XINT (sub, 1) == UNSPEC_TOC)
10621 create_TOC_reference (rtx symbol)
10623 return gen_rtx_PLUS (Pmode,
10624 gen_rtx_REG (Pmode, TOC_REGISTER),
10625 gen_rtx_CONST (Pmode,
10626 gen_rtx_MINUS (Pmode, symbol,
10627 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10630 /* If _Unwind_* has been called from within the same module,
10631 toc register is not guaranteed to be saved to 40(1) on function
10632 entry. Save it there in that case. */
10635 rs6000_aix_emit_builtin_unwind_init (void)
10638 rtx stack_top = gen_reg_rtx (Pmode);
10639 rtx opcode_addr = gen_reg_rtx (Pmode);
10640 rtx opcode = gen_reg_rtx (SImode);
10641 rtx tocompare = gen_reg_rtx (SImode);
10642 rtx no_toc_save_needed = gen_label_rtx ();
10644 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10645 emit_move_insn (stack_top, mem);
10647 mem = gen_rtx_MEM (Pmode,
10648 gen_rtx_PLUS (Pmode, stack_top,
10649 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10650 emit_move_insn (opcode_addr, mem);
10651 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10652 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10653 : 0xE8410028, SImode));
10655 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
10656 SImode, NULL_RTX, NULL_RTX,
10657 no_toc_save_needed);
10659 mem = gen_rtx_MEM (Pmode,
10660 gen_rtx_PLUS (Pmode, stack_top,
10661 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10662 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
10663 emit_label (no_toc_save_needed);
10666 /* This ties together stack memory (MEM with an alias set of
10667 rs6000_sr_alias_set) and the change to the stack pointer. */
10670 rs6000_emit_stack_tie (void)
10672 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10674 set_mem_alias_set (mem, rs6000_sr_alias_set);
10675 emit_insn (gen_stack_tie (mem));
10678 /* Emit the correct code for allocating stack space, as insns.
10679 If COPY_R12, make sure a copy of the old frame is left in r12.
10680 The generated code may use hard register 0 as a temporary. */
10683 rs6000_emit_allocate_stack (HOST_WIDE_INT size, int copy_r12)
10686 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10687 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10688 rtx todec = GEN_INT (-size);
10690 if (current_function_limit_stack)
10692 if (REG_P (stack_limit_rtx)
10693 && REGNO (stack_limit_rtx) > 1
10694 && REGNO (stack_limit_rtx) <= 31)
10696 emit_insn (TARGET_32BIT
10697 ? gen_addsi3 (tmp_reg,
10700 : gen_adddi3 (tmp_reg,
10704 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10707 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10709 && DEFAULT_ABI == ABI_V4)
10711 rtx toload = gen_rtx_CONST (VOIDmode,
10712 gen_rtx_PLUS (Pmode,
10716 emit_insn (gen_elf_high (tmp_reg, toload));
10717 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10718 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10722 warning ("stack limit expression is not supported");
10725 if (copy_r12 || ! TARGET_UPDATE)
10726 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10732 /* Need a note here so that try_split doesn't get confused. */
10733 if (get_last_insn() == NULL_RTX)
10734 emit_note (NOTE_INSN_DELETED);
10735 insn = emit_move_insn (tmp_reg, todec);
10736 try_split (PATTERN (insn), insn, 0);
10740 insn = emit_insn (TARGET_32BIT
10741 ? gen_movsi_update (stack_reg, stack_reg,
10743 : gen_movdi_update (stack_reg, stack_reg,
10744 todec, stack_reg));
10748 insn = emit_insn (TARGET_32BIT
10749 ? gen_addsi3 (stack_reg, stack_reg, todec)
10750 : gen_adddi3 (stack_reg, stack_reg, todec));
10751 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10752 gen_rtx_REG (Pmode, 12));
10755 RTX_FRAME_RELATED_P (insn) = 1;
10757 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10758 gen_rtx_SET (VOIDmode, stack_reg,
10759 gen_rtx_PLUS (Pmode, stack_reg,
10764 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10765 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10766 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10767 deduce these equivalences by itself so it wasn't necessary to hold
10768 its hand so much. */
10771 rs6000_frame_related (rtx insn, rtx reg, HOST_WIDE_INT val,
10772 rtx reg2, rtx rreg)
10776 /* copy_rtx will not make unique copies of registers, so we need to
10777 ensure we don't have unwanted sharing here. */
10779 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10782 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10784 real = copy_rtx (PATTERN (insn));
10786 if (reg2 != NULL_RTX)
10787 real = replace_rtx (real, reg2, rreg);
10789 real = replace_rtx (real, reg,
10790 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10791 STACK_POINTER_REGNUM),
10794 /* We expect that 'real' is either a SET or a PARALLEL containing
10795 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10796 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10798 if (GET_CODE (real) == SET)
10802 temp = simplify_rtx (SET_SRC (set));
10804 SET_SRC (set) = temp;
10805 temp = simplify_rtx (SET_DEST (set));
10807 SET_DEST (set) = temp;
10808 if (GET_CODE (SET_DEST (set)) == MEM)
10810 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10812 XEXP (SET_DEST (set), 0) = temp;
10815 else if (GET_CODE (real) == PARALLEL)
10818 for (i = 0; i < XVECLEN (real, 0); i++)
10819 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10821 rtx set = XVECEXP (real, 0, i);
10823 temp = simplify_rtx (SET_SRC (set));
10825 SET_SRC (set) = temp;
10826 temp = simplify_rtx (SET_DEST (set));
10828 SET_DEST (set) = temp;
10829 if (GET_CODE (SET_DEST (set)) == MEM)
10831 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10833 XEXP (SET_DEST (set), 0) = temp;
10835 RTX_FRAME_RELATED_P (set) = 1;
10842 real = spe_synthesize_frame_save (real);
10844 RTX_FRAME_RELATED_P (insn) = 1;
10845 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10850 /* Given an SPE frame note, return a PARALLEL of SETs with the
10851 original note, plus a synthetic register save. */
10854 spe_synthesize_frame_save (rtx real)
10856 rtx synth, offset, reg, real2;
10858 if (GET_CODE (real) != SET
10859 || GET_MODE (SET_SRC (real)) != V2SImode)
10862 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10863 frame related note. The parallel contains a set of the register
10864 being saved, and another set to a synthetic register (n+1200).
10865 This is so we can differentiate between 64-bit and 32-bit saves.
10866 Words cannot describe this nastiness. */
10868 if (GET_CODE (SET_DEST (real)) != MEM
10869 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10870 || GET_CODE (SET_SRC (real)) != REG)
10874 (set (mem (plus (reg x) (const y)))
10877 (set (mem (plus (reg x) (const y+4)))
10881 real2 = copy_rtx (real);
10882 PUT_MODE (SET_DEST (real2), SImode);
10883 reg = SET_SRC (real2);
10884 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10885 synth = copy_rtx (real2);
10887 if (BYTES_BIG_ENDIAN)
10889 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10890 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10893 reg = SET_SRC (synth);
10895 synth = replace_rtx (synth, reg,
10896 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10898 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10899 synth = replace_rtx (synth, offset,
10900 GEN_INT (INTVAL (offset)
10901 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10903 RTX_FRAME_RELATED_P (synth) = 1;
10904 RTX_FRAME_RELATED_P (real2) = 1;
10905 if (BYTES_BIG_ENDIAN)
10906 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10908 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10913 /* Returns an insn that has a vrsave set operation with the
10914 appropriate CLOBBERs. */
10917 generate_set_vrsave (rtx reg, rs6000_stack_t *info, int epiloguep)
10920 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10921 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10924 = gen_rtx_SET (VOIDmode,
10926 gen_rtx_UNSPEC_VOLATILE (SImode,
10927 gen_rtvec (2, reg, vrsave),
10932 /* We need to clobber the registers in the mask so the scheduler
10933 does not move sets to VRSAVE before sets of AltiVec registers.
10935 However, if the function receives nonlocal gotos, reload will set
10936 all call saved registers live. We will end up with:
10938 (set (reg 999) (mem))
10939 (parallel [ (set (reg vrsave) (unspec blah))
10940 (clobber (reg 999))])
10942 The clobber will cause the store into reg 999 to be dead, and
10943 flow will attempt to delete an epilogue insn. In this case, we
10944 need an unspec use/set of the register. */
10946 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
10947 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
10949 if (!epiloguep || call_used_regs [i])
10950 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
10951 gen_rtx_REG (V4SImode, i));
10954 rtx reg = gen_rtx_REG (V4SImode, i);
10957 = gen_rtx_SET (VOIDmode,
10959 gen_rtx_UNSPEC (V4SImode,
10960 gen_rtvec (1, reg), 27));
10964 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
10966 for (i = 0; i < nclobs; ++i)
10967 XVECEXP (insn, 0, i) = clobs[i];
10972 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
10973 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
10976 emit_frame_save (rtx frame_reg, rtx frame_ptr, enum machine_mode mode,
10977 unsigned int regno, int offset, int total_size)
10979 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
10980 rtx replacea, replaceb;
10982 int_rtx = GEN_INT (offset);
10984 /* Some cases that need register indexed addressing. */
10985 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
10987 && SPE_VECTOR_MODE (mode)
10988 && !SPE_CONST_OFFSET_OK (offset)))
10990 /* Whomever calls us must make sure r11 is available in the
10991 flow path of instructions in the prologue. */
10992 offset_rtx = gen_rtx_REG (Pmode, 11);
10993 emit_move_insn (offset_rtx, int_rtx);
10995 replacea = offset_rtx;
10996 replaceb = int_rtx;
11000 offset_rtx = int_rtx;
11001 replacea = NULL_RTX;
11002 replaceb = NULL_RTX;
11005 reg = gen_rtx_REG (mode, regno);
11006 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11007 mem = gen_rtx_MEM (mode, addr);
11008 set_mem_alias_set (mem, rs6000_sr_alias_set);
11010 insn = emit_move_insn (mem, reg);
11012 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11015 /* Emit an offset memory reference suitable for a frame store, while
11016 converting to a valid addressing mode. */
11019 gen_frame_mem_offset (enum machine_mode mode, rtx reg, int offset)
11021 rtx int_rtx, offset_rtx;
11023 int_rtx = GEN_INT (offset);
11025 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11027 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11028 emit_move_insn (offset_rtx, int_rtx);
11031 offset_rtx = int_rtx;
11033 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11036 /* Emit function prologue as insns. */
11039 rs6000_emit_prologue (void)
11041 rs6000_stack_t *info = rs6000_stack_info ();
11042 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11043 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11044 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11045 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11046 rtx frame_reg_rtx = sp_reg_rtx;
11047 rtx cr_save_rtx = NULL;
11049 int saving_FPRs_inline;
11050 int using_store_multiple;
11051 HOST_WIDE_INT sp_offset = 0;
11053 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11055 reg_mode = V2SImode;
11059 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11060 && (!TARGET_SPE_ABI
11061 || info->spe_64bit_regs_used == 0)
11062 && info->first_gp_reg_save < 31);
11063 saving_FPRs_inline = (info->first_fp_reg_save == 64
11064 || FP_SAVE_INLINE (info->first_fp_reg_save)
11065 || current_function_calls_eh_return
11066 || cfun->machine->ra_need_lr);
11068 /* For V.4, update stack before we do any saving and set back pointer. */
11070 && (DEFAULT_ABI == ABI_V4
11071 || current_function_calls_eh_return))
11073 if (info->total_size < 32767)
11074 sp_offset = info->total_size;
11076 frame_reg_rtx = frame_ptr_rtx;
11077 rs6000_emit_allocate_stack (info->total_size,
11078 (frame_reg_rtx != sp_reg_rtx
11079 && (info->cr_save_p
11081 || info->first_fp_reg_save < 64
11082 || info->first_gp_reg_save < 32
11084 if (frame_reg_rtx != sp_reg_rtx)
11085 rs6000_emit_stack_tie ();
11088 /* Save AltiVec registers if needed. */
11089 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11093 /* There should be a non inline version of this, for when we
11094 are saving lots of vector registers. */
11095 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11096 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11098 rtx areg, savereg, mem;
11101 offset = info->altivec_save_offset + sp_offset
11102 + 16 * (i - info->first_altivec_reg_save);
11104 savereg = gen_rtx_REG (V4SImode, i);
11106 areg = gen_rtx_REG (Pmode, 0);
11107 emit_move_insn (areg, GEN_INT (offset));
11109 /* AltiVec addressing mode is [reg+reg]. */
11110 mem = gen_rtx_MEM (V4SImode,
11111 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11113 set_mem_alias_set (mem, rs6000_sr_alias_set);
11115 insn = emit_move_insn (mem, savereg);
11117 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11118 areg, GEN_INT (offset));
11122 /* VRSAVE is a bit vector representing which AltiVec registers
11123 are used. The OS uses this to determine which vector
11124 registers to save on a context switch. We need to save
11125 VRSAVE on the stack frame, add whatever AltiVec registers we
11126 used in this function, and do the corresponding magic in the
11129 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11131 rtx reg, mem, vrsave;
11134 /* Get VRSAVE onto a GPR. */
11135 reg = gen_rtx_REG (SImode, 12);
11136 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11138 emit_insn (gen_get_vrsave_internal (reg));
11140 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11143 offset = info->vrsave_save_offset + sp_offset;
11145 = gen_rtx_MEM (SImode,
11146 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11147 set_mem_alias_set (mem, rs6000_sr_alias_set);
11148 insn = emit_move_insn (mem, reg);
11150 /* Include the registers in the mask. */
11151 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11153 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11156 /* If we use the link register, get it into r0. */
11157 if (info->lr_save_p)
11158 emit_move_insn (gen_rtx_REG (Pmode, 0),
11159 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11161 /* If we need to save CR, put it into r12. */
11162 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11164 cr_save_rtx = gen_rtx_REG (SImode, 12);
11165 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11168 /* Do any required saving of fpr's. If only one or two to save, do
11169 it ourselves. Otherwise, call function. */
11170 if (saving_FPRs_inline)
11173 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11174 if ((regs_ever_live[info->first_fp_reg_save+i]
11175 && ! call_used_regs[info->first_fp_reg_save+i]))
11176 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11177 info->first_fp_reg_save + i,
11178 info->fp_save_offset + sp_offset + 8 * i,
11181 else if (info->first_fp_reg_save != 64)
11185 const char *alloc_rname;
11187 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11189 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11190 gen_rtx_REG (Pmode,
11191 LINK_REGISTER_REGNUM));
11192 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11193 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11194 alloc_rname = ggc_strdup (rname);
11195 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11196 gen_rtx_SYMBOL_REF (Pmode,
11198 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11200 rtx addr, reg, mem;
11201 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11202 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11203 GEN_INT (info->fp_save_offset
11204 + sp_offset + 8*i));
11205 mem = gen_rtx_MEM (DFmode, addr);
11206 set_mem_alias_set (mem, rs6000_sr_alias_set);
11208 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11210 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11211 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11212 NULL_RTX, NULL_RTX);
11215 /* Save GPRs. This is done as a PARALLEL if we are using
11216 the store-multiple instructions. */
11217 if (using_store_multiple)
11221 p = rtvec_alloc (32 - info->first_gp_reg_save);
11222 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11224 rtx addr, reg, mem;
11225 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11226 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11227 GEN_INT (info->gp_save_offset
11230 mem = gen_rtx_MEM (reg_mode, addr);
11231 set_mem_alias_set (mem, rs6000_sr_alias_set);
11233 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11235 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11236 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11237 NULL_RTX, NULL_RTX);
11242 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11243 if ((regs_ever_live[info->first_gp_reg_save+i]
11244 && ! call_used_regs[info->first_gp_reg_save+i])
11245 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11246 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11247 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11249 rtx addr, reg, mem;
11250 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11252 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11254 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11257 if (!SPE_CONST_OFFSET_OK (offset))
11259 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11260 emit_move_insn (b, GEN_INT (offset));
11263 b = GEN_INT (offset);
11265 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11266 mem = gen_rtx_MEM (V2SImode, addr);
11267 set_mem_alias_set (mem, rs6000_sr_alias_set);
11268 insn = emit_move_insn (mem, reg);
11270 if (GET_CODE (b) == CONST_INT)
11271 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11272 NULL_RTX, NULL_RTX);
11274 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11275 b, GEN_INT (offset));
11279 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11280 GEN_INT (info->gp_save_offset
11283 mem = gen_rtx_MEM (reg_mode, addr);
11284 set_mem_alias_set (mem, rs6000_sr_alias_set);
11286 insn = emit_move_insn (mem, reg);
11287 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11288 NULL_RTX, NULL_RTX);
11293 /* ??? There's no need to emit actual instructions here, but it's the
11294 easiest way to get the frame unwind information emitted. */
11295 if (current_function_calls_eh_return)
11297 unsigned int i, regno;
11299 /* In AIX ABI we need to pretend we save r2 here. */
11302 rtx addr, reg, mem;
11304 reg = gen_rtx_REG (reg_mode, 2);
11305 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11306 GEN_INT (sp_offset + 5 * reg_size));
11307 mem = gen_rtx_MEM (reg_mode, addr);
11308 set_mem_alias_set (mem, rs6000_sr_alias_set);
11310 insn = emit_move_insn (mem, reg);
11311 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11312 NULL_RTX, NULL_RTX);
11313 PATTERN (insn) = gen_blockage ();
11318 regno = EH_RETURN_DATA_REGNO (i);
11319 if (regno == INVALID_REGNUM)
11322 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11323 info->ehrd_offset + sp_offset
11324 + reg_size * (int) i,
11329 /* Save lr if we used it. */
11330 if (info->lr_save_p)
11332 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11333 GEN_INT (info->lr_save_offset + sp_offset));
11334 rtx reg = gen_rtx_REG (Pmode, 0);
11335 rtx mem = gen_rtx_MEM (Pmode, addr);
11336 /* This should not be of rs6000_sr_alias_set, because of
11337 __builtin_return_address. */
11339 insn = emit_move_insn (mem, reg);
11340 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11341 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11344 /* Save CR if we use any that must be preserved. */
11345 if (info->cr_save_p)
11347 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11348 GEN_INT (info->cr_save_offset + sp_offset));
11349 rtx mem = gen_rtx_MEM (SImode, addr);
11351 set_mem_alias_set (mem, rs6000_sr_alias_set);
11353 /* If r12 was used to hold the original sp, copy cr into r0 now
11355 if (REGNO (frame_reg_rtx) == 12)
11357 cr_save_rtx = gen_rtx_REG (SImode, 0);
11358 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11360 insn = emit_move_insn (mem, cr_save_rtx);
11362 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11363 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11364 But that's OK. All we have to do is specify that _one_ condition
11365 code register is saved in this stack slot. The thrower's epilogue
11366 will then restore all the call-saved registers.
11367 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11368 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11369 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11372 /* Update stack and set back pointer unless this is V.4,
11373 for which it was done previously. */
11375 && !(DEFAULT_ABI == ABI_V4 || current_function_calls_eh_return))
11376 rs6000_emit_allocate_stack (info->total_size, FALSE);
11378 /* Set frame pointer, if needed. */
11379 if (frame_pointer_needed)
11381 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11383 RTX_FRAME_RELATED_P (insn) = 1;
11386 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11387 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11388 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11389 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11391 /* If emit_load_toc_table will use the link register, we need to save
11392 it. We use R12 for this purpose because emit_load_toc_table
11393 can use register 0. This allows us to use a plain 'blr' to return
11394 from the procedure more often. */
11395 int save_LR_around_toc_setup = (TARGET_ELF
11396 && DEFAULT_ABI != ABI_AIX
11398 && ! info->lr_save_p
11399 && EXIT_BLOCK_PTR->pred != NULL);
11400 if (save_LR_around_toc_setup)
11402 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11403 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11404 rs6000_emit_load_toc_table (TRUE);
11405 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11408 rs6000_emit_load_toc_table (TRUE);
11412 if (DEFAULT_ABI == ABI_DARWIN
11413 && flag_pic && current_function_uses_pic_offset_table)
11415 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11416 const char *picbase = machopic_function_base_name ();
11417 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11419 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11421 rs6000_maybe_dead (
11422 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11423 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11428 /* Write function prologue. */
11431 rs6000_output_function_prologue (FILE *file,
11432 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
11434 rs6000_stack_t *info = rs6000_stack_info ();
11436 if (TARGET_DEBUG_STACK)
11437 debug_stack_info (info);
11439 /* Write .extern for any function we will call to save and restore
11441 if (info->first_fp_reg_save < 64
11442 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11443 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11444 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11445 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11446 RESTORE_FP_SUFFIX);
11448 /* Write .extern for AIX common mode routines, if needed. */
11449 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11451 fputs ("\t.extern __mulh\n", file);
11452 fputs ("\t.extern __mull\n", file);
11453 fputs ("\t.extern __divss\n", file);
11454 fputs ("\t.extern __divus\n", file);
11455 fputs ("\t.extern __quoss\n", file);
11456 fputs ("\t.extern __quous\n", file);
11457 common_mode_defined = 1;
11460 if (! HAVE_prologue)
11464 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11465 the "toplevel" insn chain. */
11466 emit_note (NOTE_INSN_DELETED);
11467 rs6000_emit_prologue ();
11468 emit_note (NOTE_INSN_DELETED);
11470 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11474 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11476 INSN_ADDRESSES_NEW (insn, addr);
11481 if (TARGET_DEBUG_STACK)
11482 debug_rtx_list (get_insns (), 100);
11483 final (get_insns (), file, FALSE, FALSE);
11487 rs6000_pic_labelno++;
11490 /* Emit function epilogue as insns.
11492 At present, dwarf2out_frame_debug_expr doesn't understand
11493 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11494 anywhere in the epilogue. Most of the insns below would in any case
11495 need special notes to explain where r11 is in relation to the stack. */
11498 rs6000_emit_epilogue (int sibcall)
11500 rs6000_stack_t *info;
11501 int restoring_FPRs_inline;
11502 int using_load_multiple;
11503 int using_mfcr_multiple;
11504 int use_backchain_to_restore_sp;
11506 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11507 rtx frame_reg_rtx = sp_reg_rtx;
11508 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11509 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11512 info = rs6000_stack_info ();
11514 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11516 reg_mode = V2SImode;
11520 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11521 && (!TARGET_SPE_ABI
11522 || info->spe_64bit_regs_used == 0)
11523 && info->first_gp_reg_save < 31);
11524 restoring_FPRs_inline = (sibcall
11525 || current_function_calls_eh_return
11526 || info->first_fp_reg_save == 64
11527 || FP_SAVE_INLINE (info->first_fp_reg_save));
11528 use_backchain_to_restore_sp = (frame_pointer_needed
11529 || current_function_calls_alloca
11530 || info->total_size > 32767);
11531 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11532 || rs6000_cpu == PROCESSOR_PPC603
11533 || rs6000_cpu == PROCESSOR_PPC750
11536 /* If we have a frame pointer, a call to alloca, or a large stack
11537 frame, restore the old stack pointer using the backchain. Otherwise,
11538 we know what size to update it with. */
11539 if (use_backchain_to_restore_sp)
11541 /* Under V.4, don't reset the stack pointer until after we're done
11542 loading the saved registers. */
11543 if (DEFAULT_ABI == ABI_V4)
11544 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11546 emit_move_insn (frame_reg_rtx,
11547 gen_rtx_MEM (Pmode, sp_reg_rtx));
11550 else if (info->push_p)
11552 if (DEFAULT_ABI == ABI_V4
11553 || current_function_calls_eh_return)
11554 sp_offset = info->total_size;
11557 emit_insn (TARGET_32BIT
11558 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11559 GEN_INT (info->total_size))
11560 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11561 GEN_INT (info->total_size)));
11565 /* Restore AltiVec registers if needed. */
11566 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11570 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11571 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11573 rtx addr, areg, mem;
11575 areg = gen_rtx_REG (Pmode, 0);
11577 (areg, GEN_INT (info->altivec_save_offset
11579 + 16 * (i - info->first_altivec_reg_save)));
11581 /* AltiVec addressing mode is [reg+reg]. */
11582 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11583 mem = gen_rtx_MEM (V4SImode, addr);
11584 set_mem_alias_set (mem, rs6000_sr_alias_set);
11586 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11590 /* Restore VRSAVE if needed. */
11591 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11593 rtx addr, mem, reg;
11595 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11596 GEN_INT (info->vrsave_save_offset + sp_offset));
11597 mem = gen_rtx_MEM (SImode, addr);
11598 set_mem_alias_set (mem, rs6000_sr_alias_set);
11599 reg = gen_rtx_REG (SImode, 12);
11600 emit_move_insn (reg, mem);
11602 emit_insn (generate_set_vrsave (reg, info, 1));
11605 /* Get the old lr if we saved it. */
11606 if (info->lr_save_p)
11608 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11609 info->lr_save_offset + sp_offset);
11611 set_mem_alias_set (mem, rs6000_sr_alias_set);
11613 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11616 /* Get the old cr if we saved it. */
11617 if (info->cr_save_p)
11619 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11620 GEN_INT (info->cr_save_offset + sp_offset));
11621 rtx mem = gen_rtx_MEM (SImode, addr);
11623 set_mem_alias_set (mem, rs6000_sr_alias_set);
11625 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11628 /* Set LR here to try to overlap restores below. */
11629 if (info->lr_save_p)
11630 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11631 gen_rtx_REG (Pmode, 0));
11633 /* Load exception handler data registers, if needed. */
11634 if (current_function_calls_eh_return)
11636 unsigned int i, regno;
11640 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11641 GEN_INT (sp_offset + 5 * reg_size));
11642 rtx mem = gen_rtx_MEM (reg_mode, addr);
11644 set_mem_alias_set (mem, rs6000_sr_alias_set);
11646 emit_move_insn (gen_rtx_REG (reg_mode, 2), mem);
11653 regno = EH_RETURN_DATA_REGNO (i);
11654 if (regno == INVALID_REGNUM)
11657 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11658 info->ehrd_offset + sp_offset
11659 + reg_size * (int) i);
11660 set_mem_alias_set (mem, rs6000_sr_alias_set);
11662 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11666 /* Restore GPRs. This is done as a PARALLEL if we are using
11667 the load-multiple instructions. */
11668 if (using_load_multiple)
11671 p = rtvec_alloc (32 - info->first_gp_reg_save);
11672 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11674 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11675 GEN_INT (info->gp_save_offset
11678 rtx mem = gen_rtx_MEM (reg_mode, addr);
11680 set_mem_alias_set (mem, rs6000_sr_alias_set);
11683 gen_rtx_SET (VOIDmode,
11684 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11687 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11690 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11691 if ((regs_ever_live[info->first_gp_reg_save+i]
11692 && ! call_used_regs[info->first_gp_reg_save+i])
11693 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11694 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11695 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11697 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11698 GEN_INT (info->gp_save_offset
11701 rtx mem = gen_rtx_MEM (reg_mode, addr);
11703 /* Restore 64-bit quantities for SPE. */
11704 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11706 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11709 if (!SPE_CONST_OFFSET_OK (offset))
11711 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11712 emit_move_insn (b, GEN_INT (offset));
11715 b = GEN_INT (offset);
11717 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11718 mem = gen_rtx_MEM (V2SImode, addr);
11721 set_mem_alias_set (mem, rs6000_sr_alias_set);
11723 emit_move_insn (gen_rtx_REG (reg_mode,
11724 info->first_gp_reg_save + i), mem);
11727 /* Restore fpr's if we need to do it without calling a function. */
11728 if (restoring_FPRs_inline)
11729 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11730 if ((regs_ever_live[info->first_fp_reg_save+i]
11731 && ! call_used_regs[info->first_fp_reg_save+i]))
11734 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11735 GEN_INT (info->fp_save_offset
11738 mem = gen_rtx_MEM (DFmode, addr);
11739 set_mem_alias_set (mem, rs6000_sr_alias_set);
11741 emit_move_insn (gen_rtx_REG (DFmode,
11742 info->first_fp_reg_save + i),
11746 /* If we saved cr, restore it here. Just those that were used. */
11747 if (info->cr_save_p)
11749 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11752 if (using_mfcr_multiple)
11754 for (i = 0; i < 8; i++)
11755 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11761 if (using_mfcr_multiple && count > 1)
11766 p = rtvec_alloc (count);
11769 for (i = 0; i < 8; i++)
11770 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11772 rtvec r = rtvec_alloc (2);
11773 RTVEC_ELT (r, 0) = r12_rtx;
11774 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11775 RTVEC_ELT (p, ndx) =
11776 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11777 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11780 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11785 for (i = 0; i < 8; i++)
11786 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11788 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11794 /* If this is V.4, unwind the stack pointer after all of the loads
11795 have been done. We need to emit a block here so that sched
11796 doesn't decide to move the sp change before the register restores
11797 (which may not have any obvious dependency on the stack). This
11798 doesn't hurt performance, because there is no scheduling that can
11799 be done after this point. */
11800 if (DEFAULT_ABI == ABI_V4
11801 || current_function_calls_eh_return)
11803 if (frame_reg_rtx != sp_reg_rtx)
11804 rs6000_emit_stack_tie ();
11806 if (use_backchain_to_restore_sp)
11808 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11810 else if (sp_offset != 0)
11812 emit_insn (TARGET_32BIT
11813 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11814 GEN_INT (sp_offset))
11815 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11816 GEN_INT (sp_offset)));
11820 if (current_function_calls_eh_return)
11822 rtx sa = EH_RETURN_STACKADJ_RTX;
11823 emit_insn (TARGET_32BIT
11824 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11825 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11831 if (! restoring_FPRs_inline)
11832 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11834 p = rtvec_alloc (2);
11836 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11837 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11838 gen_rtx_REG (Pmode,
11839 LINK_REGISTER_REGNUM));
11841 /* If we have to restore more than two FP registers, branch to the
11842 restore function. It will return to our caller. */
11843 if (! restoring_FPRs_inline)
11847 const char *alloc_rname;
11849 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11850 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11851 alloc_rname = ggc_strdup (rname);
11852 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11853 gen_rtx_SYMBOL_REF (Pmode,
11856 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11859 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11860 GEN_INT (info->fp_save_offset + 8*i));
11861 mem = gen_rtx_MEM (DFmode, addr);
11862 set_mem_alias_set (mem, rs6000_sr_alias_set);
11864 RTVEC_ELT (p, i+3) =
11865 gen_rtx_SET (VOIDmode,
11866 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11871 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11875 /* Write function epilogue. */
11878 rs6000_output_function_epilogue (FILE *file,
11879 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
11881 rs6000_stack_t *info = rs6000_stack_info ();
11883 if (! HAVE_epilogue)
11885 rtx insn = get_last_insn ();
11886 /* If the last insn was a BARRIER, we don't have to write anything except
11887 the trace table. */
11888 if (GET_CODE (insn) == NOTE)
11889 insn = prev_nonnote_insn (insn);
11890 if (insn == 0 || GET_CODE (insn) != BARRIER)
11892 /* This is slightly ugly, but at least we don't have two
11893 copies of the epilogue-emitting code. */
11896 /* A NOTE_INSN_DELETED is supposed to be at the start
11897 and end of the "toplevel" insn chain. */
11898 emit_note (NOTE_INSN_DELETED);
11899 rs6000_emit_epilogue (FALSE);
11900 emit_note (NOTE_INSN_DELETED);
11902 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11906 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11908 INSN_ADDRESSES_NEW (insn, addr);
11913 if (TARGET_DEBUG_STACK)
11914 debug_rtx_list (get_insns (), 100);
11915 final (get_insns (), file, FALSE, FALSE);
11920 #if TARGET_OBJECT_FORMAT == OBJECT_MACHO
11921 /* Mach-O doesn't support labels at the end of objects, so if
11922 it looks like we might want one, insert a NOP. */
11924 rtx insn = get_last_insn ();
11927 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED_LABEL)
11928 insn = PREV_INSN (insn);
11932 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED_LABEL)))
11933 fputs ("\tnop\n", file);
11937 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11940 We don't output a traceback table if -finhibit-size-directive was
11941 used. The documentation for -finhibit-size-directive reads
11942 ``don't output a @code{.size} assembler directive, or anything
11943 else that would cause trouble if the function is split in the
11944 middle, and the two halves are placed at locations far apart in
11945 memory.'' The traceback table has this property, since it
11946 includes the offset from the start of the function to the
11947 traceback table itself.
11949 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11950 different traceback table. */
11951 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11952 && rs6000_traceback != traceback_none)
11954 const char *fname = NULL;
11955 const char *language_string = lang_hooks.name;
11956 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11958 int optional_tbtab;
11960 if (rs6000_traceback == traceback_full)
11961 optional_tbtab = 1;
11962 else if (rs6000_traceback == traceback_part)
11963 optional_tbtab = 0;
11965 optional_tbtab = !optimize_size && !TARGET_ELF;
11967 if (optional_tbtab)
11969 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
11970 while (*fname == '.') /* V.4 encodes . in the name */
11973 /* Need label immediately before tbtab, so we can compute
11974 its offset from the function start. */
11975 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
11976 ASM_OUTPUT_LABEL (file, fname);
11979 /* The .tbtab pseudo-op can only be used for the first eight
11980 expressions, since it can't handle the possibly variable
11981 length fields that follow. However, if you omit the optional
11982 fields, the assembler outputs zeros for all optional fields
11983 anyways, giving each variable length field is minimum length
11984 (as defined in sys/debug.h). Thus we can not use the .tbtab
11985 pseudo-op at all. */
11987 /* An all-zero word flags the start of the tbtab, for debuggers
11988 that have to find it by searching forward from the entry
11989 point or from the current pc. */
11990 fputs ("\t.long 0\n", file);
11992 /* Tbtab format type. Use format type 0. */
11993 fputs ("\t.byte 0,", file);
11995 /* Language type. Unfortunately, there doesn't seem to be any
11996 official way to get this info, so we use language_string. C
11997 is 0. C++ is 9. No number defined for Obj-C, so use the
11998 value for C for now. There is no official value for Java,
11999 although IBM appears to be using 13. There is no official value
12000 for Chill, so we've chosen 44 pseudo-randomly. */
12001 if (! strcmp (language_string, "GNU C")
12002 || ! strcmp (language_string, "GNU Objective-C"))
12004 else if (! strcmp (language_string, "GNU F77"))
12006 else if (! strcmp (language_string, "GNU Ada"))
12008 else if (! strcmp (language_string, "GNU Pascal"))
12010 else if (! strcmp (language_string, "GNU C++"))
12012 else if (! strcmp (language_string, "GNU Java"))
12014 else if (! strcmp (language_string, "GNU CHILL"))
12018 fprintf (file, "%d,", i);
12020 /* 8 single bit fields: global linkage (not set for C extern linkage,
12021 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12022 from start of procedure stored in tbtab, internal function, function
12023 has controlled storage, function has no toc, function uses fp,
12024 function logs/aborts fp operations. */
12025 /* Assume that fp operations are used if any fp reg must be saved. */
12026 fprintf (file, "%d,",
12027 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12029 /* 6 bitfields: function is interrupt handler, name present in
12030 proc table, function calls alloca, on condition directives
12031 (controls stack walks, 3 bits), saves condition reg, saves
12033 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12034 set up as a frame pointer, even when there is no alloca call. */
12035 fprintf (file, "%d,",
12036 ((optional_tbtab << 6)
12037 | ((optional_tbtab & frame_pointer_needed) << 5)
12038 | (info->cr_save_p << 1)
12039 | (info->lr_save_p)));
12041 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12043 fprintf (file, "%d,",
12044 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12046 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12047 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12049 if (optional_tbtab)
12051 /* Compute the parameter info from the function decl argument
12054 int next_parm_info_bit = 31;
12056 for (decl = DECL_ARGUMENTS (current_function_decl);
12057 decl; decl = TREE_CHAIN (decl))
12059 rtx parameter = DECL_INCOMING_RTL (decl);
12060 enum machine_mode mode = GET_MODE (parameter);
12062 if (GET_CODE (parameter) == REG)
12064 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12070 if (mode == SFmode)
12072 else if (mode == DFmode || mode == TFmode)
12077 /* If only one bit will fit, don't or in this entry. */
12078 if (next_parm_info_bit > 0)
12079 parm_info |= (bits << (next_parm_info_bit - 1));
12080 next_parm_info_bit -= 2;
12084 fixed_parms += ((GET_MODE_SIZE (mode)
12085 + (UNITS_PER_WORD - 1))
12087 next_parm_info_bit -= 1;
12093 /* Number of fixed point parameters. */
12094 /* This is actually the number of words of fixed point parameters; thus
12095 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12096 fprintf (file, "%d,", fixed_parms);
12098 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12100 /* This is actually the number of fp registers that hold parameters;
12101 and thus the maximum value is 13. */
12102 /* Set parameters on stack bit if parameters are not in their original
12103 registers, regardless of whether they are on the stack? Xlc
12104 seems to set the bit when not optimizing. */
12105 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12107 if (! optional_tbtab)
12110 /* Optional fields follow. Some are variable length. */
12112 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12113 11 double float. */
12114 /* There is an entry for each parameter in a register, in the order that
12115 they occur in the parameter list. Any intervening arguments on the
12116 stack are ignored. If the list overflows a long (max possible length
12117 34 bits) then completely leave off all elements that don't fit. */
12118 /* Only emit this long if there was at least one parameter. */
12119 if (fixed_parms || float_parms)
12120 fprintf (file, "\t.long %d\n", parm_info);
12122 /* Offset from start of code to tb table. */
12123 fputs ("\t.long ", file);
12124 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12126 RS6000_OUTPUT_BASENAME (file, fname);
12128 assemble_name (file, fname);
12130 fputs ("-.", file);
12132 RS6000_OUTPUT_BASENAME (file, fname);
12134 assemble_name (file, fname);
12138 /* Interrupt handler mask. */
12139 /* Omit this long, since we never set the interrupt handler bit
12142 /* Number of CTL (controlled storage) anchors. */
12143 /* Omit this long, since the has_ctl bit is never set above. */
12145 /* Displacement into stack of each CTL anchor. */
12146 /* Omit this list of longs, because there are no CTL anchors. */
12148 /* Length of function name. */
12151 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12153 /* Function name. */
12154 assemble_string (fname, strlen (fname));
12156 /* Register for alloca automatic storage; this is always reg 31.
12157 Only emit this if the alloca bit was set above. */
12158 if (frame_pointer_needed)
12159 fputs ("\t.byte 31\n", file);
12161 fputs ("\t.align 2\n", file);
12165 /* A C compound statement that outputs the assembler code for a thunk
12166 function, used to implement C++ virtual function calls with
12167 multiple inheritance. The thunk acts as a wrapper around a virtual
12168 function, adjusting the implicit object parameter before handing
12169 control off to the real function.
12171 First, emit code to add the integer DELTA to the location that
12172 contains the incoming first argument. Assume that this argument
12173 contains a pointer, and is the one used to pass the `this' pointer
12174 in C++. This is the incoming argument *before* the function
12175 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12176 values of all other incoming arguments.
12178 After the addition, emit code to jump to FUNCTION, which is a
12179 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12180 not touch the return address. Hence returning from FUNCTION will
12181 return to whoever called the current `thunk'.
12183 The effect must be as if FUNCTION had been called directly with the
12184 adjusted first argument. This macro is responsible for emitting
12185 all of the code for a thunk function; output_function_prologue()
12186 and output_function_epilogue() are not invoked.
12188 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12189 been extracted from it.) It might possibly be useful on some
12190 targets, but probably not.
12192 If you do not define this macro, the target-independent code in the
12193 C++ frontend will generate a less efficient heavyweight thunk that
12194 calls FUNCTION instead of jumping to it. The generic approach does
12195 not support varargs. */
12198 rs6000_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
12199 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
12202 rtx this, insn, funexp;
12204 reload_completed = 1;
12205 epilogue_completed = 1;
12206 no_new_pseudos = 1;
12208 /* Mark the end of the (empty) prologue. */
12209 emit_note (NOTE_INSN_PROLOGUE_END);
12211 /* Find the "this" pointer. If the function returns a structure,
12212 the structure return pointer is in r3. */
12213 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
12214 this = gen_rtx_REG (Pmode, 4);
12216 this = gen_rtx_REG (Pmode, 3);
12218 /* Apply the constant offset, if required. */
12221 rtx delta_rtx = GEN_INT (delta);
12222 emit_insn (TARGET_32BIT
12223 ? gen_addsi3 (this, this, delta_rtx)
12224 : gen_adddi3 (this, this, delta_rtx));
12227 /* Apply the offset from the vtable, if required. */
12230 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12231 rtx tmp = gen_rtx_REG (Pmode, 12);
12233 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12234 if (((unsigned HOST_WIDE_INT) vcall_offset) + 0x8000 >= 0x10000)
12236 emit_insn (TARGET_32BIT
12237 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12238 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12239 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12243 rtx loc = gen_rtx_PLUS (Pmode, tmp, vcall_offset_rtx);
12245 emit_move_insn (tmp, gen_rtx_MEM (Pmode, loc));
12247 emit_insn (TARGET_32BIT
12248 ? gen_addsi3 (this, this, tmp)
12249 : gen_adddi3 (this, this, tmp));
12252 /* Generate a tail call to the target function. */
12253 if (!TREE_USED (function))
12255 assemble_external (function);
12256 TREE_USED (function) = 1;
12258 funexp = XEXP (DECL_RTL (function), 0);
12259 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12262 if (MACHOPIC_INDIRECT)
12263 funexp = machopic_indirect_call_target (funexp);
12266 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12267 generate sibcall RTL explicitly to avoid constraint abort. */
12268 insn = emit_call_insn (
12269 gen_rtx_PARALLEL (VOIDmode,
12271 gen_rtx_CALL (VOIDmode,
12272 funexp, const0_rtx),
12273 gen_rtx_USE (VOIDmode, const0_rtx),
12274 gen_rtx_USE (VOIDmode,
12275 gen_rtx_REG (SImode,
12276 LINK_REGISTER_REGNUM)),
12277 gen_rtx_RETURN (VOIDmode))));
12278 SIBLING_CALL_P (insn) = 1;
12281 /* Run just enough of rest_of_compilation to get the insns emitted.
12282 There's not really enough bulk here to make other passes such as
12283 instruction scheduling worth while. Note that use_thunk calls
12284 assemble_start_function and assemble_end_function. */
12285 insn = get_insns ();
12286 insn_locators_initialize ();
12287 shorten_branches (insn);
12288 final_start_function (insn, file, 1);
12289 final (insn, file, 1, 0);
12290 final_end_function ();
12292 reload_completed = 0;
12293 epilogue_completed = 0;
12294 no_new_pseudos = 0;
12297 /* A quick summary of the various types of 'constant-pool tables'
12300 Target Flags Name One table per
12301 AIX (none) AIX TOC object file
12302 AIX -mfull-toc AIX TOC object file
12303 AIX -mminimal-toc AIX minimal TOC translation unit
12304 SVR4/EABI (none) SVR4 SDATA object file
12305 SVR4/EABI -fpic SVR4 pic object file
12306 SVR4/EABI -fPIC SVR4 PIC translation unit
12307 SVR4/EABI -mrelocatable EABI TOC function
12308 SVR4/EABI -maix AIX TOC object file
12309 SVR4/EABI -maix -mminimal-toc
12310 AIX minimal TOC translation unit
12312 Name Reg. Set by entries contains:
12313 made by addrs? fp? sum?
12315 AIX TOC 2 crt0 as Y option option
12316 AIX minimal TOC 30 prolog gcc Y Y option
12317 SVR4 SDATA 13 crt0 gcc N Y N
12318 SVR4 pic 30 prolog ld Y not yet N
12319 SVR4 PIC 30 prolog gcc Y option option
12320 EABI TOC 30 prolog gcc Y option option
12324 /* Hash functions for the hash table. */
12327 rs6000_hash_constant (rtx k)
12329 enum rtx_code code = GET_CODE (k);
12330 enum machine_mode mode = GET_MODE (k);
12331 unsigned result = (code << 3) ^ mode;
12332 const char *format;
12335 format = GET_RTX_FORMAT (code);
12336 flen = strlen (format);
12342 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12345 if (mode != VOIDmode)
12346 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12358 for (; fidx < flen; fidx++)
12359 switch (format[fidx])
12364 const char *str = XSTR (k, fidx);
12365 len = strlen (str);
12366 result = result * 613 + len;
12367 for (i = 0; i < len; i++)
12368 result = result * 613 + (unsigned) str[i];
12373 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12377 result = result * 613 + (unsigned) XINT (k, fidx);
12380 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12381 result = result * 613 + (unsigned) XWINT (k, fidx);
12385 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12386 result = result * 613 + (unsigned) (XWINT (k, fidx)
12400 toc_hash_function (const void *hash_entry)
12402 const struct toc_hash_struct *thc =
12403 (const struct toc_hash_struct *) hash_entry;
12404 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12407 /* Compare H1 and H2 for equivalence. */
12410 toc_hash_eq (const void *h1, const void *h2)
12412 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12413 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12415 if (((const struct toc_hash_struct *) h1)->key_mode
12416 != ((const struct toc_hash_struct *) h2)->key_mode)
12419 return rtx_equal_p (r1, r2);
12422 /* These are the names given by the C++ front-end to vtables, and
12423 vtable-like objects. Ideally, this logic should not be here;
12424 instead, there should be some programmatic way of inquiring as
12425 to whether or not an object is a vtable. */
12427 #define VTABLE_NAME_P(NAME) \
12428 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12429 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12430 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12431 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12434 rs6000_output_symbol_ref (FILE *file, rtx x)
12436 /* Currently C++ toc references to vtables can be emitted before it
12437 is decided whether the vtable is public or private. If this is
12438 the case, then the linker will eventually complain that there is
12439 a reference to an unknown section. Thus, for vtables only,
12440 we emit the TOC reference to reference the symbol and not the
12442 const char *name = XSTR (x, 0);
12444 if (VTABLE_NAME_P (name))
12446 RS6000_OUTPUT_BASENAME (file, name);
12449 assemble_name (file, name);
12452 /* Output a TOC entry. We derive the entry name from what is being
12456 output_toc (FILE *file, rtx x, int labelno, enum machine_mode mode)
12459 const char *name = buf;
12460 const char *real_name;
12467 /* When the linker won't eliminate them, don't output duplicate
12468 TOC entries (this happens on AIX if there is any kind of TOC,
12469 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12471 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12473 struct toc_hash_struct *h;
12476 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12477 time because GGC is not initialized at that point. */
12478 if (toc_hash_table == NULL)
12479 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12480 toc_hash_eq, NULL);
12482 h = ggc_alloc (sizeof (*h));
12484 h->key_mode = mode;
12485 h->labelno = labelno;
12487 found = htab_find_slot (toc_hash_table, h, 1);
12488 if (*found == NULL)
12490 else /* This is indeed a duplicate.
12491 Set this label equal to that label. */
12493 fputs ("\t.set ", file);
12494 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12495 fprintf (file, "%d,", labelno);
12496 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12497 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12503 /* If we're going to put a double constant in the TOC, make sure it's
12504 aligned properly when strict alignment is on. */
12505 if (GET_CODE (x) == CONST_DOUBLE
12506 && STRICT_ALIGNMENT
12507 && GET_MODE_BITSIZE (mode) >= 64
12508 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12509 ASM_OUTPUT_ALIGN (file, 3);
12512 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12514 /* Handle FP constants specially. Note that if we have a minimal
12515 TOC, things we put here aren't actually in the TOC, so we can allow
12517 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12519 REAL_VALUE_TYPE rv;
12522 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12523 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12527 if (TARGET_MINIMAL_TOC)
12528 fputs (DOUBLE_INT_ASM_OP, file);
12530 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12531 k[0] & 0xffffffff, k[1] & 0xffffffff,
12532 k[2] & 0xffffffff, k[3] & 0xffffffff);
12533 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12534 k[0] & 0xffffffff, k[1] & 0xffffffff,
12535 k[2] & 0xffffffff, k[3] & 0xffffffff);
12540 if (TARGET_MINIMAL_TOC)
12541 fputs ("\t.long ", file);
12543 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12544 k[0] & 0xffffffff, k[1] & 0xffffffff,
12545 k[2] & 0xffffffff, k[3] & 0xffffffff);
12546 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12547 k[0] & 0xffffffff, k[1] & 0xffffffff,
12548 k[2] & 0xffffffff, k[3] & 0xffffffff);
12552 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12554 REAL_VALUE_TYPE rv;
12557 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12558 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12562 if (TARGET_MINIMAL_TOC)
12563 fputs (DOUBLE_INT_ASM_OP, file);
12565 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12566 k[0] & 0xffffffff, k[1] & 0xffffffff);
12567 fprintf (file, "0x%lx%08lx\n",
12568 k[0] & 0xffffffff, k[1] & 0xffffffff);
12573 if (TARGET_MINIMAL_TOC)
12574 fputs ("\t.long ", file);
12576 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12577 k[0] & 0xffffffff, k[1] & 0xffffffff);
12578 fprintf (file, "0x%lx,0x%lx\n",
12579 k[0] & 0xffffffff, k[1] & 0xffffffff);
12583 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12585 REAL_VALUE_TYPE rv;
12588 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12589 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12593 if (TARGET_MINIMAL_TOC)
12594 fputs (DOUBLE_INT_ASM_OP, file);
12596 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12597 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12602 if (TARGET_MINIMAL_TOC)
12603 fputs ("\t.long ", file);
12605 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12606 fprintf (file, "0x%lx\n", l & 0xffffffff);
12610 else if (GET_MODE (x) == VOIDmode
12611 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12613 unsigned HOST_WIDE_INT low;
12614 HOST_WIDE_INT high;
12616 if (GET_CODE (x) == CONST_DOUBLE)
12618 low = CONST_DOUBLE_LOW (x);
12619 high = CONST_DOUBLE_HIGH (x);
12622 #if HOST_BITS_PER_WIDE_INT == 32
12625 high = (low & 0x80000000) ? ~0 : 0;
12629 low = INTVAL (x) & 0xffffffff;
12630 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12634 /* TOC entries are always Pmode-sized, but since this
12635 is a bigendian machine then if we're putting smaller
12636 integer constants in the TOC we have to pad them.
12637 (This is still a win over putting the constants in
12638 a separate constant pool, because then we'd have
12639 to have both a TOC entry _and_ the actual constant.)
12641 For a 32-bit target, CONST_INT values are loaded and shifted
12642 entirely within `low' and can be stored in one TOC entry. */
12644 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12645 abort ();/* It would be easy to make this work, but it doesn't now. */
12647 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12649 #if HOST_BITS_PER_WIDE_INT == 32
12650 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12651 POINTER_SIZE, &low, &high, 0);
12654 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12655 high = (HOST_WIDE_INT) low >> 32;
12662 if (TARGET_MINIMAL_TOC)
12663 fputs (DOUBLE_INT_ASM_OP, file);
12665 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12666 (long) high & 0xffffffff, (long) low & 0xffffffff);
12667 fprintf (file, "0x%lx%08lx\n",
12668 (long) high & 0xffffffff, (long) low & 0xffffffff);
12673 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12675 if (TARGET_MINIMAL_TOC)
12676 fputs ("\t.long ", file);
12678 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12679 (long) high & 0xffffffff, (long) low & 0xffffffff);
12680 fprintf (file, "0x%lx,0x%lx\n",
12681 (long) high & 0xffffffff, (long) low & 0xffffffff);
12685 if (TARGET_MINIMAL_TOC)
12686 fputs ("\t.long ", file);
12688 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12689 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12695 if (GET_CODE (x) == CONST)
12697 if (GET_CODE (XEXP (x, 0)) != PLUS)
12700 base = XEXP (XEXP (x, 0), 0);
12701 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12704 if (GET_CODE (base) == SYMBOL_REF)
12705 name = XSTR (base, 0);
12706 else if (GET_CODE (base) == LABEL_REF)
12707 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12708 else if (GET_CODE (base) == CODE_LABEL)
12709 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12713 real_name = (*targetm.strip_name_encoding) (name);
12714 if (TARGET_MINIMAL_TOC)
12715 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12718 fprintf (file, "\t.tc %s", real_name);
12721 fprintf (file, ".N%d", - offset);
12723 fprintf (file, ".P%d", offset);
12725 fputs ("[TC],", file);
12728 /* Currently C++ toc references to vtables can be emitted before it
12729 is decided whether the vtable is public or private. If this is
12730 the case, then the linker will eventually complain that there is
12731 a TOC reference to an unknown section. Thus, for vtables only,
12732 we emit the TOC reference to reference the symbol and not the
12734 if (VTABLE_NAME_P (name))
12736 RS6000_OUTPUT_BASENAME (file, name);
12738 fprintf (file, "%d", offset);
12739 else if (offset > 0)
12740 fprintf (file, "+%d", offset);
12743 output_addr_const (file, x);
12747 /* Output an assembler pseudo-op to write an ASCII string of N characters
12748 starting at P to FILE.
12750 On the RS/6000, we have to do this using the .byte operation and
12751 write out special characters outside the quoted string.
12752 Also, the assembler is broken; very long strings are truncated,
12753 so we must artificially break them up early. */
12756 output_ascii (FILE *file, const char *p, int n)
12759 int i, count_string;
12760 const char *for_string = "\t.byte \"";
12761 const char *for_decimal = "\t.byte ";
12762 const char *to_close = NULL;
12765 for (i = 0; i < n; i++)
12768 if (c >= ' ' && c < 0177)
12771 fputs (for_string, file);
12774 /* Write two quotes to get one. */
12782 for_decimal = "\"\n\t.byte ";
12786 if (count_string >= 512)
12788 fputs (to_close, file);
12790 for_string = "\t.byte \"";
12791 for_decimal = "\t.byte ";
12799 fputs (for_decimal, file);
12800 fprintf (file, "%d", c);
12802 for_string = "\n\t.byte \"";
12803 for_decimal = ", ";
12809 /* Now close the string if we have written one. Then end the line. */
12811 fputs (to_close, file);
12814 /* Generate a unique section name for FILENAME for a section type
12815 represented by SECTION_DESC. Output goes into BUF.
12817 SECTION_DESC can be any string, as long as it is different for each
12818 possible section type.
12820 We name the section in the same manner as xlc. The name begins with an
12821 underscore followed by the filename (after stripping any leading directory
12822 names) with the last period replaced by the string SECTION_DESC. If
12823 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12827 rs6000_gen_section_name (char **buf, const char *filename,
12828 const char *section_desc)
12830 const char *q, *after_last_slash, *last_period = 0;
12834 after_last_slash = filename;
12835 for (q = filename; *q; q++)
12838 after_last_slash = q + 1;
12839 else if (*q == '.')
12843 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12844 *buf = (char *) xmalloc (len);
12849 for (q = after_last_slash; *q; q++)
12851 if (q == last_period)
12853 strcpy (p, section_desc);
12854 p += strlen (section_desc);
12858 else if (ISALNUM (*q))
12862 if (last_period == 0)
12863 strcpy (p, section_desc);
12868 /* Emit profile function. */
12871 output_profile_hook (int labelno ATTRIBUTE_UNUSED)
12873 if (TARGET_PROFILE_KERNEL)
12876 if (DEFAULT_ABI == ABI_AIX)
12878 #ifndef NO_PROFILE_COUNTERS
12879 # define NO_PROFILE_COUNTERS 0
12881 if (NO_PROFILE_COUNTERS)
12882 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12886 const char *label_name;
12889 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12890 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12891 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12893 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12897 else if (DEFAULT_ABI == ABI_DARWIN)
12899 const char *mcount_name = RS6000_MCOUNT;
12900 int caller_addr_regno = LINK_REGISTER_REGNUM;
12902 /* Be conservative and always set this, at least for now. */
12903 current_function_uses_pic_offset_table = 1;
12906 /* For PIC code, set up a stub and collect the caller's address
12907 from r0, which is where the prologue puts it. */
12908 if (MACHOPIC_INDIRECT)
12910 mcount_name = machopic_stub_name (mcount_name);
12911 if (current_function_uses_pic_offset_table)
12912 caller_addr_regno = 0;
12915 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12917 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12921 /* Write function profiler code. */
12924 output_function_profiler (FILE *file, int labelno)
12929 switch (DEFAULT_ABI)
12938 warning ("no profiling of 64-bit code for this ABI");
12941 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12942 fprintf (file, "\tmflr %s\n", reg_names[0]);
12945 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12946 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12947 reg_names[0], save_lr, reg_names[1]);
12948 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12949 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12950 assemble_name (file, buf);
12951 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12953 else if (flag_pic > 1)
12955 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12956 reg_names[0], save_lr, reg_names[1]);
12957 /* Now, we need to get the address of the label. */
12958 fputs ("\tbl 1f\n\t.long ", file);
12959 assemble_name (file, buf);
12960 fputs ("-.\n1:", file);
12961 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
12962 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
12963 reg_names[0], reg_names[11]);
12964 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
12965 reg_names[0], reg_names[0], reg_names[11]);
12969 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
12970 assemble_name (file, buf);
12971 fputs ("@ha\n", file);
12972 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12973 reg_names[0], save_lr, reg_names[1]);
12974 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
12975 assemble_name (file, buf);
12976 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
12979 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
12980 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
12985 if (!TARGET_PROFILE_KERNEL)
12987 /* Don't do anything, done in output_profile_hook (). */
12994 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
12995 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
12997 if (current_function_needs_context)
12999 asm_fprintf (file, "\tstd %s,24(%s)\n",
13000 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13001 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13002 asm_fprintf (file, "\tld %s,24(%s)\n",
13003 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13006 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13014 rs6000_use_dfa_pipeline_interface (void)
13019 /* Power4 load update and store update instructions are cracked into a
13020 load or store and an integer insn which are executed in the same cycle.
13021 Branches have their own dispatch slot which does not count against the
13022 GCC issue rate, but it changes the program flow so there are no other
13023 instructions to issue in this cycle. */
13026 rs6000_variable_issue (FILE *stream ATTRIBUTE_UNUSED,
13027 int verbose ATTRIBUTE_UNUSED,
13028 rtx insn, int more)
13030 if (GET_CODE (PATTERN (insn)) == USE
13031 || GET_CODE (PATTERN (insn)) == CLOBBER)
13034 if (rs6000_cpu == PROCESSOR_POWER4)
13036 enum attr_type type = get_attr_type (insn);
13037 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13038 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX
13039 || type == TYPE_MFCR)
13041 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13042 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13043 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13044 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13045 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13046 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13047 || type == TYPE_IDIV || type == TYPE_LDIV
13048 || type == TYPE_INSERT_WORD)
13049 return more > 2 ? more - 2 : 0;
13055 /* Adjust the cost of a scheduling dependency. Return the new cost of
13056 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13059 rs6000_adjust_cost (rtx insn, rtx link, rtx dep_insn ATTRIBUTE_UNUSED,
13062 if (! recog_memoized (insn))
13065 if (REG_NOTE_KIND (link) != 0)
13068 if (REG_NOTE_KIND (link) == 0)
13070 /* Data dependency; DEP_INSN writes a register that INSN reads
13071 some cycles later. */
13072 switch (get_attr_type (insn))
13075 /* Tell the first scheduling pass about the latency between
13076 a mtctr and bctr (and mtlr and br/blr). The first
13077 scheduling pass will not know about this latency since
13078 the mtctr instruction, which has the latency associated
13079 to it, will be generated by reload. */
13080 return TARGET_POWER ? 5 : 4;
13082 /* Leave some extra cycles between a compare and its
13083 dependent branch, to inhibit expensive mispredicts. */
13084 if ((rs6000_cpu_attr == CPU_PPC603
13085 || rs6000_cpu_attr == CPU_PPC604
13086 || rs6000_cpu_attr == CPU_PPC604E
13087 || rs6000_cpu_attr == CPU_PPC620
13088 || rs6000_cpu_attr == CPU_PPC630
13089 || rs6000_cpu_attr == CPU_PPC750
13090 || rs6000_cpu_attr == CPU_PPC7400
13091 || rs6000_cpu_attr == CPU_PPC7450
13092 || rs6000_cpu_attr == CPU_POWER4)
13093 && recog_memoized (dep_insn)
13094 && (INSN_CODE (dep_insn) >= 0)
13095 && (get_attr_type (dep_insn) == TYPE_CMP
13096 || get_attr_type (dep_insn) == TYPE_COMPARE
13097 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13098 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13099 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13100 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13101 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13102 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13107 /* Fall out to return default cost. */
13113 /* The function returns a non-zero value if INSN can be scheduled only
13114 as the first insn in a dispatch group ("dispatch-slot restricted").
13115 In this case, the returned value indicates how many dispatch slots
13116 the insn occupies (at the beginning of the group).
13117 Return 0 otherwise. */
13120 is_dispatch_slot_restricted (rtx insn)
13122 enum attr_type type;
13124 if (rs6000_cpu != PROCESSOR_POWER4)
13128 || insn == NULL_RTX
13129 || GET_CODE (insn) == NOTE
13130 || GET_CODE (PATTERN (insn)) == USE
13131 || GET_CODE (PATTERN (insn)) == CLOBBER)
13134 type = get_attr_type (insn);
13140 case TYPE_DELAYED_CR:
13141 case TYPE_CR_LOGICAL:
13154 /* A C statement (sans semicolon) to update the integer scheduling
13155 priority INSN_PRIORITY (INSN). Increase the priority to execute the
13156 INSN earlier, reduce the priority to execute INSN later. Do not
13157 define this macro if you do not need to adjust the scheduling
13158 priorities of insns. */
13161 rs6000_adjust_priority (rtx insn ATTRIBUTE_UNUSED, int priority)
13163 /* On machines (like the 750) which have asymmetric integer units,
13164 where one integer unit can do multiply and divides and the other
13165 can't, reduce the priority of multiply/divide so it is scheduled
13166 before other integer operations. */
13169 if (! INSN_P (insn))
13172 if (GET_CODE (PATTERN (insn)) == USE)
13175 switch (rs6000_cpu_attr) {
13177 switch (get_attr_type (insn))
13184 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13185 priority, priority);
13186 if (priority >= 0 && priority < 0x01000000)
13193 if (is_dispatch_slot_restricted (insn)
13194 && reload_completed
13195 && current_sched_info->sched_max_insns_priority
13196 && rs6000_sched_restricted_insns_priority)
13199 /* Prioritize insns that can be dispatched only in the first dispatch slot. */
13200 if (rs6000_sched_restricted_insns_priority == 1)
13201 /* Attach highest priority to insn. This means that in
13202 haifa-sched.c:ready_sort(), dispatch-slot restriction considerations
13203 precede 'priority' (critical path) considerations. */
13204 return current_sched_info->sched_max_insns_priority;
13205 else if (rs6000_sched_restricted_insns_priority == 2)
13206 /* Increase priority of insn by a minimal amount. This means that in
13207 haifa-sched.c:ready_sort(), only 'priority' (critical path) considerations
13208 precede dispatch-slot restriction considerations. */
13209 return (priority + 1);
13215 /* Return how many instructions the machine can issue per cycle. */
13218 rs6000_issue_rate (void)
13220 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13221 if (!reload_completed)
13224 switch (rs6000_cpu_attr) {
13225 case CPU_RIOS1: /* ? */
13227 case CPU_PPC601: /* ? */
13248 /* Return how many instructions to look ahead for better insn
13252 rs6000_use_sched_lookahead (void)
13254 if (rs6000_cpu_attr == CPU_PPC8540)
13260 /* Length in units of the trampoline for entering a nested function. */
13263 rs6000_trampoline_size (void)
13267 switch (DEFAULT_ABI)
13273 ret = (TARGET_32BIT) ? 12 : 24;
13278 ret = (TARGET_32BIT) ? 40 : 48;
13285 /* Emit RTL insns to initialize the variable parts of a trampoline.
13286 FNADDR is an RTX for the address of the function's pure code.
13287 CXT is an RTX for the static chain value for the function. */
13290 rs6000_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt)
13292 enum machine_mode pmode = Pmode;
13293 int regsize = (TARGET_32BIT) ? 4 : 8;
13294 rtx ctx_reg = force_reg (pmode, cxt);
13296 switch (DEFAULT_ABI)
13301 /* Macros to shorten the code expansions below. */
13302 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13303 #define MEM_PLUS(addr,offset) \
13304 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13306 /* Under AIX, just build the 3 word function descriptor */
13309 rtx fn_reg = gen_reg_rtx (pmode);
13310 rtx toc_reg = gen_reg_rtx (pmode);
13311 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13312 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13313 emit_move_insn (MEM_DEREF (addr), fn_reg);
13314 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13315 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13319 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13322 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13323 FALSE, VOIDmode, 4,
13325 GEN_INT (rs6000_trampoline_size ()), SImode,
13335 /* Table of valid machine attributes. */
13337 const struct attribute_spec rs6000_attribute_table[] =
13339 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13340 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13341 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13342 { NULL, 0, 0, false, false, false, NULL }
13345 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13346 struct attribute_spec.handler. */
13349 rs6000_handle_longcall_attribute (tree *node, tree name,
13350 tree args ATTRIBUTE_UNUSED,
13351 int flags ATTRIBUTE_UNUSED,
13352 bool *no_add_attrs)
13354 if (TREE_CODE (*node) != FUNCTION_TYPE
13355 && TREE_CODE (*node) != FIELD_DECL
13356 && TREE_CODE (*node) != TYPE_DECL)
13358 warning ("`%s' attribute only applies to functions",
13359 IDENTIFIER_POINTER (name));
13360 *no_add_attrs = true;
13366 /* Set longcall attributes on all functions declared when
13367 rs6000_default_long_calls is true. */
13369 rs6000_set_default_type_attributes (tree type)
13371 if (rs6000_default_long_calls
13372 && (TREE_CODE (type) == FUNCTION_TYPE
13373 || TREE_CODE (type) == METHOD_TYPE))
13374 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13376 TYPE_ATTRIBUTES (type));
13379 /* Return a reference suitable for calling a function with the
13380 longcall attribute. */
13383 rs6000_longcall_ref (rtx call_ref)
13385 const char *call_name;
13388 if (GET_CODE (call_ref) != SYMBOL_REF)
13391 /* System V adds '.' to the internal name, so skip them. */
13392 call_name = XSTR (call_ref, 0);
13393 if (*call_name == '.')
13395 while (*call_name == '.')
13398 node = get_identifier (call_name);
13399 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13402 return force_reg (Pmode, call_ref);
13405 #ifdef USING_ELFOS_H
13407 /* A C statement or statements to switch to the appropriate section
13408 for output of RTX in mode MODE. You can assume that RTX is some
13409 kind of constant in RTL. The argument MODE is redundant except in
13410 the case of a `const_int' rtx. Select the section by calling
13411 `text_section' or one of the alternatives for other sections.
13413 Do not define this macro if you put all constants in the read-only
13417 rs6000_elf_select_rtx_section (enum machine_mode mode, rtx x,
13418 unsigned HOST_WIDE_INT align)
13420 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13423 default_elf_select_rtx_section (mode, x, align);
13426 /* A C statement or statements to switch to the appropriate
13427 section for output of DECL. DECL is either a `VAR_DECL' node
13428 or a constant of some sort. RELOC indicates whether forming
13429 the initial value of DECL requires link-time relocations. */
13432 rs6000_elf_select_section (tree decl, int reloc,
13433 unsigned HOST_WIDE_INT align)
13435 /* Pretend that we're always building for a shared library when
13436 ABI_AIX, because otherwise we end up with dynamic relocations
13437 in read-only sections. This happens for function pointers,
13438 references to vtables in typeinfo, and probably other cases. */
13439 default_elf_select_section_1 (decl, reloc, align,
13440 flag_pic || DEFAULT_ABI == ABI_AIX);
13443 /* A C statement to build up a unique section name, expressed as a
13444 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13445 RELOC indicates whether the initial value of EXP requires
13446 link-time relocations. If you do not define this macro, GCC will use
13447 the symbol name prefixed by `.' as the section name. Note - this
13448 macro can now be called for uninitialized data items as well as
13449 initialized data and functions. */
13452 rs6000_elf_unique_section (tree decl, int reloc)
13454 /* As above, pretend that we're always building for a shared library
13455 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13456 default_unique_section_1 (decl, reloc,
13457 flag_pic || DEFAULT_ABI == ABI_AIX);
13460 /* For a SYMBOL_REF, set generic flags and then perform some
13461 target-specific processing.
13463 When the AIX ABI is requested on a non-AIX system, replace the
13464 function name with the real name (with a leading .) rather than the
13465 function descriptor name. This saves a lot of overriding code to
13466 read the prefixes. */
13469 rs6000_elf_encode_section_info (tree decl, rtx rtl, int first)
13471 default_encode_section_info (decl, rtl, first);
13474 && TREE_CODE (decl) == FUNCTION_DECL
13476 && DEFAULT_ABI == ABI_AIX)
13478 rtx sym_ref = XEXP (rtl, 0);
13479 size_t len = strlen (XSTR (sym_ref, 0));
13480 char *str = alloca (len + 2);
13482 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13483 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13488 rs6000_elf_in_small_data_p (tree decl)
13490 if (rs6000_sdata == SDATA_NONE)
13493 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13495 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13496 if (strcmp (section, ".sdata") == 0
13497 || strcmp (section, ".sdata2") == 0
13498 || strcmp (section, ".sbss") == 0
13499 || strcmp (section, ".sbss2") == 0
13500 || strcmp (section, ".PPC.EMB.sdata0") == 0
13501 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13506 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13509 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13510 /* If it's not public, and we're not going to reference it there,
13511 there's no need to put it in the small data section. */
13512 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13519 #endif /* USING_ELFOS_H */
13522 /* Return a REG that occurs in ADDR with coefficient 1.
13523 ADDR can be effectively incremented by incrementing REG.
13525 r0 is special and we must not select it as an address
13526 register by this routine since our caller will try to
13527 increment the returned register via an "la" instruction. */
13530 find_addr_reg (rtx addr)
13532 while (GET_CODE (addr) == PLUS)
13534 if (GET_CODE (XEXP (addr, 0)) == REG
13535 && REGNO (XEXP (addr, 0)) != 0)
13536 addr = XEXP (addr, 0);
13537 else if (GET_CODE (XEXP (addr, 1)) == REG
13538 && REGNO (XEXP (addr, 1)) != 0)
13539 addr = XEXP (addr, 1);
13540 else if (CONSTANT_P (XEXP (addr, 0)))
13541 addr = XEXP (addr, 1);
13542 else if (CONSTANT_P (XEXP (addr, 1)))
13543 addr = XEXP (addr, 0);
13547 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13553 rs6000_fatal_bad_address (rtx op)
13555 fatal_insn ("bad address", op);
13561 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13562 reference and a constant. */
13565 symbolic_operand (rtx op)
13567 switch (GET_CODE (op))
13574 return (GET_CODE (op) == SYMBOL_REF ||
13575 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13576 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13577 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13584 #ifdef RS6000_LONG_BRANCH
13586 static tree stub_list = 0;
13588 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13589 procedure calls to the linked list. */
13592 add_compiler_stub (tree label_name, tree function_name, int line_number)
13594 tree stub = build_tree_list (function_name, label_name);
13595 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13596 TREE_CHAIN (stub) = stub_list;
13600 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13601 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13602 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13604 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13605 handling procedure calls from the linked list and initializes the
13609 output_compiler_stub (void)
13612 char label_buf[256];
13616 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13618 fprintf (asm_out_file,
13619 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13621 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13622 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13623 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13624 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13626 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13628 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13631 label_buf[0] = '_';
13632 strcpy (label_buf+1,
13633 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13636 strcpy (tmp_buf, "lis r12,hi16(");
13637 strcat (tmp_buf, label_buf);
13638 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13639 strcat (tmp_buf, label_buf);
13640 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13641 output_asm_insn (tmp_buf, 0);
13643 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13644 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13645 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13646 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13652 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13653 already there or not. */
13656 no_previous_def (tree function_name)
13659 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13660 if (function_name == STUB_FUNCTION_NAME (stub))
13665 /* GET_PREV_LABEL gets the label name from the previous definition of
13669 get_prev_label (tree function_name)
13672 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13673 if (function_name == STUB_FUNCTION_NAME (stub))
13674 return STUB_LABEL_NAME (stub);
13678 /* INSN is either a function call or a millicode call. It may have an
13679 unconditional jump in its delay slot.
13681 CALL_DEST is the routine we are calling. */
13684 output_call (rtx insn, rtx call_dest, int operand_number)
13686 static char buf[256];
13687 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13690 tree funname = get_identifier (XSTR (call_dest, 0));
13692 if (no_previous_def (funname))
13694 int line_number = 0;
13695 rtx label_rtx = gen_label_rtx ();
13696 char *label_buf, temp_buf[256];
13697 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13698 CODE_LABEL_NUMBER (label_rtx));
13699 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13700 labelname = get_identifier (label_buf);
13701 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13703 line_number = NOTE_LINE_NUMBER (insn);
13704 add_compiler_stub (labelname, funname, line_number);
13707 labelname = get_prev_label (funname);
13709 sprintf (buf, "jbsr %%z%d,%.246s",
13710 operand_number, IDENTIFIER_POINTER (labelname));
13715 sprintf (buf, "bl %%z%d", operand_number);
13720 #endif /* RS6000_LONG_BRANCH */
13722 /* Generate PIC and indirect symbol stubs. */
13725 machopic_output_stub (FILE *file, const char *symb, const char *stub)
13727 unsigned int length;
13728 char *symbol_name, *lazy_ptr_name;
13729 char *local_label_0;
13730 static int label = 0;
13732 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13733 symb = (*targetm.strip_name_encoding) (symb);
13736 length = strlen (symb);
13737 symbol_name = alloca (length + 32);
13738 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13740 lazy_ptr_name = alloca (length + 32);
13741 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13744 machopic_picsymbol_stub1_section ();
13746 machopic_symbol_stub1_section ();
13747 fprintf (file, "\t.align 2\n");
13749 fprintf (file, "%s:\n", stub);
13750 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13755 local_label_0 = alloca (sizeof("\"L0000000000$spb\""));
13756 sprintf (local_label_0, "\"L%011d$spb\"", label);
13758 fprintf (file, "\tmflr r0\n");
13759 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13760 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13761 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13762 lazy_ptr_name, local_label_0);
13763 fprintf (file, "\tmtlr r0\n");
13764 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13765 lazy_ptr_name, local_label_0);
13766 fprintf (file, "\tmtctr r12\n");
13767 fprintf (file, "\tbctr\n");
13771 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13772 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13773 fprintf (file, "\tmtctr r12\n");
13774 fprintf (file, "\tbctr\n");
13777 machopic_lazy_symbol_ptr_section ();
13778 fprintf (file, "%s:\n", lazy_ptr_name);
13779 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13780 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13783 /* Legitimize PIC addresses. If the address is already
13784 position-independent, we return ORIG. Newly generated
13785 position-independent addresses go into a reg. This is REG if non
13786 zero, otherwise we allocate register(s) as necessary. */
13788 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13791 rs6000_machopic_legitimize_pic_address (rtx orig, enum machine_mode mode,
13796 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13797 reg = gen_reg_rtx (Pmode);
13799 if (GET_CODE (orig) == CONST)
13801 if (GET_CODE (XEXP (orig, 0)) == PLUS
13802 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13805 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13808 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13811 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13817 if (GET_CODE (offset) == CONST_INT)
13819 if (SMALL_INT (offset))
13820 return plus_constant (base, INTVAL (offset));
13821 else if (! reload_in_progress && ! reload_completed)
13822 offset = force_reg (Pmode, offset);
13825 rtx mem = force_const_mem (Pmode, orig);
13826 return machopic_legitimize_pic_address (mem, Pmode, reg);
13829 return gen_rtx (PLUS, Pmode, base, offset);
13832 /* Fall back on generic machopic code. */
13833 return machopic_legitimize_pic_address (orig, mode, reg);
13836 /* This is just a placeholder to make linking work without having to
13837 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13838 ever needed for Darwin (not too likely!) this would have to get a
13839 real definition. */
13846 #endif /* TARGET_MACHO */
13849 static unsigned int
13850 rs6000_elf_section_type_flags (tree decl, const char *name, int reloc)
13853 = default_section_type_flags_1 (decl, name, reloc,
13854 flag_pic || DEFAULT_ABI == ABI_AIX);
13856 if (TARGET_RELOCATABLE)
13857 flags |= SECTION_WRITE;
13862 /* Record an element in the table of global constructors. SYMBOL is
13863 a SYMBOL_REF of the function to be called; PRIORITY is a number
13864 between 0 and MAX_INIT_PRIORITY.
13866 This differs from default_named_section_asm_out_constructor in
13867 that we have special handling for -mrelocatable. */
13870 rs6000_elf_asm_out_constructor (rtx symbol, int priority)
13872 const char *section = ".ctors";
13875 if (priority != DEFAULT_INIT_PRIORITY)
13877 sprintf (buf, ".ctors.%.5u",
13878 /* Invert the numbering so the linker puts us in the proper
13879 order; constructors are run from right to left, and the
13880 linker sorts in increasing order. */
13881 MAX_INIT_PRIORITY - priority);
13885 named_section_flags (section, SECTION_WRITE);
13886 assemble_align (POINTER_SIZE);
13888 if (TARGET_RELOCATABLE)
13890 fputs ("\t.long (", asm_out_file);
13891 output_addr_const (asm_out_file, symbol);
13892 fputs (")@fixup\n", asm_out_file);
13895 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13899 rs6000_elf_asm_out_destructor (rtx symbol, int priority)
13901 const char *section = ".dtors";
13904 if (priority != DEFAULT_INIT_PRIORITY)
13906 sprintf (buf, ".dtors.%.5u",
13907 /* Invert the numbering so the linker puts us in the proper
13908 order; constructors are run from right to left, and the
13909 linker sorts in increasing order. */
13910 MAX_INIT_PRIORITY - priority);
13914 named_section_flags (section, SECTION_WRITE);
13915 assemble_align (POINTER_SIZE);
13917 if (TARGET_RELOCATABLE)
13919 fputs ("\t.long (", asm_out_file);
13920 output_addr_const (asm_out_file, symbol);
13921 fputs (")@fixup\n", asm_out_file);
13924 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13928 rs6000_elf_declare_function_name (FILE *file, const char *name, tree decl)
13932 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
13933 ASM_OUTPUT_LABEL (file, name);
13934 fputs (DOUBLE_INT_ASM_OP, file);
13936 assemble_name (file, name);
13937 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
13938 assemble_name (file, name);
13939 fputs (",24\n\t.type\t.", file);
13940 assemble_name (file, name);
13941 fputs (",@function\n", file);
13942 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
13944 fputs ("\t.globl\t.", file);
13945 assemble_name (file, name);
13948 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
13950 ASM_OUTPUT_LABEL (file, name);
13954 if (TARGET_RELOCATABLE
13955 && (get_pool_size () != 0 || current_function_profile)
13960 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
13962 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
13963 fprintf (file, "\t.long ");
13964 assemble_name (file, buf);
13966 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
13967 assemble_name (file, buf);
13971 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
13972 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
13974 if (DEFAULT_ABI == ABI_AIX)
13976 const char *desc_name, *orig_name;
13978 orig_name = (*targetm.strip_name_encoding) (name);
13979 desc_name = orig_name;
13980 while (*desc_name == '.')
13983 if (TREE_PUBLIC (decl))
13984 fprintf (file, "\t.globl %s\n", desc_name);
13986 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
13987 fprintf (file, "%s:\n", desc_name);
13988 fprintf (file, "\t.long %s\n", orig_name);
13989 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
13990 if (DEFAULT_ABI == ABI_AIX)
13991 fputs ("\t.long 0\n", file);
13992 fprintf (file, "\t.previous\n");
13994 ASM_OUTPUT_LABEL (file, name);
14000 rs6000_xcoff_asm_globalize_label (FILE *stream, const char *name)
14002 fputs (GLOBAL_ASM_OP, stream);
14003 RS6000_OUTPUT_BASENAME (stream, name);
14004 putc ('\n', stream);
14008 rs6000_xcoff_asm_named_section (const char *name, unsigned int flags)
14011 static const char * const suffix[3] = { "PR", "RO", "RW" };
14013 if (flags & SECTION_CODE)
14015 else if (flags & SECTION_WRITE)
14020 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
14021 (flags & SECTION_CODE) ? "." : "",
14022 name, suffix[smclass], flags & SECTION_ENTSIZE);
14026 rs6000_xcoff_select_section (tree decl, int reloc,
14027 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
14029 if (decl_readonly_section_1 (decl, reloc, 1))
14031 if (TREE_PUBLIC (decl))
14032 read_only_data_section ();
14034 read_only_private_data_section ();
14038 if (TREE_PUBLIC (decl))
14041 private_data_section ();
14046 rs6000_xcoff_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
14050 /* Use select_section for private and uninitialized data. */
14051 if (!TREE_PUBLIC (decl)
14052 || DECL_COMMON (decl)
14053 || DECL_INITIAL (decl) == NULL_TREE
14054 || DECL_INITIAL (decl) == error_mark_node
14055 || (flag_zero_initialized_in_bss
14056 && initializer_zerop (DECL_INITIAL (decl))))
14059 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14060 name = (*targetm.strip_name_encoding) (name);
14061 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14064 /* Select section for constant in constant pool.
14066 On RS/6000, all constants are in the private read-only data area.
14067 However, if this is being placed in the TOC it must be output as a
14071 rs6000_xcoff_select_rtx_section (enum machine_mode mode, rtx x,
14072 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
14074 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14077 read_only_private_data_section ();
14080 /* Remove any trailing [DS] or the like from the symbol name. */
14082 static const char *
14083 rs6000_xcoff_strip_name_encoding (const char *name)
14088 len = strlen (name);
14089 if (name[len - 1] == ']')
14090 return ggc_alloc_string (name, len - 4);
14095 /* Section attributes. AIX is always PIC. */
14097 static unsigned int
14098 rs6000_xcoff_section_type_flags (tree decl, const char *name, int reloc)
14100 unsigned int align;
14101 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14103 /* Align to at least UNIT size. */
14104 if (flags & SECTION_CODE)
14105 align = MIN_UNITS_PER_WORD;
14107 /* Increase alignment of large objects if not already stricter. */
14108 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14109 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14110 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14112 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14115 /* Output at beginning of assembler file.
14117 Initialize the section names for the RS/6000 at this point.
14119 Specify filename, including full path, to assembler.
14121 We want to go into the TOC section so at least one .toc will be emitted.
14122 Also, in order to output proper .bs/.es pairs, we need at least one static
14123 [RW] section emitted.
14125 Finally, declare mcount when profiling to make the assembler happy. */
14128 rs6000_xcoff_file_start (void)
14130 rs6000_gen_section_name (&xcoff_bss_section_name,
14131 main_input_filename, ".bss_");
14132 rs6000_gen_section_name (&xcoff_private_data_section_name,
14133 main_input_filename, ".rw_");
14134 rs6000_gen_section_name (&xcoff_read_only_section_name,
14135 main_input_filename, ".ro_");
14137 fputs ("\t.file\t", asm_out_file);
14138 output_quoted_string (asm_out_file, main_input_filename);
14139 fputc ('\n', asm_out_file);
14141 if (write_symbols != NO_DEBUG)
14142 private_data_section ();
14145 fprintf (asm_out_file, "\t.extern %s\n", RS6000_MCOUNT);
14146 rs6000_file_start ();
14149 /* Output at end of assembler file.
14150 On the RS/6000, referencing data should automatically pull in text. */
14153 rs6000_xcoff_file_end (void)
14156 fputs ("_section_.text:\n", asm_out_file);
14158 fputs (TARGET_32BIT
14159 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
14162 #endif /* TARGET_XCOFF */
14165 /* Cross-module name binding. Darwin does not support overriding
14166 functions at dynamic-link time. */
14169 rs6000_binds_local_p (tree decl)
14171 return default_binds_local_p_1 (decl, 0);
14175 /* Compute a (partial) cost for rtx X. Return true if the complete
14176 cost has been computed, and false if subexpressions should be
14177 scanned. In either case, *TOTAL contains the cost result. */
14180 rs6000_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
14185 /* On the RS/6000, if it is valid in the insn, it is free.
14186 So this always returns 0. */
14197 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14198 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14199 + 0x8000) >= 0x10000)
14200 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14201 ? COSTS_N_INSNS (2)
14202 : COSTS_N_INSNS (1));
14208 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14209 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14210 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14211 ? COSTS_N_INSNS (2)
14212 : COSTS_N_INSNS (1));
14218 *total = COSTS_N_INSNS (2);
14221 switch (rs6000_cpu)
14223 case PROCESSOR_RIOS1:
14224 case PROCESSOR_PPC405:
14225 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14226 ? COSTS_N_INSNS (5)
14227 : (INTVAL (XEXP (x, 1)) >= -256
14228 && INTVAL (XEXP (x, 1)) <= 255)
14229 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14232 case PROCESSOR_PPC440:
14233 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14234 ? COSTS_N_INSNS (3)
14235 : COSTS_N_INSNS (2));
14238 case PROCESSOR_RS64A:
14239 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14240 ? GET_MODE (XEXP (x, 1)) != DImode
14241 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14242 : (INTVAL (XEXP (x, 1)) >= -256
14243 && INTVAL (XEXP (x, 1)) <= 255)
14244 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14247 case PROCESSOR_RIOS2:
14248 case PROCESSOR_MPCCORE:
14249 case PROCESSOR_PPC604e:
14250 *total = COSTS_N_INSNS (2);
14253 case PROCESSOR_PPC601:
14254 *total = COSTS_N_INSNS (5);
14257 case PROCESSOR_PPC603:
14258 case PROCESSOR_PPC7400:
14259 case PROCESSOR_PPC750:
14260 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14261 ? COSTS_N_INSNS (5)
14262 : (INTVAL (XEXP (x, 1)) >= -256
14263 && INTVAL (XEXP (x, 1)) <= 255)
14264 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14267 case PROCESSOR_PPC7450:
14268 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14269 ? COSTS_N_INSNS (4)
14270 : COSTS_N_INSNS (3));
14273 case PROCESSOR_PPC403:
14274 case PROCESSOR_PPC604:
14275 case PROCESSOR_PPC8540:
14276 *total = COSTS_N_INSNS (4);
14279 case PROCESSOR_PPC620:
14280 case PROCESSOR_PPC630:
14281 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14282 ? GET_MODE (XEXP (x, 1)) != DImode
14283 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14284 : (INTVAL (XEXP (x, 1)) >= -256
14285 && INTVAL (XEXP (x, 1)) <= 255)
14286 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14289 case PROCESSOR_POWER4:
14290 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14291 ? GET_MODE (XEXP (x, 1)) != DImode
14292 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14293 : COSTS_N_INSNS (2));
14302 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14303 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14305 *total = COSTS_N_INSNS (2);
14312 switch (rs6000_cpu)
14314 case PROCESSOR_RIOS1:
14315 *total = COSTS_N_INSNS (19);
14318 case PROCESSOR_RIOS2:
14319 *total = COSTS_N_INSNS (13);
14322 case PROCESSOR_RS64A:
14323 *total = (GET_MODE (XEXP (x, 1)) != DImode
14324 ? COSTS_N_INSNS (65)
14325 : COSTS_N_INSNS (67));
14328 case PROCESSOR_MPCCORE:
14329 *total = COSTS_N_INSNS (6);
14332 case PROCESSOR_PPC403:
14333 *total = COSTS_N_INSNS (33);
14336 case PROCESSOR_PPC405:
14337 *total = COSTS_N_INSNS (35);
14340 case PROCESSOR_PPC440:
14341 *total = COSTS_N_INSNS (34);
14344 case PROCESSOR_PPC601:
14345 *total = COSTS_N_INSNS (36);
14348 case PROCESSOR_PPC603:
14349 *total = COSTS_N_INSNS (37);
14352 case PROCESSOR_PPC604:
14353 case PROCESSOR_PPC604e:
14354 *total = COSTS_N_INSNS (20);
14357 case PROCESSOR_PPC620:
14358 case PROCESSOR_PPC630:
14359 *total = (GET_MODE (XEXP (x, 1)) != DImode
14360 ? COSTS_N_INSNS (21)
14361 : COSTS_N_INSNS (37));
14364 case PROCESSOR_PPC750:
14365 case PROCESSOR_PPC8540:
14366 case PROCESSOR_PPC7400:
14367 *total = COSTS_N_INSNS (19);
14370 case PROCESSOR_PPC7450:
14371 *total = COSTS_N_INSNS (23);
14374 case PROCESSOR_POWER4:
14375 *total = (GET_MODE (XEXP (x, 1)) != DImode
14376 ? COSTS_N_INSNS (18)
14377 : COSTS_N_INSNS (34));
14385 *total = COSTS_N_INSNS (4);
14389 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14398 /* A C expression returning the cost of moving data from a register of class
14399 CLASS1 to one of CLASS2. */
14402 rs6000_register_move_cost (enum machine_mode mode,
14403 enum reg_class from, enum reg_class to)
14405 /* Moves from/to GENERAL_REGS. */
14406 if (reg_classes_intersect_p (to, GENERAL_REGS)
14407 || reg_classes_intersect_p (from, GENERAL_REGS))
14409 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14412 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14413 return (rs6000_memory_move_cost (mode, from, 0)
14414 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14416 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14417 else if (from == CR_REGS)
14421 /* A move will cost one instruction per GPR moved. */
14422 return 2 * HARD_REGNO_NREGS (0, mode);
14425 /* Moving between two similar registers is just one instruction. */
14426 else if (reg_classes_intersect_p (to, from))
14427 return mode == TFmode ? 4 : 2;
14429 /* Everything else has to go through GENERAL_REGS. */
14431 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14432 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14435 /* A C expressions returning the cost of moving data of MODE from a register to
14439 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class class,
14440 int in ATTRIBUTE_UNUSED)
14442 if (reg_classes_intersect_p (class, GENERAL_REGS))
14443 return 4 * HARD_REGNO_NREGS (0, mode);
14444 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14445 return 4 * HARD_REGNO_NREGS (32, mode);
14446 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14447 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14449 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14452 /* Return an RTX representing where to find the function value of a
14453 function returning MODE. */
14455 rs6000_complex_function_value (enum machine_mode mode)
14457 unsigned int regno;
14459 enum machine_mode inner = GET_MODE_INNER (mode);
14461 if (FLOAT_MODE_P (mode))
14462 regno = FP_ARG_RETURN;
14465 regno = GP_ARG_RETURN;
14467 /* 32-bit is OK since it'll go in r3/r4. */
14469 && GET_MODE_BITSIZE (inner) >= 32)
14470 return gen_rtx_REG (mode, regno);
14473 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
14475 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
14476 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
14477 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
14480 /* Define how to find the value returned by a function.
14481 VALTYPE is the data type of the value (as a tree).
14482 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14483 otherwise, FUNC is 0.
14485 On the SPE, both FPs and vectors are returned in r3.
14487 On RS/6000 an integer value is in r3 and a floating-point value is in
14488 fp1, unless -msoft-float. */
14491 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14493 enum machine_mode mode;
14494 unsigned int regno;
14496 if ((INTEGRAL_TYPE_P (valtype)
14497 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14498 || POINTER_TYPE_P (valtype))
14501 mode = TYPE_MODE (valtype);
14503 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14504 regno = FP_ARG_RETURN;
14505 else if (TREE_CODE (valtype) == COMPLEX_TYPE
14506 && TARGET_HARD_FLOAT
14507 && SPLIT_COMPLEX_ARGS)
14508 return rs6000_complex_function_value (mode);
14509 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14510 regno = ALTIVEC_ARG_RETURN;
14512 regno = GP_ARG_RETURN;
14514 return gen_rtx_REG (mode, regno);
14517 /* Define how to find the value returned by a library function
14518 assuming the value has mode MODE. */
14520 rs6000_libcall_value (enum machine_mode mode)
14522 unsigned int regno;
14524 if (GET_MODE_CLASS (mode) == MODE_FLOAT
14525 && TARGET_HARD_FLOAT && TARGET_FPRS)
14526 regno = FP_ARG_RETURN;
14527 else if (ALTIVEC_VECTOR_MODE (mode))
14528 regno = ALTIVEC_ARG_RETURN;
14529 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
14530 return rs6000_complex_function_value (mode);
14532 regno = GP_ARG_RETURN;
14534 return gen_rtx_REG (mode, regno);
14537 /* Return true if TYPE is of type __ev64_opaque__. */
14540 is_ev64_opaque_type (tree type)
14543 && (type == opaque_V2SI_type_node
14544 || type == opaque_V2SF_type_node
14545 || type == opaque_p_V2SI_type_node));
14549 rs6000_dwarf_register_span (rtx reg)
14553 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14556 regno = REGNO (reg);
14558 /* The duality of the SPE register size wreaks all kinds of havoc.
14559 This is a way of distinguishing r0 in 32-bits from r0 in
14562 gen_rtx_PARALLEL (VOIDmode,
14565 gen_rtx_REG (SImode, regno + 1200),
14566 gen_rtx_REG (SImode, regno))
14568 gen_rtx_REG (SImode, regno),
14569 gen_rtx_REG (SImode, regno + 1200)));
14572 #include "gt-rs6000.h"