1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 2, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the
20 Free Software Foundation, 59 Temple Place - Suite 330, Boston,
21 MA 02111-1307, USA. */
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
43 #include "basic-block.h"
44 #include "integrate.h"
50 #include "target-def.h"
51 #include "langhooks.h"
53 #include "cfglayout.h"
55 #ifndef TARGET_NO_PROTOTYPE
56 #define TARGET_NO_PROTOTYPE 0
59 #define EASY_VECTOR_15(n, x, y) ((n) >= -16 && (n) <= 15 \
60 && easy_vector_same (x, y))
62 #define EASY_VECTOR_15_ADD_SELF(n, x, y) ((n) >= 0x10 && (n) <= 0x1e \
64 && easy_vector_same (x, y))
66 #define min(A,B) ((A) < (B) ? (A) : (B))
67 #define max(A,B) ((A) > (B) ? (A) : (B))
71 enum processor_type rs6000_cpu;
72 struct rs6000_cpu_select rs6000_select[3] =
74 /* switch name, tune arch */
75 { (const char *)0, "--with-cpu=", 1, 1 },
76 { (const char *)0, "-mcpu=", 1, 1 },
77 { (const char *)0, "-mtune=", 1, 0 },
80 /* Size of long double */
81 const char *rs6000_long_double_size_string;
82 int rs6000_long_double_type_size;
84 /* Whether -mabi=altivec has appeared */
85 int rs6000_altivec_abi;
87 /* Whether VRSAVE instructions should be generated. */
88 int rs6000_altivec_vrsave;
90 /* String from -mvrsave= option. */
91 const char *rs6000_altivec_vrsave_string;
93 /* Nonzero if we want SPE ABI extensions. */
96 /* Whether isel instructions should be generated. */
99 /* Whether SPE simd instructions should be generated. */
102 /* Nonzero if floating point operations are done in the GPRs. */
103 int rs6000_float_gprs = 0;
105 /* String from -mfloat-gprs=. */
106 const char *rs6000_float_gprs_string;
108 /* String from -misel=. */
109 const char *rs6000_isel_string;
111 /* String from -mspe=. */
112 const char *rs6000_spe_string;
114 /* Set to nonzero once AIX common-mode calls have been defined. */
115 static GTY(()) int common_mode_defined;
117 /* Save information from a "cmpxx" operation until the branch or scc is
119 rtx rs6000_compare_op0, rs6000_compare_op1;
120 int rs6000_compare_fp_p;
122 /* Label number of label created for -mrelocatable, to call to so we can
123 get the address of the GOT section */
124 int rs6000_pic_labelno;
127 /* Which abi to adhere to */
128 const char *rs6000_abi_name;
130 /* Semantics of the small data area */
131 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
133 /* Which small data model to use */
134 const char *rs6000_sdata_name = (char *)0;
136 /* Counter for labels which are to be placed in .fixup. */
137 int fixuplabelno = 0;
140 /* Bit size of immediate TLS offsets and string from which it is decoded. */
141 int rs6000_tls_size = 32;
142 const char *rs6000_tls_size_string;
144 /* ABI enumeration available for subtarget to use. */
145 enum rs6000_abi rs6000_current_abi;
147 /* ABI string from -mabi= option. */
148 const char *rs6000_abi_string;
151 const char *rs6000_debug_name;
152 int rs6000_debug_stack; /* debug stack applications */
153 int rs6000_debug_arg; /* debug argument handling */
156 static GTY(()) tree opaque_V2SI_type_node;
157 static GTY(()) tree opaque_V2SF_type_node;
158 static GTY(()) tree opaque_p_V2SI_type_node;
160 const char *rs6000_traceback_name;
162 traceback_default = 0,
168 /* Flag to say the TOC is initialized */
170 char toc_label_name[10];
172 /* Alias set for saves and restores from the rs6000 stack. */
173 static int rs6000_sr_alias_set;
175 /* Call distance, overridden by -mlongcall and #pragma longcall(1).
176 The only place that looks at this is rs6000_set_default_type_attributes;
177 everywhere else should rely on the presence or absence of a longcall
178 attribute on the function declaration. */
179 int rs6000_default_long_calls;
180 const char *rs6000_longcall_switch;
182 /* Control alignment for fields within structures. */
183 /* String from -malign-XXXXX. */
184 const char *rs6000_alignment_string;
185 int rs6000_alignment_flags;
187 struct builtin_description
189 /* mask is not const because we're going to alter it below. This
190 nonsense will go away when we rewrite the -march infrastructure
191 to give us more target flag bits. */
193 const enum insn_code icode;
194 const char *const name;
195 const enum rs6000_builtins code;
198 static bool rs6000_function_ok_for_sibcall PARAMS ((tree, tree));
199 static int num_insns_constant_wide PARAMS ((HOST_WIDE_INT));
200 static void validate_condition_mode
201 PARAMS ((enum rtx_code, enum machine_mode));
202 static rtx rs6000_generate_compare PARAMS ((enum rtx_code));
203 static void rs6000_maybe_dead PARAMS ((rtx));
204 static void rs6000_emit_stack_tie PARAMS ((void));
205 static void rs6000_frame_related PARAMS ((rtx, rtx, HOST_WIDE_INT, rtx, rtx));
206 static rtx spe_synthesize_frame_save PARAMS ((rtx));
207 static bool spe_func_has_64bit_regs_p PARAMS ((void));
208 static void emit_frame_save PARAMS ((rtx, rtx, enum machine_mode,
209 unsigned int, int, int));
210 static rtx gen_frame_mem_offset PARAMS ((enum machine_mode, rtx, int));
211 static void rs6000_emit_allocate_stack PARAMS ((HOST_WIDE_INT, int));
212 static unsigned rs6000_hash_constant PARAMS ((rtx));
213 static unsigned toc_hash_function PARAMS ((const void *));
214 static int toc_hash_eq PARAMS ((const void *, const void *));
215 static int constant_pool_expr_1 PARAMS ((rtx, int *, int *));
216 static bool constant_pool_expr_p PARAMS ((rtx));
217 static bool toc_relative_expr_p PARAMS ((rtx));
218 static bool legitimate_small_data_p PARAMS ((enum machine_mode, rtx));
219 static bool legitimate_offset_address_p PARAMS ((enum machine_mode, rtx, int));
220 static bool legitimate_indexed_address_p PARAMS ((rtx, int));
221 static bool legitimate_indirect_address_p PARAMS ((rtx, int));
222 static bool legitimate_lo_sum_address_p PARAMS ((enum machine_mode, rtx, int));
223 static struct machine_function * rs6000_init_machine_status PARAMS ((void));
224 static bool rs6000_assemble_integer PARAMS ((rtx, unsigned int, int));
225 #ifdef HAVE_GAS_HIDDEN
226 static void rs6000_assemble_visibility PARAMS ((tree, int));
228 static int rs6000_ra_ever_killed PARAMS ((void));
229 static tree rs6000_handle_longcall_attribute PARAMS ((tree *, tree, tree, int, bool *));
230 extern const struct attribute_spec rs6000_attribute_table[];
231 static void rs6000_set_default_type_attributes PARAMS ((tree));
232 static void rs6000_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
233 static void rs6000_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
234 static void rs6000_output_mi_thunk PARAMS ((FILE *, tree, HOST_WIDE_INT,
235 HOST_WIDE_INT, tree));
236 static rtx rs6000_emit_set_long_const PARAMS ((rtx,
237 HOST_WIDE_INT, HOST_WIDE_INT));
239 static unsigned int rs6000_elf_section_type_flags PARAMS ((tree, const char *,
241 static void rs6000_elf_asm_out_constructor PARAMS ((rtx, int));
242 static void rs6000_elf_asm_out_destructor PARAMS ((rtx, int));
243 static void rs6000_elf_select_section PARAMS ((tree, int,
244 unsigned HOST_WIDE_INT));
245 static void rs6000_elf_unique_section PARAMS ((tree, int));
246 static void rs6000_elf_select_rtx_section PARAMS ((enum machine_mode, rtx,
247 unsigned HOST_WIDE_INT));
248 static void rs6000_elf_encode_section_info PARAMS ((tree, rtx, int))
250 static bool rs6000_elf_in_small_data_p PARAMS ((tree));
253 static void rs6000_xcoff_asm_globalize_label PARAMS ((FILE *, const char *));
254 static void rs6000_xcoff_asm_named_section PARAMS ((const char *, unsigned int));
255 static void rs6000_xcoff_select_section PARAMS ((tree, int,
256 unsigned HOST_WIDE_INT));
257 static void rs6000_xcoff_unique_section PARAMS ((tree, int));
258 static void rs6000_xcoff_select_rtx_section PARAMS ((enum machine_mode, rtx,
259 unsigned HOST_WIDE_INT));
260 static const char * rs6000_xcoff_strip_name_encoding PARAMS ((const char *));
261 static unsigned int rs6000_xcoff_section_type_flags PARAMS ((tree, const char *, int));
262 static void rs6000_xcoff_file_end PARAMS ((void));
265 static bool rs6000_binds_local_p PARAMS ((tree));
267 static int rs6000_use_dfa_pipeline_interface PARAMS ((void));
268 static int rs6000_variable_issue PARAMS ((FILE *, int, rtx, int));
269 static bool rs6000_rtx_costs PARAMS ((rtx, int, int, int *));
270 static int rs6000_adjust_cost PARAMS ((rtx, rtx, rtx, int));
271 static int rs6000_adjust_priority PARAMS ((rtx, int));
272 static int rs6000_issue_rate PARAMS ((void));
273 static int rs6000_use_sched_lookahead PARAMS ((void));
275 static void rs6000_init_builtins PARAMS ((void));
276 static rtx rs6000_expand_unop_builtin PARAMS ((enum insn_code, tree, rtx));
277 static rtx rs6000_expand_binop_builtin PARAMS ((enum insn_code, tree, rtx));
278 static rtx rs6000_expand_ternop_builtin PARAMS ((enum insn_code, tree, rtx));
279 static rtx rs6000_expand_builtin PARAMS ((tree, rtx, rtx, enum machine_mode, int));
280 static void altivec_init_builtins PARAMS ((void));
281 static void rs6000_common_init_builtins PARAMS ((void));
283 static void enable_mask_for_builtins PARAMS ((struct builtin_description *,
284 int, enum rs6000_builtins,
285 enum rs6000_builtins));
286 static void spe_init_builtins PARAMS ((void));
287 static rtx spe_expand_builtin PARAMS ((tree, rtx, bool *));
288 static rtx spe_expand_predicate_builtin PARAMS ((enum insn_code, tree, rtx));
289 static rtx spe_expand_evsel_builtin PARAMS ((enum insn_code, tree, rtx));
290 static int rs6000_emit_int_cmove PARAMS ((rtx, rtx, rtx, rtx));
292 static rtx altivec_expand_builtin PARAMS ((tree, rtx, bool *));
293 static rtx altivec_expand_ld_builtin PARAMS ((tree, rtx, bool *));
294 static rtx altivec_expand_st_builtin PARAMS ((tree, rtx, bool *));
295 static rtx altivec_expand_dst_builtin PARAMS ((tree, rtx, bool *));
296 static rtx altivec_expand_abs_builtin PARAMS ((enum insn_code, tree, rtx));
297 static rtx altivec_expand_predicate_builtin PARAMS ((enum insn_code, const char *, tree, rtx));
298 static rtx altivec_expand_stv_builtin PARAMS ((enum insn_code, tree));
299 static void rs6000_parse_abi_options PARAMS ((void));
300 static void rs6000_parse_alignment_option PARAMS ((void));
301 static void rs6000_parse_tls_size_option PARAMS ((void));
302 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
303 static int first_altivec_reg_to_save PARAMS ((void));
304 static unsigned int compute_vrsave_mask PARAMS ((void));
305 static void is_altivec_return_reg PARAMS ((rtx, void *));
306 static rtx generate_set_vrsave PARAMS ((rtx, rs6000_stack_t *, int));
307 int easy_vector_constant PARAMS ((rtx, enum machine_mode));
308 static int easy_vector_same PARAMS ((rtx, enum machine_mode));
309 static bool is_ev64_opaque_type PARAMS ((tree));
310 static rtx rs6000_dwarf_register_span PARAMS ((rtx));
311 static rtx rs6000_legitimize_tls_address PARAMS ((rtx, enum tls_model));
312 static rtx rs6000_tls_get_addr PARAMS ((void));
313 static rtx rs6000_got_sym PARAMS ((void));
314 static inline int rs6000_tls_symbol_ref_1 PARAMS ((rtx *, void *));
315 static const char *rs6000_get_some_local_dynamic_name PARAMS ((void));
316 static int rs6000_get_some_local_dynamic_name_1 PARAMS ((rtx *, void *));
317 static rtx rs6000_complex_function_value (enum machine_mode);
318 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *, enum machine_mode, tree);
320 /* Hash table stuff for keeping track of TOC entries. */
322 struct toc_hash_struct GTY(())
324 /* `key' will satisfy CONSTANT_P; in fact, it will satisfy
325 ASM_OUTPUT_SPECIAL_POOL_ENTRY_P. */
327 enum machine_mode key_mode;
331 static GTY ((param_is (struct toc_hash_struct))) htab_t toc_hash_table;
333 /* Default register names. */
334 char rs6000_reg_names[][8] =
336 "0", "1", "2", "3", "4", "5", "6", "7",
337 "8", "9", "10", "11", "12", "13", "14", "15",
338 "16", "17", "18", "19", "20", "21", "22", "23",
339 "24", "25", "26", "27", "28", "29", "30", "31",
340 "0", "1", "2", "3", "4", "5", "6", "7",
341 "8", "9", "10", "11", "12", "13", "14", "15",
342 "16", "17", "18", "19", "20", "21", "22", "23",
343 "24", "25", "26", "27", "28", "29", "30", "31",
344 "mq", "lr", "ctr","ap",
345 "0", "1", "2", "3", "4", "5", "6", "7",
347 /* AltiVec registers. */
348 "0", "1", "2", "3", "4", "5", "6", "7",
349 "8", "9", "10", "11", "12", "13", "14", "15",
350 "16", "17", "18", "19", "20", "21", "22", "23",
351 "24", "25", "26", "27", "28", "29", "30", "31",
357 #ifdef TARGET_REGNAMES
358 static const char alt_reg_names[][8] =
360 "%r0", "%r1", "%r2", "%r3", "%r4", "%r5", "%r6", "%r7",
361 "%r8", "%r9", "%r10", "%r11", "%r12", "%r13", "%r14", "%r15",
362 "%r16", "%r17", "%r18", "%r19", "%r20", "%r21", "%r22", "%r23",
363 "%r24", "%r25", "%r26", "%r27", "%r28", "%r29", "%r30", "%r31",
364 "%f0", "%f1", "%f2", "%f3", "%f4", "%f5", "%f6", "%f7",
365 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
366 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
367 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
368 "mq", "lr", "ctr", "ap",
369 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
371 /* AltiVec registers. */
372 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
373 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
374 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
375 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
382 #ifndef MASK_STRICT_ALIGN
383 #define MASK_STRICT_ALIGN 0
385 #ifndef TARGET_PROFILE_KERNEL
386 #define TARGET_PROFILE_KERNEL 0
389 /* The VRSAVE bitmask puts bit %v0 as the most significant bit. */
390 #define ALTIVEC_REG_BIT(REGNO) (0x80000000 >> ((REGNO) - FIRST_ALTIVEC_REGNO))
392 /* Return 1 for a symbol ref for a thread-local storage symbol. */
393 #define RS6000_SYMBOL_REF_TLS_P(RTX) \
394 (GET_CODE (RTX) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (RTX) != 0)
396 /* Initialize the GCC target structure. */
397 #undef TARGET_ATTRIBUTE_TABLE
398 #define TARGET_ATTRIBUTE_TABLE rs6000_attribute_table
399 #undef TARGET_SET_DEFAULT_TYPE_ATTRIBUTES
400 #define TARGET_SET_DEFAULT_TYPE_ATTRIBUTES rs6000_set_default_type_attributes
402 #undef TARGET_ASM_ALIGNED_DI_OP
403 #define TARGET_ASM_ALIGNED_DI_OP DOUBLE_INT_ASM_OP
405 /* Default unaligned ops are only provided for ELF. Find the ops needed
406 for non-ELF systems. */
407 #ifndef OBJECT_FORMAT_ELF
409 /* For XCOFF. rs6000_assemble_integer will handle unaligned DIs on
411 #undef TARGET_ASM_UNALIGNED_HI_OP
412 #define TARGET_ASM_UNALIGNED_HI_OP "\t.vbyte\t2,"
413 #undef TARGET_ASM_UNALIGNED_SI_OP
414 #define TARGET_ASM_UNALIGNED_SI_OP "\t.vbyte\t4,"
415 #undef TARGET_ASM_UNALIGNED_DI_OP
416 #define TARGET_ASM_UNALIGNED_DI_OP "\t.vbyte\t8,"
419 #undef TARGET_ASM_UNALIGNED_HI_OP
420 #define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
421 #undef TARGET_ASM_UNALIGNED_SI_OP
422 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
426 /* This hook deals with fixups for relocatable code and DI-mode objects
428 #undef TARGET_ASM_INTEGER
429 #define TARGET_ASM_INTEGER rs6000_assemble_integer
431 #ifdef HAVE_GAS_HIDDEN
432 #undef TARGET_ASM_ASSEMBLE_VISIBILITY
433 #define TARGET_ASM_ASSEMBLE_VISIBILITY rs6000_assemble_visibility
436 #undef TARGET_HAVE_TLS
437 #define TARGET_HAVE_TLS HAVE_AS_TLS
439 #undef TARGET_CANNOT_FORCE_CONST_MEM
440 #define TARGET_CANNOT_FORCE_CONST_MEM rs6000_tls_referenced_p
442 #undef TARGET_ASM_FUNCTION_PROLOGUE
443 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
444 #undef TARGET_ASM_FUNCTION_EPILOGUE
445 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
447 #undef TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE
448 #define TARGET_SCHED_USE_DFA_PIPELINE_INTERFACE rs6000_use_dfa_pipeline_interface
449 #undef TARGET_SCHED_VARIABLE_ISSUE
450 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
452 #undef TARGET_SCHED_ISSUE_RATE
453 #define TARGET_SCHED_ISSUE_RATE rs6000_issue_rate
454 #undef TARGET_SCHED_ADJUST_COST
455 #define TARGET_SCHED_ADJUST_COST rs6000_adjust_cost
456 #undef TARGET_SCHED_ADJUST_PRIORITY
457 #define TARGET_SCHED_ADJUST_PRIORITY rs6000_adjust_priority
459 #undef TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD
460 #define TARGET_SCHED_FIRST_CYCLE_MULTIPASS_DFA_LOOKAHEAD rs6000_use_sched_lookahead
462 #undef TARGET_INIT_BUILTINS
463 #define TARGET_INIT_BUILTINS rs6000_init_builtins
465 #undef TARGET_EXPAND_BUILTIN
466 #define TARGET_EXPAND_BUILTIN rs6000_expand_builtin
469 #undef TARGET_BINDS_LOCAL_P
470 #define TARGET_BINDS_LOCAL_P rs6000_binds_local_p
473 #undef TARGET_ASM_OUTPUT_MI_THUNK
474 #define TARGET_ASM_OUTPUT_MI_THUNK rs6000_output_mi_thunk
476 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
477 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
479 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
480 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
482 #undef TARGET_RTX_COSTS
483 #define TARGET_RTX_COSTS rs6000_rtx_costs
484 #undef TARGET_ADDRESS_COST
485 #define TARGET_ADDRESS_COST hook_int_rtx_0
487 #undef TARGET_VECTOR_OPAQUE_P
488 #define TARGET_VECTOR_OPAQUE_P is_ev64_opaque_type
490 #undef TARGET_DWARF_REGISTER_SPAN
491 #define TARGET_DWARF_REGISTER_SPAN rs6000_dwarf_register_span
493 struct gcc_target targetm = TARGET_INITIALIZER;
495 /* Override command line options. Mostly we process the processor
496 type and sometimes adjust other TARGET_ options. */
499 rs6000_override_options (default_cpu)
500 const char *default_cpu;
503 struct rs6000_cpu_select *ptr;
505 /* Simplify the entries below by making a mask for any POWER
506 variant and any PowerPC variant. */
508 #define POWER_MASKS (MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING)
509 #define POWERPC_MASKS (MASK_POWERPC | MASK_PPC_GPOPT \
510 | MASK_PPC_GFXOPT | MASK_POWERPC64)
511 #define POWERPC_OPT_MASKS (MASK_PPC_GPOPT | MASK_PPC_GFXOPT)
515 const char *const name; /* Canonical processor name. */
516 const enum processor_type processor; /* Processor type enum value. */
517 const int target_enable; /* Target flags to enable. */
518 const int target_disable; /* Target flags to disable. */
519 } const processor_target_table[]
520 = {{"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS,
521 POWER_MASKS | POWERPC_MASKS},
522 {"power", PROCESSOR_POWER,
523 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
524 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
525 {"power2", PROCESSOR_POWER,
526 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
527 POWERPC_MASKS | MASK_NEW_MNEMONICS},
528 {"power3", PROCESSOR_PPC630,
529 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
531 {"power4", PROCESSOR_POWER4,
532 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
534 {"powerpc", PROCESSOR_POWERPC,
535 MASK_POWERPC | MASK_NEW_MNEMONICS,
536 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
537 {"powerpc64", PROCESSOR_POWERPC64,
538 MASK_POWERPC | MASK_POWERPC64 | MASK_NEW_MNEMONICS,
539 POWER_MASKS | POWERPC_OPT_MASKS},
540 {"rios", PROCESSOR_RIOS1,
541 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
542 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
543 {"rios1", PROCESSOR_RIOS1,
544 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
545 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
546 {"rsc", PROCESSOR_PPC601,
547 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
548 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
549 {"rsc1", PROCESSOR_PPC601,
550 MASK_POWER | MASK_MULTIPLE | MASK_STRING,
551 MASK_POWER2 | POWERPC_MASKS | MASK_NEW_MNEMONICS},
552 {"rios2", PROCESSOR_RIOS2,
553 MASK_POWER | MASK_MULTIPLE | MASK_STRING | MASK_POWER2,
554 POWERPC_MASKS | MASK_NEW_MNEMONICS},
555 {"rs64a", PROCESSOR_RS64A,
556 MASK_POWERPC | MASK_NEW_MNEMONICS,
557 POWER_MASKS | POWERPC_OPT_MASKS},
558 {"401", PROCESSOR_PPC403,
559 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
560 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
561 {"403", PROCESSOR_PPC403,
562 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS | MASK_STRICT_ALIGN,
563 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
564 {"405", PROCESSOR_PPC405,
565 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
566 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
567 {"405fp", PROCESSOR_PPC405,
568 MASK_POWERPC | MASK_NEW_MNEMONICS,
569 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
570 {"440", PROCESSOR_PPC440,
571 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
572 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
573 {"440fp", PROCESSOR_PPC440,
574 MASK_POWERPC | MASK_NEW_MNEMONICS,
575 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
576 {"505", PROCESSOR_MPCCORE,
577 MASK_POWERPC | MASK_NEW_MNEMONICS,
578 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
579 {"601", PROCESSOR_PPC601,
580 MASK_POWER | MASK_POWERPC | MASK_NEW_MNEMONICS | MASK_MULTIPLE | MASK_STRING,
581 MASK_POWER2 | POWERPC_OPT_MASKS | MASK_POWERPC64},
582 {"602", PROCESSOR_PPC603,
583 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
584 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
585 {"603", PROCESSOR_PPC603,
586 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
587 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
588 {"603e", PROCESSOR_PPC603,
589 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
590 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
591 {"ec603e", PROCESSOR_PPC603,
592 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
593 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
594 {"604", PROCESSOR_PPC604,
595 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
596 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
597 {"604e", PROCESSOR_PPC604e,
598 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
599 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
600 {"620", PROCESSOR_PPC620,
601 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
603 {"630", PROCESSOR_PPC630,
604 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
606 {"740", PROCESSOR_PPC750,
607 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
608 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
609 {"750", PROCESSOR_PPC750,
610 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
611 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
612 {"7400", PROCESSOR_PPC7400,
613 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
614 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
615 {"7450", PROCESSOR_PPC7450,
616 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
617 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
618 {"8540", PROCESSOR_PPC8540,
619 MASK_POWERPC | MASK_PPC_GFXOPT | MASK_NEW_MNEMONICS,
620 POWER_MASKS | MASK_PPC_GPOPT | MASK_POWERPC64},
621 {"801", PROCESSOR_MPCCORE,
622 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
623 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
624 {"821", PROCESSOR_MPCCORE,
625 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
626 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
627 {"823", PROCESSOR_MPCCORE,
628 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
629 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64},
630 {"860", PROCESSOR_MPCCORE,
631 MASK_POWERPC | MASK_SOFT_FLOAT | MASK_NEW_MNEMONICS,
632 POWER_MASKS | POWERPC_OPT_MASKS | MASK_POWERPC64}};
634 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
636 /* Save current -mmultiple/-mno-multiple status. */
637 int multiple = TARGET_MULTIPLE;
638 /* Save current -mstring/-mno-string status. */
639 int string = TARGET_STRING;
641 /* Identify the processor type. */
642 rs6000_select[0].string = default_cpu;
643 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT;
645 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
647 ptr = &rs6000_select[i];
648 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
650 for (j = 0; j < ptt_size; j++)
651 if (! strcmp (ptr->string, processor_target_table[j].name))
654 rs6000_cpu = processor_target_table[j].processor;
658 target_flags |= processor_target_table[j].target_enable;
659 target_flags &= ~processor_target_table[j].target_disable;
665 error ("bad value (%s) for %s switch", ptr->string, ptr->name);
672 /* If we are optimizing big endian systems for space, use the load/store
673 multiple and string instructions. */
674 if (BYTES_BIG_ENDIAN && optimize_size)
675 target_flags |= MASK_MULTIPLE | MASK_STRING;
677 /* If -mmultiple or -mno-multiple was explicitly used, don't
678 override with the processor default */
679 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
680 target_flags = (target_flags & ~MASK_MULTIPLE) | multiple;
682 /* If -mstring or -mno-string was explicitly used, don't override
683 with the processor default. */
684 if ((target_flags_explicit & MASK_STRING) != 0)
685 target_flags = (target_flags & ~MASK_STRING) | string;
687 /* Don't allow -mmultiple or -mstring on little endian systems
688 unless the cpu is a 750, because the hardware doesn't support the
689 instructions used in little endian mode, and causes an alignment
690 trap. The 750 does not cause an alignment trap (except when the
691 target is unaligned). */
693 if (!BYTES_BIG_ENDIAN && rs6000_cpu != PROCESSOR_PPC750)
697 target_flags &= ~MASK_MULTIPLE;
698 if ((target_flags_explicit & MASK_MULTIPLE) != 0)
699 warning ("-mmultiple is not supported on little endian systems");
704 target_flags &= ~MASK_STRING;
705 if ((target_flags_explicit & MASK_STRING) != 0)
706 warning ("-mstring is not supported on little endian systems");
710 /* Set debug flags */
711 if (rs6000_debug_name)
713 if (! strcmp (rs6000_debug_name, "all"))
714 rs6000_debug_stack = rs6000_debug_arg = 1;
715 else if (! strcmp (rs6000_debug_name, "stack"))
716 rs6000_debug_stack = 1;
717 else if (! strcmp (rs6000_debug_name, "arg"))
718 rs6000_debug_arg = 1;
720 error ("unknown -mdebug-%s switch", rs6000_debug_name);
723 if (rs6000_traceback_name)
725 if (! strncmp (rs6000_traceback_name, "full", 4))
726 rs6000_traceback = traceback_full;
727 else if (! strncmp (rs6000_traceback_name, "part", 4))
728 rs6000_traceback = traceback_part;
729 else if (! strncmp (rs6000_traceback_name, "no", 2))
730 rs6000_traceback = traceback_none;
732 error ("unknown -mtraceback arg `%s'; expecting `full', `partial' or `none'",
733 rs6000_traceback_name);
736 /* Set size of long double */
737 rs6000_long_double_type_size = 64;
738 if (rs6000_long_double_size_string)
741 int size = strtol (rs6000_long_double_size_string, &tail, 10);
742 if (*tail != '\0' || (size != 64 && size != 128))
743 error ("Unknown switch -mlong-double-%s",
744 rs6000_long_double_size_string);
746 rs6000_long_double_type_size = size;
749 /* Handle -mabi= options. */
750 rs6000_parse_abi_options ();
752 /* Handle -malign-XXXXX option. */
753 rs6000_parse_alignment_option ();
755 /* Handle generic -mFOO=YES/NO options. */
756 rs6000_parse_yes_no_option ("vrsave", rs6000_altivec_vrsave_string,
757 &rs6000_altivec_vrsave);
758 rs6000_parse_yes_no_option ("isel", rs6000_isel_string,
760 rs6000_parse_yes_no_option ("spe", rs6000_spe_string, &rs6000_spe);
761 rs6000_parse_yes_no_option ("float-gprs", rs6000_float_gprs_string,
764 /* Handle -mtls-size option. */
765 rs6000_parse_tls_size_option ();
767 #ifdef SUBTARGET_OVERRIDE_OPTIONS
768 SUBTARGET_OVERRIDE_OPTIONS;
770 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
771 SUBSUBTARGET_OVERRIDE_OPTIONS;
776 /* The e500 does not have string instructions, and we set
777 MASK_STRING above when optimizing for size. */
778 if ((target_flags & MASK_STRING) != 0)
779 target_flags = target_flags & ~MASK_STRING;
781 /* No SPE means 64-bit long doubles, even if an E500. */
782 if (rs6000_spe_string != 0
783 && !strcmp (rs6000_spe_string, "no"))
784 rs6000_long_double_type_size = 64;
786 else if (rs6000_select[1].string != NULL)
788 /* For the powerpc-eabispe configuration, we set all these by
789 default, so let's unset them if we manually set another
790 CPU that is not the E500. */
791 if (rs6000_abi_string == 0)
793 if (rs6000_spe_string == 0)
795 if (rs6000_float_gprs_string == 0)
796 rs6000_float_gprs = 0;
797 if (rs6000_isel_string == 0)
799 if (rs6000_long_double_size_string == 0)
800 rs6000_long_double_type_size = 64;
803 /* Handle -m(no-)longcall option. This is a bit of a cheap hack,
804 using TARGET_OPTIONS to handle a toggle switch, but we're out of
805 bits in target_flags so TARGET_SWITCHES cannot be used.
806 Assumption here is that rs6000_longcall_switch points into the
807 text of the complete option, rather than being a copy, so we can
808 scan back for the presence or absence of the no- modifier. */
809 if (rs6000_longcall_switch)
811 const char *base = rs6000_longcall_switch;
812 while (base[-1] != 'm') base--;
814 if (*rs6000_longcall_switch != '\0')
815 error ("invalid option `%s'", base);
816 rs6000_default_long_calls = (base[0] != 'n');
819 #ifdef TARGET_REGNAMES
820 /* If the user desires alternate register names, copy in the
821 alternate names now. */
823 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
826 /* Set TARGET_AIX_STRUCT_RET last, after the ABI is determined.
827 If -maix-struct-return or -msvr4-struct-return was explicitly
828 used, don't override with the ABI default. */
829 if ((target_flags_explicit & MASK_AIX_STRUCT_RET) == 0)
831 if (DEFAULT_ABI == ABI_V4 && !DRAFT_V4_STRUCT_RET)
832 target_flags = (target_flags & ~MASK_AIX_STRUCT_RET);
834 target_flags |= MASK_AIX_STRUCT_RET;
837 if (TARGET_LONG_DOUBLE_128
838 && (DEFAULT_ABI == ABI_AIX || DEFAULT_ABI == ABI_DARWIN))
839 real_format_for_mode[TFmode - QFmode] = &ibm_extended_format;
841 /* Allocate an alias set for register saves & restores from stack. */
842 rs6000_sr_alias_set = new_alias_set ();
845 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
847 /* We can only guarantee the availability of DI pseudo-ops when
848 assembling for 64-bit targets. */
851 targetm.asm_out.aligned_op.di = NULL;
852 targetm.asm_out.unaligned_op.di = NULL;
855 /* Set maximum branch target alignment at two instructions, eight bytes. */
856 align_jumps_max_skip = 8;
857 align_loops_max_skip = 8;
859 /* Arrange to save and restore machine status around nested functions. */
860 init_machine_status = rs6000_init_machine_status;
863 /* Handle generic options of the form -mfoo=yes/no.
864 NAME is the option name.
865 VALUE is the option value.
866 FLAG is the pointer to the flag where to store a 1 or 0, depending on
867 whether the option value is 'yes' or 'no' respectively. */
869 rs6000_parse_yes_no_option (const char *name, const char *value, int *flag)
873 else if (!strcmp (value, "yes"))
875 else if (!strcmp (value, "no"))
878 error ("unknown -m%s= option specified: '%s'", name, value);
881 /* Handle -mabi= options. */
883 rs6000_parse_abi_options ()
885 if (rs6000_abi_string == 0)
887 else if (! strcmp (rs6000_abi_string, "altivec"))
888 rs6000_altivec_abi = 1;
889 else if (! strcmp (rs6000_abi_string, "no-altivec"))
890 rs6000_altivec_abi = 0;
891 else if (! strcmp (rs6000_abi_string, "spe"))
895 error ("not configured for ABI: '%s'", rs6000_abi_string);
898 else if (! strcmp (rs6000_abi_string, "no-spe"))
901 error ("unknown ABI specified: '%s'", rs6000_abi_string);
904 /* Handle -malign-XXXXXX options. */
906 rs6000_parse_alignment_option ()
908 if (rs6000_alignment_string == 0
909 || ! strcmp (rs6000_alignment_string, "power"))
910 rs6000_alignment_flags = MASK_ALIGN_POWER;
911 else if (! strcmp (rs6000_alignment_string, "natural"))
912 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
914 error ("unknown -malign-XXXXX option specified: '%s'",
915 rs6000_alignment_string);
918 /* Validate and record the size specified with the -mtls-size option. */
921 rs6000_parse_tls_size_option ()
923 if (rs6000_tls_size_string == 0)
925 else if (strcmp (rs6000_tls_size_string, "16") == 0)
926 rs6000_tls_size = 16;
927 else if (strcmp (rs6000_tls_size_string, "32") == 0)
928 rs6000_tls_size = 32;
929 else if (strcmp (rs6000_tls_size_string, "64") == 0)
930 rs6000_tls_size = 64;
932 error ("bad value `%s' for -mtls-size switch", rs6000_tls_size_string);
936 optimization_options (level, size)
937 int level ATTRIBUTE_UNUSED;
938 int size ATTRIBUTE_UNUSED;
942 /* Do anything needed at the start of the asm file. */
945 rs6000_file_start (file, default_cpu)
947 const char *default_cpu;
951 const char *start = buffer;
952 struct rs6000_cpu_select *ptr;
954 if (flag_verbose_asm)
956 sprintf (buffer, "\n%s rs6000/powerpc options:", ASM_COMMENT_START);
957 rs6000_select[0].string = default_cpu;
959 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++)
961 ptr = &rs6000_select[i];
962 if (ptr->string != (char *)0 && ptr->string[0] != '\0')
964 fprintf (file, "%s %s%s", start, ptr->name, ptr->string);
970 switch (rs6000_sdata)
972 case SDATA_NONE: fprintf (file, "%s -msdata=none", start); start = ""; break;
973 case SDATA_DATA: fprintf (file, "%s -msdata=data", start); start = ""; break;
974 case SDATA_SYSV: fprintf (file, "%s -msdata=sysv", start); start = ""; break;
975 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
978 if (rs6000_sdata && g_switch_value)
980 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start,
991 /* Return nonzero if this function is known to have a null epilogue. */
996 if (reload_completed)
998 rs6000_stack_t *info = rs6000_stack_info ();
1000 if (info->first_gp_reg_save == 32
1001 && info->first_fp_reg_save == 64
1002 && info->first_altivec_reg_save == LAST_ALTIVEC_REGNO + 1
1003 && ! info->lr_save_p
1004 && ! info->cr_save_p
1005 && info->vrsave_mask == 0
1013 /* Returns 1 always. */
1016 any_operand (op, mode)
1017 rtx op ATTRIBUTE_UNUSED;
1018 enum machine_mode mode ATTRIBUTE_UNUSED;
1023 /* Returns 1 if op is the count register. */
1025 count_register_operand (op, mode)
1027 enum machine_mode mode ATTRIBUTE_UNUSED;
1029 if (GET_CODE (op) != REG)
1032 if (REGNO (op) == COUNT_REGISTER_REGNUM)
1035 if (REGNO (op) > FIRST_PSEUDO_REGISTER)
1041 /* Returns 1 if op is an altivec register. */
1043 altivec_register_operand (op, mode)
1045 enum machine_mode mode ATTRIBUTE_UNUSED;
1048 return (register_operand (op, mode)
1049 && (GET_CODE (op) != REG
1050 || REGNO (op) > FIRST_PSEUDO_REGISTER
1051 || ALTIVEC_REGNO_P (REGNO (op))));
1055 xer_operand (op, mode)
1057 enum machine_mode mode ATTRIBUTE_UNUSED;
1059 if (GET_CODE (op) != REG)
1062 if (XER_REGNO_P (REGNO (op)))
1068 /* Return 1 if OP is a signed 8-bit constant. Int multiplication
1069 by such constants completes more quickly. */
1072 s8bit_cint_operand (op, mode)
1074 enum machine_mode mode ATTRIBUTE_UNUSED;
1076 return ( GET_CODE (op) == CONST_INT
1077 && (INTVAL (op) >= -128 && INTVAL (op) <= 127));
1080 /* Return 1 if OP is a constant that can fit in a D field. */
1083 short_cint_operand (op, mode)
1085 enum machine_mode mode ATTRIBUTE_UNUSED;
1087 return (GET_CODE (op) == CONST_INT
1088 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'));
1091 /* Similar for an unsigned D field. */
1094 u_short_cint_operand (op, mode)
1096 enum machine_mode mode ATTRIBUTE_UNUSED;
1098 return (GET_CODE (op) == CONST_INT
1099 && CONST_OK_FOR_LETTER_P (INTVAL (op) & GET_MODE_MASK (mode), 'K'));
1102 /* Return 1 if OP is a CONST_INT that cannot fit in a signed D field. */
1105 non_short_cint_operand (op, mode)
1107 enum machine_mode mode ATTRIBUTE_UNUSED;
1109 return (GET_CODE (op) == CONST_INT
1110 && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x8000) >= 0x10000);
1113 /* Returns 1 if OP is a CONST_INT that is a positive value
1114 and an exact power of 2. */
1117 exact_log2_cint_operand (op, mode)
1119 enum machine_mode mode ATTRIBUTE_UNUSED;
1121 return (GET_CODE (op) == CONST_INT
1123 && exact_log2 (INTVAL (op)) >= 0);
1126 /* Returns 1 if OP is a register that is not special (i.e., not MQ,
1130 gpc_reg_operand (op, mode)
1132 enum machine_mode mode;
1134 return (register_operand (op, mode)
1135 && (GET_CODE (op) != REG
1136 || (REGNO (op) >= ARG_POINTER_REGNUM
1137 && !XER_REGNO_P (REGNO (op)))
1138 || REGNO (op) < MQ_REGNO));
1141 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1145 cc_reg_operand (op, mode)
1147 enum machine_mode mode;
1149 return (register_operand (op, mode)
1150 && (GET_CODE (op) != REG
1151 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1152 || CR_REGNO_P (REGNO (op))));
1155 /* Returns 1 if OP is either a pseudo-register or a register denoting a
1156 CR field that isn't CR0. */
1159 cc_reg_not_cr0_operand (op, mode)
1161 enum machine_mode mode;
1163 return (register_operand (op, mode)
1164 && (GET_CODE (op) != REG
1165 || REGNO (op) >= FIRST_PSEUDO_REGISTER
1166 || CR_REGNO_NOT_CR0_P (REGNO (op))));
1169 /* Returns 1 if OP is either a constant integer valid for a D-field or
1170 a non-special register. If a register, it must be in the proper
1171 mode unless MODE is VOIDmode. */
1174 reg_or_short_operand (op, mode)
1176 enum machine_mode mode;
1178 return short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1181 /* Similar, except check if the negation of the constant would be
1182 valid for a D-field. */
1185 reg_or_neg_short_operand (op, mode)
1187 enum machine_mode mode;
1189 if (GET_CODE (op) == CONST_INT)
1190 return CONST_OK_FOR_LETTER_P (INTVAL (op), 'P');
1192 return gpc_reg_operand (op, mode);
1195 /* Returns 1 if OP is either a constant integer valid for a DS-field or
1196 a non-special register. If a register, it must be in the proper
1197 mode unless MODE is VOIDmode. */
1200 reg_or_aligned_short_operand (op, mode)
1202 enum machine_mode mode;
1204 if (gpc_reg_operand (op, mode))
1206 else if (short_cint_operand (op, mode) && !(INTVAL (op) & 3))
1213 /* Return 1 if the operand is either a register or an integer whose
1214 high-order 16 bits are zero. */
1217 reg_or_u_short_operand (op, mode)
1219 enum machine_mode mode;
1221 return u_short_cint_operand (op, mode) || gpc_reg_operand (op, mode);
1224 /* Return 1 is the operand is either a non-special register or ANY
1225 constant integer. */
1228 reg_or_cint_operand (op, mode)
1230 enum machine_mode mode;
1232 return (GET_CODE (op) == CONST_INT || gpc_reg_operand (op, mode));
1235 /* Return 1 is the operand is either a non-special register or ANY
1236 32-bit signed constant integer. */
1239 reg_or_arith_cint_operand (op, mode)
1241 enum machine_mode mode;
1243 return (gpc_reg_operand (op, mode)
1244 || (GET_CODE (op) == CONST_INT
1245 #if HOST_BITS_PER_WIDE_INT != 32
1246 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80000000)
1247 < (unsigned HOST_WIDE_INT) 0x100000000ll)
1252 /* Return 1 is the operand is either a non-special register or a 32-bit
1253 signed constant integer valid for 64-bit addition. */
1256 reg_or_add_cint64_operand (op, mode)
1258 enum machine_mode mode;
1260 return (gpc_reg_operand (op, mode)
1261 || (GET_CODE (op) == CONST_INT
1262 #if HOST_BITS_PER_WIDE_INT == 32
1263 && INTVAL (op) < 0x7fff8000
1265 && ((unsigned HOST_WIDE_INT) (INTVAL (op) + 0x80008000)
1271 /* Return 1 is the operand is either a non-special register or a 32-bit
1272 signed constant integer valid for 64-bit subtraction. */
1275 reg_or_sub_cint64_operand (op, mode)
1277 enum machine_mode mode;
1279 return (gpc_reg_operand (op, mode)
1280 || (GET_CODE (op) == CONST_INT
1281 #if HOST_BITS_PER_WIDE_INT == 32
1282 && (- INTVAL (op)) < 0x7fff8000
1284 && ((unsigned HOST_WIDE_INT) ((- INTVAL (op)) + 0x80008000)
1290 /* Return 1 is the operand is either a non-special register or ANY
1291 32-bit unsigned constant integer. */
1294 reg_or_logical_cint_operand (op, mode)
1296 enum machine_mode mode;
1298 if (GET_CODE (op) == CONST_INT)
1300 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1302 if (GET_MODE_BITSIZE (mode) <= 32)
1305 if (INTVAL (op) < 0)
1309 return ((INTVAL (op) & GET_MODE_MASK (mode)
1310 & (~ (unsigned HOST_WIDE_INT) 0xffffffff)) == 0);
1312 else if (GET_CODE (op) == CONST_DOUBLE)
1314 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
1318 return CONST_DOUBLE_HIGH (op) == 0;
1321 return gpc_reg_operand (op, mode);
1324 /* Return 1 if the operand is an operand that can be loaded via the GOT. */
1327 got_operand (op, mode)
1329 enum machine_mode mode ATTRIBUTE_UNUSED;
1331 return (GET_CODE (op) == SYMBOL_REF
1332 || GET_CODE (op) == CONST
1333 || GET_CODE (op) == LABEL_REF);
1336 /* Return 1 if the operand is a simple references that can be loaded via
1337 the GOT (labels involving addition aren't allowed). */
1340 got_no_const_operand (op, mode)
1342 enum machine_mode mode ATTRIBUTE_UNUSED;
1344 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF);
1347 /* Return the number of instructions it takes to form a constant in an
1348 integer register. */
1351 num_insns_constant_wide (value)
1352 HOST_WIDE_INT value;
1354 /* signed constant loadable with {cal|addi} */
1355 if (CONST_OK_FOR_LETTER_P (value, 'I'))
1358 /* constant loadable with {cau|addis} */
1359 else if (CONST_OK_FOR_LETTER_P (value, 'L'))
1362 #if HOST_BITS_PER_WIDE_INT == 64
1363 else if (TARGET_POWERPC64)
1365 HOST_WIDE_INT low = ((value & 0xffffffff) ^ 0x80000000) - 0x80000000;
1366 HOST_WIDE_INT high = value >> 31;
1368 if (high == 0 || high == -1)
1374 return num_insns_constant_wide (high) + 1;
1376 return (num_insns_constant_wide (high)
1377 + num_insns_constant_wide (low) + 1);
1386 num_insns_constant (op, mode)
1388 enum machine_mode mode;
1390 if (GET_CODE (op) == CONST_INT)
1392 #if HOST_BITS_PER_WIDE_INT == 64
1393 if ((INTVAL (op) >> 31) != 0 && (INTVAL (op) >> 31) != -1
1394 && mask64_operand (op, mode))
1398 return num_insns_constant_wide (INTVAL (op));
1401 else if (GET_CODE (op) == CONST_DOUBLE && mode == SFmode)
1406 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1407 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1408 return num_insns_constant_wide ((HOST_WIDE_INT) l);
1411 else if (GET_CODE (op) == CONST_DOUBLE)
1417 int endian = (WORDS_BIG_ENDIAN == 0);
1419 if (mode == VOIDmode || mode == DImode)
1421 high = CONST_DOUBLE_HIGH (op);
1422 low = CONST_DOUBLE_LOW (op);
1426 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1427 REAL_VALUE_TO_TARGET_DOUBLE (rv, l);
1429 low = l[1 - endian];
1433 return (num_insns_constant_wide (low)
1434 + num_insns_constant_wide (high));
1438 if (high == 0 && low >= 0)
1439 return num_insns_constant_wide (low);
1441 else if (high == -1 && low < 0)
1442 return num_insns_constant_wide (low);
1444 else if (mask64_operand (op, mode))
1448 return num_insns_constant_wide (high) + 1;
1451 return (num_insns_constant_wide (high)
1452 + num_insns_constant_wide (low) + 1);
1460 /* Return 1 if the operand is a CONST_DOUBLE and it can be put into a
1461 register with one instruction per word. We only do this if we can
1462 safely read CONST_DOUBLE_{LOW,HIGH}. */
1465 easy_fp_constant (op, mode)
1467 enum machine_mode mode;
1469 if (GET_CODE (op) != CONST_DOUBLE
1470 || GET_MODE (op) != mode
1471 || (GET_MODE_CLASS (mode) != MODE_FLOAT && mode != DImode))
1474 /* Consider all constants with -msoft-float to be easy. */
1475 if ((TARGET_SOFT_FLOAT || !TARGET_FPRS)
1479 /* If we are using V.4 style PIC, consider all constants to be hard. */
1480 if (flag_pic && DEFAULT_ABI == ABI_V4)
1483 #ifdef TARGET_RELOCATABLE
1484 /* Similarly if we are using -mrelocatable, consider all constants
1486 if (TARGET_RELOCATABLE)
1495 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1496 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1498 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1499 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1
1500 && num_insns_constant_wide ((HOST_WIDE_INT) k[2]) == 1
1501 && num_insns_constant_wide ((HOST_WIDE_INT) k[3]) == 1);
1504 else if (mode == DFmode)
1509 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1510 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1512 return (num_insns_constant_wide ((HOST_WIDE_INT) k[0]) == 1
1513 && num_insns_constant_wide ((HOST_WIDE_INT) k[1]) == 1);
1516 else if (mode == SFmode)
1521 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1522 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1524 return num_insns_constant_wide (l) == 1;
1527 else if (mode == DImode)
1528 return ((TARGET_POWERPC64
1529 && GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_LOW (op) == 0)
1530 || (num_insns_constant (op, DImode) <= 2));
1532 else if (mode == SImode)
1538 /* Return nonzero if all elements of a vector have the same value. */
1541 easy_vector_same (op, mode)
1543 enum machine_mode mode ATTRIBUTE_UNUSED;
1547 units = CONST_VECTOR_NUNITS (op);
1549 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1550 for (i = 1; i < units; ++i)
1551 if (INTVAL (CONST_VECTOR_ELT (op, i)) != cst)
1558 /* Return 1 if the operand is a CONST_INT and can be put into a
1559 register without using memory. */
1562 easy_vector_constant (op, mode)
1564 enum machine_mode mode;
1568 if (GET_CODE (op) != CONST_VECTOR
1573 if (zero_constant (op, mode)
1574 && ((TARGET_ALTIVEC && ALTIVEC_VECTOR_MODE (mode))
1575 || (TARGET_SPE && SPE_VECTOR_MODE (mode))))
1578 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
1581 if (TARGET_SPE && mode == V1DImode)
1584 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1585 cst2 = INTVAL (CONST_VECTOR_ELT (op, 1));
1587 /* Limit SPE vectors to 15 bits signed. These we can generate with:
1589 evmergelo r0, r0, r0
1592 I don't know how efficient it would be to allow bigger constants,
1593 considering we'll have an extra 'ori' for every 'li'. I doubt 5
1594 instructions is better than a 64-bit memory load, but I don't
1595 have the e500 timing specs. */
1596 if (TARGET_SPE && mode == V2SImode
1597 && cst >= -0x7fff && cst <= 0x7fff
1598 && cst2 >= -0x7fff && cst2 <= 0x7fff)
1601 if (TARGET_ALTIVEC && EASY_VECTOR_15 (cst, op, mode))
1604 if (TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode))
1610 /* Same as easy_vector_constant but only for EASY_VECTOR_15_ADD_SELF. */
1613 easy_vector_constant_add_self (op, mode)
1615 enum machine_mode mode;
1619 if (!easy_vector_constant (op, mode))
1622 cst = INTVAL (CONST_VECTOR_ELT (op, 0));
1624 return TARGET_ALTIVEC && EASY_VECTOR_15_ADD_SELF (cst, op, mode);
1628 output_vec_const_move (operands)
1632 enum machine_mode mode;
1638 cst = INTVAL (CONST_VECTOR_ELT (vec, 0));
1639 cst2 = INTVAL (CONST_VECTOR_ELT (vec, 1));
1640 mode = GET_MODE (dest);
1644 if (zero_constant (vec, mode))
1645 return "vxor %0,%0,%0";
1646 else if (EASY_VECTOR_15 (cst, vec, mode))
1648 operands[1] = GEN_INT (cst);
1652 return "vspltisw %0,%1";
1654 return "vspltish %0,%1";
1656 return "vspltisb %0,%1";
1661 else if (EASY_VECTOR_15_ADD_SELF (cst, vec, mode))
1669 /* Vector constant 0 is handled as a splitter of V2SI, and in the
1670 pattern of V1DI, V4HI, and V2SF.
1672 FIXME: We should probabl return # and add post reload
1673 splitters for these, but this way is so easy ;-).
1675 operands[1] = GEN_INT (cst);
1676 operands[2] = GEN_INT (cst2);
1678 return "li %0,%1\n\tevmergelo %0,%0,%0";
1680 return "li %0,%1\n\tevmergelo %0,%0,%0\n\tli %0,%2";
1686 /* Return 1 if the operand is the constant 0. This works for scalars
1687 as well as vectors. */
1689 zero_constant (op, mode)
1691 enum machine_mode mode;
1693 return op == CONST0_RTX (mode);
1696 /* Return 1 if the operand is 0.0. */
1698 zero_fp_constant (op, mode)
1700 enum machine_mode mode;
1702 return GET_MODE_CLASS (mode) == MODE_FLOAT && op == CONST0_RTX (mode);
1705 /* Return 1 if the operand is in volatile memory. Note that during
1706 the RTL generation phase, memory_operand does not return TRUE for
1707 volatile memory references. So this function allows us to
1708 recognize volatile references where its safe. */
1711 volatile_mem_operand (op, mode)
1713 enum machine_mode mode;
1715 if (GET_CODE (op) != MEM)
1718 if (!MEM_VOLATILE_P (op))
1721 if (mode != GET_MODE (op))
1724 if (reload_completed)
1725 return memory_operand (op, mode);
1727 if (reload_in_progress)
1728 return strict_memory_address_p (mode, XEXP (op, 0));
1730 return memory_address_p (mode, XEXP (op, 0));
1733 /* Return 1 if the operand is an offsettable memory operand. */
1736 offsettable_mem_operand (op, mode)
1738 enum machine_mode mode;
1740 return ((GET_CODE (op) == MEM)
1741 && offsettable_address_p (reload_completed || reload_in_progress,
1742 mode, XEXP (op, 0)));
1745 /* Return 1 if the operand is either an easy FP constant (see above) or
1749 mem_or_easy_const_operand (op, mode)
1751 enum machine_mode mode;
1753 return memory_operand (op, mode) || easy_fp_constant (op, mode);
1756 /* Return 1 if the operand is either a non-special register or an item
1757 that can be used as the operand of a `mode' add insn. */
1760 add_operand (op, mode)
1762 enum machine_mode mode;
1764 if (GET_CODE (op) == CONST_INT)
1765 return (CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1766 || CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1768 return gpc_reg_operand (op, mode);
1771 /* Return 1 if OP is a constant but not a valid add_operand. */
1774 non_add_cint_operand (op, mode)
1776 enum machine_mode mode ATTRIBUTE_UNUSED;
1778 return (GET_CODE (op) == CONST_INT
1779 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'I')
1780 && !CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'));
1783 /* Return 1 if the operand is a non-special register or a constant that
1784 can be used as the operand of an OR or XOR insn on the RS/6000. */
1787 logical_operand (op, mode)
1789 enum machine_mode mode;
1791 HOST_WIDE_INT opl, oph;
1793 if (gpc_reg_operand (op, mode))
1796 if (GET_CODE (op) == CONST_INT)
1798 opl = INTVAL (op) & GET_MODE_MASK (mode);
1800 #if HOST_BITS_PER_WIDE_INT <= 32
1801 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT && opl < 0)
1805 else if (GET_CODE (op) == CONST_DOUBLE)
1807 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1810 opl = CONST_DOUBLE_LOW (op);
1811 oph = CONST_DOUBLE_HIGH (op);
1818 return ((opl & ~ (unsigned HOST_WIDE_INT) 0xffff) == 0
1819 || (opl & ~ (unsigned HOST_WIDE_INT) 0xffff0000) == 0);
1822 /* Return 1 if C is a constant that is not a logical operand (as
1823 above), but could be split into one. */
1826 non_logical_cint_operand (op, mode)
1828 enum machine_mode mode;
1830 return ((GET_CODE (op) == CONST_INT || GET_CODE (op) == CONST_DOUBLE)
1831 && ! logical_operand (op, mode)
1832 && reg_or_logical_cint_operand (op, mode));
1835 /* Return 1 if C is a constant that can be encoded in a 32-bit mask on the
1836 RS/6000. It is if there are no more than two 1->0 or 0->1 transitions.
1837 Reject all ones and all zeros, since these should have been optimized
1838 away and confuse the making of MB and ME. */
1841 mask_operand (op, mode)
1843 enum machine_mode mode ATTRIBUTE_UNUSED;
1845 HOST_WIDE_INT c, lsb;
1847 if (GET_CODE (op) != CONST_INT)
1852 /* Fail in 64-bit mode if the mask wraps around because the upper
1853 32-bits of the mask will all be 1s, contrary to GCC's internal view. */
1854 if (TARGET_POWERPC64 && (c & 0x80000001) == 0x80000001)
1857 /* We don't change the number of transitions by inverting,
1858 so make sure we start with the LS bit zero. */
1862 /* Reject all zeros or all ones. */
1866 /* Find the first transition. */
1869 /* Invert to look for a second transition. */
1872 /* Erase first transition. */
1875 /* Find the second transition (if any). */
1878 /* Match if all the bits above are 1's (or c is zero). */
1882 /* Return 1 for the PowerPC64 rlwinm corner case. */
1885 mask_operand_wrap (op, mode)
1887 enum machine_mode mode ATTRIBUTE_UNUSED;
1889 HOST_WIDE_INT c, lsb;
1891 if (GET_CODE (op) != CONST_INT)
1896 if ((c & 0x80000001) != 0x80000001)
1910 /* Return 1 if the operand is a constant that is a PowerPC64 mask.
1911 It is if there are no more than one 1->0 or 0->1 transitions.
1912 Reject all zeros, since zero should have been optimized away and
1913 confuses the making of MB and ME. */
1916 mask64_operand (op, mode)
1918 enum machine_mode mode ATTRIBUTE_UNUSED;
1920 if (GET_CODE (op) == CONST_INT)
1922 HOST_WIDE_INT c, lsb;
1926 /* Reject all zeros. */
1930 /* We don't change the number of transitions by inverting,
1931 so make sure we start with the LS bit zero. */
1935 /* Find the transition, and check that all bits above are 1's. */
1938 /* Match if all the bits above are 1's (or c is zero). */
1944 /* Like mask64_operand, but allow up to three transitions. This
1945 predicate is used by insn patterns that generate two rldicl or
1946 rldicr machine insns. */
1949 mask64_2_operand (op, mode)
1951 enum machine_mode mode ATTRIBUTE_UNUSED;
1953 if (GET_CODE (op) == CONST_INT)
1955 HOST_WIDE_INT c, lsb;
1959 /* Disallow all zeros. */
1963 /* We don't change the number of transitions by inverting,
1964 so make sure we start with the LS bit zero. */
1968 /* Find the first transition. */
1971 /* Invert to look for a second transition. */
1974 /* Erase first transition. */
1977 /* Find the second transition. */
1980 /* Invert to look for a third transition. */
1983 /* Erase second transition. */
1986 /* Find the third transition (if any). */
1989 /* Match if all the bits above are 1's (or c is zero). */
1995 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
1996 implement ANDing by the mask IN. */
1998 build_mask64_2_operands (in, out)
2002 #if HOST_BITS_PER_WIDE_INT >= 64
2003 unsigned HOST_WIDE_INT c, lsb, m1, m2;
2006 if (GET_CODE (in) != CONST_INT)
2012 /* Assume c initially something like 0x00fff000000fffff. The idea
2013 is to rotate the word so that the middle ^^^^^^ group of zeros
2014 is at the MS end and can be cleared with an rldicl mask. We then
2015 rotate back and clear off the MS ^^ group of zeros with a
2017 c = ~c; /* c == 0xff000ffffff00000 */
2018 lsb = c & -c; /* lsb == 0x0000000000100000 */
2019 m1 = -lsb; /* m1 == 0xfffffffffff00000 */
2020 c = ~c; /* c == 0x00fff000000fffff */
2021 c &= -lsb; /* c == 0x00fff00000000000 */
2022 lsb = c & -c; /* lsb == 0x0000100000000000 */
2023 c = ~c; /* c == 0xff000fffffffffff */
2024 c &= -lsb; /* c == 0xff00000000000000 */
2026 while ((lsb >>= 1) != 0)
2027 shift++; /* shift == 44 on exit from loop */
2028 m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
2029 m1 = ~m1; /* m1 == 0x000000ffffffffff */
2030 m2 = ~c; /* m2 == 0x00ffffffffffffff */
2034 /* Assume c initially something like 0xff000f0000000000. The idea
2035 is to rotate the word so that the ^^^ middle group of zeros
2036 is at the LS end and can be cleared with an rldicr mask. We then
2037 rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
2039 lsb = c & -c; /* lsb == 0x0000010000000000 */
2040 m2 = -lsb; /* m2 == 0xffffff0000000000 */
2041 c = ~c; /* c == 0x00fff0ffffffffff */
2042 c &= -lsb; /* c == 0x00fff00000000000 */
2043 lsb = c & -c; /* lsb == 0x0000100000000000 */
2044 c = ~c; /* c == 0xff000fffffffffff */
2045 c &= -lsb; /* c == 0xff00000000000000 */
2047 while ((lsb >>= 1) != 0)
2048 shift++; /* shift == 44 on exit from loop */
2049 m1 = ~c; /* m1 == 0x00ffffffffffffff */
2050 m1 >>= shift; /* m1 == 0x0000000000000fff */
2051 m1 = ~m1; /* m1 == 0xfffffffffffff000 */
2054 /* Note that when we only have two 0->1 and 1->0 transitions, one of the
2055 masks will be all 1's. We are guaranteed more than one transition. */
2056 out[0] = GEN_INT (64 - shift);
2057 out[1] = GEN_INT (m1);
2058 out[2] = GEN_INT (shift);
2059 out[3] = GEN_INT (m2);
2067 /* Return 1 if the operand is either a non-special register or a constant
2068 that can be used as the operand of a PowerPC64 logical AND insn. */
2071 and64_operand (op, mode)
2073 enum machine_mode mode;
2075 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2076 return (gpc_reg_operand (op, mode) || mask64_operand (op, mode));
2078 return (logical_operand (op, mode) || mask64_operand (op, mode));
2081 /* Like the above, but also match constants that can be implemented
2082 with two rldicl or rldicr insns. */
2085 and64_2_operand (op, mode)
2087 enum machine_mode mode;
2089 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2090 return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
2092 return logical_operand (op, mode) || mask64_2_operand (op, mode);
2095 /* Return 1 if the operand is either a non-special register or a
2096 constant that can be used as the operand of an RS/6000 logical AND insn. */
2099 and_operand (op, mode)
2101 enum machine_mode mode;
2103 if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
2104 return (gpc_reg_operand (op, mode) || mask_operand (op, mode));
2106 return (logical_operand (op, mode) || mask_operand (op, mode));
2109 /* Return 1 if the operand is a general register or memory operand. */
2112 reg_or_mem_operand (op, mode)
2114 enum machine_mode mode;
2116 return (gpc_reg_operand (op, mode)
2117 || memory_operand (op, mode)
2118 || volatile_mem_operand (op, mode));
2121 /* Return 1 if the operand is a general register or memory operand without
2122 pre_inc or pre_dec which produces invalid form of PowerPC lwa
2126 lwa_operand (op, mode)
2128 enum machine_mode mode;
2132 if (reload_completed && GET_CODE (inner) == SUBREG)
2133 inner = SUBREG_REG (inner);
2135 return gpc_reg_operand (inner, mode)
2136 || (memory_operand (inner, mode)
2137 && GET_CODE (XEXP (inner, 0)) != PRE_INC
2138 && GET_CODE (XEXP (inner, 0)) != PRE_DEC
2139 && (GET_CODE (XEXP (inner, 0)) != PLUS
2140 || GET_CODE (XEXP (XEXP (inner, 0), 1)) != CONST_INT
2141 || INTVAL (XEXP (XEXP (inner, 0), 1)) % 4 == 0));
2144 /* Return 1 if the operand, used inside a MEM, is a SYMBOL_REF. */
2147 symbol_ref_operand (op, mode)
2149 enum machine_mode mode;
2151 if (mode != VOIDmode && GET_MODE (op) != mode)
2154 return (GET_CODE (op) == SYMBOL_REF
2155 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op)));
2158 /* Return 1 if the operand, used inside a MEM, is a valid first argument
2159 to CALL. This is a SYMBOL_REF, a pseudo-register, LR or CTR. */
2162 call_operand (op, mode)
2164 enum machine_mode mode;
2166 if (mode != VOIDmode && GET_MODE (op) != mode)
2169 return (GET_CODE (op) == SYMBOL_REF
2170 || (GET_CODE (op) == REG
2171 && (REGNO (op) == LINK_REGISTER_REGNUM
2172 || REGNO (op) == COUNT_REGISTER_REGNUM
2173 || REGNO (op) >= FIRST_PSEUDO_REGISTER)));
2176 /* Return 1 if the operand is a SYMBOL_REF for a function known to be in
2180 current_file_function_operand (op, mode)
2182 enum machine_mode mode ATTRIBUTE_UNUSED;
2184 return (GET_CODE (op) == SYMBOL_REF
2185 && (DEFAULT_ABI != ABI_AIX || SYMBOL_REF_FUNCTION_P (op))
2186 && (SYMBOL_REF_LOCAL_P (op)
2187 || (op == XEXP (DECL_RTL (current_function_decl), 0))));
2190 /* Return 1 if this operand is a valid input for a move insn. */
2193 input_operand (op, mode)
2195 enum machine_mode mode;
2197 /* Memory is always valid. */
2198 if (memory_operand (op, mode))
2201 /* Only a tiny bit of handling for CONSTANT_P_RTX is necessary. */
2202 if (GET_CODE (op) == CONSTANT_P_RTX)
2205 /* For floating-point, easy constants are valid. */
2206 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2208 && easy_fp_constant (op, mode))
2211 /* Allow any integer constant. */
2212 if (GET_MODE_CLASS (mode) == MODE_INT
2213 && (GET_CODE (op) == CONST_INT
2214 || GET_CODE (op) == CONST_DOUBLE))
2217 /* Allow easy vector constants. */
2218 if (GET_CODE (op) == CONST_VECTOR
2219 && easy_vector_constant (op, mode))
2222 /* For floating-point or multi-word mode, the only remaining valid type
2224 if (GET_MODE_CLASS (mode) == MODE_FLOAT
2225 || GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2226 return register_operand (op, mode);
2228 /* The only cases left are integral modes one word or smaller (we
2229 do not get called for MODE_CC values). These can be in any
2231 if (register_operand (op, mode))
2234 /* A SYMBOL_REF referring to the TOC is valid. */
2235 if (legitimate_constant_pool_address_p (op))
2238 /* A constant pool expression (relative to the TOC) is valid */
2239 if (toc_relative_expr_p (op))
2242 /* V.4 allows SYMBOL_REFs and CONSTs that are in the small data region
2244 if (DEFAULT_ABI == ABI_V4
2245 && (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST)
2246 && small_data_operand (op, Pmode))
2252 /* Return 1 for an operand in small memory on V.4/eabi. */
2255 small_data_operand (op, mode)
2256 rtx op ATTRIBUTE_UNUSED;
2257 enum machine_mode mode ATTRIBUTE_UNUSED;
2262 if (rs6000_sdata == SDATA_NONE || rs6000_sdata == SDATA_DATA)
2265 if (DEFAULT_ABI != ABI_V4)
2268 if (GET_CODE (op) == SYMBOL_REF)
2271 else if (GET_CODE (op) != CONST
2272 || GET_CODE (XEXP (op, 0)) != PLUS
2273 || GET_CODE (XEXP (XEXP (op, 0), 0)) != SYMBOL_REF
2274 || GET_CODE (XEXP (XEXP (op, 0), 1)) != CONST_INT)
2279 rtx sum = XEXP (op, 0);
2280 HOST_WIDE_INT summand;
2282 /* We have to be careful here, because it is the referenced address
2283 that must be 32k from _SDA_BASE_, not just the symbol. */
2284 summand = INTVAL (XEXP (sum, 1));
2285 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value)
2288 sym_ref = XEXP (sum, 0);
2291 return SYMBOL_REF_SMALL_P (sym_ref);
2297 /* Subroutines of rs6000_legitimize_address and rs6000_legitimate_address. */
2300 constant_pool_expr_1 (op, have_sym, have_toc)
2305 switch (GET_CODE(op))
2308 if (RS6000_SYMBOL_REF_TLS_P (op))
2310 else if (CONSTANT_POOL_ADDRESS_P (op))
2312 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (op), Pmode))
2320 else if (! strcmp (XSTR (op, 0), toc_label_name))
2329 return (constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc)
2330 && constant_pool_expr_1 (XEXP (op, 1), have_sym, have_toc));
2332 return constant_pool_expr_1 (XEXP (op, 0), have_sym, have_toc);
2341 constant_pool_expr_p (op)
2346 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_sym;
2350 toc_relative_expr_p (op)
2355 return constant_pool_expr_1 (op, &have_sym, &have_toc) && have_toc;
2358 /* SPE offset addressing is limited to 5-bits worth of double words. */
2359 #define SPE_CONST_OFFSET_OK(x) (((x) & ~0xf8) == 0)
2362 legitimate_constant_pool_address_p (x)
2366 && GET_CODE (x) == PLUS
2367 && GET_CODE (XEXP (x, 0)) == REG
2368 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER)
2369 && constant_pool_expr_p (XEXP (x, 1)));
2373 legitimate_small_data_p (mode, x)
2374 enum machine_mode mode;
2377 return (DEFAULT_ABI == ABI_V4
2378 && !flag_pic && !TARGET_TOC
2379 && (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST)
2380 && small_data_operand (x, mode));
2384 legitimate_offset_address_p (mode, x, strict)
2385 enum machine_mode mode;
2389 unsigned HOST_WIDE_INT offset, extra;
2391 if (GET_CODE (x) != PLUS)
2393 if (GET_CODE (XEXP (x, 0)) != REG)
2395 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2397 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
2400 offset = INTVAL (XEXP (x, 1));
2408 /* AltiVec vector modes. Only reg+reg addressing is valid here,
2409 which leaves the only valid constant offset of zero, which by
2410 canonicalization rules is also invalid. */
2417 /* SPE vector modes. */
2418 return SPE_CONST_OFFSET_OK (offset);
2424 else if (offset & 3)
2432 else if (offset & 3)
2442 return (offset + extra >= offset) && (offset + extra + 0x8000 < 0x10000);
2446 legitimate_indexed_address_p (x, strict)
2452 if (GET_CODE (x) != PLUS)
2457 if (!REG_P (op0) || !REG_P (op1))
2460 return ((INT_REG_OK_FOR_BASE_P (op0, strict)
2461 && INT_REG_OK_FOR_INDEX_P (op1, strict))
2462 || (INT_REG_OK_FOR_BASE_P (op1, strict)
2463 && INT_REG_OK_FOR_INDEX_P (op0, strict)));
2467 legitimate_indirect_address_p (x, strict)
2471 return GET_CODE (x) == REG && INT_REG_OK_FOR_BASE_P (x, strict);
2475 legitimate_lo_sum_address_p (mode, x, strict)
2476 enum machine_mode mode;
2480 if (GET_CODE (x) != LO_SUM)
2482 if (GET_CODE (XEXP (x, 0)) != REG)
2484 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
2490 if (DEFAULT_ABI != ABI_AIX && flag_pic)
2494 if (GET_MODE_NUNITS (mode) != 1)
2496 if (GET_MODE_BITSIZE (mode) > 32
2497 && !(TARGET_HARD_FLOAT && TARGET_FPRS && mode == DFmode))
2500 return CONSTANT_P (x);
2507 /* Try machine-dependent ways of modifying an illegitimate address
2508 to be legitimate. If we find one, return the new, valid address.
2509 This is used from only one place: `memory_address' in explow.c.
2511 OLDX is the address as it was before break_out_memory_refs was
2512 called. In some cases it is useful to look at this to decide what
2515 MODE is passed so that this function can use GO_IF_LEGITIMATE_ADDRESS.
2517 It is always safe for this function to do nothing. It exists to
2518 recognize opportunities to optimize the output.
2520 On RS/6000, first check for the sum of a register with a constant
2521 integer that is out of range. If so, generate code to add the
2522 constant with the low-order 16 bits masked to the register and force
2523 this result into another register (this can be done with `cau').
2524 Then generate an address of REG+(CONST&0xffff), allowing for the
2525 possibility of bit 16 being a one.
2527 Then check for the sum of a register and something not constant, try to
2528 load the other things into a register and return the sum. */
2531 rs6000_legitimize_address (x, oldx, mode)
2533 rtx oldx ATTRIBUTE_UNUSED;
2534 enum machine_mode mode;
2536 if (GET_CODE (x) == SYMBOL_REF)
2538 enum tls_model model = SYMBOL_REF_TLS_MODEL (x);
2540 return rs6000_legitimize_tls_address (x, model);
2543 if (GET_CODE (x) == PLUS
2544 && GET_CODE (XEXP (x, 0)) == REG
2545 && GET_CODE (XEXP (x, 1)) == CONST_INT
2546 && (unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1)) + 0x8000) >= 0x10000)
2548 HOST_WIDE_INT high_int, low_int;
2550 low_int = ((INTVAL (XEXP (x, 1)) & 0xffff) ^ 0x8000) - 0x8000;
2551 high_int = INTVAL (XEXP (x, 1)) - low_int;
2552 sum = force_operand (gen_rtx_PLUS (Pmode, XEXP (x, 0),
2553 GEN_INT (high_int)), 0);
2554 return gen_rtx_PLUS (Pmode, sum, GEN_INT (low_int));
2556 else if (GET_CODE (x) == PLUS
2557 && GET_CODE (XEXP (x, 0)) == REG
2558 && GET_CODE (XEXP (x, 1)) != CONST_INT
2559 && GET_MODE_NUNITS (mode) == 1
2560 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
2562 || (mode != DFmode && mode != TFmode))
2563 && (TARGET_POWERPC64 || mode != DImode)
2566 return gen_rtx_PLUS (Pmode, XEXP (x, 0),
2567 force_reg (Pmode, force_operand (XEXP (x, 1), 0)));
2569 else if (ALTIVEC_VECTOR_MODE (mode))
2573 /* Make sure both operands are registers. */
2574 if (GET_CODE (x) == PLUS)
2575 return gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)),
2576 force_reg (Pmode, XEXP (x, 1)));
2578 reg = force_reg (Pmode, x);
2581 else if (SPE_VECTOR_MODE (mode))
2583 /* We accept [reg + reg] and [reg + OFFSET]. */
2585 if (GET_CODE (x) == PLUS)
2587 rtx op1 = XEXP (x, 0);
2588 rtx op2 = XEXP (x, 1);
2590 op1 = force_reg (Pmode, op1);
2592 if (GET_CODE (op2) != REG
2593 && (GET_CODE (op2) != CONST_INT
2594 || !SPE_CONST_OFFSET_OK (INTVAL (op2))))
2595 op2 = force_reg (Pmode, op2);
2597 return gen_rtx_PLUS (Pmode, op1, op2);
2600 return force_reg (Pmode, x);
2606 && GET_CODE (x) != CONST_INT
2607 && GET_CODE (x) != CONST_DOUBLE
2609 && GET_MODE_NUNITS (mode) == 1
2610 && (GET_MODE_BITSIZE (mode) <= 32
2611 || ((TARGET_HARD_FLOAT && TARGET_FPRS) && mode == DFmode)))
2613 rtx reg = gen_reg_rtx (Pmode);
2614 emit_insn (gen_elf_high (reg, (x)));
2615 return gen_rtx_LO_SUM (Pmode, reg, (x));
2617 else if (TARGET_MACHO && TARGET_32BIT && TARGET_NO_TOC
2620 && ! MACHO_DYNAMIC_NO_PIC_P
2622 && GET_CODE (x) != CONST_INT
2623 && GET_CODE (x) != CONST_DOUBLE
2625 && ((TARGET_HARD_FLOAT && TARGET_FPRS) || mode != DFmode)
2629 rtx reg = gen_reg_rtx (Pmode);
2630 emit_insn (gen_macho_high (reg, (x)));
2631 return gen_rtx_LO_SUM (Pmode, reg, (x));
2634 && constant_pool_expr_p (x)
2635 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
2637 return create_TOC_reference (x);
2643 /* Construct the SYMBOL_REF for the tls_get_addr function. */
2645 static GTY(()) rtx rs6000_tls_symbol;
2647 rs6000_tls_get_addr ()
2649 if (!rs6000_tls_symbol)
2650 rs6000_tls_symbol = init_one_libfunc ("__tls_get_addr");
2652 return rs6000_tls_symbol;
2655 /* Construct the SYMBOL_REF for TLS GOT references. */
2657 static GTY(()) rtx rs6000_got_symbol;
2661 if (!rs6000_got_symbol)
2663 rs6000_got_symbol = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
2664 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_LOCAL;
2665 SYMBOL_REF_FLAGS (rs6000_got_symbol) |= SYMBOL_FLAG_EXTERNAL;
2668 return rs6000_got_symbol;
2671 /* ADDR contains a thread-local SYMBOL_REF. Generate code to compute
2672 this (thread-local) address. */
2675 rs6000_legitimize_tls_address (addr, model)
2677 enum tls_model model;
2681 dest = gen_reg_rtx (Pmode);
2682 if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 16)
2688 tlsreg = gen_rtx_REG (Pmode, 13);
2689 insn = gen_tls_tprel_64 (dest, tlsreg, addr);
2693 tlsreg = gen_rtx_REG (Pmode, 2);
2694 insn = gen_tls_tprel_32 (dest, tlsreg, addr);
2698 else if (model == TLS_MODEL_LOCAL_EXEC && rs6000_tls_size == 32)
2702 tmp = gen_reg_rtx (Pmode);
2705 tlsreg = gen_rtx_REG (Pmode, 13);
2706 insn = gen_tls_tprel_ha_64 (tmp, tlsreg, addr);
2710 tlsreg = gen_rtx_REG (Pmode, 2);
2711 insn = gen_tls_tprel_ha_32 (tmp, tlsreg, addr);
2715 insn = gen_tls_tprel_lo_64 (dest, tmp, addr);
2717 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
2722 rtx r3, got, tga, tmp1, tmp2, eqv;
2725 got = gen_rtx_REG (Pmode, TOC_REGISTER);
2729 got = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
2732 rtx gsym = rs6000_got_sym ();
2733 got = gen_reg_rtx (Pmode);
2735 rs6000_emit_move (got, gsym, Pmode);
2739 static int tls_got_labelno = 0;
2740 rtx tempLR, lab, tmp3, mem;
2743 ASM_GENERATE_INTERNAL_LABEL (buf, "LTLS", tls_got_labelno++);
2744 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
2745 tempLR = gen_reg_rtx (Pmode);
2746 tmp1 = gen_reg_rtx (Pmode);
2747 tmp2 = gen_reg_rtx (Pmode);
2748 tmp3 = gen_reg_rtx (Pmode);
2749 mem = gen_rtx_MEM (Pmode, tmp1);
2750 RTX_UNCHANGING_P (mem) = 1;
2752 first = emit_insn (gen_load_toc_v4_PIC_1b (tempLR, lab,
2754 emit_move_insn (tmp1, tempLR);
2755 emit_move_insn (tmp2, mem);
2756 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2));
2757 last = emit_move_insn (got, tmp3);
2758 REG_NOTES (last) = gen_rtx_EXPR_LIST (REG_EQUAL, gsym,
2760 REG_NOTES (first) = gen_rtx_INSN_LIST (REG_LIBCALL, last,
2762 REG_NOTES (last) = gen_rtx_INSN_LIST (REG_RETVAL, first,
2768 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
2770 r3 = gen_rtx_REG (Pmode, 3);
2772 insn = gen_tls_gd_64 (r3, got, addr);
2774 insn = gen_tls_gd_32 (r3, got, addr);
2777 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2778 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2779 insn = emit_call_insn (insn);
2780 CONST_OR_PURE_CALL_P (insn) = 1;
2781 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2782 insn = get_insns ();
2784 emit_libcall_block (insn, dest, r3, addr);
2786 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
2788 r3 = gen_rtx_REG (Pmode, 3);
2790 insn = gen_tls_ld_64 (r3, got);
2792 insn = gen_tls_ld_32 (r3, got);
2795 tga = gen_rtx_MEM (Pmode, rs6000_tls_get_addr ());
2796 insn = gen_call_value (r3, tga, const0_rtx, const0_rtx);
2797 insn = emit_call_insn (insn);
2798 CONST_OR_PURE_CALL_P (insn) = 1;
2799 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
2800 insn = get_insns ();
2802 tmp1 = gen_reg_rtx (Pmode);
2803 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
2805 emit_libcall_block (insn, tmp1, r3, eqv);
2806 if (rs6000_tls_size == 16)
2809 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
2811 insn = gen_tls_dtprel_32 (dest, tmp1, addr);
2813 else if (rs6000_tls_size == 32)
2815 tmp2 = gen_reg_rtx (Pmode);
2817 insn = gen_tls_dtprel_ha_64 (tmp2, tmp1, addr);
2819 insn = gen_tls_dtprel_ha_32 (tmp2, tmp1, addr);
2822 insn = gen_tls_dtprel_lo_64 (dest, tmp2, addr);
2824 insn = gen_tls_dtprel_lo_32 (dest, tmp2, addr);
2828 tmp2 = gen_reg_rtx (Pmode);
2830 insn = gen_tls_got_dtprel_64 (tmp2, got, addr);
2832 insn = gen_tls_got_dtprel_32 (tmp2, got, addr);
2834 insn = gen_rtx_SET (Pmode, dest,
2835 gen_rtx_PLUS (Pmode, tmp2, tmp1));
2841 /* IE, or 64 bit offset LE. */
2842 tmp2 = gen_reg_rtx (Pmode);
2844 insn = gen_tls_got_tprel_64 (tmp2, got, addr);
2846 insn = gen_tls_got_tprel_32 (tmp2, got, addr);
2849 insn = gen_tls_tls_64 (dest, tmp2, addr);
2851 insn = gen_tls_tls_32 (dest, tmp2, addr);
2859 /* Return 1 if X is a SYMBOL_REF for a TLS symbol. This is used in
2860 instruction definitions. */
2863 rs6000_tls_symbol_ref (x, mode)
2865 enum machine_mode mode ATTRIBUTE_UNUSED;
2867 return RS6000_SYMBOL_REF_TLS_P (x);
2870 /* Return 1 if X contains a thread-local symbol. */
2873 rs6000_tls_referenced_p (x)
2876 return for_each_rtx (&x, &rs6000_tls_symbol_ref_1, 0);
2879 /* Return 1 if *X is a thread-local symbol. This is the same as
2880 rs6000_tls_symbol_ref except for the type of the unused argument. */
2883 rs6000_tls_symbol_ref_1 (x, data)
2885 void *data ATTRIBUTE_UNUSED;
2887 return RS6000_SYMBOL_REF_TLS_P (*x);
2890 /* The convention appears to be to define this wherever it is used.
2891 With legitimize_reload_address now defined here, REG_MODE_OK_FOR_BASE_P
2892 is now used here. */
2893 #ifndef REG_MODE_OK_FOR_BASE_P
2894 #define REG_MODE_OK_FOR_BASE_P(REGNO, MODE) REG_OK_FOR_BASE_P (REGNO)
2897 /* Our implementation of LEGITIMIZE_RELOAD_ADDRESS. Returns a value to
2898 replace the input X, or the original X if no replacement is called for.
2899 The output parameter *WIN is 1 if the calling macro should goto WIN,
2902 For RS/6000, we wish to handle large displacements off a base
2903 register by splitting the addend across an addiu/addis and the mem insn.
2904 This cuts number of extra insns needed from 3 to 1.
2906 On Darwin, we use this to generate code for floating point constants.
2907 A movsf_low is generated so we wind up with 2 instructions rather than 3.
2908 The Darwin code is inside #if TARGET_MACHO because only then is
2909 machopic_function_base_name() defined. */
2911 rs6000_legitimize_reload_address (x, mode, opnum, type, ind_levels, win)
2913 enum machine_mode mode;
2916 int ind_levels ATTRIBUTE_UNUSED;
2919 /* We must recognize output that we have already generated ourselves. */
2920 if (GET_CODE (x) == PLUS
2921 && GET_CODE (XEXP (x, 0)) == PLUS
2922 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
2923 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2924 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2926 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2927 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2928 opnum, (enum reload_type)type);
2934 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
2935 && GET_CODE (x) == LO_SUM
2936 && GET_CODE (XEXP (x, 0)) == PLUS
2937 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
2938 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
2939 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 0)) == CONST
2940 && XEXP (XEXP (XEXP (x, 0), 1), 0) == XEXP (x, 1)
2941 && GET_CODE (XEXP (XEXP (x, 1), 0)) == MINUS
2942 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 0)) == SYMBOL_REF
2943 && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == SYMBOL_REF)
2945 /* Result of previous invocation of this function on Darwin
2946 floating point constant. */
2947 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2948 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
2949 opnum, (enum reload_type)type);
2954 if (GET_CODE (x) == PLUS
2955 && GET_CODE (XEXP (x, 0)) == REG
2956 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2957 && REG_MODE_OK_FOR_BASE_P (XEXP (x, 0), mode)
2958 && GET_CODE (XEXP (x, 1)) == CONST_INT
2959 && !SPE_VECTOR_MODE (mode)
2960 && !ALTIVEC_VECTOR_MODE (mode))
2962 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
2963 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
2965 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
2967 /* Check for 32-bit overflow. */
2968 if (high + low != val)
2974 /* Reload the high part into a base reg; leave the low part
2975 in the mem directly. */
2977 x = gen_rtx_PLUS (GET_MODE (x),
2978 gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),
2982 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
2983 BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,
2984 opnum, (enum reload_type)type);
2989 if (GET_CODE (x) == SYMBOL_REF
2990 && DEFAULT_ABI == ABI_DARWIN
2991 && !ALTIVEC_VECTOR_MODE (mode)
2994 /* Darwin load of floating point constant. */
2995 rtx offset = gen_rtx (CONST, Pmode,
2996 gen_rtx (MINUS, Pmode, x,
2997 gen_rtx (SYMBOL_REF, Pmode,
2998 machopic_function_base_name ())));
2999 x = gen_rtx (LO_SUM, GET_MODE (x),
3000 gen_rtx (PLUS, Pmode, pic_offset_table_rtx,
3001 gen_rtx (HIGH, Pmode, offset)), offset);
3002 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3003 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3004 opnum, (enum reload_type)type);
3008 if (GET_CODE (x) == SYMBOL_REF
3009 && DEFAULT_ABI == ABI_DARWIN
3010 && !ALTIVEC_VECTOR_MODE (mode)
3011 && MACHO_DYNAMIC_NO_PIC_P)
3013 /* Darwin load of floating point constant. */
3014 x = gen_rtx (LO_SUM, GET_MODE (x),
3015 gen_rtx (HIGH, Pmode, x), x);
3016 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
3017 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
3018 opnum, (enum reload_type)type);
3024 && constant_pool_expr_p (x)
3025 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
3027 (x) = create_TOC_reference (x);
3035 /* GO_IF_LEGITIMATE_ADDRESS recognizes an RTL expression
3036 that is a valid memory address for an instruction.
3037 The MODE argument is the machine mode for the MEM expression
3038 that wants to use this address.
3040 On the RS/6000, there are four valid address: a SYMBOL_REF that
3041 refers to a constant pool entry of an address (or the sum of it
3042 plus a constant), a short (16-bit signed) constant plus a register,
3043 the sum of two registers, or a register indirect, possibly with an
3044 auto-increment. For DFmode and DImode with a constant plus register,
3045 we must ensure that both words are addressable or PowerPC64 with offset
3048 For modes spanning multiple registers (DFmode in 32-bit GPRs,
3049 32-bit DImode, TImode), indexed addressing cannot be used because
3050 adjacent memory cells are accessed by adding word-sized offsets
3051 during assembly output. */
3053 rs6000_legitimate_address (mode, x, reg_ok_strict)
3054 enum machine_mode mode;
3058 if (RS6000_SYMBOL_REF_TLS_P (x))
3060 if (legitimate_indirect_address_p (x, reg_ok_strict))
3062 if ((GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
3063 && !ALTIVEC_VECTOR_MODE (mode)
3064 && !SPE_VECTOR_MODE (mode)
3066 && legitimate_indirect_address_p (XEXP (x, 0), reg_ok_strict))
3068 if (legitimate_small_data_p (mode, x))
3070 if (legitimate_constant_pool_address_p (x))
3072 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
3074 && GET_CODE (x) == PLUS
3075 && GET_CODE (XEXP (x, 0)) == REG
3076 && XEXP (x, 0) == virtual_stack_vars_rtx
3077 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3079 if (legitimate_offset_address_p (mode, x, reg_ok_strict))
3082 && ((TARGET_HARD_FLOAT && TARGET_FPRS)
3084 || (mode != DFmode && mode != TFmode))
3085 && (TARGET_POWERPC64 || mode != DImode)
3086 && legitimate_indexed_address_p (x, reg_ok_strict))
3088 if (legitimate_lo_sum_address_p (mode, x, reg_ok_strict))
3093 /* Go to LABEL if ADDR (a legitimate address expression)
3094 has an effect that depends on the machine mode it is used for.
3096 On the RS/6000 this is true of all integral offsets (since AltiVec
3097 modes don't allow them) or is a pre-increment or decrement.
3099 ??? Except that due to conceptual problems in offsettable_address_p
3100 we can't really report the problems of integral offsets. So leave
3101 this assuming that the adjustable offset must be valid for the
3102 sub-words of a TFmode operand, which is what we had before. */
3105 rs6000_mode_dependent_address (addr)
3108 switch (GET_CODE (addr))
3111 if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
3113 unsigned HOST_WIDE_INT val = INTVAL (XEXP (addr, 1));
3114 return val + 12 + 0x8000 >= 0x10000;
3123 return TARGET_UPDATE;
3132 /* Try to output insns to set TARGET equal to the constant C if it can
3133 be done in less than N insns. Do all computations in MODE.
3134 Returns the place where the output has been placed if it can be
3135 done and the insns have been emitted. If it would take more than N
3136 insns, zero is returned and no insns and emitted. */
3139 rs6000_emit_set_const (dest, mode, source, n)
3141 enum machine_mode mode;
3142 int n ATTRIBUTE_UNUSED;
3144 rtx result, insn, set;
3145 HOST_WIDE_INT c0, c1;
3147 if (mode == QImode || mode == HImode)
3150 dest = gen_reg_rtx (mode);
3151 emit_insn (gen_rtx_SET (VOIDmode, dest, source));
3154 else if (mode == SImode)
3156 result = no_new_pseudos ? dest : gen_reg_rtx (SImode);
3158 emit_insn (gen_rtx_SET (VOIDmode, result,
3159 GEN_INT (INTVAL (source)
3160 & (~ (HOST_WIDE_INT) 0xffff))));
3161 emit_insn (gen_rtx_SET (VOIDmode, dest,
3162 gen_rtx_IOR (SImode, result,
3163 GEN_INT (INTVAL (source) & 0xffff))));
3166 else if (mode == DImode)
3168 if (GET_CODE (source) == CONST_INT)
3170 c0 = INTVAL (source);
3173 else if (GET_CODE (source) == CONST_DOUBLE)
3175 #if HOST_BITS_PER_WIDE_INT >= 64
3176 c0 = CONST_DOUBLE_LOW (source);
3179 c0 = CONST_DOUBLE_LOW (source);
3180 c1 = CONST_DOUBLE_HIGH (source);
3186 result = rs6000_emit_set_long_const (dest, c0, c1);
3191 insn = get_last_insn ();
3192 set = single_set (insn);
3193 if (! CONSTANT_P (SET_SRC (set)))
3194 set_unique_reg_note (insn, REG_EQUAL, source);
3199 /* Having failed to find a 3 insn sequence in rs6000_emit_set_const,
3200 fall back to a straight forward decomposition. We do this to avoid
3201 exponential run times encountered when looking for longer sequences
3202 with rs6000_emit_set_const. */
3204 rs6000_emit_set_long_const (dest, c1, c2)
3206 HOST_WIDE_INT c1, c2;
3208 if (!TARGET_POWERPC64)
3210 rtx operand1, operand2;
3212 operand1 = operand_subword_force (dest, WORDS_BIG_ENDIAN == 0,
3214 operand2 = operand_subword_force (dest, WORDS_BIG_ENDIAN != 0,
3216 emit_move_insn (operand1, GEN_INT (c1));
3217 emit_move_insn (operand2, GEN_INT (c2));
3221 HOST_WIDE_INT ud1, ud2, ud3, ud4;
3224 ud2 = (c1 & 0xffff0000) >> 16;
3225 #if HOST_BITS_PER_WIDE_INT >= 64
3229 ud4 = (c2 & 0xffff0000) >> 16;
3231 if ((ud4 == 0xffff && ud3 == 0xffff && ud2 == 0xffff && (ud1 & 0x8000))
3232 || (ud4 == 0 && ud3 == 0 && ud2 == 0 && ! (ud1 & 0x8000)))
3235 emit_move_insn (dest, GEN_INT (((ud1 ^ 0x8000) - 0x8000)));
3237 emit_move_insn (dest, GEN_INT (ud1));
3240 else if ((ud4 == 0xffff && ud3 == 0xffff && (ud2 & 0x8000))
3241 || (ud4 == 0 && ud3 == 0 && ! (ud2 & 0x8000)))
3244 emit_move_insn (dest, GEN_INT (((ud2 << 16) ^ 0x80000000)
3247 emit_move_insn (dest, GEN_INT (ud2 << 16));
3249 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3251 else if ((ud4 == 0xffff && (ud3 & 0x8000))
3252 || (ud4 == 0 && ! (ud3 & 0x8000)))
3255 emit_move_insn (dest, GEN_INT (((ud3 << 16) ^ 0x80000000)
3258 emit_move_insn (dest, GEN_INT (ud3 << 16));
3261 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud2)));
3262 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (16)));
3264 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3269 emit_move_insn (dest, GEN_INT (((ud4 << 16) ^ 0x80000000)
3272 emit_move_insn (dest, GEN_INT (ud4 << 16));
3275 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud3)));
3277 emit_move_insn (dest, gen_rtx_ASHIFT (DImode, dest, GEN_INT (32)));
3279 emit_move_insn (dest, gen_rtx_IOR (DImode, dest,
3280 GEN_INT (ud2 << 16)));
3282 emit_move_insn (dest, gen_rtx_IOR (DImode, dest, GEN_INT (ud1)));
3288 /* Emit a move from SOURCE to DEST in mode MODE. */
3290 rs6000_emit_move (dest, source, mode)
3293 enum machine_mode mode;
3297 operands[1] = source;
3299 /* Sanity checks. Check that we get CONST_DOUBLE only when we should. */
3300 if (GET_CODE (operands[1]) == CONST_DOUBLE
3301 && ! FLOAT_MODE_P (mode)
3302 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3304 /* FIXME. This should never happen. */
3305 /* Since it seems that it does, do the safe thing and convert
3307 operands[1] = gen_int_mode (CONST_DOUBLE_LOW (operands[1]), mode);
3309 if (GET_CODE (operands[1]) == CONST_DOUBLE
3310 && ! FLOAT_MODE_P (mode)
3311 && ((CONST_DOUBLE_HIGH (operands[1]) == 0
3312 && CONST_DOUBLE_LOW (operands[1]) >= 0)
3313 || (CONST_DOUBLE_HIGH (operands[1]) == -1
3314 && CONST_DOUBLE_LOW (operands[1]) < 0)))
3317 /* Check if GCC is setting up a block move that will end up using FP
3318 registers as temporaries. We must make sure this is acceptable. */
3319 if (GET_CODE (operands[0]) == MEM
3320 && GET_CODE (operands[1]) == MEM
3322 && (SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[0]))
3323 || SLOW_UNALIGNED_ACCESS (DImode, MEM_ALIGN (operands[1])))
3324 && ! (SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[0]) > 32
3325 ? 32 : MEM_ALIGN (operands[0])))
3326 || SLOW_UNALIGNED_ACCESS (SImode, (MEM_ALIGN (operands[1]) > 32
3328 : MEM_ALIGN (operands[1]))))
3329 && ! MEM_VOLATILE_P (operands [0])
3330 && ! MEM_VOLATILE_P (operands [1]))
3332 emit_move_insn (adjust_address (operands[0], SImode, 0),
3333 adjust_address (operands[1], SImode, 0));
3334 emit_move_insn (adjust_address (operands[0], SImode, 4),
3335 adjust_address (operands[1], SImode, 4));
3339 if (!no_new_pseudos)
3341 if (GET_CODE (operands[1]) == MEM && optimize > 0
3342 && (mode == QImode || mode == HImode || mode == SImode)
3343 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
3345 rtx reg = gen_reg_rtx (word_mode);
3347 emit_insn (gen_rtx_SET (word_mode, reg,
3348 gen_rtx_ZERO_EXTEND (word_mode,
3350 operands[1] = gen_lowpart (mode, reg);
3352 if (GET_CODE (operands[0]) != REG)
3353 operands[1] = force_reg (mode, operands[1]);
3356 if (mode == SFmode && ! TARGET_POWERPC
3357 && TARGET_HARD_FLOAT && TARGET_FPRS
3358 && GET_CODE (operands[0]) == MEM)
3362 if (reload_in_progress || reload_completed)
3363 regnum = true_regnum (operands[1]);
3364 else if (GET_CODE (operands[1]) == REG)
3365 regnum = REGNO (operands[1]);
3369 /* If operands[1] is a register, on POWER it may have
3370 double-precision data in it, so truncate it to single
3372 if (FP_REGNO_P (regnum) || regnum >= FIRST_PSEUDO_REGISTER)
3375 newreg = (no_new_pseudos ? operands[1] : gen_reg_rtx (mode));
3376 emit_insn (gen_aux_truncdfsf2 (newreg, operands[1]));
3377 operands[1] = newreg;
3381 /* Recognize the case where operand[1] is a reference to thread-local
3382 data and load its address to a register. */
3383 if (GET_CODE (operands[1]) == SYMBOL_REF)
3385 enum tls_model model = SYMBOL_REF_TLS_MODEL (operands[1]);
3387 operands[1] = rs6000_legitimize_tls_address (operands[1], model);
3390 /* Handle the case where reload calls us with an invalid address. */
3391 if (reload_in_progress && mode == Pmode
3392 && (! general_operand (operands[1], mode)
3393 || ! nonimmediate_operand (operands[0], mode)))
3396 /* Handle the case of CONSTANT_P_RTX. */
3397 if (GET_CODE (operands[1]) == CONSTANT_P_RTX)
3400 /* FIXME: In the long term, this switch statement should go away
3401 and be replaced by a sequence of tests based on things like
3407 if (CONSTANT_P (operands[1])
3408 && GET_CODE (operands[1]) != CONST_INT)
3409 operands[1] = force_const_mem (mode, operands[1]);
3415 if (CONSTANT_P (operands[1])
3416 && ! easy_fp_constant (operands[1], mode))
3417 operands[1] = force_const_mem (mode, operands[1]);
3428 if (CONSTANT_P (operands[1])
3429 && !easy_vector_constant (operands[1], mode))
3430 operands[1] = force_const_mem (mode, operands[1]);
3435 /* Use default pattern for address of ELF small data */
3438 && DEFAULT_ABI == ABI_V4
3439 && (GET_CODE (operands[1]) == SYMBOL_REF
3440 || GET_CODE (operands[1]) == CONST)
3441 && small_data_operand (operands[1], mode))
3443 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3447 if (DEFAULT_ABI == ABI_V4
3448 && mode == Pmode && mode == SImode
3449 && flag_pic == 1 && got_operand (operands[1], mode))
3451 emit_insn (gen_movsi_got (operands[0], operands[1]));
3455 if ((TARGET_ELF || DEFAULT_ABI == ABI_DARWIN)
3459 && CONSTANT_P (operands[1])
3460 && GET_CODE (operands[1]) != HIGH
3461 && GET_CODE (operands[1]) != CONST_INT)
3463 rtx target = (no_new_pseudos ? operands[0] : gen_reg_rtx (mode));
3465 /* If this is a function address on -mcall-aixdesc,
3466 convert it to the address of the descriptor. */
3467 if (DEFAULT_ABI == ABI_AIX
3468 && GET_CODE (operands[1]) == SYMBOL_REF
3469 && XSTR (operands[1], 0)[0] == '.')
3471 const char *name = XSTR (operands[1], 0);
3473 while (*name == '.')
3475 new_ref = gen_rtx_SYMBOL_REF (Pmode, name);
3476 CONSTANT_POOL_ADDRESS_P (new_ref)
3477 = CONSTANT_POOL_ADDRESS_P (operands[1]);
3478 SYMBOL_REF_FLAGS (new_ref) = SYMBOL_REF_FLAGS (operands[1]);
3479 SYMBOL_REF_USED (new_ref) = SYMBOL_REF_USED (operands[1]);
3480 SYMBOL_REF_DECL (new_ref) = SYMBOL_REF_DECL (operands[1]);
3481 operands[1] = new_ref;
3484 if (DEFAULT_ABI == ABI_DARWIN)
3487 if (MACHO_DYNAMIC_NO_PIC_P)
3489 /* Take care of any required data indirection. */
3490 operands[1] = rs6000_machopic_legitimize_pic_address (
3491 operands[1], mode, operands[0]);
3492 if (operands[0] != operands[1])
3493 emit_insn (gen_rtx_SET (VOIDmode,
3494 operands[0], operands[1]));
3498 emit_insn (gen_macho_high (target, operands[1]));
3499 emit_insn (gen_macho_low (operands[0], target, operands[1]));
3503 emit_insn (gen_elf_high (target, operands[1]));
3504 emit_insn (gen_elf_low (operands[0], target, operands[1]));
3508 /* If this is a SYMBOL_REF that refers to a constant pool entry,
3509 and we have put it in the TOC, we just need to make a TOC-relative
3512 && GET_CODE (operands[1]) == SYMBOL_REF
3513 && constant_pool_expr_p (operands[1])
3514 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
3515 get_pool_mode (operands[1])))
3517 operands[1] = create_TOC_reference (operands[1]);
3519 else if (mode == Pmode
3520 && CONSTANT_P (operands[1])
3521 && ((GET_CODE (operands[1]) != CONST_INT
3522 && ! easy_fp_constant (operands[1], mode))
3523 || (GET_CODE (operands[1]) == CONST_INT
3524 && num_insns_constant (operands[1], mode) > 2)
3525 || (GET_CODE (operands[0]) == REG
3526 && FP_REGNO_P (REGNO (operands[0]))))
3527 && GET_CODE (operands[1]) != HIGH
3528 && ! legitimate_constant_pool_address_p (operands[1])
3529 && ! toc_relative_expr_p (operands[1]))
3531 /* Emit a USE operation so that the constant isn't deleted if
3532 expensive optimizations are turned on because nobody
3533 references it. This should only be done for operands that
3534 contain SYMBOL_REFs with CONSTANT_POOL_ADDRESS_P set.
3535 This should not be done for operands that contain LABEL_REFs.
3536 For now, we just handle the obvious case. */
3537 if (GET_CODE (operands[1]) != LABEL_REF)
3538 emit_insn (gen_rtx_USE (VOIDmode, operands[1]));
3541 /* Darwin uses a special PIC legitimizer. */
3542 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
3545 rs6000_machopic_legitimize_pic_address (operands[1], mode,
3547 if (operands[0] != operands[1])
3548 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3553 /* If we are to limit the number of things we put in the TOC and
3554 this is a symbol plus a constant we can add in one insn,
3555 just put the symbol in the TOC and add the constant. Don't do
3556 this if reload is in progress. */
3557 if (GET_CODE (operands[1]) == CONST
3558 && TARGET_NO_SUM_IN_TOC && ! reload_in_progress
3559 && GET_CODE (XEXP (operands[1], 0)) == PLUS
3560 && add_operand (XEXP (XEXP (operands[1], 0), 1), mode)
3561 && (GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == LABEL_REF
3562 || GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == SYMBOL_REF)
3563 && ! side_effects_p (operands[0]))
3566 force_const_mem (mode, XEXP (XEXP (operands[1], 0), 0));
3567 rtx other = XEXP (XEXP (operands[1], 0), 1);
3569 sym = force_reg (mode, sym);
3571 emit_insn (gen_addsi3 (operands[0], sym, other));
3573 emit_insn (gen_adddi3 (operands[0], sym, other));
3577 operands[1] = force_const_mem (mode, operands[1]);
3580 && constant_pool_expr_p (XEXP (operands[1], 0))
3581 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
3582 get_pool_constant (XEXP (operands[1], 0)),
3583 get_pool_mode (XEXP (operands[1], 0))))
3586 = gen_rtx_MEM (mode,
3587 create_TOC_reference (XEXP (operands[1], 0)));
3588 set_mem_alias_set (operands[1], get_TOC_alias_set ());
3589 RTX_UNCHANGING_P (operands[1]) = 1;
3595 if (GET_CODE (operands[0]) == MEM
3596 && GET_CODE (XEXP (operands[0], 0)) != REG
3597 && ! reload_in_progress)
3599 = replace_equiv_address (operands[0],
3600 copy_addr_to_reg (XEXP (operands[0], 0)));
3602 if (GET_CODE (operands[1]) == MEM
3603 && GET_CODE (XEXP (operands[1], 0)) != REG
3604 && ! reload_in_progress)
3606 = replace_equiv_address (operands[1],
3607 copy_addr_to_reg (XEXP (operands[1], 0)));
3610 emit_insn (gen_rtx_PARALLEL (VOIDmode,
3612 gen_rtx_SET (VOIDmode,
3613 operands[0], operands[1]),
3614 gen_rtx_CLOBBER (VOIDmode,
3615 gen_rtx_SCRATCH (SImode)))));
3624 /* Above, we may have called force_const_mem which may have returned
3625 an invalid address. If we can, fix this up; otherwise, reload will
3626 have to deal with it. */
3627 if (GET_CODE (operands[1]) == MEM && ! reload_in_progress)
3628 operands[1] = validize_mem (operands[1]);
3631 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
3634 /* Initialize a variable CUM of type CUMULATIVE_ARGS
3635 for a call to a function whose data type is FNTYPE.
3636 For a library call, FNTYPE is 0.
3638 For incoming args we set the number of arguments in the prototype large
3639 so we never return a PARALLEL. */
3642 init_cumulative_args (cum, fntype, libname, incoming, libcall)
3643 CUMULATIVE_ARGS *cum;
3645 rtx libname ATTRIBUTE_UNUSED;
3649 static CUMULATIVE_ARGS zero_cumulative;
3651 *cum = zero_cumulative;
3653 cum->fregno = FP_ARG_MIN_REG;
3654 cum->vregno = ALTIVEC_ARG_MIN_REG;
3655 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype));
3656 cum->call_cookie = libcall ? CALL_LIBCALL : CALL_NORMAL;
3657 cum->sysv_gregno = GP_ARG_MIN_REG;
3658 cum->stdarg = fntype
3659 && (TYPE_ARG_TYPES (fntype) != 0
3660 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3661 != void_type_node));
3664 cum->nargs_prototype = 1000; /* don't return a PARALLEL */
3666 else if (cum->prototype)
3667 cum->nargs_prototype = (list_length (TYPE_ARG_TYPES (fntype)) - 1
3668 + (TYPE_MODE (TREE_TYPE (fntype)) == BLKmode
3669 || RETURN_IN_MEMORY (TREE_TYPE (fntype))));
3672 cum->nargs_prototype = 0;
3674 cum->orig_nargs = cum->nargs_prototype;
3676 /* Check for a longcall attribute. */
3678 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype))
3679 && !lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype)))
3680 cum->call_cookie = CALL_LONG;
3682 if (TARGET_DEBUG_ARG)
3684 fprintf (stderr, "\ninit_cumulative_args:");
3687 tree ret_type = TREE_TYPE (fntype);
3688 fprintf (stderr, " ret code = %s,",
3689 tree_code_name[ (int)TREE_CODE (ret_type) ]);
3692 if (cum->call_cookie & CALL_LONG)
3693 fprintf (stderr, " longcall,");
3695 fprintf (stderr, " proto = %d, nargs = %d\n",
3696 cum->prototype, cum->nargs_prototype);
3700 /* If defined, a C expression which determines whether, and in which
3701 direction, to pad out an argument with extra space. The value
3702 should be of type `enum direction': either `upward' to pad above
3703 the argument, `downward' to pad below, or `none' to inhibit
3706 For the AIX ABI structs are always stored left shifted in their
3710 function_arg_padding (mode, type)
3711 enum machine_mode mode;
3714 if (type != 0 && AGGREGATE_TYPE_P (type))
3717 /* This is the default definition. */
3718 return (! BYTES_BIG_ENDIAN
3721 ? (type && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3722 && int_size_in_bytes (type) < (PARM_BOUNDARY / BITS_PER_UNIT))
3723 : GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
3724 ? downward : upward));
3727 /* If defined, a C expression that gives the alignment boundary, in bits,
3728 of an argument with the specified mode and type. If it is not defined,
3729 PARM_BOUNDARY is used for all arguments.
3731 V.4 wants long longs to be double word aligned. */
3734 function_arg_boundary (mode, type)
3735 enum machine_mode mode;
3736 tree type ATTRIBUTE_UNUSED;
3738 if (DEFAULT_ABI == ABI_V4 && (mode == DImode || mode == DFmode))
3740 else if (SPE_VECTOR_MODE (mode))
3742 else if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3745 return PARM_BOUNDARY;
3748 /* Update the data in CUM to advance over an argument
3749 of mode MODE and data type TYPE.
3750 (TYPE is null for libcalls where that information may not be available.) */
3753 function_arg_advance (cum, mode, type, named)
3754 CUMULATIVE_ARGS *cum;
3755 enum machine_mode mode;
3759 cum->nargs_prototype--;
3761 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3763 if (cum->vregno <= ALTIVEC_ARG_MAX_REG && cum->nargs_prototype >= 0)
3766 cum->words += RS6000_ARG_SIZE (mode, type);
3768 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
3770 && cum->sysv_gregno <= GP_ARG_MAX_REG)
3772 else if (DEFAULT_ABI == ABI_V4)
3774 if (TARGET_HARD_FLOAT && TARGET_FPRS
3775 && (mode == SFmode || mode == DFmode))
3777 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3782 cum->words += cum->words & 1;
3783 cum->words += RS6000_ARG_SIZE (mode, type);
3789 int gregno = cum->sysv_gregno;
3791 /* Aggregates and IEEE quad get passed by reference. */
3792 if ((type && AGGREGATE_TYPE_P (type))
3796 n_words = RS6000_ARG_SIZE (mode, type);
3798 /* Long long and SPE vectors are put in odd registers. */
3799 if (n_words == 2 && (gregno & 1) == 0)
3802 /* Long long and SPE vectors are not split between registers
3804 if (gregno + n_words - 1 > GP_ARG_MAX_REG)
3806 /* Long long is aligned on the stack. */
3808 cum->words += cum->words & 1;
3809 cum->words += n_words;
3812 /* Note: continuing to accumulate gregno past when we've started
3813 spilling to the stack indicates the fact that we've started
3814 spilling to the stack to expand_builtin_saveregs. */
3815 cum->sysv_gregno = gregno + n_words;
3818 if (TARGET_DEBUG_ARG)
3820 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3821 cum->words, cum->fregno);
3822 fprintf (stderr, "gregno = %2d, nargs = %4d, proto = %d, ",
3823 cum->sysv_gregno, cum->nargs_prototype, cum->prototype);
3824 fprintf (stderr, "mode = %4s, named = %d\n",
3825 GET_MODE_NAME (mode), named);
3830 int align = (TARGET_32BIT && (cum->words & 1) != 0
3831 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3833 cum->words += align + RS6000_ARG_SIZE (mode, type);
3835 if (GET_MODE_CLASS (mode) == MODE_FLOAT
3836 && TARGET_HARD_FLOAT && TARGET_FPRS)
3837 cum->fregno += (mode == TFmode ? 2 : 1);
3839 if (TARGET_DEBUG_ARG)
3841 fprintf (stderr, "function_adv: words = %2d, fregno = %2d, ",
3842 cum->words, cum->fregno);
3843 fprintf (stderr, "nargs = %4d, proto = %d, mode = %4s, ",
3844 cum->nargs_prototype, cum->prototype, GET_MODE_NAME (mode));
3845 fprintf (stderr, "named = %d, align = %d\n", named, align);
3850 /* Determine where to put a SIMD argument on the SPE. */
3852 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type)
3856 int gregno = cum->sysv_gregno;
3857 int n_words = RS6000_ARG_SIZE (mode, type);
3859 /* SPE vectors are put in odd registers. */
3860 if (n_words == 2 && (gregno & 1) == 0)
3863 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3866 enum machine_mode m = SImode;
3868 r1 = gen_rtx_REG (m, gregno);
3869 r1 = gen_rtx_EXPR_LIST (m, r1, const0_rtx);
3870 r2 = gen_rtx_REG (m, gregno + 1);
3871 r2 = gen_rtx_EXPR_LIST (m, r2, GEN_INT (4));
3872 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
3879 if (cum->sysv_gregno <= GP_ARG_MAX_REG)
3880 return gen_rtx_REG (mode, cum->sysv_gregno);
3886 /* Determine where to put an argument to a function.
3887 Value is zero to push the argument on the stack,
3888 or a hard register in which to store the argument.
3890 MODE is the argument's machine mode.
3891 TYPE is the data type of the argument (as a tree).
3892 This is null for libcalls where that information may
3894 CUM is a variable of type CUMULATIVE_ARGS which gives info about
3895 the preceding args and about the function being called.
3896 NAMED is nonzero if this argument is a named parameter
3897 (otherwise it is an extra parameter matching an ellipsis).
3899 On RS/6000 the first eight words of non-FP are normally in registers
3900 and the rest are pushed. Under AIX, the first 13 FP args are in registers.
3901 Under V.4, the first 8 FP args are in registers.
3903 If this is floating-point and no prototype is specified, we use
3904 both an FP and integer register (or possibly FP reg and stack). Library
3905 functions (when CALL_LIBCALL is set) always have the proper types for args,
3906 so we can pass the FP value just in one register. emit_library_function
3907 doesn't support PARALLEL anyway. */
3910 function_arg (cum, mode, type, named)
3911 CUMULATIVE_ARGS *cum;
3912 enum machine_mode mode;
3916 enum rs6000_abi abi = DEFAULT_ABI;
3918 /* Return a marker to indicate whether CR1 needs to set or clear the
3919 bit that V.4 uses to say fp args were passed in registers.
3920 Assume that we don't need the marker for software floating point,
3921 or compiler generated library calls. */
3922 if (mode == VOIDmode)
3925 && cum->nargs_prototype < 0
3926 && (cum->call_cookie & CALL_LIBCALL) == 0
3927 && (cum->prototype || TARGET_NO_PROTOTYPE))
3929 /* For the SPE, we need to crxor CR6 always. */
3931 return GEN_INT (cum->call_cookie | CALL_V4_SET_FP_ARGS);
3932 else if (TARGET_HARD_FLOAT && TARGET_FPRS)
3933 return GEN_INT (cum->call_cookie
3934 | ((cum->fregno == FP_ARG_MIN_REG)
3935 ? CALL_V4_SET_FP_ARGS
3936 : CALL_V4_CLEAR_FP_ARGS));
3939 return GEN_INT (cum->call_cookie);
3942 if (TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
3944 if (named && cum->vregno <= ALTIVEC_ARG_MAX_REG)
3945 return gen_rtx_REG (mode, cum->vregno);
3949 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode))
3950 return rs6000_spe_function_arg (cum, mode, type);
3951 else if (abi == ABI_V4)
3953 if (TARGET_HARD_FLOAT && TARGET_FPRS
3954 && (mode == SFmode || mode == DFmode))
3956 if (cum->fregno <= FP_ARG_V4_MAX_REG)
3957 return gen_rtx_REG (mode, cum->fregno);
3964 int gregno = cum->sysv_gregno;
3966 /* Aggregates and IEEE quad get passed by reference. */
3967 if ((type && AGGREGATE_TYPE_P (type))
3971 n_words = RS6000_ARG_SIZE (mode, type);
3973 /* Long long and SPE vectors are put in odd registers. */
3974 if (n_words == 2 && (gregno & 1) == 0)
3977 /* Long long do not split between registers and stack. */
3978 if (gregno + n_words - 1 <= GP_ARG_MAX_REG)
3979 return gen_rtx_REG (mode, gregno);
3986 int align = (TARGET_32BIT && (cum->words & 1) != 0
3987 && function_arg_boundary (mode, type) == 64) ? 1 : 0;
3988 int align_words = cum->words + align;
3990 if (type && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3993 if (USE_FP_FOR_ARG_P (*cum, mode, type))
3996 || ((cum->nargs_prototype > 0)
3997 /* IBM AIX extended its linkage convention definition always
3998 to require FP args after register save area hole on the
4000 && (DEFAULT_ABI != ABI_AIX
4002 || (align_words < GP_ARG_NUM_REG))))
4003 return gen_rtx_REG (mode, cum->fregno);
4005 return gen_rtx_PARALLEL (mode,
4007 gen_rtx_EXPR_LIST (VOIDmode,
4008 ((align_words >= GP_ARG_NUM_REG)
4011 + RS6000_ARG_SIZE (mode, type)
4013 /* If this is partially on the stack, then
4014 we only include the portion actually
4015 in registers here. */
4016 ? gen_rtx_REG (SImode,
4017 GP_ARG_MIN_REG + align_words)
4018 : gen_rtx_REG (mode,
4019 GP_ARG_MIN_REG + align_words))),
4021 gen_rtx_EXPR_LIST (VOIDmode,
4022 gen_rtx_REG (mode, cum->fregno),
4025 else if (align_words < GP_ARG_NUM_REG)
4026 return gen_rtx_REG (mode, GP_ARG_MIN_REG + align_words);
4032 /* For an arg passed partly in registers and partly in memory,
4033 this is the number of registers used.
4034 For args passed entirely in registers or entirely in memory, zero. */
4037 function_arg_partial_nregs (cum, mode, type, named)
4038 CUMULATIVE_ARGS *cum;
4039 enum machine_mode mode;
4041 int named ATTRIBUTE_UNUSED;
4043 if (DEFAULT_ABI == ABI_V4)
4046 if (USE_FP_FOR_ARG_P (*cum, mode, type)
4047 || USE_ALTIVEC_FOR_ARG_P (*cum, mode, type))
4049 if (cum->nargs_prototype >= 0)
4053 if (cum->words < GP_ARG_NUM_REG
4054 && GP_ARG_NUM_REG < (cum->words + RS6000_ARG_SIZE (mode, type)))
4056 int ret = GP_ARG_NUM_REG - cum->words;
4057 if (ret && TARGET_DEBUG_ARG)
4058 fprintf (stderr, "function_arg_partial_nregs: %d\n", ret);
4066 /* A C expression that indicates when an argument must be passed by
4067 reference. If nonzero for an argument, a copy of that argument is
4068 made in memory and a pointer to the argument is passed instead of
4069 the argument itself. The pointer is passed in whatever way is
4070 appropriate for passing a pointer to that type.
4072 Under V.4, structures and unions are passed by reference.
4074 As an extension to all ABIs, variable sized types are passed by
4078 function_arg_pass_by_reference (cum, mode, type, named)
4079 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED;
4080 enum machine_mode mode ATTRIBUTE_UNUSED;
4082 int named ATTRIBUTE_UNUSED;
4084 if (DEFAULT_ABI == ABI_V4
4085 && ((type && AGGREGATE_TYPE_P (type))
4088 if (TARGET_DEBUG_ARG)
4089 fprintf (stderr, "function_arg_pass_by_reference: aggregate\n");
4093 return type && int_size_in_bytes (type) <= 0;
4096 /* Perform any needed actions needed for a function that is receiving a
4097 variable number of arguments.
4101 MODE and TYPE are the mode and type of the current parameter.
4103 PRETEND_SIZE is a variable that should be set to the amount of stack
4104 that must be pushed by the prolog to pretend that our caller pushed
4107 Normally, this macro will push all remaining incoming registers on the
4108 stack and set PRETEND_SIZE to the length of the registers pushed. */
4111 setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
4112 CUMULATIVE_ARGS *cum;
4113 enum machine_mode mode;
4115 int *pretend_size ATTRIBUTE_UNUSED;
4119 CUMULATIVE_ARGS next_cum;
4120 int reg_size = TARGET_32BIT ? 4 : 8;
4121 rtx save_area = NULL_RTX, mem;
4122 int first_reg_offset, set;
4126 fntype = TREE_TYPE (current_function_decl);
4127 stdarg_p = (TYPE_ARG_TYPES (fntype) != 0
4128 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4129 != void_type_node));
4131 /* For varargs, we do not want to skip the dummy va_dcl argument.
4132 For stdargs, we do want to skip the last named argument. */
4135 function_arg_advance (&next_cum, mode, type, 1);
4137 if (DEFAULT_ABI == ABI_V4)
4139 /* Indicate to allocate space on the stack for varargs save area. */
4140 cfun->machine->sysv_varargs_p = 1;
4142 save_area = plus_constant (virtual_stack_vars_rtx,
4143 - RS6000_VARARGS_SIZE);
4145 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
4149 first_reg_offset = next_cum.words;
4150 save_area = virtual_incoming_args_rtx;
4151 cfun->machine->sysv_varargs_p = 0;
4153 if (MUST_PASS_IN_STACK (mode, type))
4154 first_reg_offset += RS6000_ARG_SIZE (TYPE_MODE (type), type);
4157 set = get_varargs_alias_set ();
4158 if (! no_rtl && first_reg_offset < GP_ARG_NUM_REG)
4160 mem = gen_rtx_MEM (BLKmode,
4161 plus_constant (save_area,
4162 first_reg_offset * reg_size)),
4163 set_mem_alias_set (mem, set);
4164 set_mem_align (mem, BITS_PER_WORD);
4166 move_block_from_reg (GP_ARG_MIN_REG + first_reg_offset, mem,
4167 GP_ARG_NUM_REG - first_reg_offset);
4170 /* Save FP registers if needed. */
4171 if (DEFAULT_ABI == ABI_V4
4172 && TARGET_HARD_FLOAT && TARGET_FPRS
4174 && next_cum.fregno <= FP_ARG_V4_MAX_REG)
4176 int fregno = next_cum.fregno;
4177 rtx cr1 = gen_rtx_REG (CCmode, CR1_REGNO);
4178 rtx lab = gen_label_rtx ();
4179 int off = (GP_ARG_NUM_REG * reg_size) + ((fregno - FP_ARG_MIN_REG) * 8);
4181 emit_jump_insn (gen_rtx_SET (VOIDmode,
4183 gen_rtx_IF_THEN_ELSE (VOIDmode,
4184 gen_rtx_NE (VOIDmode, cr1,
4186 gen_rtx_LABEL_REF (VOIDmode, lab),
4189 while (fregno <= FP_ARG_V4_MAX_REG)
4191 mem = gen_rtx_MEM (DFmode, plus_constant (save_area, off));
4192 set_mem_alias_set (mem, set);
4193 emit_move_insn (mem, gen_rtx_REG (DFmode, fregno));
4202 /* Create the va_list data type. */
4205 rs6000_build_va_list ()
4207 tree f_gpr, f_fpr, f_res, f_ovf, f_sav, record, type_decl;
4209 /* For AIX, prefer 'char *' because that's what the system
4210 header files like. */
4211 if (DEFAULT_ABI != ABI_V4)
4212 return build_pointer_type (char_type_node);
4214 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
4215 type_decl = build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record);
4217 f_gpr = build_decl (FIELD_DECL, get_identifier ("gpr"),
4218 unsigned_char_type_node);
4219 f_fpr = build_decl (FIELD_DECL, get_identifier ("fpr"),
4220 unsigned_char_type_node);
4221 /* Give the two bytes of padding a name, so that -Wpadded won't warn on
4223 f_res = build_decl (FIELD_DECL, get_identifier ("reserved"),
4224 short_unsigned_type_node);
4225 f_ovf = build_decl (FIELD_DECL, get_identifier ("overflow_arg_area"),
4227 f_sav = build_decl (FIELD_DECL, get_identifier ("reg_save_area"),
4230 DECL_FIELD_CONTEXT (f_gpr) = record;
4231 DECL_FIELD_CONTEXT (f_fpr) = record;
4232 DECL_FIELD_CONTEXT (f_res) = record;
4233 DECL_FIELD_CONTEXT (f_ovf) = record;
4234 DECL_FIELD_CONTEXT (f_sav) = record;
4236 TREE_CHAIN (record) = type_decl;
4237 TYPE_NAME (record) = type_decl;
4238 TYPE_FIELDS (record) = f_gpr;
4239 TREE_CHAIN (f_gpr) = f_fpr;
4240 TREE_CHAIN (f_fpr) = f_res;
4241 TREE_CHAIN (f_res) = f_ovf;
4242 TREE_CHAIN (f_ovf) = f_sav;
4244 layout_type (record);
4246 /* The correct type is an array type of one element. */
4247 return build_array_type (record, build_index_type (size_zero_node));
4250 /* Implement va_start. */
4253 rs6000_va_start (valist, nextarg)
4257 HOST_WIDE_INT words, n_gpr, n_fpr;
4258 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4259 tree gpr, fpr, ovf, sav, t;
4261 /* Only SVR4 needs something special. */
4262 if (DEFAULT_ABI != ABI_V4)
4264 std_expand_builtin_va_start (valist, nextarg);
4268 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4269 f_fpr = TREE_CHAIN (f_gpr);
4270 f_res = TREE_CHAIN (f_fpr);
4271 f_ovf = TREE_CHAIN (f_res);
4272 f_sav = TREE_CHAIN (f_ovf);
4274 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4275 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4276 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4277 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4278 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4280 /* Count number of gp and fp argument registers used. */
4281 words = current_function_args_info.words;
4282 n_gpr = current_function_args_info.sysv_gregno - GP_ARG_MIN_REG;
4283 n_fpr = current_function_args_info.fregno - FP_ARG_MIN_REG;
4285 if (TARGET_DEBUG_ARG)
4286 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
4287 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
4288 words, n_gpr, n_fpr);
4290 t = build (MODIFY_EXPR, TREE_TYPE (gpr), gpr, build_int_2 (n_gpr, 0));
4291 TREE_SIDE_EFFECTS (t) = 1;
4292 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4294 t = build (MODIFY_EXPR, TREE_TYPE (fpr), fpr, build_int_2 (n_fpr, 0));
4295 TREE_SIDE_EFFECTS (t) = 1;
4296 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4298 /* Find the overflow area. */
4299 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
4301 t = build (PLUS_EXPR, TREE_TYPE (ovf), t,
4302 build_int_2 (words * UNITS_PER_WORD, 0));
4303 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4304 TREE_SIDE_EFFECTS (t) = 1;
4305 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4307 /* Find the register save area. */
4308 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
4309 t = build (PLUS_EXPR, TREE_TYPE (sav), t,
4310 build_int_2 (-RS6000_VARARGS_SIZE, -1));
4311 t = build (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
4312 TREE_SIDE_EFFECTS (t) = 1;
4313 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4316 /* Implement va_arg. */
4319 rs6000_va_arg (valist, type)
4322 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
4323 tree gpr, fpr, ovf, sav, reg, t, u;
4324 int indirect_p, size, rsize, n_reg, sav_ofs, sav_scale;
4325 rtx lab_false, lab_over, addr_rtx, r;
4327 if (DEFAULT_ABI != ABI_V4)
4329 /* Variable sized types are passed by reference. */
4330 if (int_size_in_bytes (type) <= 0)
4332 u = build_pointer_type (type);
4334 /* Args grow upward. */
4335 t = build (POSTINCREMENT_EXPR, TREE_TYPE (valist), valist,
4336 build_int_2 (POINTER_SIZE / BITS_PER_UNIT, 0));
4337 TREE_SIDE_EFFECTS (t) = 1;
4339 t = build1 (NOP_EXPR, build_pointer_type (u), t);
4340 TREE_SIDE_EFFECTS (t) = 1;
4342 t = build1 (INDIRECT_REF, u, t);
4343 TREE_SIDE_EFFECTS (t) = 1;
4345 return expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4348 return std_expand_builtin_va_arg (valist, type);
4351 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
4352 f_fpr = TREE_CHAIN (f_gpr);
4353 f_res = TREE_CHAIN (f_fpr);
4354 f_ovf = TREE_CHAIN (f_res);
4355 f_sav = TREE_CHAIN (f_ovf);
4357 valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
4358 gpr = build (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr);
4359 fpr = build (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr);
4360 ovf = build (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf);
4361 sav = build (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav);
4363 size = int_size_in_bytes (type);
4364 rsize = (size + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
4366 if (AGGREGATE_TYPE_P (type) || TYPE_MODE (type) == TFmode)
4368 /* Aggregates and long doubles are passed by reference. */
4374 size = UNITS_PER_WORD;
4377 else if (FLOAT_TYPE_P (type) && TARGET_HARD_FLOAT && TARGET_FPRS)
4379 /* FP args go in FP registers, if present. */
4388 /* Otherwise into GP registers. */
4396 /* Pull the value out of the saved registers ... */
4398 lab_false = gen_label_rtx ();
4399 lab_over = gen_label_rtx ();
4400 addr_rtx = gen_reg_rtx (Pmode);
4402 /* AltiVec vectors never go in registers. */
4403 if (!TARGET_ALTIVEC || TREE_CODE (type) != VECTOR_TYPE)
4405 TREE_THIS_VOLATILE (reg) = 1;
4406 emit_cmp_and_jump_insns
4407 (expand_expr (reg, NULL_RTX, QImode, EXPAND_NORMAL),
4408 GEN_INT (8 - n_reg + 1), GE, const1_rtx, QImode, 1,
4411 /* Long long is aligned in the registers. */
4414 u = build (BIT_AND_EXPR, TREE_TYPE (reg), reg,
4415 build_int_2 (n_reg - 1, 0));
4416 u = build (PLUS_EXPR, TREE_TYPE (reg), reg, u);
4417 u = build (MODIFY_EXPR, TREE_TYPE (reg), reg, u);
4418 TREE_SIDE_EFFECTS (u) = 1;
4419 expand_expr (u, const0_rtx, VOIDmode, EXPAND_NORMAL);
4423 t = build (PLUS_EXPR, ptr_type_node, sav, build_int_2 (sav_ofs, 0));
4427 u = build (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg,
4428 build_int_2 (n_reg, 0));
4429 TREE_SIDE_EFFECTS (u) = 1;
4431 u = build1 (CONVERT_EXPR, integer_type_node, u);
4432 TREE_SIDE_EFFECTS (u) = 1;
4434 u = build (MULT_EXPR, integer_type_node, u, build_int_2 (sav_scale, 0));
4435 TREE_SIDE_EFFECTS (u) = 1;
4437 t = build (PLUS_EXPR, ptr_type_node, t, u);
4438 TREE_SIDE_EFFECTS (t) = 1;
4440 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4442 emit_move_insn (addr_rtx, r);
4444 emit_jump_insn (gen_jump (lab_over));
4448 emit_label (lab_false);
4450 /* ... otherwise out of the overflow area. */
4452 /* Make sure we don't find reg 7 for the next int arg.
4454 All AltiVec vectors go in the overflow area. So in the AltiVec
4455 case we need to get the vectors from the overflow area, but
4456 remember where the GPRs and FPRs are. */
4457 if (n_reg > 1 && (TREE_CODE (type) != VECTOR_TYPE
4458 || !TARGET_ALTIVEC))
4460 t = build (MODIFY_EXPR, TREE_TYPE (reg), reg, build_int_2 (8, 0));
4461 TREE_SIDE_EFFECTS (t) = 1;
4462 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4465 /* Care for on-stack alignment if needed. */
4472 /* AltiVec vectors are 16 byte aligned. */
4473 if (TARGET_ALTIVEC && TREE_CODE (type) == VECTOR_TYPE)
4478 t = build (PLUS_EXPR, TREE_TYPE (ovf), ovf, build_int_2 (align, 0));
4479 t = build (BIT_AND_EXPR, TREE_TYPE (t), t, build_int_2 (-align-1, -1));
4483 r = expand_expr (t, addr_rtx, Pmode, EXPAND_NORMAL);
4485 emit_move_insn (addr_rtx, r);
4487 t = build (PLUS_EXPR, TREE_TYPE (t), t, build_int_2 (size, 0));
4488 t = build (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
4489 TREE_SIDE_EFFECTS (t) = 1;
4490 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4492 emit_label (lab_over);
4496 r = gen_rtx_MEM (Pmode, addr_rtx);
4497 set_mem_alias_set (r, get_varargs_alias_set ());
4498 emit_move_insn (addr_rtx, r);
4506 #define def_builtin(MASK, NAME, TYPE, CODE) \
4508 if ((MASK) & target_flags) \
4509 builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
4513 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
4515 static const struct builtin_description bdesc_3arg[] =
4517 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
4518 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
4519 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
4520 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
4521 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
4522 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
4523 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
4524 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
4525 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
4526 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
4527 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
4528 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
4529 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
4530 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
4531 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_16qi, "__builtin_altivec_vperm_16qi", ALTIVEC_BUILTIN_VPERM_16QI },
4532 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4sf, "__builtin_altivec_vsel_4sf", ALTIVEC_BUILTIN_VSEL_4SF },
4533 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_4si, "__builtin_altivec_vsel_4si", ALTIVEC_BUILTIN_VSEL_4SI },
4534 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_8hi, "__builtin_altivec_vsel_8hi", ALTIVEC_BUILTIN_VSEL_8HI },
4535 { MASK_ALTIVEC, CODE_FOR_altivec_vsel_16qi, "__builtin_altivec_vsel_16qi", ALTIVEC_BUILTIN_VSEL_16QI },
4536 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_16qi, "__builtin_altivec_vsldoi_16qi", ALTIVEC_BUILTIN_VSLDOI_16QI },
4537 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_8hi, "__builtin_altivec_vsldoi_8hi", ALTIVEC_BUILTIN_VSLDOI_8HI },
4538 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4si, "__builtin_altivec_vsldoi_4si", ALTIVEC_BUILTIN_VSLDOI_4SI },
4539 { MASK_ALTIVEC, CODE_FOR_altivec_vsldoi_4sf, "__builtin_altivec_vsldoi_4sf", ALTIVEC_BUILTIN_VSLDOI_4SF },
4542 /* DST operations: void foo (void *, const int, const char). */
4544 static const struct builtin_description bdesc_dst[] =
4546 { MASK_ALTIVEC, CODE_FOR_altivec_dst, "__builtin_altivec_dst", ALTIVEC_BUILTIN_DST },
4547 { MASK_ALTIVEC, CODE_FOR_altivec_dstt, "__builtin_altivec_dstt", ALTIVEC_BUILTIN_DSTT },
4548 { MASK_ALTIVEC, CODE_FOR_altivec_dstst, "__builtin_altivec_dstst", ALTIVEC_BUILTIN_DSTST },
4549 { MASK_ALTIVEC, CODE_FOR_altivec_dststt, "__builtin_altivec_dststt", ALTIVEC_BUILTIN_DSTSTT }
4552 /* Simple binary operations: VECc = foo (VECa, VECb). */
4554 static struct builtin_description bdesc_2arg[] =
4556 { MASK_ALTIVEC, CODE_FOR_addv16qi3, "__builtin_altivec_vaddubm", ALTIVEC_BUILTIN_VADDUBM },
4557 { MASK_ALTIVEC, CODE_FOR_addv8hi3, "__builtin_altivec_vadduhm", ALTIVEC_BUILTIN_VADDUHM },
4558 { MASK_ALTIVEC, CODE_FOR_addv4si3, "__builtin_altivec_vadduwm", ALTIVEC_BUILTIN_VADDUWM },
4559 { MASK_ALTIVEC, CODE_FOR_addv4sf3, "__builtin_altivec_vaddfp", ALTIVEC_BUILTIN_VADDFP },
4560 { MASK_ALTIVEC, CODE_FOR_altivec_vaddcuw, "__builtin_altivec_vaddcuw", ALTIVEC_BUILTIN_VADDCUW },
4561 { MASK_ALTIVEC, CODE_FOR_altivec_vaddubs, "__builtin_altivec_vaddubs", ALTIVEC_BUILTIN_VADDUBS },
4562 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsbs, "__builtin_altivec_vaddsbs", ALTIVEC_BUILTIN_VADDSBS },
4563 { MASK_ALTIVEC, CODE_FOR_altivec_vadduhs, "__builtin_altivec_vadduhs", ALTIVEC_BUILTIN_VADDUHS },
4564 { MASK_ALTIVEC, CODE_FOR_altivec_vaddshs, "__builtin_altivec_vaddshs", ALTIVEC_BUILTIN_VADDSHS },
4565 { MASK_ALTIVEC, CODE_FOR_altivec_vadduws, "__builtin_altivec_vadduws", ALTIVEC_BUILTIN_VADDUWS },
4566 { MASK_ALTIVEC, CODE_FOR_altivec_vaddsws, "__builtin_altivec_vaddsws", ALTIVEC_BUILTIN_VADDSWS },
4567 { MASK_ALTIVEC, CODE_FOR_andv4si3, "__builtin_altivec_vand", ALTIVEC_BUILTIN_VAND },
4568 { MASK_ALTIVEC, CODE_FOR_altivec_vandc, "__builtin_altivec_vandc", ALTIVEC_BUILTIN_VANDC },
4569 { MASK_ALTIVEC, CODE_FOR_altivec_vavgub, "__builtin_altivec_vavgub", ALTIVEC_BUILTIN_VAVGUB },
4570 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsb, "__builtin_altivec_vavgsb", ALTIVEC_BUILTIN_VAVGSB },
4571 { MASK_ALTIVEC, CODE_FOR_altivec_vavguh, "__builtin_altivec_vavguh", ALTIVEC_BUILTIN_VAVGUH },
4572 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsh, "__builtin_altivec_vavgsh", ALTIVEC_BUILTIN_VAVGSH },
4573 { MASK_ALTIVEC, CODE_FOR_altivec_vavguw, "__builtin_altivec_vavguw", ALTIVEC_BUILTIN_VAVGUW },
4574 { MASK_ALTIVEC, CODE_FOR_altivec_vavgsw, "__builtin_altivec_vavgsw", ALTIVEC_BUILTIN_VAVGSW },
4575 { MASK_ALTIVEC, CODE_FOR_altivec_vcfux, "__builtin_altivec_vcfux", ALTIVEC_BUILTIN_VCFUX },
4576 { MASK_ALTIVEC, CODE_FOR_altivec_vcfsx, "__builtin_altivec_vcfsx", ALTIVEC_BUILTIN_VCFSX },
4577 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpbfp, "__builtin_altivec_vcmpbfp", ALTIVEC_BUILTIN_VCMPBFP },
4578 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequb, "__builtin_altivec_vcmpequb", ALTIVEC_BUILTIN_VCMPEQUB },
4579 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequh, "__builtin_altivec_vcmpequh", ALTIVEC_BUILTIN_VCMPEQUH },
4580 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpequw, "__builtin_altivec_vcmpequw", ALTIVEC_BUILTIN_VCMPEQUW },
4581 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpeqfp, "__builtin_altivec_vcmpeqfp", ALTIVEC_BUILTIN_VCMPEQFP },
4582 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgefp, "__builtin_altivec_vcmpgefp", ALTIVEC_BUILTIN_VCMPGEFP },
4583 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtub, "__builtin_altivec_vcmpgtub", ALTIVEC_BUILTIN_VCMPGTUB },
4584 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsb, "__builtin_altivec_vcmpgtsb", ALTIVEC_BUILTIN_VCMPGTSB },
4585 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuh, "__builtin_altivec_vcmpgtuh", ALTIVEC_BUILTIN_VCMPGTUH },
4586 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsh, "__builtin_altivec_vcmpgtsh", ALTIVEC_BUILTIN_VCMPGTSH },
4587 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtuw, "__builtin_altivec_vcmpgtuw", ALTIVEC_BUILTIN_VCMPGTUW },
4588 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtsw, "__builtin_altivec_vcmpgtsw", ALTIVEC_BUILTIN_VCMPGTSW },
4589 { MASK_ALTIVEC, CODE_FOR_altivec_vcmpgtfp, "__builtin_altivec_vcmpgtfp", ALTIVEC_BUILTIN_VCMPGTFP },
4590 { MASK_ALTIVEC, CODE_FOR_altivec_vctsxs, "__builtin_altivec_vctsxs", ALTIVEC_BUILTIN_VCTSXS },
4591 { MASK_ALTIVEC, CODE_FOR_altivec_vctuxs, "__builtin_altivec_vctuxs", ALTIVEC_BUILTIN_VCTUXS },
4592 { MASK_ALTIVEC, CODE_FOR_umaxv16qi3, "__builtin_altivec_vmaxub", ALTIVEC_BUILTIN_VMAXUB },
4593 { MASK_ALTIVEC, CODE_FOR_smaxv16qi3, "__builtin_altivec_vmaxsb", ALTIVEC_BUILTIN_VMAXSB },
4594 { MASK_ALTIVEC, CODE_FOR_umaxv8hi3, "__builtin_altivec_vmaxuh", ALTIVEC_BUILTIN_VMAXUH },
4595 { MASK_ALTIVEC, CODE_FOR_smaxv8hi3, "__builtin_altivec_vmaxsh", ALTIVEC_BUILTIN_VMAXSH },
4596 { MASK_ALTIVEC, CODE_FOR_umaxv4si3, "__builtin_altivec_vmaxuw", ALTIVEC_BUILTIN_VMAXUW },
4597 { MASK_ALTIVEC, CODE_FOR_smaxv4si3, "__builtin_altivec_vmaxsw", ALTIVEC_BUILTIN_VMAXSW },
4598 { MASK_ALTIVEC, CODE_FOR_smaxv4sf3, "__builtin_altivec_vmaxfp", ALTIVEC_BUILTIN_VMAXFP },
4599 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghb, "__builtin_altivec_vmrghb", ALTIVEC_BUILTIN_VMRGHB },
4600 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghh, "__builtin_altivec_vmrghh", ALTIVEC_BUILTIN_VMRGHH },
4601 { MASK_ALTIVEC, CODE_FOR_altivec_vmrghw, "__builtin_altivec_vmrghw", ALTIVEC_BUILTIN_VMRGHW },
4602 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglb, "__builtin_altivec_vmrglb", ALTIVEC_BUILTIN_VMRGLB },
4603 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglh, "__builtin_altivec_vmrglh", ALTIVEC_BUILTIN_VMRGLH },
4604 { MASK_ALTIVEC, CODE_FOR_altivec_vmrglw, "__builtin_altivec_vmrglw", ALTIVEC_BUILTIN_VMRGLW },
4605 { MASK_ALTIVEC, CODE_FOR_uminv16qi3, "__builtin_altivec_vminub", ALTIVEC_BUILTIN_VMINUB },
4606 { MASK_ALTIVEC, CODE_FOR_sminv16qi3, "__builtin_altivec_vminsb", ALTIVEC_BUILTIN_VMINSB },
4607 { MASK_ALTIVEC, CODE_FOR_uminv8hi3, "__builtin_altivec_vminuh", ALTIVEC_BUILTIN_VMINUH },
4608 { MASK_ALTIVEC, CODE_FOR_sminv8hi3, "__builtin_altivec_vminsh", ALTIVEC_BUILTIN_VMINSH },
4609 { MASK_ALTIVEC, CODE_FOR_uminv4si3, "__builtin_altivec_vminuw", ALTIVEC_BUILTIN_VMINUW },
4610 { MASK_ALTIVEC, CODE_FOR_sminv4si3, "__builtin_altivec_vminsw", ALTIVEC_BUILTIN_VMINSW },
4611 { MASK_ALTIVEC, CODE_FOR_sminv4sf3, "__builtin_altivec_vminfp", ALTIVEC_BUILTIN_VMINFP },
4612 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleub, "__builtin_altivec_vmuleub", ALTIVEC_BUILTIN_VMULEUB },
4613 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesb, "__builtin_altivec_vmulesb", ALTIVEC_BUILTIN_VMULESB },
4614 { MASK_ALTIVEC, CODE_FOR_altivec_vmuleuh, "__builtin_altivec_vmuleuh", ALTIVEC_BUILTIN_VMULEUH },
4615 { MASK_ALTIVEC, CODE_FOR_altivec_vmulesh, "__builtin_altivec_vmulesh", ALTIVEC_BUILTIN_VMULESH },
4616 { MASK_ALTIVEC, CODE_FOR_altivec_vmuloub, "__builtin_altivec_vmuloub", ALTIVEC_BUILTIN_VMULOUB },
4617 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosb, "__builtin_altivec_vmulosb", ALTIVEC_BUILTIN_VMULOSB },
4618 { MASK_ALTIVEC, CODE_FOR_altivec_vmulouh, "__builtin_altivec_vmulouh", ALTIVEC_BUILTIN_VMULOUH },
4619 { MASK_ALTIVEC, CODE_FOR_altivec_vmulosh, "__builtin_altivec_vmulosh", ALTIVEC_BUILTIN_VMULOSH },
4620 { MASK_ALTIVEC, CODE_FOR_altivec_vnor, "__builtin_altivec_vnor", ALTIVEC_BUILTIN_VNOR },
4621 { MASK_ALTIVEC, CODE_FOR_iorv4si3, "__builtin_altivec_vor", ALTIVEC_BUILTIN_VOR },
4622 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhum, "__builtin_altivec_vpkuhum", ALTIVEC_BUILTIN_VPKUHUM },
4623 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwum, "__builtin_altivec_vpkuwum", ALTIVEC_BUILTIN_VPKUWUM },
4624 { MASK_ALTIVEC, CODE_FOR_altivec_vpkpx, "__builtin_altivec_vpkpx", ALTIVEC_BUILTIN_VPKPX },
4625 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhss, "__builtin_altivec_vpkuhss", ALTIVEC_BUILTIN_VPKUHSS },
4626 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshss, "__builtin_altivec_vpkshss", ALTIVEC_BUILTIN_VPKSHSS },
4627 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwss, "__builtin_altivec_vpkuwss", ALTIVEC_BUILTIN_VPKUWSS },
4628 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
4629 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
4630 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
4631 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
4632 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
4633 { MASK_ALTIVEC, CODE_FOR_altivec_vrlb, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
4634 { MASK_ALTIVEC, CODE_FOR_altivec_vrlh, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
4635 { MASK_ALTIVEC, CODE_FOR_altivec_vrlw, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
4636 { MASK_ALTIVEC, CODE_FOR_altivec_vslb, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
4637 { MASK_ALTIVEC, CODE_FOR_altivec_vslh, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
4638 { MASK_ALTIVEC, CODE_FOR_altivec_vslw, "__builtin_altivec_vslw", ALTIVEC_BUILTIN_VSLW },
4639 { MASK_ALTIVEC, CODE_FOR_altivec_vsl, "__builtin_altivec_vsl", ALTIVEC_BUILTIN_VSL },
4640 { MASK_ALTIVEC, CODE_FOR_altivec_vslo, "__builtin_altivec_vslo", ALTIVEC_BUILTIN_VSLO },
4641 { MASK_ALTIVEC, CODE_FOR_altivec_vspltb, "__builtin_altivec_vspltb", ALTIVEC_BUILTIN_VSPLTB },
4642 { MASK_ALTIVEC, CODE_FOR_altivec_vsplth, "__builtin_altivec_vsplth", ALTIVEC_BUILTIN_VSPLTH },
4643 { MASK_ALTIVEC, CODE_FOR_altivec_vspltw, "__builtin_altivec_vspltw", ALTIVEC_BUILTIN_VSPLTW },
4644 { MASK_ALTIVEC, CODE_FOR_altivec_vsrb, "__builtin_altivec_vsrb", ALTIVEC_BUILTIN_VSRB },
4645 { MASK_ALTIVEC, CODE_FOR_altivec_vsrh, "__builtin_altivec_vsrh", ALTIVEC_BUILTIN_VSRH },
4646 { MASK_ALTIVEC, CODE_FOR_altivec_vsrw, "__builtin_altivec_vsrw", ALTIVEC_BUILTIN_VSRW },
4647 { MASK_ALTIVEC, CODE_FOR_altivec_vsrab, "__builtin_altivec_vsrab", ALTIVEC_BUILTIN_VSRAB },
4648 { MASK_ALTIVEC, CODE_FOR_altivec_vsrah, "__builtin_altivec_vsrah", ALTIVEC_BUILTIN_VSRAH },
4649 { MASK_ALTIVEC, CODE_FOR_altivec_vsraw, "__builtin_altivec_vsraw", ALTIVEC_BUILTIN_VSRAW },
4650 { MASK_ALTIVEC, CODE_FOR_altivec_vsr, "__builtin_altivec_vsr", ALTIVEC_BUILTIN_VSR },
4651 { MASK_ALTIVEC, CODE_FOR_altivec_vsro, "__builtin_altivec_vsro", ALTIVEC_BUILTIN_VSRO },
4652 { MASK_ALTIVEC, CODE_FOR_subv16qi3, "__builtin_altivec_vsububm", ALTIVEC_BUILTIN_VSUBUBM },
4653 { MASK_ALTIVEC, CODE_FOR_subv8hi3, "__builtin_altivec_vsubuhm", ALTIVEC_BUILTIN_VSUBUHM },
4654 { MASK_ALTIVEC, CODE_FOR_subv4si3, "__builtin_altivec_vsubuwm", ALTIVEC_BUILTIN_VSUBUWM },
4655 { MASK_ALTIVEC, CODE_FOR_subv4sf3, "__builtin_altivec_vsubfp", ALTIVEC_BUILTIN_VSUBFP },
4656 { MASK_ALTIVEC, CODE_FOR_altivec_vsubcuw, "__builtin_altivec_vsubcuw", ALTIVEC_BUILTIN_VSUBCUW },
4657 { MASK_ALTIVEC, CODE_FOR_altivec_vsububs, "__builtin_altivec_vsububs", ALTIVEC_BUILTIN_VSUBUBS },
4658 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsbs, "__builtin_altivec_vsubsbs", ALTIVEC_BUILTIN_VSUBSBS },
4659 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuhs, "__builtin_altivec_vsubuhs", ALTIVEC_BUILTIN_VSUBUHS },
4660 { MASK_ALTIVEC, CODE_FOR_altivec_vsubshs, "__builtin_altivec_vsubshs", ALTIVEC_BUILTIN_VSUBSHS },
4661 { MASK_ALTIVEC, CODE_FOR_altivec_vsubuws, "__builtin_altivec_vsubuws", ALTIVEC_BUILTIN_VSUBUWS },
4662 { MASK_ALTIVEC, CODE_FOR_altivec_vsubsws, "__builtin_altivec_vsubsws", ALTIVEC_BUILTIN_VSUBSWS },
4663 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4ubs, "__builtin_altivec_vsum4ubs", ALTIVEC_BUILTIN_VSUM4UBS },
4664 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4sbs, "__builtin_altivec_vsum4sbs", ALTIVEC_BUILTIN_VSUM4SBS },
4665 { MASK_ALTIVEC, CODE_FOR_altivec_vsum4shs, "__builtin_altivec_vsum4shs", ALTIVEC_BUILTIN_VSUM4SHS },
4666 { MASK_ALTIVEC, CODE_FOR_altivec_vsum2sws, "__builtin_altivec_vsum2sws", ALTIVEC_BUILTIN_VSUM2SWS },
4667 { MASK_ALTIVEC, CODE_FOR_altivec_vsumsws, "__builtin_altivec_vsumsws", ALTIVEC_BUILTIN_VSUMSWS },
4668 { MASK_ALTIVEC, CODE_FOR_xorv4si3, "__builtin_altivec_vxor", ALTIVEC_BUILTIN_VXOR },
4670 /* Place holder, leave as first spe builtin. */
4671 { 0, CODE_FOR_spe_evaddw, "__builtin_spe_evaddw", SPE_BUILTIN_EVADDW },
4672 { 0, CODE_FOR_spe_evand, "__builtin_spe_evand", SPE_BUILTIN_EVAND },
4673 { 0, CODE_FOR_spe_evandc, "__builtin_spe_evandc", SPE_BUILTIN_EVANDC },
4674 { 0, CODE_FOR_spe_evdivws, "__builtin_spe_evdivws", SPE_BUILTIN_EVDIVWS },
4675 { 0, CODE_FOR_spe_evdivwu, "__builtin_spe_evdivwu", SPE_BUILTIN_EVDIVWU },
4676 { 0, CODE_FOR_spe_eveqv, "__builtin_spe_eveqv", SPE_BUILTIN_EVEQV },
4677 { 0, CODE_FOR_spe_evfsadd, "__builtin_spe_evfsadd", SPE_BUILTIN_EVFSADD },
4678 { 0, CODE_FOR_spe_evfsdiv, "__builtin_spe_evfsdiv", SPE_BUILTIN_EVFSDIV },
4679 { 0, CODE_FOR_spe_evfsmul, "__builtin_spe_evfsmul", SPE_BUILTIN_EVFSMUL },
4680 { 0, CODE_FOR_spe_evfssub, "__builtin_spe_evfssub", SPE_BUILTIN_EVFSSUB },
4681 { 0, CODE_FOR_spe_evmergehi, "__builtin_spe_evmergehi", SPE_BUILTIN_EVMERGEHI },
4682 { 0, CODE_FOR_spe_evmergehilo, "__builtin_spe_evmergehilo", SPE_BUILTIN_EVMERGEHILO },
4683 { 0, CODE_FOR_spe_evmergelo, "__builtin_spe_evmergelo", SPE_BUILTIN_EVMERGELO },
4684 { 0, CODE_FOR_spe_evmergelohi, "__builtin_spe_evmergelohi", SPE_BUILTIN_EVMERGELOHI },
4685 { 0, CODE_FOR_spe_evmhegsmfaa, "__builtin_spe_evmhegsmfaa", SPE_BUILTIN_EVMHEGSMFAA },
4686 { 0, CODE_FOR_spe_evmhegsmfan, "__builtin_spe_evmhegsmfan", SPE_BUILTIN_EVMHEGSMFAN },
4687 { 0, CODE_FOR_spe_evmhegsmiaa, "__builtin_spe_evmhegsmiaa", SPE_BUILTIN_EVMHEGSMIAA },
4688 { 0, CODE_FOR_spe_evmhegsmian, "__builtin_spe_evmhegsmian", SPE_BUILTIN_EVMHEGSMIAN },
4689 { 0, CODE_FOR_spe_evmhegumiaa, "__builtin_spe_evmhegumiaa", SPE_BUILTIN_EVMHEGUMIAA },
4690 { 0, CODE_FOR_spe_evmhegumian, "__builtin_spe_evmhegumian", SPE_BUILTIN_EVMHEGUMIAN },
4691 { 0, CODE_FOR_spe_evmhesmf, "__builtin_spe_evmhesmf", SPE_BUILTIN_EVMHESMF },
4692 { 0, CODE_FOR_spe_evmhesmfa, "__builtin_spe_evmhesmfa", SPE_BUILTIN_EVMHESMFA },
4693 { 0, CODE_FOR_spe_evmhesmfaaw, "__builtin_spe_evmhesmfaaw", SPE_BUILTIN_EVMHESMFAAW },
4694 { 0, CODE_FOR_spe_evmhesmfanw, "__builtin_spe_evmhesmfanw", SPE_BUILTIN_EVMHESMFANW },
4695 { 0, CODE_FOR_spe_evmhesmi, "__builtin_spe_evmhesmi", SPE_BUILTIN_EVMHESMI },
4696 { 0, CODE_FOR_spe_evmhesmia, "__builtin_spe_evmhesmia", SPE_BUILTIN_EVMHESMIA },
4697 { 0, CODE_FOR_spe_evmhesmiaaw, "__builtin_spe_evmhesmiaaw", SPE_BUILTIN_EVMHESMIAAW },
4698 { 0, CODE_FOR_spe_evmhesmianw, "__builtin_spe_evmhesmianw", SPE_BUILTIN_EVMHESMIANW },
4699 { 0, CODE_FOR_spe_evmhessf, "__builtin_spe_evmhessf", SPE_BUILTIN_EVMHESSF },
4700 { 0, CODE_FOR_spe_evmhessfa, "__builtin_spe_evmhessfa", SPE_BUILTIN_EVMHESSFA },
4701 { 0, CODE_FOR_spe_evmhessfaaw, "__builtin_spe_evmhessfaaw", SPE_BUILTIN_EVMHESSFAAW },
4702 { 0, CODE_FOR_spe_evmhessfanw, "__builtin_spe_evmhessfanw", SPE_BUILTIN_EVMHESSFANW },
4703 { 0, CODE_FOR_spe_evmhessiaaw, "__builtin_spe_evmhessiaaw", SPE_BUILTIN_EVMHESSIAAW },
4704 { 0, CODE_FOR_spe_evmhessianw, "__builtin_spe_evmhessianw", SPE_BUILTIN_EVMHESSIANW },
4705 { 0, CODE_FOR_spe_evmheumi, "__builtin_spe_evmheumi", SPE_BUILTIN_EVMHEUMI },
4706 { 0, CODE_FOR_spe_evmheumia, "__builtin_spe_evmheumia", SPE_BUILTIN_EVMHEUMIA },
4707 { 0, CODE_FOR_spe_evmheumiaaw, "__builtin_spe_evmheumiaaw", SPE_BUILTIN_EVMHEUMIAAW },
4708 { 0, CODE_FOR_spe_evmheumianw, "__builtin_spe_evmheumianw", SPE_BUILTIN_EVMHEUMIANW },
4709 { 0, CODE_FOR_spe_evmheusiaaw, "__builtin_spe_evmheusiaaw", SPE_BUILTIN_EVMHEUSIAAW },
4710 { 0, CODE_FOR_spe_evmheusianw, "__builtin_spe_evmheusianw", SPE_BUILTIN_EVMHEUSIANW },
4711 { 0, CODE_FOR_spe_evmhogsmfaa, "__builtin_spe_evmhogsmfaa", SPE_BUILTIN_EVMHOGSMFAA },
4712 { 0, CODE_FOR_spe_evmhogsmfan, "__builtin_spe_evmhogsmfan", SPE_BUILTIN_EVMHOGSMFAN },
4713 { 0, CODE_FOR_spe_evmhogsmiaa, "__builtin_spe_evmhogsmiaa", SPE_BUILTIN_EVMHOGSMIAA },
4714 { 0, CODE_FOR_spe_evmhogsmian, "__builtin_spe_evmhogsmian", SPE_BUILTIN_EVMHOGSMIAN },
4715 { 0, CODE_FOR_spe_evmhogumiaa, "__builtin_spe_evmhogumiaa", SPE_BUILTIN_EVMHOGUMIAA },
4716 { 0, CODE_FOR_spe_evmhogumian, "__builtin_spe_evmhogumian", SPE_BUILTIN_EVMHOGUMIAN },
4717 { 0, CODE_FOR_spe_evmhosmf, "__builtin_spe_evmhosmf", SPE_BUILTIN_EVMHOSMF },
4718 { 0, CODE_FOR_spe_evmhosmfa, "__builtin_spe_evmhosmfa", SPE_BUILTIN_EVMHOSMFA },
4719 { 0, CODE_FOR_spe_evmhosmfaaw, "__builtin_spe_evmhosmfaaw", SPE_BUILTIN_EVMHOSMFAAW },
4720 { 0, CODE_FOR_spe_evmhosmfanw, "__builtin_spe_evmhosmfanw", SPE_BUILTIN_EVMHOSMFANW },
4721 { 0, CODE_FOR_spe_evmhosmi, "__builtin_spe_evmhosmi", SPE_BUILTIN_EVMHOSMI },
4722 { 0, CODE_FOR_spe_evmhosmia, "__builtin_spe_evmhosmia", SPE_BUILTIN_EVMHOSMIA },
4723 { 0, CODE_FOR_spe_evmhosmiaaw, "__builtin_spe_evmhosmiaaw", SPE_BUILTIN_EVMHOSMIAAW },
4724 { 0, CODE_FOR_spe_evmhosmianw, "__builtin_spe_evmhosmianw", SPE_BUILTIN_EVMHOSMIANW },
4725 { 0, CODE_FOR_spe_evmhossf, "__builtin_spe_evmhossf", SPE_BUILTIN_EVMHOSSF },
4726 { 0, CODE_FOR_spe_evmhossfa, "__builtin_spe_evmhossfa", SPE_BUILTIN_EVMHOSSFA },
4727 { 0, CODE_FOR_spe_evmhossfaaw, "__builtin_spe_evmhossfaaw", SPE_BUILTIN_EVMHOSSFAAW },
4728 { 0, CODE_FOR_spe_evmhossfanw, "__builtin_spe_evmhossfanw", SPE_BUILTIN_EVMHOSSFANW },
4729 { 0, CODE_FOR_spe_evmhossiaaw, "__builtin_spe_evmhossiaaw", SPE_BUILTIN_EVMHOSSIAAW },
4730 { 0, CODE_FOR_spe_evmhossianw, "__builtin_spe_evmhossianw", SPE_BUILTIN_EVMHOSSIANW },
4731 { 0, CODE_FOR_spe_evmhoumi, "__builtin_spe_evmhoumi", SPE_BUILTIN_EVMHOUMI },
4732 { 0, CODE_FOR_spe_evmhoumia, "__builtin_spe_evmhoumia", SPE_BUILTIN_EVMHOUMIA },
4733 { 0, CODE_FOR_spe_evmhoumiaaw, "__builtin_spe_evmhoumiaaw", SPE_BUILTIN_EVMHOUMIAAW },
4734 { 0, CODE_FOR_spe_evmhoumianw, "__builtin_spe_evmhoumianw", SPE_BUILTIN_EVMHOUMIANW },
4735 { 0, CODE_FOR_spe_evmhousiaaw, "__builtin_spe_evmhousiaaw", SPE_BUILTIN_EVMHOUSIAAW },
4736 { 0, CODE_FOR_spe_evmhousianw, "__builtin_spe_evmhousianw", SPE_BUILTIN_EVMHOUSIANW },
4737 { 0, CODE_FOR_spe_evmwhsmf, "__builtin_spe_evmwhsmf", SPE_BUILTIN_EVMWHSMF },
4738 { 0, CODE_FOR_spe_evmwhsmfa, "__builtin_spe_evmwhsmfa", SPE_BUILTIN_EVMWHSMFA },
4739 { 0, CODE_FOR_spe_evmwhsmi, "__builtin_spe_evmwhsmi", SPE_BUILTIN_EVMWHSMI },
4740 { 0, CODE_FOR_spe_evmwhsmia, "__builtin_spe_evmwhsmia", SPE_BUILTIN_EVMWHSMIA },
4741 { 0, CODE_FOR_spe_evmwhssf, "__builtin_spe_evmwhssf", SPE_BUILTIN_EVMWHSSF },
4742 { 0, CODE_FOR_spe_evmwhssfa, "__builtin_spe_evmwhssfa", SPE_BUILTIN_EVMWHSSFA },
4743 { 0, CODE_FOR_spe_evmwhumi, "__builtin_spe_evmwhumi", SPE_BUILTIN_EVMWHUMI },
4744 { 0, CODE_FOR_spe_evmwhumia, "__builtin_spe_evmwhumia", SPE_BUILTIN_EVMWHUMIA },
4745 { 0, CODE_FOR_spe_evmwlsmiaaw, "__builtin_spe_evmwlsmiaaw", SPE_BUILTIN_EVMWLSMIAAW },
4746 { 0, CODE_FOR_spe_evmwlsmianw, "__builtin_spe_evmwlsmianw", SPE_BUILTIN_EVMWLSMIANW },
4747 { 0, CODE_FOR_spe_evmwlssiaaw, "__builtin_spe_evmwlssiaaw", SPE_BUILTIN_EVMWLSSIAAW },
4748 { 0, CODE_FOR_spe_evmwlssianw, "__builtin_spe_evmwlssianw", SPE_BUILTIN_EVMWLSSIANW },
4749 { 0, CODE_FOR_spe_evmwlumi, "__builtin_spe_evmwlumi", SPE_BUILTIN_EVMWLUMI },
4750 { 0, CODE_FOR_spe_evmwlumia, "__builtin_spe_evmwlumia", SPE_BUILTIN_EVMWLUMIA },
4751 { 0, CODE_FOR_spe_evmwlumiaaw, "__builtin_spe_evmwlumiaaw", SPE_BUILTIN_EVMWLUMIAAW },
4752 { 0, CODE_FOR_spe_evmwlumianw, "__builtin_spe_evmwlumianw", SPE_BUILTIN_EVMWLUMIANW },
4753 { 0, CODE_FOR_spe_evmwlusiaaw, "__builtin_spe_evmwlusiaaw", SPE_BUILTIN_EVMWLUSIAAW },
4754 { 0, CODE_FOR_spe_evmwlusianw, "__builtin_spe_evmwlusianw", SPE_BUILTIN_EVMWLUSIANW },
4755 { 0, CODE_FOR_spe_evmwsmf, "__builtin_spe_evmwsmf", SPE_BUILTIN_EVMWSMF },
4756 { 0, CODE_FOR_spe_evmwsmfa, "__builtin_spe_evmwsmfa", SPE_BUILTIN_EVMWSMFA },
4757 { 0, CODE_FOR_spe_evmwsmfaa, "__builtin_spe_evmwsmfaa", SPE_BUILTIN_EVMWSMFAA },
4758 { 0, CODE_FOR_spe_evmwsmfan, "__builtin_spe_evmwsmfan", SPE_BUILTIN_EVMWSMFAN },
4759 { 0, CODE_FOR_spe_evmwsmi, "__builtin_spe_evmwsmi", SPE_BUILTIN_EVMWSMI },
4760 { 0, CODE_FOR_spe_evmwsmia, "__builtin_spe_evmwsmia", SPE_BUILTIN_EVMWSMIA },
4761 { 0, CODE_FOR_spe_evmwsmiaa, "__builtin_spe_evmwsmiaa", SPE_BUILTIN_EVMWSMIAA },
4762 { 0, CODE_FOR_spe_evmwsmian, "__builtin_spe_evmwsmian", SPE_BUILTIN_EVMWSMIAN },
4763 { 0, CODE_FOR_spe_evmwssf, "__builtin_spe_evmwssf", SPE_BUILTIN_EVMWSSF },
4764 { 0, CODE_FOR_spe_evmwssfa, "__builtin_spe_evmwssfa", SPE_BUILTIN_EVMWSSFA },
4765 { 0, CODE_FOR_spe_evmwssfaa, "__builtin_spe_evmwssfaa", SPE_BUILTIN_EVMWSSFAA },
4766 { 0, CODE_FOR_spe_evmwssfan, "__builtin_spe_evmwssfan", SPE_BUILTIN_EVMWSSFAN },
4767 { 0, CODE_FOR_spe_evmwumi, "__builtin_spe_evmwumi", SPE_BUILTIN_EVMWUMI },
4768 { 0, CODE_FOR_spe_evmwumia, "__builtin_spe_evmwumia", SPE_BUILTIN_EVMWUMIA },
4769 { 0, CODE_FOR_spe_evmwumiaa, "__builtin_spe_evmwumiaa", SPE_BUILTIN_EVMWUMIAA },
4770 { 0, CODE_FOR_spe_evmwumian, "__builtin_spe_evmwumian", SPE_BUILTIN_EVMWUMIAN },
4771 { 0, CODE_FOR_spe_evnand, "__builtin_spe_evnand", SPE_BUILTIN_EVNAND },
4772 { 0, CODE_FOR_spe_evnor, "__builtin_spe_evnor", SPE_BUILTIN_EVNOR },
4773 { 0, CODE_FOR_spe_evor, "__builtin_spe_evor", SPE_BUILTIN_EVOR },
4774 { 0, CODE_FOR_spe_evorc, "__builtin_spe_evorc", SPE_BUILTIN_EVORC },
4775 { 0, CODE_FOR_spe_evrlw, "__builtin_spe_evrlw", SPE_BUILTIN_EVRLW },
4776 { 0, CODE_FOR_spe_evslw, "__builtin_spe_evslw", SPE_BUILTIN_EVSLW },
4777 { 0, CODE_FOR_spe_evsrws, "__builtin_spe_evsrws", SPE_BUILTIN_EVSRWS },
4778 { 0, CODE_FOR_spe_evsrwu, "__builtin_spe_evsrwu", SPE_BUILTIN_EVSRWU },
4779 { 0, CODE_FOR_spe_evsubfw, "__builtin_spe_evsubfw", SPE_BUILTIN_EVSUBFW },
4781 /* SPE binary operations expecting a 5-bit unsigned literal. */
4782 { 0, CODE_FOR_spe_evaddiw, "__builtin_spe_evaddiw", SPE_BUILTIN_EVADDIW },
4784 { 0, CODE_FOR_spe_evrlwi, "__builtin_spe_evrlwi", SPE_BUILTIN_EVRLWI },
4785 { 0, CODE_FOR_spe_evslwi, "__builtin_spe_evslwi", SPE_BUILTIN_EVSLWI },
4786 { 0, CODE_FOR_spe_evsrwis, "__builtin_spe_evsrwis", SPE_BUILTIN_EVSRWIS },
4787 { 0, CODE_FOR_spe_evsrwiu, "__builtin_spe_evsrwiu", SPE_BUILTIN_EVSRWIU },
4788 { 0, CODE_FOR_spe_evsubifw, "__builtin_spe_evsubifw", SPE_BUILTIN_EVSUBIFW },
4789 { 0, CODE_FOR_spe_evmwhssfaa, "__builtin_spe_evmwhssfaa", SPE_BUILTIN_EVMWHSSFAA },
4790 { 0, CODE_FOR_spe_evmwhssmaa, "__builtin_spe_evmwhssmaa", SPE_BUILTIN_EVMWHSSMAA },
4791 { 0, CODE_FOR_spe_evmwhsmfaa, "__builtin_spe_evmwhsmfaa", SPE_BUILTIN_EVMWHSMFAA },
4792 { 0, CODE_FOR_spe_evmwhsmiaa, "__builtin_spe_evmwhsmiaa", SPE_BUILTIN_EVMWHSMIAA },
4793 { 0, CODE_FOR_spe_evmwhusiaa, "__builtin_spe_evmwhusiaa", SPE_BUILTIN_EVMWHUSIAA },
4794 { 0, CODE_FOR_spe_evmwhumiaa, "__builtin_spe_evmwhumiaa", SPE_BUILTIN_EVMWHUMIAA },
4795 { 0, CODE_FOR_spe_evmwhssfan, "__builtin_spe_evmwhssfan", SPE_BUILTIN_EVMWHSSFAN },
4796 { 0, CODE_FOR_spe_evmwhssian, "__builtin_spe_evmwhssian", SPE_BUILTIN_EVMWHSSIAN },
4797 { 0, CODE_FOR_spe_evmwhsmfan, "__builtin_spe_evmwhsmfan", SPE_BUILTIN_EVMWHSMFAN },
4798 { 0, CODE_FOR_spe_evmwhsmian, "__builtin_spe_evmwhsmian", SPE_BUILTIN_EVMWHSMIAN },
4799 { 0, CODE_FOR_spe_evmwhusian, "__builtin_spe_evmwhusian", SPE_BUILTIN_EVMWHUSIAN },
4800 { 0, CODE_FOR_spe_evmwhumian, "__builtin_spe_evmwhumian", SPE_BUILTIN_EVMWHUMIAN },
4801 { 0, CODE_FOR_spe_evmwhgssfaa, "__builtin_spe_evmwhgssfaa", SPE_BUILTIN_EVMWHGSSFAA },
4802 { 0, CODE_FOR_spe_evmwhgsmfaa, "__builtin_spe_evmwhgsmfaa", SPE_BUILTIN_EVMWHGSMFAA },
4803 { 0, CODE_FOR_spe_evmwhgsmiaa, "__builtin_spe_evmwhgsmiaa", SPE_BUILTIN_EVMWHGSMIAA },
4804 { 0, CODE_FOR_spe_evmwhgumiaa, "__builtin_spe_evmwhgumiaa", SPE_BUILTIN_EVMWHGUMIAA },
4805 { 0, CODE_FOR_spe_evmwhgssfan, "__builtin_spe_evmwhgssfan", SPE_BUILTIN_EVMWHGSSFAN },
4806 { 0, CODE_FOR_spe_evmwhgsmfan, "__builtin_spe_evmwhgsmfan", SPE_BUILTIN_EVMWHGSMFAN },
4807 { 0, CODE_FOR_spe_evmwhgsmian, "__builtin_spe_evmwhgsmian", SPE_BUILTIN_EVMWHGSMIAN },
4808 { 0, CODE_FOR_spe_evmwhgumian, "__builtin_spe_evmwhgumian", SPE_BUILTIN_EVMWHGUMIAN },
4809 { 0, CODE_FOR_spe_brinc, "__builtin_spe_brinc", SPE_BUILTIN_BRINC },
4811 /* Place-holder. Leave as last binary SPE builtin. */
4812 { 0, CODE_FOR_xorv2si3, "__builtin_spe_evxor", SPE_BUILTIN_EVXOR },
4815 /* AltiVec predicates. */
4817 struct builtin_description_predicates
4819 const unsigned int mask;
4820 const enum insn_code icode;
4822 const char *const name;
4823 const enum rs6000_builtins code;
4826 static const struct builtin_description_predicates bdesc_altivec_preds[] =
4828 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpbfp.", "__builtin_altivec_vcmpbfp_p", ALTIVEC_BUILTIN_VCMPBFP_P },
4829 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpeqfp.", "__builtin_altivec_vcmpeqfp_p", ALTIVEC_BUILTIN_VCMPEQFP_P },
4830 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgefp.", "__builtin_altivec_vcmpgefp_p", ALTIVEC_BUILTIN_VCMPGEFP_P },
4831 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4sf, "*vcmpgtfp.", "__builtin_altivec_vcmpgtfp_p", ALTIVEC_BUILTIN_VCMPGTFP_P },
4832 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpequw.", "__builtin_altivec_vcmpequw_p", ALTIVEC_BUILTIN_VCMPEQUW_P },
4833 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtsw.", "__builtin_altivec_vcmpgtsw_p", ALTIVEC_BUILTIN_VCMPGTSW_P },
4834 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v4si, "*vcmpgtuw.", "__builtin_altivec_vcmpgtuw_p", ALTIVEC_BUILTIN_VCMPGTUW_P },
4835 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtuh.", "__builtin_altivec_vcmpgtuh_p", ALTIVEC_BUILTIN_VCMPGTUH_P },
4836 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpgtsh.", "__builtin_altivec_vcmpgtsh_p", ALTIVEC_BUILTIN_VCMPGTSH_P },
4837 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v8hi, "*vcmpequh.", "__builtin_altivec_vcmpequh_p", ALTIVEC_BUILTIN_VCMPEQUH_P },
4838 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpequb.", "__builtin_altivec_vcmpequb_p", ALTIVEC_BUILTIN_VCMPEQUB_P },
4839 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtsb.", "__builtin_altivec_vcmpgtsb_p", ALTIVEC_BUILTIN_VCMPGTSB_P },
4840 { MASK_ALTIVEC, CODE_FOR_altivec_predicate_v16qi, "*vcmpgtub.", "__builtin_altivec_vcmpgtub_p", ALTIVEC_BUILTIN_VCMPGTUB_P }
4843 /* SPE predicates. */
4844 static struct builtin_description bdesc_spe_predicates[] =
4846 /* Place-holder. Leave as first. */
4847 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evcmpeq", SPE_BUILTIN_EVCMPEQ },
4848 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evcmpgts", SPE_BUILTIN_EVCMPGTS },
4849 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evcmpgtu", SPE_BUILTIN_EVCMPGTU },
4850 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evcmplts", SPE_BUILTIN_EVCMPLTS },
4851 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evcmpltu", SPE_BUILTIN_EVCMPLTU },
4852 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evfscmpeq", SPE_BUILTIN_EVFSCMPEQ },
4853 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evfscmpgt", SPE_BUILTIN_EVFSCMPGT },
4854 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evfscmplt", SPE_BUILTIN_EVFSCMPLT },
4855 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evfststeq", SPE_BUILTIN_EVFSTSTEQ },
4856 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evfststgt", SPE_BUILTIN_EVFSTSTGT },
4857 /* Place-holder. Leave as last. */
4858 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evfststlt", SPE_BUILTIN_EVFSTSTLT },
4861 /* SPE evsel predicates. */
4862 static struct builtin_description bdesc_spe_evsel[] =
4864 /* Place-holder. Leave as first. */
4865 { 0, CODE_FOR_spe_evcmpgts, "__builtin_spe_evsel_gts", SPE_BUILTIN_EVSEL_CMPGTS },
4866 { 0, CODE_FOR_spe_evcmpgtu, "__builtin_spe_evsel_gtu", SPE_BUILTIN_EVSEL_CMPGTU },
4867 { 0, CODE_FOR_spe_evcmplts, "__builtin_spe_evsel_lts", SPE_BUILTIN_EVSEL_CMPLTS },
4868 { 0, CODE_FOR_spe_evcmpltu, "__builtin_spe_evsel_ltu", SPE_BUILTIN_EVSEL_CMPLTU },
4869 { 0, CODE_FOR_spe_evcmpeq, "__builtin_spe_evsel_eq", SPE_BUILTIN_EVSEL_CMPEQ },
4870 { 0, CODE_FOR_spe_evfscmpgt, "__builtin_spe_evsel_fsgt", SPE_BUILTIN_EVSEL_FSCMPGT },
4871 { 0, CODE_FOR_spe_evfscmplt, "__builtin_spe_evsel_fslt", SPE_BUILTIN_EVSEL_FSCMPLT },
4872 { 0, CODE_FOR_spe_evfscmpeq, "__builtin_spe_evsel_fseq", SPE_BUILTIN_EVSEL_FSCMPEQ },
4873 { 0, CODE_FOR_spe_evfststgt, "__builtin_spe_evsel_fststgt", SPE_BUILTIN_EVSEL_FSTSTGT },
4874 { 0, CODE_FOR_spe_evfststlt, "__builtin_spe_evsel_fststlt", SPE_BUILTIN_EVSEL_FSTSTLT },
4875 /* Place-holder. Leave as last. */
4876 { 0, CODE_FOR_spe_evfststeq, "__builtin_spe_evsel_fststeq", SPE_BUILTIN_EVSEL_FSTSTEQ },
4879 /* ABS* operations. */
4881 static const struct builtin_description bdesc_abs[] =
4883 { MASK_ALTIVEC, CODE_FOR_absv4si2, "__builtin_altivec_abs_v4si", ALTIVEC_BUILTIN_ABS_V4SI },
4884 { MASK_ALTIVEC, CODE_FOR_absv8hi2, "__builtin_altivec_abs_v8hi", ALTIVEC_BUILTIN_ABS_V8HI },
4885 { MASK_ALTIVEC, CODE_FOR_absv4sf2, "__builtin_altivec_abs_v4sf", ALTIVEC_BUILTIN_ABS_V4SF },
4886 { MASK_ALTIVEC, CODE_FOR_absv16qi2, "__builtin_altivec_abs_v16qi", ALTIVEC_BUILTIN_ABS_V16QI },
4887 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v4si, "__builtin_altivec_abss_v4si", ALTIVEC_BUILTIN_ABSS_V4SI },
4888 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v8hi, "__builtin_altivec_abss_v8hi", ALTIVEC_BUILTIN_ABSS_V8HI },
4889 { MASK_ALTIVEC, CODE_FOR_altivec_abss_v16qi, "__builtin_altivec_abss_v16qi", ALTIVEC_BUILTIN_ABSS_V16QI }
4892 /* Simple unary operations: VECb = foo (unsigned literal) or VECb =
4895 static struct builtin_description bdesc_1arg[] =
4897 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
4898 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
4899 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
4900 { MASK_ALTIVEC, CODE_FOR_altivec_vrfim, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
4901 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
4902 { MASK_ALTIVEC, CODE_FOR_altivec_vrfip, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
4903 { MASK_ALTIVEC, CODE_FOR_ftruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
4904 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
4905 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
4906 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
4907 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
4908 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
4909 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
4910 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsh, "__builtin_altivec_vupkhsh", ALTIVEC_BUILTIN_VUPKHSH },
4911 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsb, "__builtin_altivec_vupklsb", ALTIVEC_BUILTIN_VUPKLSB },
4912 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
4913 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
4915 /* The SPE unary builtins must start with SPE_BUILTIN_EVABS and
4916 end with SPE_BUILTIN_EVSUBFUSIAAW. */
4917 { 0, CODE_FOR_spe_evabs, "__builtin_spe_evabs", SPE_BUILTIN_EVABS },
4918 { 0, CODE_FOR_spe_evaddsmiaaw, "__builtin_spe_evaddsmiaaw", SPE_BUILTIN_EVADDSMIAAW },
4919 { 0, CODE_FOR_spe_evaddssiaaw, "__builtin_spe_evaddssiaaw", SPE_BUILTIN_EVADDSSIAAW },
4920 { 0, CODE_FOR_spe_evaddumiaaw, "__builtin_spe_evaddumiaaw", SPE_BUILTIN_EVADDUMIAAW },
4921 { 0, CODE_FOR_spe_evaddusiaaw, "__builtin_spe_evaddusiaaw", SPE_BUILTIN_EVADDUSIAAW },
4922 { 0, CODE_FOR_spe_evcntlsw, "__builtin_spe_evcntlsw", SPE_BUILTIN_EVCNTLSW },
4923 { 0, CODE_FOR_spe_evcntlzw, "__builtin_spe_evcntlzw", SPE_BUILTIN_EVCNTLZW },
4924 { 0, CODE_FOR_spe_evextsb, "__builtin_spe_evextsb", SPE_BUILTIN_EVEXTSB },
4925 { 0, CODE_FOR_spe_evextsh, "__builtin_spe_evextsh", SPE_BUILTIN_EVEXTSH },
4926 { 0, CODE_FOR_spe_evfsabs, "__builtin_spe_evfsabs", SPE_BUILTIN_EVFSABS },
4927 { 0, CODE_FOR_spe_evfscfsf, "__builtin_spe_evfscfsf", SPE_BUILTIN_EVFSCFSF },
4928 { 0, CODE_FOR_spe_evfscfsi, "__builtin_spe_evfscfsi", SPE_BUILTIN_EVFSCFSI },
4929 { 0, CODE_FOR_spe_evfscfuf, "__builtin_spe_evfscfuf", SPE_BUILTIN_EVFSCFUF },
4930 { 0, CODE_FOR_spe_evfscfui, "__builtin_spe_evfscfui", SPE_BUILTIN_EVFSCFUI },
4931 { 0, CODE_FOR_spe_evfsctsf, "__builtin_spe_evfsctsf", SPE_BUILTIN_EVFSCTSF },
4932 { 0, CODE_FOR_spe_evfsctsi, "__builtin_spe_evfsctsi", SPE_BUILTIN_EVFSCTSI },
4933 { 0, CODE_FOR_spe_evfsctsiz, "__builtin_spe_evfsctsiz", SPE_BUILTIN_EVFSCTSIZ },
4934 { 0, CODE_FOR_spe_evfsctuf, "__builtin_spe_evfsctuf", SPE_BUILTIN_EVFSCTUF },
4935 { 0, CODE_FOR_spe_evfsctui, "__builtin_spe_evfsctui", SPE_BUILTIN_EVFSCTUI },
4936 { 0, CODE_FOR_spe_evfsctuiz, "__builtin_spe_evfsctuiz", SPE_BUILTIN_EVFSCTUIZ },
4937 { 0, CODE_FOR_spe_evfsnabs, "__builtin_spe_evfsnabs", SPE_BUILTIN_EVFSNABS },
4938 { 0, CODE_FOR_spe_evfsneg, "__builtin_spe_evfsneg", SPE_BUILTIN_EVFSNEG },
4939 { 0, CODE_FOR_spe_evmra, "__builtin_spe_evmra", SPE_BUILTIN_EVMRA },
4940 { 0, CODE_FOR_spe_evneg, "__builtin_spe_evneg", SPE_BUILTIN_EVNEG },
4941 { 0, CODE_FOR_spe_evrndw, "__builtin_spe_evrndw", SPE_BUILTIN_EVRNDW },
4942 { 0, CODE_FOR_spe_evsubfsmiaaw, "__builtin_spe_evsubfsmiaaw", SPE_BUILTIN_EVSUBFSMIAAW },
4943 { 0, CODE_FOR_spe_evsubfssiaaw, "__builtin_spe_evsubfssiaaw", SPE_BUILTIN_EVSUBFSSIAAW },
4944 { 0, CODE_FOR_spe_evsubfumiaaw, "__builtin_spe_evsubfumiaaw", SPE_BUILTIN_EVSUBFUMIAAW },
4945 { 0, CODE_FOR_spe_evsplatfi, "__builtin_spe_evsplatfi", SPE_BUILTIN_EVSPLATFI },
4946 { 0, CODE_FOR_spe_evsplati, "__builtin_spe_evsplati", SPE_BUILTIN_EVSPLATI },
4948 /* Place-holder. Leave as last unary SPE builtin. */
4949 { 0, CODE_FOR_spe_evsubfusiaaw, "__builtin_spe_evsubfusiaaw", SPE_BUILTIN_EVSUBFUSIAAW },
4953 rs6000_expand_unop_builtin (icode, arglist, target)
4954 enum insn_code icode;
4959 tree arg0 = TREE_VALUE (arglist);
4960 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
4961 enum machine_mode tmode = insn_data[icode].operand[0].mode;
4962 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
4964 if (icode == CODE_FOR_nothing)
4965 /* Builtin not supported on this processor. */
4968 /* If we got invalid arguments bail out before generating bad rtl. */
4969 if (arg0 == error_mark_node)
4972 if (icode == CODE_FOR_altivec_vspltisb
4973 || icode == CODE_FOR_altivec_vspltish
4974 || icode == CODE_FOR_altivec_vspltisw
4975 || icode == CODE_FOR_spe_evsplatfi
4976 || icode == CODE_FOR_spe_evsplati)
4978 /* Only allow 5-bit *signed* literals. */
4979 if (GET_CODE (op0) != CONST_INT
4980 || INTVAL (op0) > 0x1f
4981 || INTVAL (op0) < -0x1f)
4983 error ("argument 1 must be a 5-bit signed literal");
4989 || GET_MODE (target) != tmode
4990 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
4991 target = gen_reg_rtx (tmode);
4993 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
4994 op0 = copy_to_mode_reg (mode0, op0);
4996 pat = GEN_FCN (icode) (target, op0);
5005 altivec_expand_abs_builtin (icode, arglist, target)
5006 enum insn_code icode;
5010 rtx pat, scratch1, scratch2;
5011 tree arg0 = TREE_VALUE (arglist);
5012 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5013 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5014 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5016 /* If we have invalid arguments, bail out before generating bad rtl. */
5017 if (arg0 == error_mark_node)
5021 || GET_MODE (target) != tmode
5022 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5023 target = gen_reg_rtx (tmode);
5025 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5026 op0 = copy_to_mode_reg (mode0, op0);
5028 scratch1 = gen_reg_rtx (mode0);
5029 scratch2 = gen_reg_rtx (mode0);
5031 pat = GEN_FCN (icode) (target, op0, scratch1, scratch2);
5040 rs6000_expand_binop_builtin (icode, arglist, target)
5041 enum insn_code icode;
5046 tree arg0 = TREE_VALUE (arglist);
5047 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5048 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5049 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5050 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5051 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5052 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5054 if (icode == CODE_FOR_nothing)
5055 /* Builtin not supported on this processor. */
5058 /* If we got invalid arguments bail out before generating bad rtl. */
5059 if (arg0 == error_mark_node || arg1 == error_mark_node)
5062 if (icode == CODE_FOR_altivec_vcfux
5063 || icode == CODE_FOR_altivec_vcfsx
5064 || icode == CODE_FOR_altivec_vctsxs
5065 || icode == CODE_FOR_altivec_vctuxs
5066 || icode == CODE_FOR_altivec_vspltb
5067 || icode == CODE_FOR_altivec_vsplth
5068 || icode == CODE_FOR_altivec_vspltw
5069 || icode == CODE_FOR_spe_evaddiw
5070 || icode == CODE_FOR_spe_evldd
5071 || icode == CODE_FOR_spe_evldh
5072 || icode == CODE_FOR_spe_evldw
5073 || icode == CODE_FOR_spe_evlhhesplat
5074 || icode == CODE_FOR_spe_evlhhossplat
5075 || icode == CODE_FOR_spe_evlhhousplat
5076 || icode == CODE_FOR_spe_evlwhe
5077 || icode == CODE_FOR_spe_evlwhos
5078 || icode == CODE_FOR_spe_evlwhou
5079 || icode == CODE_FOR_spe_evlwhsplat
5080 || icode == CODE_FOR_spe_evlwwsplat
5081 || icode == CODE_FOR_spe_evrlwi
5082 || icode == CODE_FOR_spe_evslwi
5083 || icode == CODE_FOR_spe_evsrwis
5084 || icode == CODE_FOR_spe_evsubifw
5085 || icode == CODE_FOR_spe_evsrwiu)
5087 /* Only allow 5-bit unsigned literals. */
5088 if (TREE_CODE (arg1) != INTEGER_CST
5089 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5091 error ("argument 2 must be a 5-bit unsigned literal");
5097 || GET_MODE (target) != tmode
5098 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5099 target = gen_reg_rtx (tmode);
5101 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5102 op0 = copy_to_mode_reg (mode0, op0);
5103 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5104 op1 = copy_to_mode_reg (mode1, op1);
5106 pat = GEN_FCN (icode) (target, op0, op1);
5115 altivec_expand_predicate_builtin (icode, opcode, arglist, target)
5116 enum insn_code icode;
5122 tree cr6_form = TREE_VALUE (arglist);
5123 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5124 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5125 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5126 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5127 enum machine_mode tmode = SImode;
5128 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5129 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5132 if (TREE_CODE (cr6_form) != INTEGER_CST)
5134 error ("argument 1 of __builtin_altivec_predicate must be a constant");
5138 cr6_form_int = TREE_INT_CST_LOW (cr6_form);
5143 /* If we have invalid arguments, bail out before generating bad rtl. */
5144 if (arg0 == error_mark_node || arg1 == error_mark_node)
5148 || GET_MODE (target) != tmode
5149 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5150 target = gen_reg_rtx (tmode);
5152 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5153 op0 = copy_to_mode_reg (mode0, op0);
5154 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5155 op1 = copy_to_mode_reg (mode1, op1);
5157 scratch = gen_reg_rtx (mode0);
5159 pat = GEN_FCN (icode) (scratch, op0, op1,
5160 gen_rtx (SYMBOL_REF, Pmode, opcode));
5165 /* The vec_any* and vec_all* predicates use the same opcodes for two
5166 different operations, but the bits in CR6 will be different
5167 depending on what information we want. So we have to play tricks
5168 with CR6 to get the right bits out.
5170 If you think this is disgusting, look at the specs for the
5171 AltiVec predicates. */
5173 switch (cr6_form_int)
5176 emit_insn (gen_cr6_test_for_zero (target));
5179 emit_insn (gen_cr6_test_for_zero_reverse (target));
5182 emit_insn (gen_cr6_test_for_lt (target));
5185 emit_insn (gen_cr6_test_for_lt_reverse (target));
5188 error ("argument 1 of __builtin_altivec_predicate is out of range");
5196 altivec_expand_stv_builtin (icode, arglist)
5197 enum insn_code icode;
5200 tree arg0 = TREE_VALUE (arglist);
5201 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5202 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5203 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5204 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5205 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5207 enum machine_mode mode0 = insn_data[icode].operand[0].mode;
5208 enum machine_mode mode1 = insn_data[icode].operand[1].mode;
5209 enum machine_mode mode2 = insn_data[icode].operand[2].mode;
5211 /* Invalid arguments. Bail before doing anything stoopid! */
5212 if (arg0 == error_mark_node
5213 || arg1 == error_mark_node
5214 || arg2 == error_mark_node)
5217 if (! (*insn_data[icode].operand[2].predicate) (op0, mode2))
5218 op0 = copy_to_mode_reg (mode2, op0);
5219 if (! (*insn_data[icode].operand[0].predicate) (op1, mode0))
5220 op1 = copy_to_mode_reg (mode0, op1);
5221 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5222 op2 = copy_to_mode_reg (mode1, op2);
5224 pat = GEN_FCN (icode) (op1, op2, op0);
5231 rs6000_expand_ternop_builtin (icode, arglist, target)
5232 enum insn_code icode;
5237 tree arg0 = TREE_VALUE (arglist);
5238 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5239 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5240 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5241 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5242 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5243 enum machine_mode tmode = insn_data[icode].operand[0].mode;
5244 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5245 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5246 enum machine_mode mode2 = insn_data[icode].operand[3].mode;
5248 if (icode == CODE_FOR_nothing)
5249 /* Builtin not supported on this processor. */
5252 /* If we got invalid arguments bail out before generating bad rtl. */
5253 if (arg0 == error_mark_node
5254 || arg1 == error_mark_node
5255 || arg2 == error_mark_node)
5258 if (icode == CODE_FOR_altivec_vsldoi_4sf
5259 || icode == CODE_FOR_altivec_vsldoi_4si
5260 || icode == CODE_FOR_altivec_vsldoi_8hi
5261 || icode == CODE_FOR_altivec_vsldoi_16qi)
5263 /* Only allow 4-bit unsigned literals. */
5264 if (TREE_CODE (arg2) != INTEGER_CST
5265 || TREE_INT_CST_LOW (arg2) & ~0xf)
5267 error ("argument 3 must be a 4-bit unsigned literal");
5273 || GET_MODE (target) != tmode
5274 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5275 target = gen_reg_rtx (tmode);
5277 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5278 op0 = copy_to_mode_reg (mode0, op0);
5279 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5280 op1 = copy_to_mode_reg (mode1, op1);
5281 if (! (*insn_data[icode].operand[3].predicate) (op2, mode2))
5282 op2 = copy_to_mode_reg (mode2, op2);
5284 pat = GEN_FCN (icode) (target, op0, op1, op2);
5292 /* Expand the lvx builtins. */
5294 altivec_expand_ld_builtin (exp, target, expandedp)
5299 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5300 tree arglist = TREE_OPERAND (exp, 1);
5301 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5303 enum machine_mode tmode, mode0;
5305 enum insn_code icode;
5309 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
5310 icode = CODE_FOR_altivec_lvx_16qi;
5312 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
5313 icode = CODE_FOR_altivec_lvx_8hi;
5315 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
5316 icode = CODE_FOR_altivec_lvx_4si;
5318 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
5319 icode = CODE_FOR_altivec_lvx_4sf;
5328 arg0 = TREE_VALUE (arglist);
5329 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5330 tmode = insn_data[icode].operand[0].mode;
5331 mode0 = insn_data[icode].operand[1].mode;
5334 || GET_MODE (target) != tmode
5335 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5336 target = gen_reg_rtx (tmode);
5338 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5339 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5341 pat = GEN_FCN (icode) (target, op0);
5348 /* Expand the stvx builtins. */
5350 altivec_expand_st_builtin (exp, target, expandedp)
5352 rtx target ATTRIBUTE_UNUSED;
5355 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5356 tree arglist = TREE_OPERAND (exp, 1);
5357 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5359 enum machine_mode mode0, mode1;
5361 enum insn_code icode;
5365 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
5366 icode = CODE_FOR_altivec_stvx_16qi;
5368 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
5369 icode = CODE_FOR_altivec_stvx_8hi;
5371 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
5372 icode = CODE_FOR_altivec_stvx_4si;
5374 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
5375 icode = CODE_FOR_altivec_stvx_4sf;
5382 arg0 = TREE_VALUE (arglist);
5383 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5384 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5385 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5386 mode0 = insn_data[icode].operand[0].mode;
5387 mode1 = insn_data[icode].operand[1].mode;
5389 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5390 op0 = gen_rtx_MEM (mode0, copy_to_mode_reg (Pmode, op0));
5391 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5392 op1 = copy_to_mode_reg (mode1, op1);
5394 pat = GEN_FCN (icode) (op0, op1);
5402 /* Expand the dst builtins. */
5404 altivec_expand_dst_builtin (exp, target, expandedp)
5406 rtx target ATTRIBUTE_UNUSED;
5409 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5410 tree arglist = TREE_OPERAND (exp, 1);
5411 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5412 tree arg0, arg1, arg2;
5413 enum machine_mode mode0, mode1, mode2;
5414 rtx pat, op0, op1, op2;
5415 struct builtin_description *d;
5420 /* Handle DST variants. */
5421 d = (struct builtin_description *) bdesc_dst;
5422 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
5423 if (d->code == fcode)
5425 arg0 = TREE_VALUE (arglist);
5426 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5427 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5428 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5429 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5430 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5431 mode0 = insn_data[d->icode].operand[0].mode;
5432 mode1 = insn_data[d->icode].operand[1].mode;
5433 mode2 = insn_data[d->icode].operand[2].mode;
5435 /* Invalid arguments, bail out before generating bad rtl. */
5436 if (arg0 == error_mark_node
5437 || arg1 == error_mark_node
5438 || arg2 == error_mark_node)
5441 if (TREE_CODE (arg2) != INTEGER_CST
5442 || TREE_INT_CST_LOW (arg2) & ~0x3)
5444 error ("argument to `%s' must be a 2-bit unsigned literal", d->name);
5448 if (! (*insn_data[d->icode].operand[0].predicate) (op0, mode0))
5449 op0 = copy_to_mode_reg (mode0, op0);
5450 if (! (*insn_data[d->icode].operand[1].predicate) (op1, mode1))
5451 op1 = copy_to_mode_reg (mode1, op1);
5453 pat = GEN_FCN (d->icode) (op0, op1, op2);
5464 /* Expand the builtin in EXP and store the result in TARGET. Store
5465 true in *EXPANDEDP if we found a builtin to expand. */
5467 altivec_expand_builtin (exp, target, expandedp)
5472 struct builtin_description *d;
5473 struct builtin_description_predicates *dp;
5475 enum insn_code icode;
5476 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5477 tree arglist = TREE_OPERAND (exp, 1);
5480 enum machine_mode tmode, mode0;
5481 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5483 target = altivec_expand_ld_builtin (exp, target, expandedp);
5487 target = altivec_expand_st_builtin (exp, target, expandedp);
5491 target = altivec_expand_dst_builtin (exp, target, expandedp);
5499 case ALTIVEC_BUILTIN_STVX:
5500 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, arglist);
5501 case ALTIVEC_BUILTIN_STVEBX:
5502 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, arglist);
5503 case ALTIVEC_BUILTIN_STVEHX:
5504 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, arglist);
5505 case ALTIVEC_BUILTIN_STVEWX:
5506 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvewx, arglist);
5507 case ALTIVEC_BUILTIN_STVXL:
5508 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvxl, arglist);
5510 case ALTIVEC_BUILTIN_MFVSCR:
5511 icode = CODE_FOR_altivec_mfvscr;
5512 tmode = insn_data[icode].operand[0].mode;
5515 || GET_MODE (target) != tmode
5516 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5517 target = gen_reg_rtx (tmode);
5519 pat = GEN_FCN (icode) (target);
5525 case ALTIVEC_BUILTIN_MTVSCR:
5526 icode = CODE_FOR_altivec_mtvscr;
5527 arg0 = TREE_VALUE (arglist);
5528 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5529 mode0 = insn_data[icode].operand[0].mode;
5531 /* If we got invalid arguments bail out before generating bad rtl. */
5532 if (arg0 == error_mark_node)
5535 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5536 op0 = copy_to_mode_reg (mode0, op0);
5538 pat = GEN_FCN (icode) (op0);
5543 case ALTIVEC_BUILTIN_DSSALL:
5544 emit_insn (gen_altivec_dssall ());
5547 case ALTIVEC_BUILTIN_DSS:
5548 icode = CODE_FOR_altivec_dss;
5549 arg0 = TREE_VALUE (arglist);
5550 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5551 mode0 = insn_data[icode].operand[0].mode;
5553 /* If we got invalid arguments bail out before generating bad rtl. */
5554 if (arg0 == error_mark_node)
5557 if (TREE_CODE (arg0) != INTEGER_CST
5558 || TREE_INT_CST_LOW (arg0) & ~0x3)
5560 error ("argument to dss must be a 2-bit unsigned literal");
5564 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5565 op0 = copy_to_mode_reg (mode0, op0);
5567 emit_insn (gen_altivec_dss (op0));
5571 /* Expand abs* operations. */
5572 d = (struct builtin_description *) bdesc_abs;
5573 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
5574 if (d->code == fcode)
5575 return altivec_expand_abs_builtin (d->icode, arglist, target);
5577 /* Expand the AltiVec predicates. */
5578 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
5579 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
5580 if (dp->code == fcode)
5581 return altivec_expand_predicate_builtin (dp->icode, dp->opcode, arglist, target);
5583 /* LV* are funky. We initialized them differently. */
5586 case ALTIVEC_BUILTIN_LVSL:
5587 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsl,
5589 case ALTIVEC_BUILTIN_LVSR:
5590 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvsr,
5592 case ALTIVEC_BUILTIN_LVEBX:
5593 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvebx,
5595 case ALTIVEC_BUILTIN_LVEHX:
5596 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvehx,
5598 case ALTIVEC_BUILTIN_LVEWX:
5599 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvewx,
5601 case ALTIVEC_BUILTIN_LVXL:
5602 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvxl,
5604 case ALTIVEC_BUILTIN_LVX:
5605 return rs6000_expand_binop_builtin (CODE_FOR_altivec_lvx,
5616 /* Binops that need to be initialized manually, but can be expanded
5617 automagically by rs6000_expand_binop_builtin. */
5618 static struct builtin_description bdesc_2arg_spe[] =
5620 { 0, CODE_FOR_spe_evlddx, "__builtin_spe_evlddx", SPE_BUILTIN_EVLDDX },
5621 { 0, CODE_FOR_spe_evldwx, "__builtin_spe_evldwx", SPE_BUILTIN_EVLDWX },
5622 { 0, CODE_FOR_spe_evldhx, "__builtin_spe_evldhx", SPE_BUILTIN_EVLDHX },
5623 { 0, CODE_FOR_spe_evlwhex, "__builtin_spe_evlwhex", SPE_BUILTIN_EVLWHEX },
5624 { 0, CODE_FOR_spe_evlwhoux, "__builtin_spe_evlwhoux", SPE_BUILTIN_EVLWHOUX },
5625 { 0, CODE_FOR_spe_evlwhosx, "__builtin_spe_evlwhosx", SPE_BUILTIN_EVLWHOSX },
5626 { 0, CODE_FOR_spe_evlwwsplatx, "__builtin_spe_evlwwsplatx", SPE_BUILTIN_EVLWWSPLATX },
5627 { 0, CODE_FOR_spe_evlwhsplatx, "__builtin_spe_evlwhsplatx", SPE_BUILTIN_EVLWHSPLATX },
5628 { 0, CODE_FOR_spe_evlhhesplatx, "__builtin_spe_evlhhesplatx", SPE_BUILTIN_EVLHHESPLATX },
5629 { 0, CODE_FOR_spe_evlhhousplatx, "__builtin_spe_evlhhousplatx", SPE_BUILTIN_EVLHHOUSPLATX },
5630 { 0, CODE_FOR_spe_evlhhossplatx, "__builtin_spe_evlhhossplatx", SPE_BUILTIN_EVLHHOSSPLATX },
5631 { 0, CODE_FOR_spe_evldd, "__builtin_spe_evldd", SPE_BUILTIN_EVLDD },
5632 { 0, CODE_FOR_spe_evldw, "__builtin_spe_evldw", SPE_BUILTIN_EVLDW },
5633 { 0, CODE_FOR_spe_evldh, "__builtin_spe_evldh", SPE_BUILTIN_EVLDH },
5634 { 0, CODE_FOR_spe_evlwhe, "__builtin_spe_evlwhe", SPE_BUILTIN_EVLWHE },
5635 { 0, CODE_FOR_spe_evlwhou, "__builtin_spe_evlwhou", SPE_BUILTIN_EVLWHOU },
5636 { 0, CODE_FOR_spe_evlwhos, "__builtin_spe_evlwhos", SPE_BUILTIN_EVLWHOS },
5637 { 0, CODE_FOR_spe_evlwwsplat, "__builtin_spe_evlwwsplat", SPE_BUILTIN_EVLWWSPLAT },
5638 { 0, CODE_FOR_spe_evlwhsplat, "__builtin_spe_evlwhsplat", SPE_BUILTIN_EVLWHSPLAT },
5639 { 0, CODE_FOR_spe_evlhhesplat, "__builtin_spe_evlhhesplat", SPE_BUILTIN_EVLHHESPLAT },
5640 { 0, CODE_FOR_spe_evlhhousplat, "__builtin_spe_evlhhousplat", SPE_BUILTIN_EVLHHOUSPLAT },
5641 { 0, CODE_FOR_spe_evlhhossplat, "__builtin_spe_evlhhossplat", SPE_BUILTIN_EVLHHOSSPLAT }
5644 /* Expand the builtin in EXP and store the result in TARGET. Store
5645 true in *EXPANDEDP if we found a builtin to expand.
5647 This expands the SPE builtins that are not simple unary and binary
5650 spe_expand_builtin (exp, target, expandedp)
5655 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5656 tree arglist = TREE_OPERAND (exp, 1);
5658 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5659 enum insn_code icode;
5660 enum machine_mode tmode, mode0;
5662 struct builtin_description *d;
5667 /* Syntax check for a 5-bit unsigned immediate. */
5670 case SPE_BUILTIN_EVSTDD:
5671 case SPE_BUILTIN_EVSTDH:
5672 case SPE_BUILTIN_EVSTDW:
5673 case SPE_BUILTIN_EVSTWHE:
5674 case SPE_BUILTIN_EVSTWHO:
5675 case SPE_BUILTIN_EVSTWWE:
5676 case SPE_BUILTIN_EVSTWWO:
5677 arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5678 if (TREE_CODE (arg1) != INTEGER_CST
5679 || TREE_INT_CST_LOW (arg1) & ~0x1f)
5681 error ("argument 2 must be a 5-bit unsigned literal");
5689 d = (struct builtin_description *) bdesc_2arg_spe;
5690 for (i = 0; i < ARRAY_SIZE (bdesc_2arg_spe); ++i, ++d)
5691 if (d->code == fcode)
5692 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5694 d = (struct builtin_description *) bdesc_spe_predicates;
5695 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, ++d)
5696 if (d->code == fcode)
5697 return spe_expand_predicate_builtin (d->icode, arglist, target);
5699 d = (struct builtin_description *) bdesc_spe_evsel;
5700 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, ++d)
5701 if (d->code == fcode)
5702 return spe_expand_evsel_builtin (d->icode, arglist, target);
5706 case SPE_BUILTIN_EVSTDDX:
5707 return altivec_expand_stv_builtin (CODE_FOR_spe_evstddx, arglist);
5708 case SPE_BUILTIN_EVSTDHX:
5709 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdhx, arglist);
5710 case SPE_BUILTIN_EVSTDWX:
5711 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdwx, arglist);
5712 case SPE_BUILTIN_EVSTWHEX:
5713 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhex, arglist);
5714 case SPE_BUILTIN_EVSTWHOX:
5715 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhox, arglist);
5716 case SPE_BUILTIN_EVSTWWEX:
5717 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwex, arglist);
5718 case SPE_BUILTIN_EVSTWWOX:
5719 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwox, arglist);
5720 case SPE_BUILTIN_EVSTDD:
5721 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdd, arglist);
5722 case SPE_BUILTIN_EVSTDH:
5723 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdh, arglist);
5724 case SPE_BUILTIN_EVSTDW:
5725 return altivec_expand_stv_builtin (CODE_FOR_spe_evstdw, arglist);
5726 case SPE_BUILTIN_EVSTWHE:
5727 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwhe, arglist);
5728 case SPE_BUILTIN_EVSTWHO:
5729 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwho, arglist);
5730 case SPE_BUILTIN_EVSTWWE:
5731 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwe, arglist);
5732 case SPE_BUILTIN_EVSTWWO:
5733 return altivec_expand_stv_builtin (CODE_FOR_spe_evstwwo, arglist);
5734 case SPE_BUILTIN_MFSPEFSCR:
5735 icode = CODE_FOR_spe_mfspefscr;
5736 tmode = insn_data[icode].operand[0].mode;
5739 || GET_MODE (target) != tmode
5740 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
5741 target = gen_reg_rtx (tmode);
5743 pat = GEN_FCN (icode) (target);
5748 case SPE_BUILTIN_MTSPEFSCR:
5749 icode = CODE_FOR_spe_mtspefscr;
5750 arg0 = TREE_VALUE (arglist);
5751 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5752 mode0 = insn_data[icode].operand[0].mode;
5754 if (arg0 == error_mark_node)
5757 if (! (*insn_data[icode].operand[0].predicate) (op0, mode0))
5758 op0 = copy_to_mode_reg (mode0, op0);
5760 pat = GEN_FCN (icode) (op0);
5773 spe_expand_predicate_builtin (icode, arglist, target)
5774 enum insn_code icode;
5778 rtx pat, scratch, tmp;
5779 tree form = TREE_VALUE (arglist);
5780 tree arg0 = TREE_VALUE (TREE_CHAIN (arglist));
5781 tree arg1 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5782 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5783 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5784 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5785 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5789 if (TREE_CODE (form) != INTEGER_CST)
5791 error ("argument 1 of __builtin_spe_predicate must be a constant");
5795 form_int = TREE_INT_CST_LOW (form);
5800 if (arg0 == error_mark_node || arg1 == error_mark_node)
5804 || GET_MODE (target) != SImode
5805 || ! (*insn_data[icode].operand[0].predicate) (target, SImode))
5806 target = gen_reg_rtx (SImode);
5808 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5809 op0 = copy_to_mode_reg (mode0, op0);
5810 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
5811 op1 = copy_to_mode_reg (mode1, op1);
5813 scratch = gen_reg_rtx (CCmode);
5815 pat = GEN_FCN (icode) (scratch, op0, op1);
5820 /* There are 4 variants for each predicate: _any_, _all_, _upper_,
5821 _lower_. We use one compare, but look in different bits of the
5822 CR for each variant.
5824 There are 2 elements in each SPE simd type (upper/lower). The CR
5825 bits are set as follows:
5827 BIT0 | BIT 1 | BIT 2 | BIT 3
5828 U | L | (U | L) | (U & L)
5830 So, for an "all" relationship, BIT 3 would be set.
5831 For an "any" relationship, BIT 2 would be set. Etc.
5833 Following traditional nomenclature, these bits map to:
5835 BIT0 | BIT 1 | BIT 2 | BIT 3
5838 Later, we will generate rtl to look in the LT/EQ/EQ/OV bits.
5843 /* All variant. OV bit. */
5845 /* We need to get to the OV bit, which is the ORDERED bit. We
5846 could generate (ordered:SI (reg:CC xx) (const_int 0)), but
5847 that's ugly and will trigger a validate_condition_mode abort.
5848 So let's just use another pattern. */
5849 emit_insn (gen_move_from_CR_ov_bit (target, scratch));
5851 /* Any variant. EQ bit. */
5855 /* Upper variant. LT bit. */
5859 /* Lower variant. GT bit. */
5864 error ("argument 1 of __builtin_spe_predicate is out of range");
5868 tmp = gen_rtx_fmt_ee (code, SImode, scratch, const0_rtx);
5869 emit_move_insn (target, tmp);
5874 /* The evsel builtins look like this:
5876 e = __builtin_spe_evsel_OP (a, b, c, d);
5880 e[upper] = a[upper] *OP* b[upper] ? c[upper] : d[upper];
5881 e[lower] = a[lower] *OP* b[lower] ? c[lower] : d[lower];
5885 spe_expand_evsel_builtin (icode, arglist, target)
5886 enum insn_code icode;
5891 tree arg0 = TREE_VALUE (arglist);
5892 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
5893 tree arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
5894 tree arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arglist))));
5895 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
5896 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
5897 rtx op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
5898 rtx op3 = expand_expr (arg3, NULL_RTX, VOIDmode, 0);
5899 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
5900 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
5905 if (arg0 == error_mark_node || arg1 == error_mark_node
5906 || arg2 == error_mark_node || arg3 == error_mark_node)
5910 || GET_MODE (target) != mode0
5911 || ! (*insn_data[icode].operand[0].predicate) (target, mode0))
5912 target = gen_reg_rtx (mode0);
5914 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
5915 op0 = copy_to_mode_reg (mode0, op0);
5916 if (! (*insn_data[icode].operand[1].predicate) (op1, mode1))
5917 op1 = copy_to_mode_reg (mode0, op1);
5918 if (! (*insn_data[icode].operand[1].predicate) (op2, mode1))
5919 op2 = copy_to_mode_reg (mode0, op2);
5920 if (! (*insn_data[icode].operand[1].predicate) (op3, mode1))
5921 op3 = copy_to_mode_reg (mode0, op3);
5923 /* Generate the compare. */
5924 scratch = gen_reg_rtx (CCmode);
5925 pat = GEN_FCN (icode) (scratch, op0, op1);
5930 if (mode0 == V2SImode)
5931 emit_insn (gen_spe_evsel (target, op2, op3, scratch));
5933 emit_insn (gen_spe_evsel_fs (target, op2, op3, scratch));
5938 /* Expand an expression EXP that calls a built-in function,
5939 with result going to TARGET if that's convenient
5940 (and in mode MODE if that's convenient).
5941 SUBTARGET may be used as the target for computing one of EXP's operands.
5942 IGNORE is nonzero if the value is to be ignored. */
5945 rs6000_expand_builtin (exp, target, subtarget, mode, ignore)
5948 rtx subtarget ATTRIBUTE_UNUSED;
5949 enum machine_mode mode ATTRIBUTE_UNUSED;
5950 int ignore ATTRIBUTE_UNUSED;
5952 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5953 tree arglist = TREE_OPERAND (exp, 1);
5954 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
5955 struct builtin_description *d;
5962 ret = altivec_expand_builtin (exp, target, &success);
5969 ret = spe_expand_builtin (exp, target, &success);
5975 if (TARGET_ALTIVEC || TARGET_SPE)
5977 /* Handle simple unary operations. */
5978 d = (struct builtin_description *) bdesc_1arg;
5979 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
5980 if (d->code == fcode)
5981 return rs6000_expand_unop_builtin (d->icode, arglist, target);
5983 /* Handle simple binary operations. */
5984 d = (struct builtin_description *) bdesc_2arg;
5985 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
5986 if (d->code == fcode)
5987 return rs6000_expand_binop_builtin (d->icode, arglist, target);
5989 /* Handle simple ternary operations. */
5990 d = (struct builtin_description *) bdesc_3arg;
5991 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
5992 if (d->code == fcode)
5993 return rs6000_expand_ternop_builtin (d->icode, arglist, target);
6001 rs6000_init_builtins ()
6003 opaque_V2SI_type_node = copy_node (V2SI_type_node);
6004 opaque_V2SF_type_node = copy_node (V2SF_type_node);
6005 opaque_p_V2SI_type_node = build_pointer_type (opaque_V2SI_type_node);
6008 spe_init_builtins ();
6010 altivec_init_builtins ();
6011 if (TARGET_ALTIVEC || TARGET_SPE)
6012 rs6000_common_init_builtins ();
6015 /* Search through a set of builtins and enable the mask bits.
6016 DESC is an array of builtins.
6017 SIZE is the total number of builtins.
6018 START is the builtin enum at which to start.
6019 END is the builtin enum at which to end. */
6021 enable_mask_for_builtins (desc, size, start, end)
6022 struct builtin_description *desc;
6024 enum rs6000_builtins start, end;
6028 for (i = 0; i < size; ++i)
6029 if (desc[i].code == start)
6035 for (; i < size; ++i)
6037 /* Flip all the bits on. */
6038 desc[i].mask = target_flags;
6039 if (desc[i].code == end)
6045 spe_init_builtins ()
6047 tree endlink = void_list_node;
6048 tree puint_type_node = build_pointer_type (unsigned_type_node);
6049 tree pushort_type_node = build_pointer_type (short_unsigned_type_node);
6050 struct builtin_description *d;
6053 tree v2si_ftype_4_v2si
6054 = build_function_type
6055 (opaque_V2SI_type_node,
6056 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6057 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6058 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6059 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6062 tree v2sf_ftype_4_v2sf
6063 = build_function_type
6064 (opaque_V2SF_type_node,
6065 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6066 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6067 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6068 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6071 tree int_ftype_int_v2si_v2si
6072 = build_function_type
6074 tree_cons (NULL_TREE, integer_type_node,
6075 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6076 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6079 tree int_ftype_int_v2sf_v2sf
6080 = build_function_type
6082 tree_cons (NULL_TREE, integer_type_node,
6083 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6084 tree_cons (NULL_TREE, opaque_V2SF_type_node,
6087 tree void_ftype_v2si_puint_int
6088 = build_function_type (void_type_node,
6089 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6090 tree_cons (NULL_TREE, puint_type_node,
6091 tree_cons (NULL_TREE,
6095 tree void_ftype_v2si_puint_char
6096 = build_function_type (void_type_node,
6097 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6098 tree_cons (NULL_TREE, puint_type_node,
6099 tree_cons (NULL_TREE,
6103 tree void_ftype_v2si_pv2si_int
6104 = build_function_type (void_type_node,
6105 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6106 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6107 tree_cons (NULL_TREE,
6111 tree void_ftype_v2si_pv2si_char
6112 = build_function_type (void_type_node,
6113 tree_cons (NULL_TREE, opaque_V2SI_type_node,
6114 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6115 tree_cons (NULL_TREE,
6120 = build_function_type (void_type_node,
6121 tree_cons (NULL_TREE, integer_type_node, endlink));
6124 = build_function_type (integer_type_node, endlink);
6126 tree v2si_ftype_pv2si_int
6127 = build_function_type (opaque_V2SI_type_node,
6128 tree_cons (NULL_TREE, opaque_p_V2SI_type_node,
6129 tree_cons (NULL_TREE, integer_type_node,
6132 tree v2si_ftype_puint_int
6133 = build_function_type (opaque_V2SI_type_node,
6134 tree_cons (NULL_TREE, puint_type_node,
6135 tree_cons (NULL_TREE, integer_type_node,
6138 tree v2si_ftype_pushort_int
6139 = build_function_type (opaque_V2SI_type_node,
6140 tree_cons (NULL_TREE, pushort_type_node,
6141 tree_cons (NULL_TREE, integer_type_node,
6144 /* The initialization of the simple binary and unary builtins is
6145 done in rs6000_common_init_builtins, but we have to enable the
6146 mask bits here manually because we have run out of `target_flags'
6147 bits. We really need to redesign this mask business. */
6149 enable_mask_for_builtins ((struct builtin_description *) bdesc_2arg,
6150 ARRAY_SIZE (bdesc_2arg),
6153 enable_mask_for_builtins ((struct builtin_description *) bdesc_1arg,
6154 ARRAY_SIZE (bdesc_1arg),
6156 SPE_BUILTIN_EVSUBFUSIAAW);
6157 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_predicates,
6158 ARRAY_SIZE (bdesc_spe_predicates),
6159 SPE_BUILTIN_EVCMPEQ,
6160 SPE_BUILTIN_EVFSTSTLT);
6161 enable_mask_for_builtins ((struct builtin_description *) bdesc_spe_evsel,
6162 ARRAY_SIZE (bdesc_spe_evsel),
6163 SPE_BUILTIN_EVSEL_CMPGTS,
6164 SPE_BUILTIN_EVSEL_FSTSTEQ);
6166 /* Initialize irregular SPE builtins. */
6168 def_builtin (target_flags, "__builtin_spe_mtspefscr", void_ftype_int, SPE_BUILTIN_MTSPEFSCR);
6169 def_builtin (target_flags, "__builtin_spe_mfspefscr", int_ftype_void, SPE_BUILTIN_MFSPEFSCR);
6170 def_builtin (target_flags, "__builtin_spe_evstddx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDDX);
6171 def_builtin (target_flags, "__builtin_spe_evstdhx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDHX);
6172 def_builtin (target_flags, "__builtin_spe_evstdwx", void_ftype_v2si_pv2si_int, SPE_BUILTIN_EVSTDWX);
6173 def_builtin (target_flags, "__builtin_spe_evstwhex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHEX);
6174 def_builtin (target_flags, "__builtin_spe_evstwhox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWHOX);
6175 def_builtin (target_flags, "__builtin_spe_evstwwex", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWEX);
6176 def_builtin (target_flags, "__builtin_spe_evstwwox", void_ftype_v2si_puint_int, SPE_BUILTIN_EVSTWWOX);
6177 def_builtin (target_flags, "__builtin_spe_evstdd", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDD);
6178 def_builtin (target_flags, "__builtin_spe_evstdh", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDH);
6179 def_builtin (target_flags, "__builtin_spe_evstdw", void_ftype_v2si_pv2si_char, SPE_BUILTIN_EVSTDW);
6180 def_builtin (target_flags, "__builtin_spe_evstwhe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHE);
6181 def_builtin (target_flags, "__builtin_spe_evstwho", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWHO);
6182 def_builtin (target_flags, "__builtin_spe_evstwwe", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWE);
6183 def_builtin (target_flags, "__builtin_spe_evstwwo", void_ftype_v2si_puint_char, SPE_BUILTIN_EVSTWWO);
6186 def_builtin (target_flags, "__builtin_spe_evlddx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDDX);
6187 def_builtin (target_flags, "__builtin_spe_evldwx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDWX);
6188 def_builtin (target_flags, "__builtin_spe_evldhx", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDHX);
6189 def_builtin (target_flags, "__builtin_spe_evlwhex", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHEX);
6190 def_builtin (target_flags, "__builtin_spe_evlwhoux", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOUX);
6191 def_builtin (target_flags, "__builtin_spe_evlwhosx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOSX);
6192 def_builtin (target_flags, "__builtin_spe_evlwwsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLATX);
6193 def_builtin (target_flags, "__builtin_spe_evlwhsplatx", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLATX);
6194 def_builtin (target_flags, "__builtin_spe_evlhhesplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLATX);
6195 def_builtin (target_flags, "__builtin_spe_evlhhousplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLATX);
6196 def_builtin (target_flags, "__builtin_spe_evlhhossplatx", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLATX);
6197 def_builtin (target_flags, "__builtin_spe_evldd", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDD);
6198 def_builtin (target_flags, "__builtin_spe_evldw", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDW);
6199 def_builtin (target_flags, "__builtin_spe_evldh", v2si_ftype_pv2si_int, SPE_BUILTIN_EVLDH);
6200 def_builtin (target_flags, "__builtin_spe_evlhhesplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHESPLAT);
6201 def_builtin (target_flags, "__builtin_spe_evlhhossplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOSSPLAT);
6202 def_builtin (target_flags, "__builtin_spe_evlhhousplat", v2si_ftype_pushort_int, SPE_BUILTIN_EVLHHOUSPLAT);
6203 def_builtin (target_flags, "__builtin_spe_evlwhe", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHE);
6204 def_builtin (target_flags, "__builtin_spe_evlwhos", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOS);
6205 def_builtin (target_flags, "__builtin_spe_evlwhou", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHOU);
6206 def_builtin (target_flags, "__builtin_spe_evlwhsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWHSPLAT);
6207 def_builtin (target_flags, "__builtin_spe_evlwwsplat", v2si_ftype_puint_int, SPE_BUILTIN_EVLWWSPLAT);
6210 d = (struct builtin_description *) bdesc_spe_predicates;
6211 for (i = 0; i < ARRAY_SIZE (bdesc_spe_predicates); ++i, d++)
6215 switch (insn_data[d->icode].operand[1].mode)
6218 type = int_ftype_int_v2si_v2si;
6221 type = int_ftype_int_v2sf_v2sf;
6227 def_builtin (d->mask, d->name, type, d->code);
6230 /* Evsel predicates. */
6231 d = (struct builtin_description *) bdesc_spe_evsel;
6232 for (i = 0; i < ARRAY_SIZE (bdesc_spe_evsel); ++i, d++)
6236 switch (insn_data[d->icode].operand[1].mode)
6239 type = v2si_ftype_4_v2si;
6242 type = v2sf_ftype_4_v2sf;
6248 def_builtin (d->mask, d->name, type, d->code);
6253 altivec_init_builtins ()
6255 struct builtin_description *d;
6256 struct builtin_description_predicates *dp;
6258 tree pfloat_type_node = build_pointer_type (float_type_node);
6259 tree pint_type_node = build_pointer_type (integer_type_node);
6260 tree pshort_type_node = build_pointer_type (short_integer_type_node);
6261 tree pchar_type_node = build_pointer_type (char_type_node);
6263 tree pvoid_type_node = build_pointer_type (void_type_node);
6265 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST));
6266 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST));
6267 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST));
6268 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
6270 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
6272 tree int_ftype_int_v4si_v4si
6273 = build_function_type_list (integer_type_node,
6274 integer_type_node, V4SI_type_node,
6275 V4SI_type_node, NULL_TREE);
6276 tree v4sf_ftype_pcfloat
6277 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
6278 tree void_ftype_pfloat_v4sf
6279 = build_function_type_list (void_type_node,
6280 pfloat_type_node, V4SF_type_node, NULL_TREE);
6281 tree v4si_ftype_pcint
6282 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
6283 tree void_ftype_pint_v4si
6284 = build_function_type_list (void_type_node,
6285 pint_type_node, V4SI_type_node, NULL_TREE);
6286 tree v8hi_ftype_pcshort
6287 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
6288 tree void_ftype_pshort_v8hi
6289 = build_function_type_list (void_type_node,
6290 pshort_type_node, V8HI_type_node, NULL_TREE);
6291 tree v16qi_ftype_pcchar
6292 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
6293 tree void_ftype_pchar_v16qi
6294 = build_function_type_list (void_type_node,
6295 pchar_type_node, V16QI_type_node, NULL_TREE);
6296 tree void_ftype_v4si
6297 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
6298 tree v8hi_ftype_void
6299 = build_function_type (V8HI_type_node, void_list_node);
6300 tree void_ftype_void
6301 = build_function_type (void_type_node, void_list_node);
6303 = build_function_type_list (void_type_node, char_type_node, NULL_TREE);
6305 tree v16qi_ftype_int_pcvoid
6306 = build_function_type_list (V16QI_type_node,
6307 integer_type_node, pcvoid_type_node, NULL_TREE);
6308 tree v8hi_ftype_int_pcvoid
6309 = build_function_type_list (V8HI_type_node,
6310 integer_type_node, pcvoid_type_node, NULL_TREE);
6311 tree v4si_ftype_int_pcvoid
6312 = build_function_type_list (V4SI_type_node,
6313 integer_type_node, pcvoid_type_node, NULL_TREE);
6315 tree void_ftype_v4si_int_pvoid
6316 = build_function_type_list (void_type_node,
6317 V4SI_type_node, integer_type_node,
6318 pvoid_type_node, NULL_TREE);
6319 tree void_ftype_v16qi_int_pvoid
6320 = build_function_type_list (void_type_node,
6321 V16QI_type_node, integer_type_node,
6322 pvoid_type_node, NULL_TREE);
6323 tree void_ftype_v8hi_int_pvoid
6324 = build_function_type_list (void_type_node,
6325 V8HI_type_node, integer_type_node,
6326 pvoid_type_node, NULL_TREE);
6327 tree int_ftype_int_v8hi_v8hi
6328 = build_function_type_list (integer_type_node,
6329 integer_type_node, V8HI_type_node,
6330 V8HI_type_node, NULL_TREE);
6331 tree int_ftype_int_v16qi_v16qi
6332 = build_function_type_list (integer_type_node,
6333 integer_type_node, V16QI_type_node,
6334 V16QI_type_node, NULL_TREE);
6335 tree int_ftype_int_v4sf_v4sf
6336 = build_function_type_list (integer_type_node,
6337 integer_type_node, V4SF_type_node,
6338 V4SF_type_node, NULL_TREE);
6339 tree v4si_ftype_v4si
6340 = build_function_type_list (V4SI_type_node, V4SI_type_node, NULL_TREE);
6341 tree v8hi_ftype_v8hi
6342 = build_function_type_list (V8HI_type_node, V8HI_type_node, NULL_TREE);
6343 tree v16qi_ftype_v16qi
6344 = build_function_type_list (V16QI_type_node, V16QI_type_node, NULL_TREE);
6345 tree v4sf_ftype_v4sf
6346 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6347 tree void_ftype_pcvoid_int_char
6348 = build_function_type_list (void_type_node,
6349 pcvoid_type_node, integer_type_node,
6350 char_type_node, NULL_TREE);
6352 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
6353 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
6354 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
6355 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
6356 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
6357 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
6358 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
6359 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
6360 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
6361 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
6362 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
6363 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
6364 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
6365 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
6366 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
6367 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
6368 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
6369 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
6370 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
6371 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_qi, ALTIVEC_BUILTIN_DSS);
6372 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSL);
6373 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsr", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVSR);
6374 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvebx", v16qi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEBX);
6375 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvehx", v8hi_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEHX);
6376 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvewx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVEWX);
6377 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvxl", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVXL);
6378 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvx", v4si_ftype_int_pcvoid, ALTIVEC_BUILTIN_LVX);
6379 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVX);
6380 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvewx", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVEWX);
6381 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvxl", void_ftype_v4si_int_pvoid, ALTIVEC_BUILTIN_STVXL);
6382 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvebx", void_ftype_v16qi_int_pvoid, ALTIVEC_BUILTIN_STVEBX);
6383 def_builtin (MASK_ALTIVEC, "__builtin_altivec_stvehx", void_ftype_v8hi_int_pvoid, ALTIVEC_BUILTIN_STVEHX);
6385 /* Add the DST variants. */
6386 d = (struct builtin_description *) bdesc_dst;
6387 for (i = 0; i < ARRAY_SIZE (bdesc_dst); i++, d++)
6388 def_builtin (d->mask, d->name, void_ftype_pcvoid_int_char, d->code);
6390 /* Initialize the predicates. */
6391 dp = (struct builtin_description_predicates *) bdesc_altivec_preds;
6392 for (i = 0; i < ARRAY_SIZE (bdesc_altivec_preds); i++, dp++)
6394 enum machine_mode mode1;
6397 mode1 = insn_data[dp->icode].operand[1].mode;
6402 type = int_ftype_int_v4si_v4si;
6405 type = int_ftype_int_v8hi_v8hi;
6408 type = int_ftype_int_v16qi_v16qi;
6411 type = int_ftype_int_v4sf_v4sf;
6417 def_builtin (dp->mask, dp->name, type, dp->code);
6420 /* Initialize the abs* operators. */
6421 d = (struct builtin_description *) bdesc_abs;
6422 for (i = 0; i < ARRAY_SIZE (bdesc_abs); i++, d++)
6424 enum machine_mode mode0;
6427 mode0 = insn_data[d->icode].operand[0].mode;
6432 type = v4si_ftype_v4si;
6435 type = v8hi_ftype_v8hi;
6438 type = v16qi_ftype_v16qi;
6441 type = v4sf_ftype_v4sf;
6447 def_builtin (d->mask, d->name, type, d->code);
6452 rs6000_common_init_builtins ()
6454 struct builtin_description *d;
6457 tree v4sf_ftype_v4sf_v4sf_v16qi
6458 = build_function_type_list (V4SF_type_node,
6459 V4SF_type_node, V4SF_type_node,
6460 V16QI_type_node, NULL_TREE);
6461 tree v4si_ftype_v4si_v4si_v16qi
6462 = build_function_type_list (V4SI_type_node,
6463 V4SI_type_node, V4SI_type_node,
6464 V16QI_type_node, NULL_TREE);
6465 tree v8hi_ftype_v8hi_v8hi_v16qi
6466 = build_function_type_list (V8HI_type_node,
6467 V8HI_type_node, V8HI_type_node,
6468 V16QI_type_node, NULL_TREE);
6469 tree v16qi_ftype_v16qi_v16qi_v16qi
6470 = build_function_type_list (V16QI_type_node,
6471 V16QI_type_node, V16QI_type_node,
6472 V16QI_type_node, NULL_TREE);
6473 tree v4si_ftype_char
6474 = build_function_type_list (V4SI_type_node, char_type_node, NULL_TREE);
6475 tree v8hi_ftype_char
6476 = build_function_type_list (V8HI_type_node, char_type_node, NULL_TREE);
6477 tree v16qi_ftype_char
6478 = build_function_type_list (V16QI_type_node, char_type_node, NULL_TREE);
6479 tree v8hi_ftype_v16qi
6480 = build_function_type_list (V8HI_type_node, V16QI_type_node, NULL_TREE);
6481 tree v4sf_ftype_v4sf
6482 = build_function_type_list (V4SF_type_node, V4SF_type_node, NULL_TREE);
6484 tree v2si_ftype_v2si_v2si
6485 = build_function_type_list (opaque_V2SI_type_node,
6486 opaque_V2SI_type_node,
6487 opaque_V2SI_type_node, NULL_TREE);
6489 tree v2sf_ftype_v2sf_v2sf
6490 = build_function_type_list (opaque_V2SF_type_node,
6491 opaque_V2SF_type_node,
6492 opaque_V2SF_type_node, NULL_TREE);
6494 tree v2si_ftype_int_int
6495 = build_function_type_list (opaque_V2SI_type_node,
6496 integer_type_node, integer_type_node,
6499 tree v2si_ftype_v2si
6500 = build_function_type_list (opaque_V2SI_type_node,
6501 opaque_V2SI_type_node, NULL_TREE);
6503 tree v2sf_ftype_v2sf
6504 = build_function_type_list (opaque_V2SF_type_node,
6505 opaque_V2SF_type_node, NULL_TREE);
6507 tree v2sf_ftype_v2si
6508 = build_function_type_list (opaque_V2SF_type_node,
6509 opaque_V2SI_type_node, NULL_TREE);
6511 tree v2si_ftype_v2sf
6512 = build_function_type_list (opaque_V2SI_type_node,
6513 opaque_V2SF_type_node, NULL_TREE);
6515 tree v2si_ftype_v2si_char
6516 = build_function_type_list (opaque_V2SI_type_node,
6517 opaque_V2SI_type_node,
6518 char_type_node, NULL_TREE);
6520 tree v2si_ftype_int_char
6521 = build_function_type_list (opaque_V2SI_type_node,
6522 integer_type_node, char_type_node, NULL_TREE);
6524 tree v2si_ftype_char
6525 = build_function_type_list (opaque_V2SI_type_node,
6526 char_type_node, NULL_TREE);
6528 tree int_ftype_int_int
6529 = build_function_type_list (integer_type_node,
6530 integer_type_node, integer_type_node,
6533 tree v4si_ftype_v4si_v4si
6534 = build_function_type_list (V4SI_type_node,
6535 V4SI_type_node, V4SI_type_node, NULL_TREE);
6536 tree v4sf_ftype_v4si_char
6537 = build_function_type_list (V4SF_type_node,
6538 V4SI_type_node, char_type_node, NULL_TREE);
6539 tree v4si_ftype_v4sf_char
6540 = build_function_type_list (V4SI_type_node,
6541 V4SF_type_node, char_type_node, NULL_TREE);
6542 tree v4si_ftype_v4si_char
6543 = build_function_type_list (V4SI_type_node,
6544 V4SI_type_node, char_type_node, NULL_TREE);
6545 tree v8hi_ftype_v8hi_char
6546 = build_function_type_list (V8HI_type_node,
6547 V8HI_type_node, char_type_node, NULL_TREE);
6548 tree v16qi_ftype_v16qi_char
6549 = build_function_type_list (V16QI_type_node,
6550 V16QI_type_node, char_type_node, NULL_TREE);
6551 tree v16qi_ftype_v16qi_v16qi_char
6552 = build_function_type_list (V16QI_type_node,
6553 V16QI_type_node, V16QI_type_node,
6554 char_type_node, NULL_TREE);
6555 tree v8hi_ftype_v8hi_v8hi_char
6556 = build_function_type_list (V8HI_type_node,
6557 V8HI_type_node, V8HI_type_node,
6558 char_type_node, NULL_TREE);
6559 tree v4si_ftype_v4si_v4si_char
6560 = build_function_type_list (V4SI_type_node,
6561 V4SI_type_node, V4SI_type_node,
6562 char_type_node, NULL_TREE);
6563 tree v4sf_ftype_v4sf_v4sf_char
6564 = build_function_type_list (V4SF_type_node,
6565 V4SF_type_node, V4SF_type_node,
6566 char_type_node, NULL_TREE);
6567 tree v4sf_ftype_v4sf_v4sf
6568 = build_function_type_list (V4SF_type_node,
6569 V4SF_type_node, V4SF_type_node, NULL_TREE);
6570 tree v4sf_ftype_v4sf_v4sf_v4si
6571 = build_function_type_list (V4SF_type_node,
6572 V4SF_type_node, V4SF_type_node,
6573 V4SI_type_node, NULL_TREE);
6574 tree v4sf_ftype_v4sf_v4sf_v4sf
6575 = build_function_type_list (V4SF_type_node,
6576 V4SF_type_node, V4SF_type_node,
6577 V4SF_type_node, NULL_TREE);
6578 tree v4si_ftype_v4si_v4si_v4si
6579 = build_function_type_list (V4SI_type_node,
6580 V4SI_type_node, V4SI_type_node,
6581 V4SI_type_node, NULL_TREE);
6582 tree v8hi_ftype_v8hi_v8hi
6583 = build_function_type_list (V8HI_type_node,
6584 V8HI_type_node, V8HI_type_node, NULL_TREE);
6585 tree v8hi_ftype_v8hi_v8hi_v8hi
6586 = build_function_type_list (V8HI_type_node,
6587 V8HI_type_node, V8HI_type_node,
6588 V8HI_type_node, NULL_TREE);
6589 tree v4si_ftype_v8hi_v8hi_v4si
6590 = build_function_type_list (V4SI_type_node,
6591 V8HI_type_node, V8HI_type_node,
6592 V4SI_type_node, NULL_TREE);
6593 tree v4si_ftype_v16qi_v16qi_v4si
6594 = build_function_type_list (V4SI_type_node,
6595 V16QI_type_node, V16QI_type_node,
6596 V4SI_type_node, NULL_TREE);
6597 tree v16qi_ftype_v16qi_v16qi
6598 = build_function_type_list (V16QI_type_node,
6599 V16QI_type_node, V16QI_type_node, NULL_TREE);
6600 tree v4si_ftype_v4sf_v4sf
6601 = build_function_type_list (V4SI_type_node,
6602 V4SF_type_node, V4SF_type_node, NULL_TREE);
6603 tree v8hi_ftype_v16qi_v16qi
6604 = build_function_type_list (V8HI_type_node,
6605 V16QI_type_node, V16QI_type_node, NULL_TREE);
6606 tree v4si_ftype_v8hi_v8hi
6607 = build_function_type_list (V4SI_type_node,
6608 V8HI_type_node, V8HI_type_node, NULL_TREE);
6609 tree v8hi_ftype_v4si_v4si
6610 = build_function_type_list (V8HI_type_node,
6611 V4SI_type_node, V4SI_type_node, NULL_TREE);
6612 tree v16qi_ftype_v8hi_v8hi
6613 = build_function_type_list (V16QI_type_node,
6614 V8HI_type_node, V8HI_type_node, NULL_TREE);
6615 tree v4si_ftype_v16qi_v4si
6616 = build_function_type_list (V4SI_type_node,
6617 V16QI_type_node, V4SI_type_node, NULL_TREE);
6618 tree v4si_ftype_v16qi_v16qi
6619 = build_function_type_list (V4SI_type_node,
6620 V16QI_type_node, V16QI_type_node, NULL_TREE);
6621 tree v4si_ftype_v8hi_v4si
6622 = build_function_type_list (V4SI_type_node,
6623 V8HI_type_node, V4SI_type_node, NULL_TREE);
6624 tree v4si_ftype_v8hi
6625 = build_function_type_list (V4SI_type_node, V8HI_type_node, NULL_TREE);
6626 tree int_ftype_v4si_v4si
6627 = build_function_type_list (integer_type_node,
6628 V4SI_type_node, V4SI_type_node, NULL_TREE);
6629 tree int_ftype_v4sf_v4sf
6630 = build_function_type_list (integer_type_node,
6631 V4SF_type_node, V4SF_type_node, NULL_TREE);
6632 tree int_ftype_v16qi_v16qi
6633 = build_function_type_list (integer_type_node,
6634 V16QI_type_node, V16QI_type_node, NULL_TREE);
6635 tree int_ftype_v8hi_v8hi
6636 = build_function_type_list (integer_type_node,
6637 V8HI_type_node, V8HI_type_node, NULL_TREE);
6639 /* Add the simple ternary operators. */
6640 d = (struct builtin_description *) bdesc_3arg;
6641 for (i = 0; i < ARRAY_SIZE (bdesc_3arg); i++, d++)
6644 enum machine_mode mode0, mode1, mode2, mode3;
6647 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6650 mode0 = insn_data[d->icode].operand[0].mode;
6651 mode1 = insn_data[d->icode].operand[1].mode;
6652 mode2 = insn_data[d->icode].operand[2].mode;
6653 mode3 = insn_data[d->icode].operand[3].mode;
6655 /* When all four are of the same mode. */
6656 if (mode0 == mode1 && mode1 == mode2 && mode2 == mode3)
6661 type = v4si_ftype_v4si_v4si_v4si;
6664 type = v4sf_ftype_v4sf_v4sf_v4sf;
6667 type = v8hi_ftype_v8hi_v8hi_v8hi;
6670 type = v16qi_ftype_v16qi_v16qi_v16qi;
6676 else if (mode0 == mode1 && mode1 == mode2 && mode3 == V16QImode)
6681 type = v4si_ftype_v4si_v4si_v16qi;
6684 type = v4sf_ftype_v4sf_v4sf_v16qi;
6687 type = v8hi_ftype_v8hi_v8hi_v16qi;
6690 type = v16qi_ftype_v16qi_v16qi_v16qi;
6696 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode
6697 && mode3 == V4SImode)
6698 type = v4si_ftype_v16qi_v16qi_v4si;
6699 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode
6700 && mode3 == V4SImode)
6701 type = v4si_ftype_v8hi_v8hi_v4si;
6702 else if (mode0 == V4SFmode && mode1 == V4SFmode && mode2 == V4SFmode
6703 && mode3 == V4SImode)
6704 type = v4sf_ftype_v4sf_v4sf_v4si;
6706 /* vchar, vchar, vchar, 4 bit literal. */
6707 else if (mode0 == V16QImode && mode1 == mode0 && mode2 == mode0
6709 type = v16qi_ftype_v16qi_v16qi_char;
6711 /* vshort, vshort, vshort, 4 bit literal. */
6712 else if (mode0 == V8HImode && mode1 == mode0 && mode2 == mode0
6714 type = v8hi_ftype_v8hi_v8hi_char;
6716 /* vint, vint, vint, 4 bit literal. */
6717 else if (mode0 == V4SImode && mode1 == mode0 && mode2 == mode0
6719 type = v4si_ftype_v4si_v4si_char;
6721 /* vfloat, vfloat, vfloat, 4 bit literal. */
6722 else if (mode0 == V4SFmode && mode1 == mode0 && mode2 == mode0
6724 type = v4sf_ftype_v4sf_v4sf_char;
6729 def_builtin (d->mask, d->name, type, d->code);
6732 /* Add the simple binary operators. */
6733 d = (struct builtin_description *) bdesc_2arg;
6734 for (i = 0; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6736 enum machine_mode mode0, mode1, mode2;
6739 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6742 mode0 = insn_data[d->icode].operand[0].mode;
6743 mode1 = insn_data[d->icode].operand[1].mode;
6744 mode2 = insn_data[d->icode].operand[2].mode;
6746 /* When all three operands are of the same mode. */
6747 if (mode0 == mode1 && mode1 == mode2)
6752 type = v4sf_ftype_v4sf_v4sf;
6755 type = v4si_ftype_v4si_v4si;
6758 type = v16qi_ftype_v16qi_v16qi;
6761 type = v8hi_ftype_v8hi_v8hi;
6764 type = v2si_ftype_v2si_v2si;
6767 type = v2sf_ftype_v2sf_v2sf;
6770 type = int_ftype_int_int;
6777 /* A few other combos we really don't want to do manually. */
6779 /* vint, vfloat, vfloat. */
6780 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == V4SFmode)
6781 type = v4si_ftype_v4sf_v4sf;
6783 /* vshort, vchar, vchar. */
6784 else if (mode0 == V8HImode && mode1 == V16QImode && mode2 == V16QImode)
6785 type = v8hi_ftype_v16qi_v16qi;
6787 /* vint, vshort, vshort. */
6788 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V8HImode)
6789 type = v4si_ftype_v8hi_v8hi;
6791 /* vshort, vint, vint. */
6792 else if (mode0 == V8HImode && mode1 == V4SImode && mode2 == V4SImode)
6793 type = v8hi_ftype_v4si_v4si;
6795 /* vchar, vshort, vshort. */
6796 else if (mode0 == V16QImode && mode1 == V8HImode && mode2 == V8HImode)
6797 type = v16qi_ftype_v8hi_v8hi;
6799 /* vint, vchar, vint. */
6800 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V4SImode)
6801 type = v4si_ftype_v16qi_v4si;
6803 /* vint, vchar, vchar. */
6804 else if (mode0 == V4SImode && mode1 == V16QImode && mode2 == V16QImode)
6805 type = v4si_ftype_v16qi_v16qi;
6807 /* vint, vshort, vint. */
6808 else if (mode0 == V4SImode && mode1 == V8HImode && mode2 == V4SImode)
6809 type = v4si_ftype_v8hi_v4si;
6811 /* vint, vint, 5 bit literal. */
6812 else if (mode0 == V4SImode && mode1 == V4SImode && mode2 == QImode)
6813 type = v4si_ftype_v4si_char;
6815 /* vshort, vshort, 5 bit literal. */
6816 else if (mode0 == V8HImode && mode1 == V8HImode && mode2 == QImode)
6817 type = v8hi_ftype_v8hi_char;
6819 /* vchar, vchar, 5 bit literal. */
6820 else if (mode0 == V16QImode && mode1 == V16QImode && mode2 == QImode)
6821 type = v16qi_ftype_v16qi_char;
6823 /* vfloat, vint, 5 bit literal. */
6824 else if (mode0 == V4SFmode && mode1 == V4SImode && mode2 == QImode)
6825 type = v4sf_ftype_v4si_char;
6827 /* vint, vfloat, 5 bit literal. */
6828 else if (mode0 == V4SImode && mode1 == V4SFmode && mode2 == QImode)
6829 type = v4si_ftype_v4sf_char;
6831 else if (mode0 == V2SImode && mode1 == SImode && mode2 == SImode)
6832 type = v2si_ftype_int_int;
6834 else if (mode0 == V2SImode && mode1 == V2SImode && mode2 == QImode)
6835 type = v2si_ftype_v2si_char;
6837 else if (mode0 == V2SImode && mode1 == SImode && mode2 == QImode)
6838 type = v2si_ftype_int_char;
6841 else if (mode0 == SImode)
6846 type = int_ftype_v4si_v4si;
6849 type = int_ftype_v4sf_v4sf;
6852 type = int_ftype_v16qi_v16qi;
6855 type = int_ftype_v8hi_v8hi;
6865 def_builtin (d->mask, d->name, type, d->code);
6868 /* Add the simple unary operators. */
6869 d = (struct builtin_description *) bdesc_1arg;
6870 for (i = 0; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6872 enum machine_mode mode0, mode1;
6875 if (d->name == 0 || d->icode == CODE_FOR_nothing)
6878 mode0 = insn_data[d->icode].operand[0].mode;
6879 mode1 = insn_data[d->icode].operand[1].mode;
6881 if (mode0 == V4SImode && mode1 == QImode)
6882 type = v4si_ftype_char;
6883 else if (mode0 == V8HImode && mode1 == QImode)
6884 type = v8hi_ftype_char;
6885 else if (mode0 == V16QImode && mode1 == QImode)
6886 type = v16qi_ftype_char;
6887 else if (mode0 == V4SFmode && mode1 == V4SFmode)
6888 type = v4sf_ftype_v4sf;
6889 else if (mode0 == V8HImode && mode1 == V16QImode)
6890 type = v8hi_ftype_v16qi;
6891 else if (mode0 == V4SImode && mode1 == V8HImode)
6892 type = v4si_ftype_v8hi;
6893 else if (mode0 == V2SImode && mode1 == V2SImode)
6894 type = v2si_ftype_v2si;
6895 else if (mode0 == V2SFmode && mode1 == V2SFmode)
6896 type = v2sf_ftype_v2sf;
6897 else if (mode0 == V2SFmode && mode1 == V2SImode)
6898 type = v2sf_ftype_v2si;
6899 else if (mode0 == V2SImode && mode1 == V2SFmode)
6900 type = v2si_ftype_v2sf;
6901 else if (mode0 == V2SImode && mode1 == QImode)
6902 type = v2si_ftype_char;
6906 def_builtin (d->mask, d->name, type, d->code);
6911 /* Expand a block move operation, and return 1 if successful. Return 0
6912 if we should let the compiler generate normal code.
6914 operands[0] is the destination
6915 operands[1] is the source
6916 operands[2] is the length
6917 operands[3] is the alignment */
6919 #define MAX_MOVE_REG 4
6922 expand_block_move (operands)
6925 rtx orig_dest = operands[0];
6926 rtx orig_src = operands[1];
6927 rtx bytes_rtx = operands[2];
6928 rtx align_rtx = operands[3];
6929 int constp = (GET_CODE (bytes_rtx) == CONST_INT);
6934 rtx stores[MAX_MOVE_REG];
6937 /* If this is not a fixed size move, just call memcpy */
6941 /* If this is not a fixed size alignment, abort */
6942 if (GET_CODE (align_rtx) != CONST_INT)
6944 align = INTVAL (align_rtx);
6946 /* Anything to move? */
6947 bytes = INTVAL (bytes_rtx);
6951 /* store_one_arg depends on expand_block_move to handle at least the size of
6952 reg_parm_stack_space. */
6953 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
6956 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
6959 rtx (*movstrsi) PARAMS ((rtx, rtx, rtx, rtx));
6960 rtx (*mov) PARAMS ((rtx, rtx));
6962 enum machine_mode mode = BLKmode;
6966 && bytes > 24 /* move up to 32 bytes at a time */
6974 && ! fixed_regs[12])
6976 move_bytes = (bytes > 32) ? 32 : bytes;
6977 gen_func.movstrsi = gen_movstrsi_8reg;
6979 else if (TARGET_STRING
6980 && bytes > 16 /* move up to 24 bytes at a time */
6986 && ! fixed_regs[10])
6988 move_bytes = (bytes > 24) ? 24 : bytes;
6989 gen_func.movstrsi = gen_movstrsi_6reg;
6991 else if (TARGET_STRING
6992 && bytes > 8 /* move up to 16 bytes at a time */
6998 move_bytes = (bytes > 16) ? 16 : bytes;
6999 gen_func.movstrsi = gen_movstrsi_4reg;
7001 else if (bytes >= 8 && TARGET_POWERPC64
7002 /* 64-bit loads and stores require word-aligned
7004 && (align >= 8 || (! STRICT_ALIGNMENT && align >= 4)))
7008 gen_func.mov = gen_movdi;
7010 else if (TARGET_STRING && bytes > 4 && !TARGET_POWERPC64)
7011 { /* move up to 8 bytes at a time */
7012 move_bytes = (bytes > 8) ? 8 : bytes;
7013 gen_func.movstrsi = gen_movstrsi_2reg;
7015 else if (bytes >= 4 && (align >= 4 || ! STRICT_ALIGNMENT))
7016 { /* move 4 bytes */
7019 gen_func.mov = gen_movsi;
7021 else if (bytes == 2 && (align >= 2 || ! STRICT_ALIGNMENT))
7022 { /* move 2 bytes */
7025 gen_func.mov = gen_movhi;
7027 else if (TARGET_STRING && bytes > 1)
7028 { /* move up to 4 bytes at a time */
7029 move_bytes = (bytes > 4) ? 4 : bytes;
7030 gen_func.movstrsi = gen_movstrsi_1reg;
7032 else /* move 1 byte at a time */
7036 gen_func.mov = gen_movqi;
7039 src = adjust_address (orig_src, mode, offset);
7040 dest = adjust_address (orig_dest, mode, offset);
7042 if (mode != BLKmode)
7044 rtx tmp_reg = gen_reg_rtx (mode);
7046 emit_insn ((*gen_func.mov) (tmp_reg, src));
7047 stores[num_reg++] = (*gen_func.mov) (dest, tmp_reg);
7050 if (mode == BLKmode || num_reg >= MAX_MOVE_REG || bytes == move_bytes)
7053 for (i = 0; i < num_reg; i++)
7054 emit_insn (stores[i]);
7058 if (mode == BLKmode)
7060 /* Move the address into scratch registers. The movstrsi
7061 patterns require zero offset. */
7062 if (!REG_P (XEXP (src, 0)))
7064 rtx src_reg = copy_addr_to_reg (XEXP (src, 0));
7065 src = replace_equiv_address (src, src_reg);
7067 set_mem_size (src, GEN_INT (move_bytes));
7069 if (!REG_P (XEXP (dest, 0)))
7071 rtx dest_reg = copy_addr_to_reg (XEXP (dest, 0));
7072 dest = replace_equiv_address (dest, dest_reg);
7074 set_mem_size (dest, GEN_INT (move_bytes));
7076 emit_insn ((*gen_func.movstrsi) (dest, src,
7077 GEN_INT (move_bytes & 31),
7086 /* Return 1 if OP is a load multiple operation. It is known to be a
7087 PARALLEL and the first section will be tested. */
7090 load_multiple_operation (op, mode)
7092 enum machine_mode mode ATTRIBUTE_UNUSED;
7094 int count = XVECLEN (op, 0);
7095 unsigned int dest_regno;
7099 /* Perform a quick check so we don't blow up below. */
7101 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7102 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7103 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7106 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7107 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7109 for (i = 1; i < count; i++)
7111 rtx elt = XVECEXP (op, 0, i);
7113 if (GET_CODE (elt) != SET
7114 || GET_CODE (SET_DEST (elt)) != REG
7115 || GET_MODE (SET_DEST (elt)) != SImode
7116 || REGNO (SET_DEST (elt)) != dest_regno + i
7117 || GET_CODE (SET_SRC (elt)) != MEM
7118 || GET_MODE (SET_SRC (elt)) != SImode
7119 || GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
7120 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
7121 || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
7122 || INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != i * 4)
7129 /* Similar, but tests for store multiple. Here, the second vector element
7130 is a CLOBBER. It will be tested later. */
7133 store_multiple_operation (op, mode)
7135 enum machine_mode mode ATTRIBUTE_UNUSED;
7137 int count = XVECLEN (op, 0) - 1;
7138 unsigned int src_regno;
7142 /* Perform a quick check so we don't blow up below. */
7144 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7145 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7146 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7149 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7150 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7152 for (i = 1; i < count; i++)
7154 rtx elt = XVECEXP (op, 0, i + 1);
7156 if (GET_CODE (elt) != SET
7157 || GET_CODE (SET_SRC (elt)) != REG
7158 || GET_MODE (SET_SRC (elt)) != SImode
7159 || REGNO (SET_SRC (elt)) != src_regno + i
7160 || GET_CODE (SET_DEST (elt)) != MEM
7161 || GET_MODE (SET_DEST (elt)) != SImode
7162 || GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
7163 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
7164 || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
7165 || INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != i * 4)
7172 /* Return a string to perform a load_multiple operation.
7173 operands[0] is the vector.
7174 operands[1] is the source address.
7175 operands[2] is the first destination register. */
7178 rs6000_output_load_multiple (operands)
7181 /* We have to handle the case where the pseudo used to contain the address
7182 is assigned to one of the output registers. */
7184 int words = XVECLEN (operands[0], 0);
7187 if (XVECLEN (operands[0], 0) == 1)
7188 return "{l|lwz} %2,0(%1)";
7190 for (i = 0; i < words; i++)
7191 if (refers_to_regno_p (REGNO (operands[2]) + i,
7192 REGNO (operands[2]) + i + 1, operands[1], 0))
7196 xop[0] = GEN_INT (4 * (words-1));
7197 xop[1] = operands[1];
7198 xop[2] = operands[2];
7199 output_asm_insn ("{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,%0(%1)", xop);
7204 xop[0] = GEN_INT (4 * (words-1));
7205 xop[1] = operands[1];
7206 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
7207 output_asm_insn ("{cal %1,4(%1)|addi %1,%1,4}\n\t{lsi|lswi} %2,%1,%0\n\t{l|lwz} %1,-4(%1)", xop);
7212 for (j = 0; j < words; j++)
7215 xop[0] = GEN_INT (j * 4);
7216 xop[1] = operands[1];
7217 xop[2] = gen_rtx_REG (SImode, REGNO (operands[2]) + j);
7218 output_asm_insn ("{l|lwz} %2,%0(%1)", xop);
7220 xop[0] = GEN_INT (i * 4);
7221 xop[1] = operands[1];
7222 output_asm_insn ("{l|lwz} %1,%0(%1)", xop);
7227 return "{lsi|lswi} %2,%1,%N0";
7230 /* Return 1 for a parallel vrsave operation. */
7233 vrsave_operation (op, mode)
7235 enum machine_mode mode ATTRIBUTE_UNUSED;
7237 int count = XVECLEN (op, 0);
7238 unsigned int dest_regno, src_regno;
7242 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7243 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7244 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC_VOLATILE)
7247 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7248 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7250 if (dest_regno != VRSAVE_REGNO
7251 && src_regno != VRSAVE_REGNO)
7254 for (i = 1; i < count; i++)
7256 rtx elt = XVECEXP (op, 0, i);
7258 if (GET_CODE (elt) != CLOBBER
7259 && GET_CODE (elt) != SET)
7266 /* Return 1 for an PARALLEL suitable for mtcrf. */
7269 mtcrf_operation (op, mode)
7271 enum machine_mode mode ATTRIBUTE_UNUSED;
7273 int count = XVECLEN (op, 0);
7277 /* Perform a quick check so we don't blow up below. */
7279 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7280 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != UNSPEC
7281 || XVECLEN (SET_SRC (XVECEXP (op, 0, 0)), 0) != 2)
7283 src_reg = XVECEXP (SET_SRC (XVECEXP (op, 0, 0)), 0, 0);
7285 if (GET_CODE (src_reg) != REG
7286 || GET_MODE (src_reg) != SImode
7287 || ! INT_REGNO_P (REGNO (src_reg)))
7290 for (i = 0; i < count; i++)
7292 rtx exp = XVECEXP (op, 0, i);
7296 if (GET_CODE (exp) != SET
7297 || GET_CODE (SET_DEST (exp)) != REG
7298 || GET_MODE (SET_DEST (exp)) != CCmode
7299 || ! CR_REGNO_P (REGNO (SET_DEST (exp))))
7301 unspec = SET_SRC (exp);
7302 maskval = 1 << (MAX_CR_REGNO - REGNO (SET_DEST (exp)));
7304 if (GET_CODE (unspec) != UNSPEC
7305 || XINT (unspec, 1) != UNSPEC_MOVESI_TO_CR
7306 || XVECLEN (unspec, 0) != 2
7307 || XVECEXP (unspec, 0, 0) != src_reg
7308 || GET_CODE (XVECEXP (unspec, 0, 1)) != CONST_INT
7309 || INTVAL (XVECEXP (unspec, 0, 1)) != maskval)
7315 /* Return 1 for an PARALLEL suitable for lmw. */
7318 lmw_operation (op, mode)
7320 enum machine_mode mode ATTRIBUTE_UNUSED;
7322 int count = XVECLEN (op, 0);
7323 unsigned int dest_regno;
7325 unsigned int base_regno;
7326 HOST_WIDE_INT offset;
7329 /* Perform a quick check so we don't blow up below. */
7331 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7332 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != REG
7333 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != MEM)
7336 dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, 0)));
7337 src_addr = XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0);
7340 || count != 32 - (int) dest_regno)
7343 if (legitimate_indirect_address_p (src_addr, 0))
7346 base_regno = REGNO (src_addr);
7347 if (base_regno == 0)
7350 else if (legitimate_offset_address_p (SImode, src_addr, 0))
7352 offset = INTVAL (XEXP (src_addr, 1));
7353 base_regno = REGNO (XEXP (src_addr, 0));
7358 for (i = 0; i < count; i++)
7360 rtx elt = XVECEXP (op, 0, i);
7363 HOST_WIDE_INT newoffset;
7365 if (GET_CODE (elt) != SET
7366 || GET_CODE (SET_DEST (elt)) != REG
7367 || GET_MODE (SET_DEST (elt)) != SImode
7368 || REGNO (SET_DEST (elt)) != dest_regno + i
7369 || GET_CODE (SET_SRC (elt)) != MEM
7370 || GET_MODE (SET_SRC (elt)) != SImode)
7372 newaddr = XEXP (SET_SRC (elt), 0);
7373 if (legitimate_indirect_address_p (newaddr, 0))
7378 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7380 addr_reg = XEXP (newaddr, 0);
7381 newoffset = INTVAL (XEXP (newaddr, 1));
7385 if (REGNO (addr_reg) != base_regno
7386 || newoffset != offset + 4 * i)
7393 /* Return 1 for an PARALLEL suitable for stmw. */
7396 stmw_operation (op, mode)
7398 enum machine_mode mode ATTRIBUTE_UNUSED;
7400 int count = XVECLEN (op, 0);
7401 unsigned int src_regno;
7403 unsigned int base_regno;
7404 HOST_WIDE_INT offset;
7407 /* Perform a quick check so we don't blow up below. */
7409 || GET_CODE (XVECEXP (op, 0, 0)) != SET
7410 || GET_CODE (SET_DEST (XVECEXP (op, 0, 0))) != MEM
7411 || GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) != REG)
7414 src_regno = REGNO (SET_SRC (XVECEXP (op, 0, 0)));
7415 dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, 0)), 0);
7418 || count != 32 - (int) src_regno)
7421 if (legitimate_indirect_address_p (dest_addr, 0))
7424 base_regno = REGNO (dest_addr);
7425 if (base_regno == 0)
7428 else if (legitimate_offset_address_p (SImode, dest_addr, 0))
7430 offset = INTVAL (XEXP (dest_addr, 1));
7431 base_regno = REGNO (XEXP (dest_addr, 0));
7436 for (i = 0; i < count; i++)
7438 rtx elt = XVECEXP (op, 0, i);
7441 HOST_WIDE_INT newoffset;
7443 if (GET_CODE (elt) != SET
7444 || GET_CODE (SET_SRC (elt)) != REG
7445 || GET_MODE (SET_SRC (elt)) != SImode
7446 || REGNO (SET_SRC (elt)) != src_regno + i
7447 || GET_CODE (SET_DEST (elt)) != MEM
7448 || GET_MODE (SET_DEST (elt)) != SImode)
7450 newaddr = XEXP (SET_DEST (elt), 0);
7451 if (legitimate_indirect_address_p (newaddr, 0))
7456 else if (legitimate_offset_address_p (SImode, newaddr, 0))
7458 addr_reg = XEXP (newaddr, 0);
7459 newoffset = INTVAL (XEXP (newaddr, 1));
7463 if (REGNO (addr_reg) != base_regno
7464 || newoffset != offset + 4 * i)
7471 /* A validation routine: say whether CODE, a condition code, and MODE
7472 match. The other alternatives either don't make sense or should
7473 never be generated. */
7476 validate_condition_mode (code, mode)
7478 enum machine_mode mode;
7480 if (GET_RTX_CLASS (code) != '<'
7481 || GET_MODE_CLASS (mode) != MODE_CC)
7484 /* These don't make sense. */
7485 if ((code == GT || code == LT || code == GE || code == LE)
7486 && mode == CCUNSmode)
7489 if ((code == GTU || code == LTU || code == GEU || code == LEU)
7490 && mode != CCUNSmode)
7493 if (mode != CCFPmode
7494 && (code == ORDERED || code == UNORDERED
7495 || code == UNEQ || code == LTGT
7496 || code == UNGT || code == UNLT
7497 || code == UNGE || code == UNLE))
7500 /* These should never be generated except for
7501 flag_finite_math_only. */
7502 if (mode == CCFPmode
7503 && ! flag_finite_math_only
7504 && (code == LE || code == GE
7505 || code == UNEQ || code == LTGT
7506 || code == UNGT || code == UNLT))
7509 /* These are invalid; the information is not there. */
7510 if (mode == CCEQmode
7511 && code != EQ && code != NE)
7515 /* Return 1 if OP is a comparison operation that is valid for a branch insn.
7516 We only check the opcode against the mode of the CC value here. */
7519 branch_comparison_operator (op, mode)
7521 enum machine_mode mode ATTRIBUTE_UNUSED;
7523 enum rtx_code code = GET_CODE (op);
7524 enum machine_mode cc_mode;
7526 if (GET_RTX_CLASS (code) != '<')
7529 cc_mode = GET_MODE (XEXP (op, 0));
7530 if (GET_MODE_CLASS (cc_mode) != MODE_CC)
7533 validate_condition_mode (code, cc_mode);
7538 /* Return 1 if OP is a comparison operation that is valid for a branch
7539 insn and which is true if the corresponding bit in the CC register
7543 branch_positive_comparison_operator (op, mode)
7545 enum machine_mode mode;
7549 if (! branch_comparison_operator (op, mode))
7552 code = GET_CODE (op);
7553 return (code == EQ || code == LT || code == GT
7554 || (TARGET_E500 && TARGET_HARD_FLOAT && !TARGET_FPRS && code == NE)
7555 || code == LTU || code == GTU
7556 || code == UNORDERED);
7559 /* Return 1 if OP is a comparison operation that is valid for an scc
7560 insn: it must be a positive comparison. */
7563 scc_comparison_operator (op, mode)
7565 enum machine_mode mode;
7567 return branch_positive_comparison_operator (op, mode);
7571 trap_comparison_operator (op, mode)
7573 enum machine_mode mode;
7575 if (mode != VOIDmode && mode != GET_MODE (op))
7577 return GET_RTX_CLASS (GET_CODE (op)) == '<';
7581 boolean_operator (op, mode)
7583 enum machine_mode mode ATTRIBUTE_UNUSED;
7585 enum rtx_code code = GET_CODE (op);
7586 return (code == AND || code == IOR || code == XOR);
7590 boolean_or_operator (op, mode)
7592 enum machine_mode mode ATTRIBUTE_UNUSED;
7594 enum rtx_code code = GET_CODE (op);
7595 return (code == IOR || code == XOR);
7599 min_max_operator (op, mode)
7601 enum machine_mode mode ATTRIBUTE_UNUSED;
7603 enum rtx_code code = GET_CODE (op);
7604 return (code == SMIN || code == SMAX || code == UMIN || code == UMAX);
7607 /* Return 1 if ANDOP is a mask that has no bits on that are not in the
7608 mask required to convert the result of a rotate insn into a shift
7609 left insn of SHIFTOP bits. Both are known to be SImode CONST_INT. */
7612 includes_lshift_p (shiftop, andop)
7616 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7618 shift_mask <<= INTVAL (shiftop);
7620 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7623 /* Similar, but for right shift. */
7626 includes_rshift_p (shiftop, andop)
7630 unsigned HOST_WIDE_INT shift_mask = ~(unsigned HOST_WIDE_INT) 0;
7632 shift_mask >>= INTVAL (shiftop);
7634 return (INTVAL (andop) & 0xffffffff & ~shift_mask) == 0;
7637 /* Return 1 if ANDOP is a mask suitable for use with an rldic insn
7638 to perform a left shift. It must have exactly SHIFTOP least
7639 significant 0's, then one or more 1's, then zero or more 0's. */
7642 includes_rldic_lshift_p (shiftop, andop)
7646 if (GET_CODE (andop) == CONST_INT)
7648 HOST_WIDE_INT c, lsb, shift_mask;
7651 if (c == 0 || c == ~0)
7655 shift_mask <<= INTVAL (shiftop);
7657 /* Find the least significant one bit. */
7660 /* It must coincide with the LSB of the shift mask. */
7661 if (-lsb != shift_mask)
7664 /* Invert to look for the next transition (if any). */
7667 /* Remove the low group of ones (originally low group of zeros). */
7670 /* Again find the lsb, and check we have all 1's above. */
7674 else if (GET_CODE (andop) == CONST_DOUBLE
7675 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7677 HOST_WIDE_INT low, high, lsb;
7678 HOST_WIDE_INT shift_mask_low, shift_mask_high;
7680 low = CONST_DOUBLE_LOW (andop);
7681 if (HOST_BITS_PER_WIDE_INT < 64)
7682 high = CONST_DOUBLE_HIGH (andop);
7684 if ((low == 0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == 0))
7685 || (low == ~0 && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0)))
7688 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7690 shift_mask_high = ~0;
7691 if (INTVAL (shiftop) > 32)
7692 shift_mask_high <<= INTVAL (shiftop) - 32;
7696 if (-lsb != shift_mask_high || INTVAL (shiftop) < 32)
7703 return high == -lsb;
7706 shift_mask_low = ~0;
7707 shift_mask_low <<= INTVAL (shiftop);
7711 if (-lsb != shift_mask_low)
7714 if (HOST_BITS_PER_WIDE_INT < 64)
7719 if (HOST_BITS_PER_WIDE_INT < 64 && low == 0)
7722 return high == -lsb;
7726 return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
7732 /* Return 1 if ANDOP is a mask suitable for use with an rldicr insn
7733 to perform a left shift. It must have SHIFTOP or more least
7734 signifigant 0's, with the remainder of the word 1's. */
7737 includes_rldicr_lshift_p (shiftop, andop)
7741 if (GET_CODE (andop) == CONST_INT)
7743 HOST_WIDE_INT c, lsb, shift_mask;
7746 shift_mask <<= INTVAL (shiftop);
7749 /* Find the least signifigant one bit. */
7752 /* It must be covered by the shift mask.
7753 This test also rejects c == 0. */
7754 if ((lsb & shift_mask) == 0)
7757 /* Check we have all 1's above the transition, and reject all 1's. */
7758 return c == -lsb && lsb != 1;
7760 else if (GET_CODE (andop) == CONST_DOUBLE
7761 && (GET_MODE (andop) == VOIDmode || GET_MODE (andop) == DImode))
7763 HOST_WIDE_INT low, lsb, shift_mask_low;
7765 low = CONST_DOUBLE_LOW (andop);
7767 if (HOST_BITS_PER_WIDE_INT < 64)
7769 HOST_WIDE_INT high, shift_mask_high;
7771 high = CONST_DOUBLE_HIGH (andop);
7775 shift_mask_high = ~0;
7776 if (INTVAL (shiftop) > 32)
7777 shift_mask_high <<= INTVAL (shiftop) - 32;
7781 if ((lsb & shift_mask_high) == 0)
7784 return high == -lsb;
7790 shift_mask_low = ~0;
7791 shift_mask_low <<= INTVAL (shiftop);
7795 if ((lsb & shift_mask_low) == 0)
7798 return low == -lsb && lsb != 1;
7804 /* Return 1 if REGNO (reg1) == REGNO (reg2) - 1 making them candidates
7805 for lfq and stfq insns.
7807 Note reg1 and reg2 *must* be hard registers. To be sure we will
7808 abort if we are passed pseudo registers. */
7811 registers_ok_for_quad_peep (reg1, reg2)
7814 /* We might have been passed a SUBREG. */
7815 if (GET_CODE (reg1) != REG || GET_CODE (reg2) != REG)
7818 return (REGNO (reg1) == REGNO (reg2) - 1);
7821 /* Return 1 if addr1 and addr2 are suitable for lfq or stfq insn.
7822 addr1 and addr2 must be in consecutive memory locations
7823 (addr2 == addr1 + 8). */
7826 addrs_ok_for_quad_peep (addr1, addr2)
7833 /* Extract an offset (if used) from the first addr. */
7834 if (GET_CODE (addr1) == PLUS)
7836 /* If not a REG, return zero. */
7837 if (GET_CODE (XEXP (addr1, 0)) != REG)
7841 reg1 = REGNO (XEXP (addr1, 0));
7842 /* The offset must be constant! */
7843 if (GET_CODE (XEXP (addr1, 1)) != CONST_INT)
7845 offset1 = INTVAL (XEXP (addr1, 1));
7848 else if (GET_CODE (addr1) != REG)
7852 reg1 = REGNO (addr1);
7853 /* This was a simple (mem (reg)) expression. Offset is 0. */
7857 /* Make sure the second address is a (mem (plus (reg) (const_int)))
7858 or if it is (mem (reg)) then make sure that offset1 is -8 and the same
7859 register as addr1. */
7860 if (offset1 == -8 && GET_CODE (addr2) == REG && reg1 == REGNO (addr2))
7862 if (GET_CODE (addr2) != PLUS)
7865 if (GET_CODE (XEXP (addr2, 0)) != REG
7866 || GET_CODE (XEXP (addr2, 1)) != CONST_INT)
7869 if (reg1 != REGNO (XEXP (addr2, 0)))
7872 /* The offset for the second addr must be 8 more than the first addr. */
7873 if (INTVAL (XEXP (addr2, 1)) != offset1 + 8)
7876 /* All the tests passed. addr1 and addr2 are valid for lfq or stfq
7881 /* Return the register class of a scratch register needed to copy IN into
7882 or out of a register in CLASS in MODE. If it can be done directly,
7883 NO_REGS is returned. */
7886 secondary_reload_class (class, mode, in)
7887 enum reg_class class;
7888 enum machine_mode mode ATTRIBUTE_UNUSED;
7893 if (TARGET_ELF || (DEFAULT_ABI == ABI_DARWIN
7895 && MACHOPIC_INDIRECT
7899 /* We cannot copy a symbolic operand directly into anything
7900 other than BASE_REGS for TARGET_ELF. So indicate that a
7901 register from BASE_REGS is needed as an intermediate
7904 On Darwin, pic addresses require a load from memory, which
7905 needs a base register. */
7906 if (class != BASE_REGS
7907 && (GET_CODE (in) == SYMBOL_REF
7908 || GET_CODE (in) == HIGH
7909 || GET_CODE (in) == LABEL_REF
7910 || GET_CODE (in) == CONST))
7914 if (GET_CODE (in) == REG)
7917 if (regno >= FIRST_PSEUDO_REGISTER)
7919 regno = true_regnum (in);
7920 if (regno >= FIRST_PSEUDO_REGISTER)
7924 else if (GET_CODE (in) == SUBREG)
7926 regno = true_regnum (in);
7927 if (regno >= FIRST_PSEUDO_REGISTER)
7933 /* We can place anything into GENERAL_REGS and can put GENERAL_REGS
7935 if (class == GENERAL_REGS || class == BASE_REGS
7936 || (regno >= 0 && INT_REGNO_P (regno)))
7939 /* Constants, memory, and FP registers can go into FP registers. */
7940 if ((regno == -1 || FP_REGNO_P (regno))
7941 && (class == FLOAT_REGS || class == NON_SPECIAL_REGS))
7944 /* Memory, and AltiVec registers can go into AltiVec registers. */
7945 if ((regno == -1 || ALTIVEC_REGNO_P (regno))
7946 && class == ALTIVEC_REGS)
7949 /* We can copy among the CR registers. */
7950 if ((class == CR_REGS || class == CR0_REGS)
7951 && regno >= 0 && CR_REGNO_P (regno))
7954 /* Otherwise, we need GENERAL_REGS. */
7955 return GENERAL_REGS;
7958 /* Given a comparison operation, return the bit number in CCR to test. We
7959 know this is a valid comparison.
7961 SCC_P is 1 if this is for an scc. That means that %D will have been
7962 used instead of %C, so the bits will be in different places.
7964 Return -1 if OP isn't a valid comparison for some reason. */
7971 enum rtx_code code = GET_CODE (op);
7972 enum machine_mode cc_mode;
7977 if (GET_RTX_CLASS (code) != '<')
7982 if (GET_CODE (reg) != REG
7983 || ! CR_REGNO_P (REGNO (reg)))
7986 cc_mode = GET_MODE (reg);
7987 cc_regnum = REGNO (reg);
7988 base_bit = 4 * (cc_regnum - CR0_REGNO);
7990 validate_condition_mode (code, cc_mode);
7992 /* When generating a sCOND operation, only positive conditions are
7994 if (scc_p && code != EQ && code != GT && code != LT && code != UNORDERED
7995 && code != GTU && code != LTU)
8001 if (TARGET_E500 && !TARGET_FPRS
8002 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8003 return base_bit + 1;
8004 return scc_p ? base_bit + 3 : base_bit + 2;
8006 if (TARGET_E500 && !TARGET_FPRS
8007 && TARGET_HARD_FLOAT && cc_mode == CCFPmode)
8008 return base_bit + 1;
8009 return base_bit + 2;
8010 case GT: case GTU: case UNLE:
8011 return base_bit + 1;
8012 case LT: case LTU: case UNGE:
8014 case ORDERED: case UNORDERED:
8015 return base_bit + 3;
8018 /* If scc, we will have done a cror to put the bit in the
8019 unordered position. So test that bit. For integer, this is ! LT
8020 unless this is an scc insn. */
8021 return scc_p ? base_bit + 3 : base_bit;
8024 return scc_p ? base_bit + 3 : base_bit + 1;
8031 /* Return the GOT register. */
8034 rs6000_got_register (value)
8035 rtx value ATTRIBUTE_UNUSED;
8037 /* The second flow pass currently (June 1999) can't update
8038 regs_ever_live without disturbing other parts of the compiler, so
8039 update it here to make the prolog/epilogue code happy. */
8040 if (no_new_pseudos && ! regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM])
8041 regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM] = 1;
8043 current_function_uses_pic_offset_table = 1;
8045 return pic_offset_table_rtx;
8048 /* Function to init struct machine_function.
8049 This will be called, via a pointer variable,
8050 from push_function_context. */
8052 static struct machine_function *
8053 rs6000_init_machine_status ()
8055 return ggc_alloc_cleared (sizeof (machine_function));
8058 /* These macros test for integers and extract the low-order bits. */
8060 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
8061 && GET_MODE (X) == VOIDmode)
8063 #define INT_LOWPART(X) \
8064 (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
8071 unsigned long val = INT_LOWPART (op);
8073 /* If the high bit is zero, the value is the first 1 bit we find
8075 if ((val & 0x80000000) == 0)
8077 if ((val & 0xffffffff) == 0)
8081 while (((val <<= 1) & 0x80000000) == 0)
8086 /* If the high bit is set and the low bit is not, or the mask is all
8087 1's, the value is zero. */
8088 if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
8091 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8094 while (((val >>= 1) & 1) != 0)
8105 unsigned long val = INT_LOWPART (op);
8107 /* If the low bit is zero, the value is the first 1 bit we find from
8111 if ((val & 0xffffffff) == 0)
8115 while (((val >>= 1) & 1) == 0)
8121 /* If the low bit is set and the high bit is not, or the mask is all
8122 1's, the value is 31. */
8123 if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
8126 /* Otherwise we have a wrap-around mask. Look for the first 0 bit
8129 while (((val <<= 1) & 0x80000000) != 0)
8135 /* Locate some local-dynamic symbol still in use by this function
8136 so that we can print its name in some tls_ld pattern. */
8139 rs6000_get_some_local_dynamic_name ()
8143 if (cfun->machine->some_ld_name)
8144 return cfun->machine->some_ld_name;
8146 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
8148 && for_each_rtx (&PATTERN (insn),
8149 rs6000_get_some_local_dynamic_name_1, 0))
8150 return cfun->machine->some_ld_name;
8155 /* Helper function for rs6000_get_some_local_dynamic_name. */
8158 rs6000_get_some_local_dynamic_name_1 (px, data)
8160 void *data ATTRIBUTE_UNUSED;
8164 if (GET_CODE (x) == SYMBOL_REF)
8166 const char *str = XSTR (x, 0);
8167 if (SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC)
8169 cfun->machine->some_ld_name = str;
8177 /* Print an operand. Recognize special options, documented below. */
8180 #define SMALL_DATA_RELOC ((rs6000_sdata == SDATA_EABI) ? "sda21" : "sdarel")
8181 #define SMALL_DATA_REG ((rs6000_sdata == SDATA_EABI) ? 0 : 13)
8183 #define SMALL_DATA_RELOC "sda21"
8184 #define SMALL_DATA_REG 0
8188 print_operand (file, x, code)
8195 unsigned HOST_WIDE_INT uval;
8200 /* Write out an instruction after the call which may be replaced
8201 with glue code by the loader. This depends on the AIX version. */
8202 asm_fprintf (file, RS6000_CALL_GLUE);
8205 /* %a is output_address. */
8208 /* If X is a constant integer whose low-order 5 bits are zero,
8209 write 'l'. Otherwise, write 'r'. This is a kludge to fix a bug
8210 in the AIX assembler where "sri" with a zero shift count
8211 writes a trash instruction. */
8212 if (GET_CODE (x) == CONST_INT && (INTVAL (x) & 31) == 0)
8219 /* If constant, low-order 16 bits of constant, unsigned.
8220 Otherwise, write normally. */
8222 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 0xffff);
8224 print_operand (file, x, 0);
8228 /* If the low-order bit is zero, write 'r'; otherwise, write 'l'
8229 for 64-bit mask direction. */
8230 putc (((INT_LOWPART(x) & 1) == 0 ? 'r' : 'l'), file);
8233 /* %c is output_addr_const if a CONSTANT_ADDRESS_P, otherwise
8237 /* X is a CR register. Print the number of the EQ bit of the CR */
8238 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8239 output_operand_lossage ("invalid %%E value");
8241 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO) + 2);
8245 /* X is a CR register. Print the shift count needed to move it
8246 to the high-order four bits. */
8247 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8248 output_operand_lossage ("invalid %%f value");
8250 fprintf (file, "%d", 4 * (REGNO (x) - CR0_REGNO));
8254 /* Similar, but print the count for the rotate in the opposite
8256 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8257 output_operand_lossage ("invalid %%F value");
8259 fprintf (file, "%d", 32 - 4 * (REGNO (x) - CR0_REGNO));
8263 /* X is a constant integer. If it is negative, print "m",
8264 otherwise print "z". This is to make an aze or ame insn. */
8265 if (GET_CODE (x) != CONST_INT)
8266 output_operand_lossage ("invalid %%G value");
8267 else if (INTVAL (x) >= 0)
8274 /* If constant, output low-order five bits. Otherwise, write
8277 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 31);
8279 print_operand (file, x, 0);
8283 /* If constant, output low-order six bits. Otherwise, write
8286 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INT_LOWPART (x) & 63);
8288 print_operand (file, x, 0);
8292 /* Print `i' if this is a constant, else nothing. */
8298 /* Write the bit number in CCR for jump. */
8301 output_operand_lossage ("invalid %%j code");
8303 fprintf (file, "%d", i);
8307 /* Similar, but add one for shift count in rlinm for scc and pass
8308 scc flag to `ccr_bit'. */
8311 output_operand_lossage ("invalid %%J code");
8313 /* If we want bit 31, write a shift count of zero, not 32. */
8314 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8318 /* X must be a constant. Write the 1's complement of the
8321 output_operand_lossage ("invalid %%k value");
8323 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~ INT_LOWPART (x));
8327 /* X must be a symbolic constant on ELF. Write an
8328 expression suitable for an 'addi' that adds in the low 16
8330 if (GET_CODE (x) != CONST)
8332 print_operand_address (file, x);
8337 if (GET_CODE (XEXP (x, 0)) != PLUS
8338 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
8339 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
8340 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
8341 output_operand_lossage ("invalid %%K value");
8342 print_operand_address (file, XEXP (XEXP (x, 0), 0));
8344 /* For GNU as, there must be a non-alphanumeric character
8345 between 'l' and the number. The '-' is added by
8346 print_operand() already. */
8347 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
8349 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
8353 /* %l is output_asm_label. */
8356 /* Write second word of DImode or DFmode reference. Works on register
8357 or non-indexed memory only. */
8358 if (GET_CODE (x) == REG)
8359 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
8360 else if (GET_CODE (x) == MEM)
8362 /* Handle possible auto-increment. Since it is pre-increment and
8363 we have already done it, we can just use an offset of word. */
8364 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8365 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8366 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
8369 output_address (XEXP (adjust_address_nv (x, SImode,
8373 if (small_data_operand (x, GET_MODE (x)))
8374 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8375 reg_names[SMALL_DATA_REG]);
8380 /* MB value for a mask operand. */
8381 if (! mask_operand (x, SImode))
8382 output_operand_lossage ("invalid %%m value");
8384 fprintf (file, "%d", extract_MB (x));
8388 /* ME value for a mask operand. */
8389 if (! mask_operand (x, SImode))
8390 output_operand_lossage ("invalid %%M value");
8392 fprintf (file, "%d", extract_ME (x));
8395 /* %n outputs the negative of its operand. */
8398 /* Write the number of elements in the vector times 4. */
8399 if (GET_CODE (x) != PARALLEL)
8400 output_operand_lossage ("invalid %%N value");
8402 fprintf (file, "%d", XVECLEN (x, 0) * 4);
8406 /* Similar, but subtract 1 first. */
8407 if (GET_CODE (x) != PARALLEL)
8408 output_operand_lossage ("invalid %%O value");
8410 fprintf (file, "%d", (XVECLEN (x, 0) - 1) * 4);
8414 /* X is a CONST_INT that is a power of two. Output the logarithm. */
8416 || INT_LOWPART (x) < 0
8417 || (i = exact_log2 (INT_LOWPART (x))) < 0)
8418 output_operand_lossage ("invalid %%p value");
8420 fprintf (file, "%d", i);
8424 /* The operand must be an indirect memory reference. The result
8425 is the register number. */
8426 if (GET_CODE (x) != MEM || GET_CODE (XEXP (x, 0)) != REG
8427 || REGNO (XEXP (x, 0)) >= 32)
8428 output_operand_lossage ("invalid %%P value");
8430 fprintf (file, "%d", REGNO (XEXP (x, 0)));
8434 /* This outputs the logical code corresponding to a boolean
8435 expression. The expression may have one or both operands
8436 negated (if one, only the first one). For condition register
8437 logical operations, it will also treat the negated
8438 CR codes as NOTs, but not handle NOTs of them. */
8440 const char *const *t = 0;
8442 enum rtx_code code = GET_CODE (x);
8443 static const char * const tbl[3][3] = {
8444 { "and", "andc", "nor" },
8445 { "or", "orc", "nand" },
8446 { "xor", "eqv", "xor" } };
8450 else if (code == IOR)
8452 else if (code == XOR)
8455 output_operand_lossage ("invalid %%q value");
8457 if (GET_CODE (XEXP (x, 0)) != NOT)
8461 if (GET_CODE (XEXP (x, 1)) == NOT)
8472 /* X is a CR register. Print the mask for `mtcrf'. */
8473 if (GET_CODE (x) != REG || ! CR_REGNO_P (REGNO (x)))
8474 output_operand_lossage ("invalid %%R value");
8476 fprintf (file, "%d", 128 >> (REGNO (x) - CR0_REGNO));
8480 /* Low 5 bits of 32 - value */
8482 output_operand_lossage ("invalid %%s value");
8484 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INT_LOWPART (x)) & 31);
8488 /* PowerPC64 mask position. All 0's is excluded.
8489 CONST_INT 32-bit mask is considered sign-extended so any
8490 transition must occur within the CONST_INT, not on the boundary. */
8491 if (! mask64_operand (x, DImode))
8492 output_operand_lossage ("invalid %%S value");
8494 uval = INT_LOWPART (x);
8496 if (uval & 1) /* Clear Left */
8498 #if HOST_BITS_PER_WIDE_INT > 64
8499 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8503 else /* Clear Right */
8506 #if HOST_BITS_PER_WIDE_INT > 64
8507 uval &= ((unsigned HOST_WIDE_INT) 1 << 64) - 1;
8515 fprintf (file, "%d", i);
8519 /* Like 'J' but get to the OVERFLOW/UNORDERED bit. */
8520 if (GET_CODE (x) != REG || GET_MODE (x) != CCmode)
8523 /* Bit 3 is OV bit. */
8524 i = 4 * (REGNO (x) - CR0_REGNO) + 3;
8526 /* If we want bit 31, write a shift count of zero, not 32. */
8527 fprintf (file, "%d", i == 31 ? 0 : i + 1);
8531 /* Print the symbolic name of a branch target register. */
8532 if (GET_CODE (x) != REG || (REGNO (x) != LINK_REGISTER_REGNUM
8533 && REGNO (x) != COUNT_REGISTER_REGNUM))
8534 output_operand_lossage ("invalid %%T value");
8535 else if (REGNO (x) == LINK_REGISTER_REGNUM)
8536 fputs (TARGET_NEW_MNEMONICS ? "lr" : "r", file);
8538 fputs ("ctr", file);
8542 /* High-order 16 bits of constant for use in unsigned operand. */
8544 output_operand_lossage ("invalid %%u value");
8546 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8547 (INT_LOWPART (x) >> 16) & 0xffff);
8551 /* High-order 16 bits of constant for use in signed operand. */
8553 output_operand_lossage ("invalid %%v value");
8555 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
8556 (INT_LOWPART (x) >> 16) & 0xffff);
8560 /* Print `u' if this has an auto-increment or auto-decrement. */
8561 if (GET_CODE (x) == MEM
8562 && (GET_CODE (XEXP (x, 0)) == PRE_INC
8563 || GET_CODE (XEXP (x, 0)) == PRE_DEC))
8568 /* Print the trap code for this operand. */
8569 switch (GET_CODE (x))
8572 fputs ("eq", file); /* 4 */
8575 fputs ("ne", file); /* 24 */
8578 fputs ("lt", file); /* 16 */
8581 fputs ("le", file); /* 20 */
8584 fputs ("gt", file); /* 8 */
8587 fputs ("ge", file); /* 12 */
8590 fputs ("llt", file); /* 2 */
8593 fputs ("lle", file); /* 6 */
8596 fputs ("lgt", file); /* 1 */
8599 fputs ("lge", file); /* 5 */
8607 /* If constant, low-order 16 bits of constant, signed. Otherwise, write
8610 fprintf (file, HOST_WIDE_INT_PRINT_DEC,
8611 ((INT_LOWPART (x) & 0xffff) ^ 0x8000) - 0x8000);
8613 print_operand (file, x, 0);
8617 /* MB value for a PowerPC64 rldic operand. */
8618 val = (GET_CODE (x) == CONST_INT
8619 ? INTVAL (x) : CONST_DOUBLE_HIGH (x));
8624 for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
8625 if ((val <<= 1) < 0)
8628 #if HOST_BITS_PER_WIDE_INT == 32
8629 if (GET_CODE (x) == CONST_INT && i >= 0)
8630 i += 32; /* zero-extend high-part was all 0's */
8631 else if (GET_CODE (x) == CONST_DOUBLE && i == 32)
8633 val = CONST_DOUBLE_LOW (x);
8640 for ( ; i < 64; i++)
8641 if ((val <<= 1) < 0)
8646 fprintf (file, "%d", i + 1);
8650 if (GET_CODE (x) == MEM
8651 && legitimate_indexed_address_p (XEXP (x, 0), 0))
8656 /* Like 'L', for third word of TImode */
8657 if (GET_CODE (x) == REG)
8658 fprintf (file, "%s", reg_names[REGNO (x) + 2]);
8659 else if (GET_CODE (x) == MEM)
8661 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8662 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8663 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 8));
8665 output_address (XEXP (adjust_address_nv (x, SImode, 8), 0));
8666 if (small_data_operand (x, GET_MODE (x)))
8667 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8668 reg_names[SMALL_DATA_REG]);
8673 /* X is a SYMBOL_REF. Write out the name preceded by a
8674 period and without any trailing data in brackets. Used for function
8675 names. If we are configured for System V (or the embedded ABI) on
8676 the PowerPC, do not emit the period, since those systems do not use
8677 TOCs and the like. */
8678 if (GET_CODE (x) != SYMBOL_REF)
8681 if (XSTR (x, 0)[0] != '.')
8683 switch (DEFAULT_ABI)
8698 RS6000_OUTPUT_BASENAME (file, XSTR (x, 0));
8700 assemble_name (file, XSTR (x, 0));
8704 /* Like 'L', for last word of TImode. */
8705 if (GET_CODE (x) == REG)
8706 fprintf (file, "%s", reg_names[REGNO (x) + 3]);
8707 else if (GET_CODE (x) == MEM)
8709 if (GET_CODE (XEXP (x, 0)) == PRE_INC
8710 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
8711 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 12));
8713 output_address (XEXP (adjust_address_nv (x, SImode, 12), 0));
8714 if (small_data_operand (x, GET_MODE (x)))
8715 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8716 reg_names[SMALL_DATA_REG]);
8720 /* Print AltiVec or SPE memory operand. */
8725 if (GET_CODE (x) != MEM)
8733 if (GET_CODE (tmp) == REG)
8735 fprintf (file, "0(%s)", reg_names[REGNO (tmp)]);
8738 /* Handle [reg+UIMM]. */
8739 else if (GET_CODE (tmp) == PLUS &&
8740 GET_CODE (XEXP (tmp, 1)) == CONST_INT)
8744 if (GET_CODE (XEXP (tmp, 0)) != REG)
8747 x = INTVAL (XEXP (tmp, 1));
8748 fprintf (file, "%d(%s)", x, reg_names[REGNO (XEXP (tmp, 0))]);
8752 /* Fall through. Must be [reg+reg]. */
8754 if (GET_CODE (tmp) == REG)
8755 fprintf (file, "0,%s", reg_names[REGNO (tmp)]);
8756 else if (GET_CODE (tmp) == PLUS && GET_CODE (XEXP (tmp, 1)) == REG)
8758 if (REGNO (XEXP (tmp, 0)) == 0)
8759 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 1)) ],
8760 reg_names[ REGNO (XEXP (tmp, 0)) ]);
8762 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (tmp, 0)) ],
8763 reg_names[ REGNO (XEXP (tmp, 1)) ]);
8771 if (GET_CODE (x) == REG)
8772 fprintf (file, "%s", reg_names[REGNO (x)]);
8773 else if (GET_CODE (x) == MEM)
8775 /* We need to handle PRE_INC and PRE_DEC here, since we need to
8776 know the width from the mode. */
8777 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
8778 fprintf (file, "%d(%s)", GET_MODE_SIZE (GET_MODE (x)),
8779 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8780 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
8781 fprintf (file, "%d(%s)", - GET_MODE_SIZE (GET_MODE (x)),
8782 reg_names[REGNO (XEXP (XEXP (x, 0), 0))]);
8784 output_address (XEXP (x, 0));
8787 output_addr_const (file, x);
8791 assemble_name (file, rs6000_get_some_local_dynamic_name ());
8795 output_operand_lossage ("invalid %%xn code");
8799 /* Print the address of an operand. */
8802 print_operand_address (file, x)
8806 if (GET_CODE (x) == REG)
8807 fprintf (file, "0(%s)", reg_names[ REGNO (x) ]);
8808 else if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == CONST
8809 || GET_CODE (x) == LABEL_REF)
8811 output_addr_const (file, x);
8812 if (small_data_operand (x, GET_MODE (x)))
8813 fprintf (file, "@%s(%s)", SMALL_DATA_RELOC,
8814 reg_names[SMALL_DATA_REG]);
8815 else if (TARGET_TOC)
8818 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG)
8820 if (REGNO (XEXP (x, 0)) == 0)
8821 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 1)) ],
8822 reg_names[ REGNO (XEXP (x, 0)) ]);
8824 fprintf (file, "%s,%s", reg_names[ REGNO (XEXP (x, 0)) ],
8825 reg_names[ REGNO (XEXP (x, 1)) ]);
8827 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
8828 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
8829 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
8831 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8832 && CONSTANT_P (XEXP (x, 1)))
8834 output_addr_const (file, XEXP (x, 1));
8835 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8839 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
8840 && CONSTANT_P (XEXP (x, 1)))
8842 fprintf (file, "lo16(");
8843 output_addr_const (file, XEXP (x, 1));
8844 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
8847 else if (legitimate_constant_pool_address_p (x))
8849 if (TARGET_AIX && (!TARGET_ELF || !TARGET_MINIMAL_TOC))
8851 rtx contains_minus = XEXP (x, 1);
8855 /* Find the (minus (sym) (toc)) buried in X, and temporarily
8856 turn it into (sym) for output_addr_const. */
8857 while (GET_CODE (XEXP (contains_minus, 0)) != MINUS)
8858 contains_minus = XEXP (contains_minus, 0);
8860 minus = XEXP (contains_minus, 0);
8861 symref = XEXP (minus, 0);
8862 XEXP (contains_minus, 0) = symref;
8867 name = XSTR (symref, 0);
8868 newname = alloca (strlen (name) + sizeof ("@toc"));
8869 strcpy (newname, name);
8870 strcat (newname, "@toc");
8871 XSTR (symref, 0) = newname;
8873 output_addr_const (file, XEXP (x, 1));
8875 XSTR (symref, 0) = name;
8876 XEXP (contains_minus, 0) = minus;
8879 output_addr_const (file, XEXP (x, 1));
8881 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
8887 /* Target hook for assembling integer objects. The PowerPC version has
8888 to handle fixup entries for relocatable code if RELOCATABLE_NEEDS_FIXUP
8889 is defined. It also needs to handle DI-mode objects on 64-bit
8893 rs6000_assemble_integer (x, size, aligned_p)
8898 #ifdef RELOCATABLE_NEEDS_FIXUP
8899 /* Special handling for SI values. */
8900 if (size == 4 && aligned_p)
8902 extern int in_toc_section PARAMS ((void));
8903 static int recurse = 0;
8905 /* For -mrelocatable, we mark all addresses that need to be fixed up
8906 in the .fixup section. */
8907 if (TARGET_RELOCATABLE
8908 && !in_toc_section ()
8909 && !in_text_section ()
8911 && GET_CODE (x) != CONST_INT
8912 && GET_CODE (x) != CONST_DOUBLE
8918 ASM_GENERATE_INTERNAL_LABEL (buf, "LCP", fixuplabelno);
8920 ASM_OUTPUT_LABEL (asm_out_file, buf);
8921 fprintf (asm_out_file, "\t.long\t(");
8922 output_addr_const (asm_out_file, x);
8923 fprintf (asm_out_file, ")@fixup\n");
8924 fprintf (asm_out_file, "\t.section\t\".fixup\",\"aw\"\n");
8925 ASM_OUTPUT_ALIGN (asm_out_file, 2);
8926 fprintf (asm_out_file, "\t.long\t");
8927 assemble_name (asm_out_file, buf);
8928 fprintf (asm_out_file, "\n\t.previous\n");
8932 /* Remove initial .'s to turn a -mcall-aixdesc function
8933 address into the address of the descriptor, not the function
8935 else if (GET_CODE (x) == SYMBOL_REF
8936 && XSTR (x, 0)[0] == '.'
8937 && DEFAULT_ABI == ABI_AIX)
8939 const char *name = XSTR (x, 0);
8940 while (*name == '.')
8943 fprintf (asm_out_file, "\t.long\t%s\n", name);
8947 #endif /* RELOCATABLE_NEEDS_FIXUP */
8948 return default_assemble_integer (x, size, aligned_p);
8951 #ifdef HAVE_GAS_HIDDEN
8952 /* Emit an assembler directive to set symbol visibility for DECL to
8956 rs6000_assemble_visibility (decl, vis)
8960 /* Functions need to have their entry point symbol visibility set as
8961 well as their descriptor symbol visibility. */
8962 if (DEFAULT_ABI == ABI_AIX && TREE_CODE (decl) == FUNCTION_DECL)
8964 static const char * const visibility_types[] = {
8965 NULL, "internal", "hidden", "protected"
8968 const char *name, *type;
8970 name = ((* targetm.strip_name_encoding)
8971 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))));
8972 type = visibility_types[vis];
8974 fprintf (asm_out_file, "\t.%s\t%s\n", type, name);
8975 fprintf (asm_out_file, "\t.%s\t.%s\n", type, name);
8978 default_assemble_visibility (decl, vis);
8983 rs6000_reverse_condition (mode, code)
8984 enum machine_mode mode;
8987 /* Reversal of FP compares takes care -- an ordered compare
8988 becomes an unordered compare and vice versa. */
8989 if (mode == CCFPmode
8990 && (!flag_finite_math_only
8991 || code == UNLT || code == UNLE || code == UNGT || code == UNGE
8992 || code == UNEQ || code == LTGT))
8993 return reverse_condition_maybe_unordered (code);
8995 return reverse_condition (code);
8998 /* Generate a compare for CODE. Return a brand-new rtx that
8999 represents the result of the compare. */
9002 rs6000_generate_compare (code)
9005 enum machine_mode comp_mode;
9008 if (rs6000_compare_fp_p)
9009 comp_mode = CCFPmode;
9010 else if (code == GTU || code == LTU
9011 || code == GEU || code == LEU)
9012 comp_mode = CCUNSmode;
9016 /* First, the compare. */
9017 compare_result = gen_reg_rtx (comp_mode);
9019 /* SPE FP compare instructions on the GPRs. Yuck! */
9020 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT)
9021 && rs6000_compare_fp_p)
9023 rtx cmp, or1, or2, or_result, compare_result2;
9031 cmp = flag_finite_math_only
9032 ? gen_tstsfeq_gpr (compare_result, rs6000_compare_op0,
9034 : gen_cmpsfeq_gpr (compare_result, rs6000_compare_op0,
9035 rs6000_compare_op1);
9043 cmp = flag_finite_math_only
9044 ? gen_tstsfgt_gpr (compare_result, rs6000_compare_op0,
9046 : gen_cmpsfgt_gpr (compare_result, rs6000_compare_op0,
9047 rs6000_compare_op1);
9055 cmp = flag_finite_math_only
9056 ? gen_tstsflt_gpr (compare_result, rs6000_compare_op0,
9058 : gen_cmpsflt_gpr (compare_result, rs6000_compare_op0,
9059 rs6000_compare_op1);
9065 /* Synthesize LE and GE from LT/GT || EQ. */
9066 if (code == LE || code == GE || code == LEU || code == GEU)
9068 /* Synthesize GE/LE frome GT/LT || EQ. */
9074 case LE: code = LT; break;
9075 case GE: code = GT; break;
9076 case LEU: code = LT; break;
9077 case GEU: code = GT; break;
9081 or1 = gen_reg_rtx (SImode);
9082 or2 = gen_reg_rtx (SImode);
9083 or_result = gen_reg_rtx (CCEQmode);
9084 compare_result2 = gen_reg_rtx (CCFPmode);
9087 cmp = flag_finite_math_only
9088 ? gen_tstsfeq_gpr (compare_result2, rs6000_compare_op0,
9090 : gen_cmpsfeq_gpr (compare_result2, rs6000_compare_op0,
9091 rs6000_compare_op1);
9094 /* The MC8540 FP compare instructions set the CR bits
9095 differently than other PPC compare instructions. For
9096 that matter, there is no generic test instruction, but a
9097 testgt, testlt, and testeq. For a true condition, bit 2
9098 is set (x1xx) in the CR. Following the traditional CR
9104 ... bit 2 would be a GT CR alias, so later on we
9105 look in the GT bits for the branch instructions.
9106 However, we must be careful to emit correct RTL in
9107 the meantime, so optimizations don't get confused. */
9109 or1 = gen_rtx (NE, SImode, compare_result, const0_rtx);
9110 or2 = gen_rtx (NE, SImode, compare_result2, const0_rtx);
9112 /* OR them together. */
9113 cmp = gen_rtx_SET (VOIDmode, or_result,
9114 gen_rtx_COMPARE (CCEQmode,
9115 gen_rtx_IOR (SImode, or1, or2),
9117 compare_result = or_result;
9122 /* We only care about 1 bit (x1xx), so map everything to NE to
9123 maintain rtl sanity. We'll get to the right bit (x1xx) at
9124 code output time. */
9125 if (code == NE || code == LTGT)
9126 /* Do the inverse here because we have no cmpne
9127 instruction. We use the cmpeq instruction and expect
9128 to get a 0 instead. */
9137 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
9138 gen_rtx_COMPARE (comp_mode,
9140 rs6000_compare_op1)));
9142 /* Some kinds of FP comparisons need an OR operation;
9143 under flag_finite_math_only we don't bother. */
9144 if (rs6000_compare_fp_p
9145 && ! flag_finite_math_only
9146 && ! (TARGET_HARD_FLOAT && TARGET_E500 && !TARGET_FPRS)
9147 && (code == LE || code == GE
9148 || code == UNEQ || code == LTGT
9149 || code == UNGT || code == UNLT))
9151 enum rtx_code or1, or2;
9152 rtx or1_rtx, or2_rtx, compare2_rtx;
9153 rtx or_result = gen_reg_rtx (CCEQmode);
9157 case LE: or1 = LT; or2 = EQ; break;
9158 case GE: or1 = GT; or2 = EQ; break;
9159 case UNEQ: or1 = UNORDERED; or2 = EQ; break;
9160 case LTGT: or1 = LT; or2 = GT; break;
9161 case UNGT: or1 = UNORDERED; or2 = GT; break;
9162 case UNLT: or1 = UNORDERED; or2 = LT; break;
9165 validate_condition_mode (or1, comp_mode);
9166 validate_condition_mode (or2, comp_mode);
9167 or1_rtx = gen_rtx (or1, SImode, compare_result, const0_rtx);
9168 or2_rtx = gen_rtx (or2, SImode, compare_result, const0_rtx);
9169 compare2_rtx = gen_rtx_COMPARE (CCEQmode,
9170 gen_rtx_IOR (SImode, or1_rtx, or2_rtx),
9172 emit_insn (gen_rtx_SET (VOIDmode, or_result, compare2_rtx));
9174 compare_result = or_result;
9178 validate_condition_mode (code, GET_MODE (compare_result));
9180 return gen_rtx (code, VOIDmode, compare_result, const0_rtx);
9184 /* Emit the RTL for an sCOND pattern. */
9187 rs6000_emit_sCOND (code, result)
9192 enum machine_mode op_mode;
9193 enum rtx_code cond_code;
9195 condition_rtx = rs6000_generate_compare (code);
9196 cond_code = GET_CODE (condition_rtx);
9199 || cond_code == GE || cond_code == LE
9200 || cond_code == GEU || cond_code == LEU
9201 || cond_code == ORDERED || cond_code == UNGE || cond_code == UNLE)
9203 rtx not_result = gen_reg_rtx (CCEQmode);
9204 rtx not_op, rev_cond_rtx;
9205 enum machine_mode cc_mode;
9207 cc_mode = GET_MODE (XEXP (condition_rtx, 0));
9209 rev_cond_rtx = gen_rtx (rs6000_reverse_condition (cc_mode, cond_code),
9210 SImode, XEXP (condition_rtx, 0), const0_rtx);
9211 not_op = gen_rtx_COMPARE (CCEQmode, rev_cond_rtx, const0_rtx);
9212 emit_insn (gen_rtx_SET (VOIDmode, not_result, not_op));
9213 condition_rtx = gen_rtx_EQ (VOIDmode, not_result, const0_rtx);
9216 op_mode = GET_MODE (rs6000_compare_op0);
9217 if (op_mode == VOIDmode)
9218 op_mode = GET_MODE (rs6000_compare_op1);
9220 if (TARGET_POWERPC64 && (op_mode == DImode || rs6000_compare_fp_p))
9222 PUT_MODE (condition_rtx, DImode);
9223 convert_move (result, condition_rtx, 0);
9227 PUT_MODE (condition_rtx, SImode);
9228 emit_insn (gen_rtx_SET (VOIDmode, result, condition_rtx));
9232 /* Emit a branch of kind CODE to location LOC. */
9235 rs6000_emit_cbranch (code, loc)
9239 rtx condition_rtx, loc_ref;
9241 condition_rtx = rs6000_generate_compare (code);
9242 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
9243 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx,
9244 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
9248 /* Return the string to output a conditional branch to LABEL, which is
9249 the operand number of the label, or -1 if the branch is really a
9252 OP is the conditional expression. XEXP (OP, 0) is assumed to be a
9253 condition code register and its mode specifies what kind of
9256 REVERSED is nonzero if we should reverse the sense of the comparison.
9258 INSN is the insn. */
9261 output_cbranch (op, label, reversed, insn)
9267 static char string[64];
9268 enum rtx_code code = GET_CODE (op);
9269 rtx cc_reg = XEXP (op, 0);
9270 enum machine_mode mode = GET_MODE (cc_reg);
9271 int cc_regno = REGNO (cc_reg) - CR0_REGNO;
9272 int need_longbranch = label != NULL && get_attr_length (insn) == 8;
9273 int really_reversed = reversed ^ need_longbranch;
9279 validate_condition_mode (code, mode);
9281 /* Work out which way this really branches. We could use
9282 reverse_condition_maybe_unordered here always but this
9283 makes the resulting assembler clearer. */
9284 if (really_reversed)
9286 /* Reversal of FP compares takes care -- an ordered compare
9287 becomes an unordered compare and vice versa. */
9288 if (mode == CCFPmode)
9289 code = reverse_condition_maybe_unordered (code);
9291 code = reverse_condition (code);
9294 if ((TARGET_E500 && !TARGET_FPRS && TARGET_HARD_FLOAT) && mode == CCFPmode)
9296 /* The efscmp/tst* instructions twiddle bit 2, which maps nicely
9299 /* Opposite of GT. */
9301 else if (code == NE)
9309 /* Not all of these are actually distinct opcodes, but
9310 we distinguish them for clarity of the resulting assembler. */
9312 ccode = "ne"; break;
9314 ccode = "eq"; break;
9316 ccode = "ge"; break;
9317 case GT: case GTU: case UNGT:
9318 ccode = "gt"; break;
9320 ccode = "le"; break;
9321 case LT: case LTU: case UNLT:
9322 ccode = "lt"; break;
9323 case UNORDERED: ccode = "un"; break;
9324 case ORDERED: ccode = "nu"; break;
9325 case UNGE: ccode = "nl"; break;
9326 case UNLE: ccode = "ng"; break;
9331 /* Maybe we have a guess as to how likely the branch is.
9332 The old mnemonics don't have a way to specify this information. */
9334 note = find_reg_note (insn, REG_BR_PROB, NULL_RTX);
9335 if (note != NULL_RTX)
9337 /* PROB is the difference from 50%. */
9338 int prob = INTVAL (XEXP (note, 0)) - REG_BR_PROB_BASE / 2;
9339 bool always_hint = rs6000_cpu != PROCESSOR_POWER4;
9341 /* Only hint for highly probable/improbable branches on newer
9342 cpus as static prediction overrides processor dynamic
9343 prediction. For older cpus we may as well always hint, but
9344 assume not taken for branches that are very close to 50% as a
9345 mispredicted taken branch is more expensive than a
9346 mispredicted not-taken branch. */
9348 || abs (prob) > REG_BR_PROB_BASE / 100 * 48)
9350 if (abs (prob) > REG_BR_PROB_BASE / 20
9351 && ((prob > 0) ^ need_longbranch))
9359 s += sprintf (s, "{b%sr|b%slr%s} ", ccode, ccode, pred);
9361 s += sprintf (s, "{b%s|b%s%s} ", ccode, ccode, pred);
9363 /* We need to escape any '%' characters in the reg_names string.
9364 Assume they'd only be the first character... */
9365 if (reg_names[cc_regno + CR0_REGNO][0] == '%')
9367 s += sprintf (s, "%s", reg_names[cc_regno + CR0_REGNO]);
9371 /* If the branch distance was too far, we may have to use an
9372 unconditional branch to go the distance. */
9373 if (need_longbranch)
9374 s += sprintf (s, ",$+8\n\tb %s", label);
9376 s += sprintf (s, ",%s", label);
9382 /* Emit a conditional move: move TRUE_COND to DEST if OP of the
9383 operands of the last comparison is nonzero/true, FALSE_COND if it
9384 is zero/false. Return 0 if the hardware has no such operation. */
9387 rs6000_emit_cmove (dest, op, true_cond, false_cond)
9393 enum rtx_code code = GET_CODE (op);
9394 rtx op0 = rs6000_compare_op0;
9395 rtx op1 = rs6000_compare_op1;
9397 enum machine_mode compare_mode = GET_MODE (op0);
9398 enum machine_mode result_mode = GET_MODE (dest);
9401 /* These modes should always match. */
9402 if (GET_MODE (op1) != compare_mode
9403 /* In the isel case however, we can use a compare immediate, so
9404 op1 may be a small constant. */
9405 && (!TARGET_ISEL || !short_cint_operand (op1, VOIDmode)))
9407 if (GET_MODE (true_cond) != result_mode)
9409 if (GET_MODE (false_cond) != result_mode)
9412 /* First, work out if the hardware can do this at all, or
9413 if it's too slow... */
9414 if (! rs6000_compare_fp_p)
9417 return rs6000_emit_int_cmove (dest, op, true_cond, false_cond);
9421 /* Eliminate half of the comparisons by switching operands, this
9422 makes the remaining code simpler. */
9423 if (code == UNLT || code == UNGT || code == UNORDERED || code == NE
9424 || code == LTGT || code == LT || code == UNLE)
9426 code = reverse_condition_maybe_unordered (code);
9428 true_cond = false_cond;
9432 /* UNEQ and LTGT take four instructions for a comparison with zero,
9433 it'll probably be faster to use a branch here too. */
9434 if (code == UNEQ && HONOR_NANS (compare_mode))
9437 if (GET_CODE (op1) == CONST_DOUBLE)
9438 REAL_VALUE_FROM_CONST_DOUBLE (c1, op1);
9440 /* We're going to try to implement comparisons by performing
9441 a subtract, then comparing against zero. Unfortunately,
9442 Inf - Inf is NaN which is not zero, and so if we don't
9443 know that the operand is finite and the comparison
9444 would treat EQ different to UNORDERED, we can't do it. */
9445 if (HONOR_INFINITIES (compare_mode)
9446 && code != GT && code != UNGE
9447 && (GET_CODE (op1) != CONST_DOUBLE || real_isinf (&c1))
9448 /* Constructs of the form (a OP b ? a : b) are safe. */
9449 && ((! rtx_equal_p (op0, false_cond) && ! rtx_equal_p (op1, false_cond))
9450 || (! rtx_equal_p (op0, true_cond)
9451 && ! rtx_equal_p (op1, true_cond))))
9453 /* At this point we know we can use fsel. */
9455 /* Reduce the comparison to a comparison against zero. */
9456 temp = gen_reg_rtx (compare_mode);
9457 emit_insn (gen_rtx_SET (VOIDmode, temp,
9458 gen_rtx_MINUS (compare_mode, op0, op1)));
9460 op1 = CONST0_RTX (compare_mode);
9462 /* If we don't care about NaNs we can reduce some of the comparisons
9463 down to faster ones. */
9464 if (! HONOR_NANS (compare_mode))
9470 true_cond = false_cond;
9483 /* Now, reduce everything down to a GE. */
9490 temp = gen_reg_rtx (compare_mode);
9491 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9496 temp = gen_reg_rtx (compare_mode);
9497 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_ABS (compare_mode, op0)));
9502 temp = gen_reg_rtx (compare_mode);
9503 emit_insn (gen_rtx_SET (VOIDmode, temp,
9504 gen_rtx_NEG (compare_mode,
9505 gen_rtx_ABS (compare_mode, op0))));
9510 /* a UNGE 0 <-> (a GE 0 || -a UNLT 0) */
9511 temp = gen_reg_rtx (result_mode);
9512 emit_insn (gen_rtx_SET (VOIDmode, temp,
9513 gen_rtx_IF_THEN_ELSE (result_mode,
9514 gen_rtx_GE (VOIDmode,
9516 true_cond, false_cond)));
9517 false_cond = true_cond;
9520 temp = gen_reg_rtx (compare_mode);
9521 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9526 /* a GT 0 <-> (a GE 0 && -a UNLT 0) */
9527 temp = gen_reg_rtx (result_mode);
9528 emit_insn (gen_rtx_SET (VOIDmode, temp,
9529 gen_rtx_IF_THEN_ELSE (result_mode,
9530 gen_rtx_GE (VOIDmode,
9532 true_cond, false_cond)));
9533 true_cond = false_cond;
9536 temp = gen_reg_rtx (compare_mode);
9537 emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_NEG (compare_mode, op0)));
9545 emit_insn (gen_rtx_SET (VOIDmode, dest,
9546 gen_rtx_IF_THEN_ELSE (result_mode,
9547 gen_rtx_GE (VOIDmode,
9549 true_cond, false_cond)));
9553 /* Same as above, but for ints (isel). */
9556 rs6000_emit_int_cmove (dest, op, true_cond, false_cond)
9562 rtx condition_rtx, cr;
9564 /* All isel implementations thus far are 32-bits. */
9565 if (GET_MODE (rs6000_compare_op0) != SImode)
9568 /* We still have to do the compare, because isel doesn't do a
9569 compare, it just looks at the CRx bits set by a previous compare
9571 condition_rtx = rs6000_generate_compare (GET_CODE (op));
9572 cr = XEXP (condition_rtx, 0);
9574 if (GET_MODE (cr) == CCmode)
9575 emit_insn (gen_isel_signed (dest, condition_rtx,
9576 true_cond, false_cond, cr));
9578 emit_insn (gen_isel_unsigned (dest, condition_rtx,
9579 true_cond, false_cond, cr));
9585 output_isel (operands)
9590 code = GET_CODE (operands[1]);
9591 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
9593 PUT_CODE (operands[1], reverse_condition (code));
9594 return "isel %0,%3,%2,%j1";
9597 return "isel %0,%2,%3,%j1";
9601 rs6000_emit_minmax (dest, code, op0, op1)
9607 enum machine_mode mode = GET_MODE (op0);
9611 if (code == SMAX || code == SMIN)
9616 if (code == SMAX || code == UMAX)
9617 target = emit_conditional_move (dest, c, op0, op1, mode,
9620 target = emit_conditional_move (dest, c, op0, op1, mode,
9622 if (target == NULL_RTX)
9625 emit_move_insn (dest, target);
9628 /* This page contains routines that are used to determine what the
9629 function prologue and epilogue code will do and write them out. */
9631 /* Return the first fixed-point register that is required to be
9632 saved. 32 if none. */
9635 first_reg_to_save ()
9639 /* Find lowest numbered live register. */
9640 for (first_reg = 13; first_reg <= 31; first_reg++)
9641 if (regs_ever_live[first_reg]
9642 && (! call_used_regs[first_reg]
9643 || (first_reg == RS6000_PIC_OFFSET_TABLE_REGNUM
9644 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
9645 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))))
9650 && current_function_uses_pic_offset_table
9651 && first_reg > RS6000_PIC_OFFSET_TABLE_REGNUM)
9652 return RS6000_PIC_OFFSET_TABLE_REGNUM;
9658 /* Similar, for FP regs. */
9661 first_fp_reg_to_save ()
9665 /* Find lowest numbered live register. */
9666 for (first_reg = 14 + 32; first_reg <= 63; first_reg++)
9667 if (regs_ever_live[first_reg])
9673 /* Similar, for AltiVec regs. */
9676 first_altivec_reg_to_save ()
9680 /* Stack frame remains as is unless we are in AltiVec ABI. */
9681 if (! TARGET_ALTIVEC_ABI)
9682 return LAST_ALTIVEC_REGNO + 1;
9684 /* Find lowest numbered live register. */
9685 for (i = FIRST_ALTIVEC_REGNO + 20; i <= LAST_ALTIVEC_REGNO; ++i)
9686 if (regs_ever_live[i])
9692 /* Return a 32-bit mask of the AltiVec registers we need to set in
9693 VRSAVE. Bit n of the return value is 1 if Vn is live. The MSB in
9694 the 32-bit word is 0. */
9697 compute_vrsave_mask ()
9699 unsigned int i, mask = 0;
9701 /* First, find out if we use _any_ altivec registers. */
9702 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
9703 if (regs_ever_live[i])
9704 mask |= ALTIVEC_REG_BIT (i);
9709 /* Next, remove the argument registers from the set. These must
9710 be in the VRSAVE mask set by the caller, so we don't need to add
9711 them in again. More importantly, the mask we compute here is
9712 used to generate CLOBBERs in the set_vrsave insn, and we do not
9713 wish the argument registers to die. */
9714 for (i = cfun->args_info.vregno; i >= ALTIVEC_ARG_MIN_REG; --i)
9715 mask &= ~ALTIVEC_REG_BIT (i);
9717 /* Similarly, remove the return value from the set. */
9720 diddle_return_value (is_altivec_return_reg, &yes);
9722 mask &= ~ALTIVEC_REG_BIT (ALTIVEC_ARG_RETURN);
9729 is_altivec_return_reg (reg, xyes)
9733 bool *yes = (bool *) xyes;
9734 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
9739 /* Calculate the stack information for the current function. This is
9740 complicated by having two separate calling sequences, the AIX calling
9741 sequence and the V.4 calling sequence.
9743 AIX (and Darwin/Mac OS X) stack frames look like:
9745 SP----> +---------------------------------------+
9746 | back chain to caller | 0 0
9747 +---------------------------------------+
9748 | saved CR | 4 8 (8-11)
9749 +---------------------------------------+
9751 +---------------------------------------+
9752 | reserved for compilers | 12 24
9753 +---------------------------------------+
9754 | reserved for binders | 16 32
9755 +---------------------------------------+
9756 | saved TOC pointer | 20 40
9757 +---------------------------------------+
9758 | Parameter save area (P) | 24 48
9759 +---------------------------------------+
9760 | Alloca space (A) | 24+P etc.
9761 +---------------------------------------+
9762 | Local variable space (L) | 24+P+A
9763 +---------------------------------------+
9764 | Float/int conversion temporary (X) | 24+P+A+L
9765 +---------------------------------------+
9766 | Save area for AltiVec registers (W) | 24+P+A+L+X
9767 +---------------------------------------+
9768 | AltiVec alignment padding (Y) | 24+P+A+L+X+W
9769 +---------------------------------------+
9770 | Save area for VRSAVE register (Z) | 24+P+A+L+X+W+Y
9771 +---------------------------------------+
9772 | Save area for GP registers (G) | 24+P+A+X+L+X+W+Y+Z
9773 +---------------------------------------+
9774 | Save area for FP registers (F) | 24+P+A+X+L+X+W+Y+Z+G
9775 +---------------------------------------+
9776 old SP->| back chain to caller's caller |
9777 +---------------------------------------+
9779 The required alignment for AIX configurations is two words (i.e., 8
9783 V.4 stack frames look like:
9785 SP----> +---------------------------------------+
9786 | back chain to caller | 0
9787 +---------------------------------------+
9788 | caller's saved LR | 4
9789 +---------------------------------------+
9790 | Parameter save area (P) | 8
9791 +---------------------------------------+
9792 | Alloca space (A) | 8+P
9793 +---------------------------------------+
9794 | Varargs save area (V) | 8+P+A
9795 +---------------------------------------+
9796 | Local variable space (L) | 8+P+A+V
9797 +---------------------------------------+
9798 | Float/int conversion temporary (X) | 8+P+A+V+L
9799 +---------------------------------------+
9800 | Save area for AltiVec registers (W) | 8+P+A+V+L+X
9801 +---------------------------------------+
9802 | AltiVec alignment padding (Y) | 8+P+A+V+L+X+W
9803 +---------------------------------------+
9804 | Save area for VRSAVE register (Z) | 8+P+A+V+L+X+W+Y
9805 +---------------------------------------+
9806 | SPE: area for 64-bit GP registers |
9807 +---------------------------------------+
9808 | SPE alignment padding |
9809 +---------------------------------------+
9810 | saved CR (C) | 8+P+A+V+L+X+W+Y+Z
9811 +---------------------------------------+
9812 | Save area for GP registers (G) | 8+P+A+V+L+X+W+Y+Z+C
9813 +---------------------------------------+
9814 | Save area for FP registers (F) | 8+P+A+V+L+X+W+Y+Z+C+G
9815 +---------------------------------------+
9816 old SP->| back chain to caller's caller |
9817 +---------------------------------------+
9819 The required alignment for V.4 is 16 bytes, or 8 bytes if -meabi is
9820 given. (But note below and in sysv4.h that we require only 8 and
9821 may round up the size of our stack frame anyways. The historical
9822 reason is early versions of powerpc-linux which didn't properly
9823 align the stack at program startup. A happy side-effect is that
9824 -mno-eabi libraries can be used with -meabi programs.)
9826 The EABI configuration defaults to the V.4 layout. However,
9827 the stack alignment requirements may differ. If -mno-eabi is not
9828 given, the required stack alignment is 8 bytes; if -mno-eabi is
9829 given, the required alignment is 16 bytes. (But see V.4 comment
9832 #ifndef ABI_STACK_BOUNDARY
9833 #define ABI_STACK_BOUNDARY STACK_BOUNDARY
9837 rs6000_stack_info ()
9839 static rs6000_stack_t info, zero_info;
9840 rs6000_stack_t *info_ptr = &info;
9841 int reg_size = TARGET_POWERPC64 ? 8 : 4;
9845 /* Zero all fields portably. */
9850 /* Cache value so we don't rescan instruction chain over and over. */
9851 if (cfun->machine->insn_chain_scanned_p == 0)
9853 cfun->machine->insn_chain_scanned_p = 1;
9854 info_ptr->spe_64bit_regs_used = (int) spe_func_has_64bit_regs_p ();
9858 /* Select which calling sequence. */
9859 info_ptr->abi = DEFAULT_ABI;
9861 /* Calculate which registers need to be saved & save area size. */
9862 info_ptr->first_gp_reg_save = first_reg_to_save ();
9863 /* Assume that we will have to save RS6000_PIC_OFFSET_TABLE_REGNUM,
9864 even if it currently looks like we won't. */
9865 if (((TARGET_TOC && TARGET_MINIMAL_TOC)
9866 || (flag_pic == 1 && DEFAULT_ABI == ABI_V4)
9867 || (flag_pic && DEFAULT_ABI == ABI_DARWIN))
9868 && info_ptr->first_gp_reg_save > RS6000_PIC_OFFSET_TABLE_REGNUM)
9869 info_ptr->gp_size = reg_size * (32 - RS6000_PIC_OFFSET_TABLE_REGNUM);
9871 info_ptr->gp_size = reg_size * (32 - info_ptr->first_gp_reg_save);
9873 /* For the SPE, we have an additional upper 32-bits on each GPR.
9874 Ideally we should save the entire 64-bits only when the upper
9875 half is used in SIMD instructions. Since we only record
9876 registers live (not the size they are used in), this proves
9877 difficult because we'd have to traverse the instruction chain at
9878 the right time, taking reload into account. This is a real pain,
9879 so we opt to save the GPRs in 64-bits always if but one register
9880 gets used in 64-bits. Otherwise, all the registers in the frame
9881 get saved in 32-bits.
9883 So... since when we save all GPRs (except the SP) in 64-bits, the
9884 traditional GP save area will be empty. */
9885 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9886 info_ptr->gp_size = 0;
9888 info_ptr->first_fp_reg_save = first_fp_reg_to_save ();
9889 info_ptr->fp_size = 8 * (64 - info_ptr->first_fp_reg_save);
9891 info_ptr->first_altivec_reg_save = first_altivec_reg_to_save ();
9892 info_ptr->altivec_size = 16 * (LAST_ALTIVEC_REGNO + 1
9893 - info_ptr->first_altivec_reg_save);
9895 /* Does this function call anything? */
9896 info_ptr->calls_p = (! current_function_is_leaf
9897 || cfun->machine->ra_needs_full_frame);
9899 /* Determine if we need to save the link register. */
9900 if (rs6000_ra_ever_killed ()
9901 || (DEFAULT_ABI == ABI_AIX
9902 && current_function_profile
9903 && !TARGET_PROFILE_KERNEL)
9904 #ifdef TARGET_RELOCATABLE
9905 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
9907 || (info_ptr->first_fp_reg_save != 64
9908 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
9909 || info_ptr->first_altivec_reg_save <= LAST_ALTIVEC_REGNO
9910 || (DEFAULT_ABI == ABI_V4 && current_function_calls_alloca)
9911 || (DEFAULT_ABI == ABI_DARWIN
9913 && current_function_uses_pic_offset_table)
9914 || info_ptr->calls_p)
9916 info_ptr->lr_save_p = 1;
9917 regs_ever_live[LINK_REGISTER_REGNUM] = 1;
9920 /* Determine if we need to save the condition code registers. */
9921 if (regs_ever_live[CR2_REGNO]
9922 || regs_ever_live[CR3_REGNO]
9923 || regs_ever_live[CR4_REGNO])
9925 info_ptr->cr_save_p = 1;
9926 if (DEFAULT_ABI == ABI_V4)
9927 info_ptr->cr_size = reg_size;
9930 /* If the current function calls __builtin_eh_return, then we need
9931 to allocate stack space for registers that will hold data for
9932 the exception handler. */
9933 if (current_function_calls_eh_return)
9936 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
9939 /* SPE saves EH registers in 64-bits. */
9940 ehrd_size = i * (TARGET_SPE_ABI
9941 && info_ptr->spe_64bit_regs_used != 0
9942 ? UNITS_PER_SPE_WORD : UNITS_PER_WORD);
9947 /* Determine various sizes. */
9948 info_ptr->reg_size = reg_size;
9949 info_ptr->fixed_size = RS6000_SAVE_AREA;
9950 info_ptr->varargs_size = RS6000_VARARGS_AREA;
9951 info_ptr->vars_size = RS6000_ALIGN (get_frame_size (), 8);
9952 info_ptr->parm_size = RS6000_ALIGN (current_function_outgoing_args_size,
9955 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
9956 info_ptr->spe_gp_size = 8 * (32 - info_ptr->first_gp_reg_save);
9958 info_ptr->spe_gp_size = 0;
9960 if (TARGET_ALTIVEC_ABI && TARGET_ALTIVEC_VRSAVE)
9962 info_ptr->vrsave_mask = compute_vrsave_mask ();
9963 info_ptr->vrsave_size = info_ptr->vrsave_mask ? 4 : 0;
9967 info_ptr->vrsave_mask = 0;
9968 info_ptr->vrsave_size = 0;
9971 /* Calculate the offsets. */
9972 switch (DEFAULT_ABI)
9980 info_ptr->fp_save_offset = - info_ptr->fp_size;
9981 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
9983 if (TARGET_ALTIVEC_ABI)
9985 info_ptr->vrsave_save_offset
9986 = info_ptr->gp_save_offset - info_ptr->vrsave_size;
9988 /* Align stack so vector save area is on a quadword boundary. */
9989 if (info_ptr->altivec_size != 0)
9990 info_ptr->altivec_padding_size
9991 = 16 - (-info_ptr->vrsave_save_offset % 16);
9993 info_ptr->altivec_padding_size = 0;
9995 info_ptr->altivec_save_offset
9996 = info_ptr->vrsave_save_offset
9997 - info_ptr->altivec_padding_size
9998 - info_ptr->altivec_size;
10000 /* Adjust for AltiVec case. */
10001 info_ptr->ehrd_offset = info_ptr->altivec_save_offset - ehrd_size;
10004 info_ptr->ehrd_offset = info_ptr->gp_save_offset - ehrd_size;
10005 info_ptr->cr_save_offset = reg_size; /* first word when 64-bit. */
10006 info_ptr->lr_save_offset = 2*reg_size;
10010 info_ptr->fp_save_offset = - info_ptr->fp_size;
10011 info_ptr->gp_save_offset = info_ptr->fp_save_offset - info_ptr->gp_size;
10012 info_ptr->cr_save_offset = info_ptr->gp_save_offset - info_ptr->cr_size;
10014 if (TARGET_SPE_ABI && info_ptr->spe_64bit_regs_used != 0)
10016 /* Align stack so SPE GPR save area is aligned on a
10017 double-word boundary. */
10018 if (info_ptr->spe_gp_size != 0)
10019 info_ptr->spe_padding_size
10020 = 8 - (-info_ptr->cr_save_offset % 8);
10022 info_ptr->spe_padding_size = 0;
10024 info_ptr->spe_gp_save_offset
10025 = info_ptr->cr_save_offset
10026 - info_ptr->spe_padding_size
10027 - info_ptr->spe_gp_size;
10029 /* Adjust for SPE case. */
10030 info_ptr->toc_save_offset
10031 = info_ptr->spe_gp_save_offset - info_ptr->toc_size;
10033 else if (TARGET_ALTIVEC_ABI)
10035 info_ptr->vrsave_save_offset
10036 = info_ptr->cr_save_offset - info_ptr->vrsave_size;
10038 /* Align stack so vector save area is on a quadword boundary. */
10039 if (info_ptr->altivec_size != 0)
10040 info_ptr->altivec_padding_size
10041 = 16 - (-info_ptr->vrsave_save_offset % 16);
10043 info_ptr->altivec_padding_size = 0;
10045 info_ptr->altivec_save_offset
10046 = info_ptr->vrsave_save_offset
10047 - info_ptr->altivec_padding_size
10048 - info_ptr->altivec_size;
10050 /* Adjust for AltiVec case. */
10051 info_ptr->toc_save_offset
10052 = info_ptr->altivec_save_offset - info_ptr->toc_size;
10055 info_ptr->toc_save_offset = info_ptr->cr_save_offset - info_ptr->toc_size;
10056 info_ptr->ehrd_offset = info_ptr->toc_save_offset - ehrd_size;
10057 info_ptr->lr_save_offset = reg_size;
10061 info_ptr->save_size = RS6000_ALIGN (info_ptr->fp_size
10062 + info_ptr->gp_size
10063 + info_ptr->altivec_size
10064 + info_ptr->altivec_padding_size
10065 + info_ptr->spe_gp_size
10066 + info_ptr->spe_padding_size
10068 + info_ptr->cr_size
10069 + info_ptr->lr_size
10070 + info_ptr->vrsave_size
10071 + info_ptr->toc_size,
10072 (TARGET_ALTIVEC_ABI || ABI_DARWIN)
10075 total_raw_size = (info_ptr->vars_size
10076 + info_ptr->parm_size
10077 + info_ptr->save_size
10078 + info_ptr->varargs_size
10079 + info_ptr->fixed_size);
10081 info_ptr->total_size =
10082 RS6000_ALIGN (total_raw_size, ABI_STACK_BOUNDARY / BITS_PER_UNIT);
10084 /* Determine if we need to allocate any stack frame:
10086 For AIX we need to push the stack if a frame pointer is needed
10087 (because the stack might be dynamically adjusted), if we are
10088 debugging, if we make calls, or if the sum of fp_save, gp_save,
10089 and local variables are more than the space needed to save all
10090 non-volatile registers: 32-bit: 18*8 + 19*4 = 220 or 64-bit: 18*8
10091 + 18*8 = 288 (GPR13 reserved).
10093 For V.4 we don't have the stack cushion that AIX uses, but assume
10094 that the debugger can handle stackless frames. */
10096 if (info_ptr->calls_p)
10097 info_ptr->push_p = 1;
10099 else if (DEFAULT_ABI == ABI_V4)
10100 info_ptr->push_p = total_raw_size > info_ptr->fixed_size;
10102 else if (frame_pointer_needed)
10103 info_ptr->push_p = 1;
10105 else if (TARGET_XCOFF && write_symbols != NO_DEBUG)
10106 info_ptr->push_p = 1;
10110 = total_raw_size - info_ptr->fixed_size > (TARGET_32BIT ? 220 : 288);
10112 /* Zero offsets if we're not saving those registers. */
10113 if (info_ptr->fp_size == 0)
10114 info_ptr->fp_save_offset = 0;
10116 if (info_ptr->gp_size == 0)
10117 info_ptr->gp_save_offset = 0;
10119 if (! TARGET_ALTIVEC_ABI || info_ptr->altivec_size == 0)
10120 info_ptr->altivec_save_offset = 0;
10122 if (! TARGET_ALTIVEC_ABI || info_ptr->vrsave_mask == 0)
10123 info_ptr->vrsave_save_offset = 0;
10125 if (! TARGET_SPE_ABI
10126 || info_ptr->spe_64bit_regs_used == 0
10127 || info_ptr->spe_gp_size == 0)
10128 info_ptr->spe_gp_save_offset = 0;
10130 if (! info_ptr->lr_save_p)
10131 info_ptr->lr_save_offset = 0;
10133 if (! info_ptr->cr_save_p)
10134 info_ptr->cr_save_offset = 0;
10136 if (! info_ptr->toc_save_p)
10137 info_ptr->toc_save_offset = 0;
10142 /* Return true if the current function uses any GPRs in 64-bit SIMD
10146 spe_func_has_64bit_regs_p ()
10150 /* Functions that save and restore all the call-saved registers will
10151 need to save/restore the registers in 64-bits. */
10152 if (current_function_calls_eh_return
10153 || current_function_calls_setjmp
10154 || current_function_has_nonlocal_goto)
10157 insns = get_insns ();
10159 for (insn = NEXT_INSN (insns); insn != NULL_RTX; insn = NEXT_INSN (insn))
10165 i = PATTERN (insn);
10166 if (GET_CODE (i) == SET
10167 && SPE_VECTOR_MODE (GET_MODE (SET_SRC (i))))
10176 debug_stack_info (info)
10177 rs6000_stack_t *info;
10179 const char *abi_string;
10182 info = rs6000_stack_info ();
10184 fprintf (stderr, "\nStack information for function %s:\n",
10185 ((current_function_decl && DECL_NAME (current_function_decl))
10186 ? IDENTIFIER_POINTER (DECL_NAME (current_function_decl))
10191 default: abi_string = "Unknown"; break;
10192 case ABI_NONE: abi_string = "NONE"; break;
10193 case ABI_AIX: abi_string = "AIX"; break;
10194 case ABI_DARWIN: abi_string = "Darwin"; break;
10195 case ABI_V4: abi_string = "V.4"; break;
10198 fprintf (stderr, "\tABI = %5s\n", abi_string);
10200 if (TARGET_ALTIVEC_ABI)
10201 fprintf (stderr, "\tALTIVEC ABI extensions enabled.\n");
10203 if (TARGET_SPE_ABI)
10204 fprintf (stderr, "\tSPE ABI extensions enabled.\n");
10206 if (info->first_gp_reg_save != 32)
10207 fprintf (stderr, "\tfirst_gp_reg_save = %5d\n", info->first_gp_reg_save);
10209 if (info->first_fp_reg_save != 64)
10210 fprintf (stderr, "\tfirst_fp_reg_save = %5d\n", info->first_fp_reg_save);
10212 if (info->first_altivec_reg_save <= LAST_ALTIVEC_REGNO)
10213 fprintf (stderr, "\tfirst_altivec_reg_save = %5d\n",
10214 info->first_altivec_reg_save);
10216 if (info->lr_save_p)
10217 fprintf (stderr, "\tlr_save_p = %5d\n", info->lr_save_p);
10219 if (info->cr_save_p)
10220 fprintf (stderr, "\tcr_save_p = %5d\n", info->cr_save_p);
10222 if (info->toc_save_p)
10223 fprintf (stderr, "\ttoc_save_p = %5d\n", info->toc_save_p);
10225 if (info->vrsave_mask)
10226 fprintf (stderr, "\tvrsave_mask = 0x%x\n", info->vrsave_mask);
10229 fprintf (stderr, "\tpush_p = %5d\n", info->push_p);
10232 fprintf (stderr, "\tcalls_p = %5d\n", info->calls_p);
10234 if (info->gp_save_offset)
10235 fprintf (stderr, "\tgp_save_offset = %5d\n", info->gp_save_offset);
10237 if (info->fp_save_offset)
10238 fprintf (stderr, "\tfp_save_offset = %5d\n", info->fp_save_offset);
10240 if (info->altivec_save_offset)
10241 fprintf (stderr, "\taltivec_save_offset = %5d\n",
10242 info->altivec_save_offset);
10244 if (info->spe_gp_save_offset)
10245 fprintf (stderr, "\tspe_gp_save_offset = %5d\n",
10246 info->spe_gp_save_offset);
10248 if (info->vrsave_save_offset)
10249 fprintf (stderr, "\tvrsave_save_offset = %5d\n",
10250 info->vrsave_save_offset);
10252 if (info->lr_save_offset)
10253 fprintf (stderr, "\tlr_save_offset = %5d\n", info->lr_save_offset);
10255 if (info->cr_save_offset)
10256 fprintf (stderr, "\tcr_save_offset = %5d\n", info->cr_save_offset);
10258 if (info->toc_save_offset)
10259 fprintf (stderr, "\ttoc_save_offset = %5d\n", info->toc_save_offset);
10261 if (info->varargs_save_offset)
10262 fprintf (stderr, "\tvarargs_save_offset = %5d\n", info->varargs_save_offset);
10264 if (info->total_size)
10265 fprintf (stderr, "\ttotal_size = %5d\n", info->total_size);
10267 if (info->varargs_size)
10268 fprintf (stderr, "\tvarargs_size = %5d\n", info->varargs_size);
10270 if (info->vars_size)
10271 fprintf (stderr, "\tvars_size = %5d\n", info->vars_size);
10273 if (info->parm_size)
10274 fprintf (stderr, "\tparm_size = %5d\n", info->parm_size);
10276 if (info->fixed_size)
10277 fprintf (stderr, "\tfixed_size = %5d\n", info->fixed_size);
10280 fprintf (stderr, "\tgp_size = %5d\n", info->gp_size);
10282 if (info->spe_gp_size)
10283 fprintf (stderr, "\tspe_gp_size = %5d\n", info->spe_gp_size);
10286 fprintf (stderr, "\tfp_size = %5d\n", info->fp_size);
10288 if (info->altivec_size)
10289 fprintf (stderr, "\taltivec_size = %5d\n", info->altivec_size);
10291 if (info->vrsave_size)
10292 fprintf (stderr, "\tvrsave_size = %5d\n", info->vrsave_size);
10294 if (info->altivec_padding_size)
10295 fprintf (stderr, "\taltivec_padding_size= %5d\n",
10296 info->altivec_padding_size);
10298 if (info->spe_padding_size)
10299 fprintf (stderr, "\tspe_padding_size = %5d\n",
10300 info->spe_padding_size);
10303 fprintf (stderr, "\tlr_size = %5d\n", info->lr_size);
10306 fprintf (stderr, "\tcr_size = %5d\n", info->cr_size);
10308 if (info->toc_size)
10309 fprintf (stderr, "\ttoc_size = %5d\n", info->toc_size);
10311 if (info->save_size)
10312 fprintf (stderr, "\tsave_size = %5d\n", info->save_size);
10314 if (info->reg_size != 4)
10315 fprintf (stderr, "\treg_size = %5d\n", info->reg_size);
10317 fprintf (stderr, "\n");
10321 rs6000_return_addr (count, frame)
10325 /* Currently we don't optimize very well between prolog and body
10326 code and for PIC code the code can be actually quite bad, so
10327 don't try to be too clever here. */
10328 if (count != 0 || (DEFAULT_ABI != ABI_AIX && flag_pic))
10330 cfun->machine->ra_needs_full_frame = 1;
10337 plus_constant (copy_to_reg
10338 (gen_rtx_MEM (Pmode,
10339 memory_address (Pmode, frame))),
10340 RETURN_ADDRESS_OFFSET)));
10343 return get_hard_reg_initial_val (Pmode, LINK_REGISTER_REGNUM);
10346 /* Say whether a function is a candidate for sibcall handling or not.
10347 We do not allow indirect calls to be optimized into sibling calls.
10348 Also, we can't do it if there are any vector parameters; there's
10349 nowhere to put the VRsave code so it works; note that functions with
10350 vector parameters are required to have a prototype, so the argument
10351 type info must be available here. (The tail recursion case can work
10352 with vector parameters, but there's no way to distinguish here.) */
10354 rs6000_function_ok_for_sibcall (decl, exp)
10356 tree exp ATTRIBUTE_UNUSED;
10361 if (TARGET_ALTIVEC_VRSAVE)
10363 for (type = TYPE_ARG_TYPES (TREE_TYPE (decl));
10364 type; type = TREE_CHAIN (type))
10366 if (TREE_CODE (TREE_VALUE (type)) == VECTOR_TYPE)
10370 if (DEFAULT_ABI == ABI_DARWIN
10371 || (*targetm.binds_local_p) (decl))
10373 tree attr_list = TYPE_ATTRIBUTES (TREE_TYPE (decl));
10375 if (!lookup_attribute ("longcall", attr_list)
10376 || lookup_attribute ("shortcall", attr_list))
10384 rs6000_ra_ever_killed ()
10390 /* Irritatingly, there are two kinds of thunks -- those created with
10391 TARGET_ASM_OUTPUT_MI_THUNK and those with DECL_THUNK_P that go
10392 through the regular part of the compiler. This is a very hacky
10393 way to tell them apart. */
10394 if (current_function_is_thunk && !no_new_pseudos)
10397 /* regs_ever_live has LR marked as used if any sibcalls are present,
10398 but this should not force saving and restoring in the
10399 pro/epilogue. Likewise, reg_set_between_p thinks a sibcall
10400 clobbers LR, so that is inappropriate. */
10402 /* Also, the prologue can generate a store into LR that
10403 doesn't really count, like this:
10406 bcl to set PIC register
10410 When we're called from the epilogue, we need to avoid counting
10411 this as a store. */
10413 push_topmost_sequence ();
10414 top = get_insns ();
10415 pop_topmost_sequence ();
10416 reg = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
10418 for (insn = NEXT_INSN (top); insn != NULL_RTX; insn = NEXT_INSN (insn))
10422 if (FIND_REG_INC_NOTE (insn, reg))
10424 else if (GET_CODE (insn) == CALL_INSN
10425 && !SIBLING_CALL_P (insn))
10427 else if (set_of (reg, insn) != NULL_RTX
10428 && !prologue_epilogue_contains (insn))
10435 /* Add a REG_MAYBE_DEAD note to the insn. */
10437 rs6000_maybe_dead (insn)
10440 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD,
10445 /* Emit instructions needed to load the TOC register.
10446 This is only needed when TARGET_TOC, TARGET_MINIMAL_TOC, and there is
10447 a constant pool; or for SVR4 -fpic. */
10450 rs6000_emit_load_toc_table (fromprolog)
10454 dest = gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM);
10456 if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
10458 rtx temp = (fromprolog
10459 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10460 : gen_reg_rtx (Pmode));
10461 insn = emit_insn (gen_load_toc_v4_pic_si (temp));
10463 rs6000_maybe_dead (insn);
10464 insn = emit_move_insn (dest, temp);
10466 rs6000_maybe_dead (insn);
10468 else if (TARGET_ELF && DEFAULT_ABI != ABI_AIX && flag_pic == 2)
10471 rtx tempLR = (fromprolog
10472 ? gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)
10473 : gen_reg_rtx (Pmode));
10474 rtx temp0 = (fromprolog
10475 ? gen_rtx_REG (Pmode, 0)
10476 : gen_reg_rtx (Pmode));
10479 /* possibly create the toc section */
10480 if (! toc_initialized)
10483 function_section (current_function_decl);
10490 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
10491 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10493 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
10494 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10496 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_1 (tempLR,
10498 rs6000_maybe_dead (emit_move_insn (dest, tempLR));
10499 rs6000_maybe_dead (emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest,
10506 static int reload_toc_labelno = 0;
10508 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
10510 ASM_GENERATE_INTERNAL_LABEL (buf, "LCG", reload_toc_labelno++);
10511 symF = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10513 emit_insn (gen_load_toc_v4_PIC_1b (tempLR, symF, tocsym));
10514 emit_move_insn (dest, tempLR);
10515 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
10517 insn = emit_insn (gen_addsi3 (dest, temp0, dest));
10519 rs6000_maybe_dead (insn);
10521 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
10523 /* This is for AIX code running in non-PIC ELF32. */
10526 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
10527 realsym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
10529 insn = emit_insn (gen_elf_high (dest, realsym));
10531 rs6000_maybe_dead (insn);
10532 insn = emit_insn (gen_elf_low (dest, dest, realsym));
10534 rs6000_maybe_dead (insn);
10536 else if (DEFAULT_ABI == ABI_AIX)
10539 insn = emit_insn (gen_load_toc_aix_si (dest));
10541 insn = emit_insn (gen_load_toc_aix_di (dest));
10543 rs6000_maybe_dead (insn);
10550 get_TOC_alias_set ()
10552 static int set = -1;
10554 set = new_alias_set ();
10558 /* This retuns nonzero if the current function uses the TOC. This is
10559 determined by the presence of (unspec ... UNSPEC_TOC) or
10560 use (unspec ... UNSPEC_TOC), which are generated by the various
10561 load_toc_* patterns. */
10568 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
10571 rtx pat = PATTERN (insn);
10574 if (GET_CODE (pat) == PARALLEL)
10575 for (i = 0; i < XVECLEN (pat, 0); i++)
10577 rtx sub = XVECEXP (pat, 0, i);
10578 if (GET_CODE (sub) == USE)
10580 sub = XEXP (sub, 0);
10581 if (GET_CODE (sub) == UNSPEC
10582 && XINT (sub, 1) == UNSPEC_TOC)
10591 create_TOC_reference (symbol)
10594 return gen_rtx_PLUS (Pmode,
10595 gen_rtx_REG (Pmode, TOC_REGISTER),
10596 gen_rtx_CONST (Pmode,
10597 gen_rtx_MINUS (Pmode, symbol,
10598 gen_rtx_SYMBOL_REF (Pmode, toc_label_name))));
10601 /* __throw will restore its own return address to be the same as the
10602 return address of the function that the throw is being made to.
10603 This is unfortunate, because we want to check the original
10604 return address to see if we need to restore the TOC.
10605 So we have to squirrel it away here.
10606 This is used only in compiling __throw and __rethrow.
10608 Most of this code should be removed by CSE. */
10609 static rtx insn_after_throw;
10611 /* This does the saving... */
10613 rs6000_aix_emit_builtin_unwind_init ()
10616 rtx stack_top = gen_reg_rtx (Pmode);
10617 rtx opcode_addr = gen_reg_rtx (Pmode);
10619 insn_after_throw = gen_reg_rtx (SImode);
10621 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10622 emit_move_insn (stack_top, mem);
10624 mem = gen_rtx_MEM (Pmode,
10625 gen_rtx_PLUS (Pmode, stack_top,
10626 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10627 emit_move_insn (opcode_addr, mem);
10628 emit_move_insn (insn_after_throw, gen_rtx_MEM (SImode, opcode_addr));
10631 /* Emit insns to _restore_ the TOC register, at runtime (specifically
10632 in _eh.o). Only used on AIX.
10634 The idea is that on AIX, function calls look like this:
10635 bl somefunction-trampoline
10639 somefunction-trampoline:
10641 ... load function address in the count register ...
10643 or like this, if the linker determines that this is not a cross-module call
10644 and so the TOC need not be restored:
10647 or like this, if the compiler could determine that this is not a
10650 now, the tricky bit here is that register 2 is saved and restored
10651 by the _linker_, so we can't readily generate debugging information
10652 for it. So we need to go back up the call chain looking at the
10653 insns at return addresses to see which calls saved the TOC register
10654 and so see where it gets restored from.
10656 Oh, and all this gets done in RTL inside the eh_epilogue pattern,
10657 just before the actual epilogue.
10659 On the bright side, this incurs no space or time overhead unless an
10660 exception is thrown, except for the extra code in libgcc.a.
10662 The parameter STACKSIZE is a register containing (at runtime)
10663 the amount to be popped off the stack in addition to the stack frame
10664 of this routine (which will be __throw or __rethrow, and so is
10665 guaranteed to have a stack frame). */
10668 rs6000_emit_eh_toc_restore (stacksize)
10672 rtx bottom_of_stack = gen_reg_rtx (Pmode);
10673 rtx tocompare = gen_reg_rtx (SImode);
10674 rtx opcode = gen_reg_rtx (SImode);
10675 rtx opcode_addr = gen_reg_rtx (Pmode);
10677 rtx loop_start = gen_label_rtx ();
10678 rtx no_toc_restore_needed = gen_label_rtx ();
10679 rtx loop_exit = gen_label_rtx ();
10681 mem = gen_rtx_MEM (Pmode, hard_frame_pointer_rtx);
10682 set_mem_alias_set (mem, rs6000_sr_alias_set);
10683 emit_move_insn (bottom_of_stack, mem);
10685 top_of_stack = expand_binop (Pmode, add_optab,
10686 bottom_of_stack, stacksize,
10687 NULL_RTX, 1, OPTAB_WIDEN);
10689 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
10690 : 0xE8410028, SImode));
10692 if (insn_after_throw == NULL_RTX)
10694 emit_move_insn (opcode, insn_after_throw);
10696 emit_note (NULL, NOTE_INSN_LOOP_BEG);
10697 emit_label (loop_start);
10699 do_compare_rtx_and_jump (opcode, tocompare, NE, 1,
10700 SImode, NULL_RTX, NULL_RTX,
10701 no_toc_restore_needed);
10703 mem = gen_rtx_MEM (Pmode,
10704 gen_rtx_PLUS (Pmode, bottom_of_stack,
10705 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
10706 emit_move_insn (gen_rtx_REG (Pmode, 2), mem);
10708 emit_label (no_toc_restore_needed);
10709 do_compare_rtx_and_jump (top_of_stack, bottom_of_stack, EQ, 1,
10710 Pmode, NULL_RTX, NULL_RTX,
10713 mem = gen_rtx_MEM (Pmode, bottom_of_stack);
10714 set_mem_alias_set (mem, rs6000_sr_alias_set);
10715 emit_move_insn (bottom_of_stack, mem);
10717 mem = gen_rtx_MEM (Pmode,
10718 gen_rtx_PLUS (Pmode, bottom_of_stack,
10719 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
10720 emit_move_insn (opcode_addr, mem);
10721 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
10723 emit_note (NULL, NOTE_INSN_LOOP_CONT);
10724 emit_jump (loop_start);
10725 emit_note (NULL, NOTE_INSN_LOOP_END);
10726 emit_label (loop_exit);
10729 /* This ties together stack memory (MEM with an alias set of
10730 rs6000_sr_alias_set) and the change to the stack pointer. */
10733 rs6000_emit_stack_tie ()
10735 rtx mem = gen_rtx_MEM (BLKmode, gen_rtx_REG (Pmode, STACK_POINTER_REGNUM));
10737 set_mem_alias_set (mem, rs6000_sr_alias_set);
10738 emit_insn (gen_stack_tie (mem));
10741 /* Emit the correct code for allocating stack space, as insns.
10742 If COPY_R12, make sure a copy of the old frame is left in r12.
10743 The generated code may use hard register 0 as a temporary. */
10746 rs6000_emit_allocate_stack (size, copy_r12)
10747 HOST_WIDE_INT size;
10751 rtx stack_reg = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
10752 rtx tmp_reg = gen_rtx_REG (Pmode, 0);
10753 rtx todec = GEN_INT (-size);
10755 if (current_function_limit_stack)
10757 if (REG_P (stack_limit_rtx)
10758 && REGNO (stack_limit_rtx) > 1
10759 && REGNO (stack_limit_rtx) <= 31)
10761 emit_insn (TARGET_32BIT
10762 ? gen_addsi3 (tmp_reg,
10765 : gen_adddi3 (tmp_reg,
10769 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10772 else if (GET_CODE (stack_limit_rtx) == SYMBOL_REF
10774 && DEFAULT_ABI == ABI_V4)
10776 rtx toload = gen_rtx_CONST (VOIDmode,
10777 gen_rtx_PLUS (Pmode,
10781 emit_insn (gen_elf_high (tmp_reg, toload));
10782 emit_insn (gen_elf_low (tmp_reg, tmp_reg, toload));
10783 emit_insn (gen_cond_trap (LTU, stack_reg, tmp_reg,
10787 warning ("stack limit expression is not supported");
10790 if (copy_r12 || ! TARGET_UPDATE)
10791 emit_move_insn (gen_rtx_REG (Pmode, 12), stack_reg);
10797 /* Need a note here so that try_split doesn't get confused. */
10798 if (get_last_insn() == NULL_RTX)
10799 emit_note (0, NOTE_INSN_DELETED);
10800 insn = emit_move_insn (tmp_reg, todec);
10801 try_split (PATTERN (insn), insn, 0);
10805 insn = emit_insn (TARGET_32BIT
10806 ? gen_movsi_update (stack_reg, stack_reg,
10808 : gen_movdi_update (stack_reg, stack_reg,
10809 todec, stack_reg));
10813 insn = emit_insn (TARGET_32BIT
10814 ? gen_addsi3 (stack_reg, stack_reg, todec)
10815 : gen_adddi3 (stack_reg, stack_reg, todec));
10816 emit_move_insn (gen_rtx_MEM (Pmode, stack_reg),
10817 gen_rtx_REG (Pmode, 12));
10820 RTX_FRAME_RELATED_P (insn) = 1;
10822 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10823 gen_rtx_SET (VOIDmode, stack_reg,
10824 gen_rtx_PLUS (Pmode, stack_reg,
10829 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
10830 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
10831 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
10832 deduce these equivalences by itself so it wasn't necessary to hold
10833 its hand so much. */
10836 rs6000_frame_related (insn, reg, val, reg2, rreg)
10845 /* copy_rtx will not make unique copies of registers, so we need to
10846 ensure we don't have unwanted sharing here. */
10848 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10851 reg = gen_raw_REG (GET_MODE (reg), REGNO (reg));
10853 real = copy_rtx (PATTERN (insn));
10855 if (reg2 != NULL_RTX)
10856 real = replace_rtx (real, reg2, rreg);
10858 real = replace_rtx (real, reg,
10859 gen_rtx_PLUS (Pmode, gen_rtx_REG (Pmode,
10860 STACK_POINTER_REGNUM),
10863 /* We expect that 'real' is either a SET or a PARALLEL containing
10864 SETs (and possibly other stuff). In a PARALLEL, all the SETs
10865 are important so they all have to be marked RTX_FRAME_RELATED_P. */
10867 if (GET_CODE (real) == SET)
10871 temp = simplify_rtx (SET_SRC (set));
10873 SET_SRC (set) = temp;
10874 temp = simplify_rtx (SET_DEST (set));
10876 SET_DEST (set) = temp;
10877 if (GET_CODE (SET_DEST (set)) == MEM)
10879 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10881 XEXP (SET_DEST (set), 0) = temp;
10884 else if (GET_CODE (real) == PARALLEL)
10887 for (i = 0; i < XVECLEN (real, 0); i++)
10888 if (GET_CODE (XVECEXP (real, 0, i)) == SET)
10890 rtx set = XVECEXP (real, 0, i);
10892 temp = simplify_rtx (SET_SRC (set));
10894 SET_SRC (set) = temp;
10895 temp = simplify_rtx (SET_DEST (set));
10897 SET_DEST (set) = temp;
10898 if (GET_CODE (SET_DEST (set)) == MEM)
10900 temp = simplify_rtx (XEXP (SET_DEST (set), 0));
10902 XEXP (SET_DEST (set), 0) = temp;
10904 RTX_FRAME_RELATED_P (set) = 1;
10911 real = spe_synthesize_frame_save (real);
10913 RTX_FRAME_RELATED_P (insn) = 1;
10914 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
10919 /* Given an SPE frame note, return a PARALLEL of SETs with the
10920 original note, plus a synthetic register save. */
10923 spe_synthesize_frame_save (real)
10926 rtx synth, offset, reg, real2;
10928 if (GET_CODE (real) != SET
10929 || GET_MODE (SET_SRC (real)) != V2SImode)
10932 /* For the SPE, registers saved in 64-bits, get a PARALLEL for their
10933 frame related note. The parallel contains a set of the register
10934 being saved, and another set to a synthetic register (n+1200).
10935 This is so we can differentiate between 64-bit and 32-bit saves.
10936 Words cannot describe this nastiness. */
10938 if (GET_CODE (SET_DEST (real)) != MEM
10939 || GET_CODE (XEXP (SET_DEST (real), 0)) != PLUS
10940 || GET_CODE (SET_SRC (real)) != REG)
10944 (set (mem (plus (reg x) (const y)))
10947 (set (mem (plus (reg x) (const y+4)))
10951 real2 = copy_rtx (real);
10952 PUT_MODE (SET_DEST (real2), SImode);
10953 reg = SET_SRC (real2);
10954 real2 = replace_rtx (real2, reg, gen_rtx_REG (SImode, REGNO (reg)));
10955 synth = copy_rtx (real2);
10957 if (BYTES_BIG_ENDIAN)
10959 offset = XEXP (XEXP (SET_DEST (real2), 0), 1);
10960 real2 = replace_rtx (real2, offset, GEN_INT (INTVAL (offset) + 4));
10963 reg = SET_SRC (synth);
10965 synth = replace_rtx (synth, reg,
10966 gen_rtx_REG (SImode, REGNO (reg) + 1200));
10968 offset = XEXP (XEXP (SET_DEST (synth), 0), 1);
10969 synth = replace_rtx (synth, offset,
10970 GEN_INT (INTVAL (offset)
10971 + (BYTES_BIG_ENDIAN ? 0 : 4)));
10973 RTX_FRAME_RELATED_P (synth) = 1;
10974 RTX_FRAME_RELATED_P (real2) = 1;
10975 if (BYTES_BIG_ENDIAN)
10976 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, synth, real2));
10978 real = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, real2, synth));
10983 /* Returns an insn that has a vrsave set operation with the
10984 appropriate CLOBBERs. */
10987 generate_set_vrsave (reg, info, epiloguep)
10989 rs6000_stack_t *info;
10993 rtx insn, clobs[TOTAL_ALTIVEC_REGS + 1];
10994 rtx vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
10997 = gen_rtx_SET (VOIDmode,
10999 gen_rtx_UNSPEC_VOLATILE (SImode,
11000 gen_rtvec (2, reg, vrsave),
11005 /* We need to clobber the registers in the mask so the scheduler
11006 does not move sets to VRSAVE before sets of AltiVec registers.
11008 However, if the function receives nonlocal gotos, reload will set
11009 all call saved registers live. We will end up with:
11011 (set (reg 999) (mem))
11012 (parallel [ (set (reg vrsave) (unspec blah))
11013 (clobber (reg 999))])
11015 The clobber will cause the store into reg 999 to be dead, and
11016 flow will attempt to delete an epilogue insn. In this case, we
11017 need an unspec use/set of the register. */
11019 for (i = FIRST_ALTIVEC_REGNO; i <= LAST_ALTIVEC_REGNO; ++i)
11020 if (info->vrsave_mask != 0 && ALTIVEC_REG_BIT (i) != 0)
11022 if (!epiloguep || call_used_regs [i])
11023 clobs[nclobs++] = gen_rtx_CLOBBER (VOIDmode,
11024 gen_rtx_REG (V4SImode, i));
11027 rtx reg = gen_rtx_REG (V4SImode, i);
11030 = gen_rtx_SET (VOIDmode,
11032 gen_rtx_UNSPEC (V4SImode,
11033 gen_rtvec (1, reg), 27));
11037 insn = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nclobs));
11039 for (i = 0; i < nclobs; ++i)
11040 XVECEXP (insn, 0, i) = clobs[i];
11045 /* Save a register into the frame, and emit RTX_FRAME_RELATED_P notes.
11046 Save REGNO into [FRAME_REG + OFFSET] in mode MODE. */
11049 emit_frame_save (frame_reg, frame_ptr, mode, regno, offset, total_size)
11052 enum machine_mode mode;
11053 unsigned int regno;
11057 rtx reg, offset_rtx, insn, mem, addr, int_rtx;
11058 rtx replacea, replaceb;
11060 int_rtx = GEN_INT (offset);
11062 /* Some cases that need register indexed addressing. */
11063 if ((TARGET_ALTIVEC_ABI && ALTIVEC_VECTOR_MODE (mode))
11065 && SPE_VECTOR_MODE (mode)
11066 && !SPE_CONST_OFFSET_OK (offset)))
11068 /* Whomever calls us must make sure r11 is available in the
11069 flow path of instructions in the prologue. */
11070 offset_rtx = gen_rtx_REG (Pmode, 11);
11071 emit_move_insn (offset_rtx, int_rtx);
11073 replacea = offset_rtx;
11074 replaceb = int_rtx;
11078 offset_rtx = int_rtx;
11079 replacea = NULL_RTX;
11080 replaceb = NULL_RTX;
11083 reg = gen_rtx_REG (mode, regno);
11084 addr = gen_rtx_PLUS (Pmode, frame_reg, offset_rtx);
11085 mem = gen_rtx_MEM (mode, addr);
11086 set_mem_alias_set (mem, rs6000_sr_alias_set);
11088 insn = emit_move_insn (mem, reg);
11090 rs6000_frame_related (insn, frame_ptr, total_size, replacea, replaceb);
11093 /* Emit an offset memory reference suitable for a frame store, while
11094 converting to a valid addressing mode. */
11097 gen_frame_mem_offset (mode, reg, offset)
11098 enum machine_mode mode;
11102 rtx int_rtx, offset_rtx;
11104 int_rtx = GEN_INT (offset);
11106 if (TARGET_SPE_ABI && SPE_VECTOR_MODE (mode))
11108 offset_rtx = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11109 emit_move_insn (offset_rtx, int_rtx);
11112 offset_rtx = int_rtx;
11114 return gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, reg, offset_rtx));
11117 /* Emit function prologue as insns. */
11120 rs6000_emit_prologue ()
11122 rs6000_stack_t *info = rs6000_stack_info ();
11123 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11124 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11125 rtx sp_reg_rtx = gen_rtx_REG (Pmode, STACK_POINTER_REGNUM);
11126 rtx frame_ptr_rtx = gen_rtx_REG (Pmode, 12);
11127 rtx frame_reg_rtx = sp_reg_rtx;
11128 rtx cr_save_rtx = NULL;
11130 int saving_FPRs_inline;
11131 int using_store_multiple;
11132 HOST_WIDE_INT sp_offset = 0;
11134 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11136 reg_mode = V2SImode;
11140 using_store_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11141 && (!TARGET_SPE_ABI
11142 || info->spe_64bit_regs_used == 0)
11143 && info->first_gp_reg_save < 31);
11144 saving_FPRs_inline = (info->first_fp_reg_save == 64
11145 || FP_SAVE_INLINE (info->first_fp_reg_save));
11147 /* For V.4, update stack before we do any saving and set back pointer. */
11148 if (info->push_p && DEFAULT_ABI == ABI_V4)
11150 if (info->total_size < 32767)
11151 sp_offset = info->total_size;
11153 frame_reg_rtx = frame_ptr_rtx;
11154 rs6000_emit_allocate_stack (info->total_size,
11155 (frame_reg_rtx != sp_reg_rtx
11156 && (info->cr_save_p
11158 || info->first_fp_reg_save < 64
11159 || info->first_gp_reg_save < 32
11161 if (frame_reg_rtx != sp_reg_rtx)
11162 rs6000_emit_stack_tie ();
11165 /* Save AltiVec registers if needed. */
11166 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11170 /* There should be a non inline version of this, for when we
11171 are saving lots of vector registers. */
11172 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11173 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11175 rtx areg, savereg, mem;
11178 offset = info->altivec_save_offset + sp_offset
11179 + 16 * (i - info->first_altivec_reg_save);
11181 savereg = gen_rtx_REG (V4SImode, i);
11183 areg = gen_rtx_REG (Pmode, 0);
11184 emit_move_insn (areg, GEN_INT (offset));
11186 /* AltiVec addressing mode is [reg+reg]. */
11187 mem = gen_rtx_MEM (V4SImode,
11188 gen_rtx_PLUS (Pmode, frame_reg_rtx, areg));
11190 set_mem_alias_set (mem, rs6000_sr_alias_set);
11192 insn = emit_move_insn (mem, savereg);
11194 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11195 areg, GEN_INT (offset));
11199 /* VRSAVE is a bit vector representing which AltiVec registers
11200 are used. The OS uses this to determine which vector
11201 registers to save on a context switch. We need to save
11202 VRSAVE on the stack frame, add whatever AltiVec registers we
11203 used in this function, and do the corresponding magic in the
11206 if (TARGET_ALTIVEC && info->vrsave_mask != 0)
11208 rtx reg, mem, vrsave;
11211 /* Get VRSAVE onto a GPR. */
11212 reg = gen_rtx_REG (SImode, 12);
11213 vrsave = gen_rtx_REG (SImode, VRSAVE_REGNO);
11215 emit_insn (gen_get_vrsave_internal (reg));
11217 emit_insn (gen_rtx_SET (VOIDmode, reg, vrsave));
11220 offset = info->vrsave_save_offset + sp_offset;
11222 = gen_rtx_MEM (SImode,
11223 gen_rtx_PLUS (Pmode, frame_reg_rtx, GEN_INT (offset)));
11224 set_mem_alias_set (mem, rs6000_sr_alias_set);
11225 insn = emit_move_insn (mem, reg);
11227 /* Include the registers in the mask. */
11228 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
11230 insn = emit_insn (generate_set_vrsave (reg, info, 0));
11233 /* If we use the link register, get it into r0. */
11234 if (info->lr_save_p)
11235 emit_move_insn (gen_rtx_REG (Pmode, 0),
11236 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11238 /* If we need to save CR, put it into r12. */
11239 if (info->cr_save_p && frame_reg_rtx != frame_ptr_rtx)
11241 cr_save_rtx = gen_rtx_REG (SImode, 12);
11242 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11245 /* Do any required saving of fpr's. If only one or two to save, do
11246 it ourselves. Otherwise, call function. */
11247 if (saving_FPRs_inline)
11250 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11251 if ((regs_ever_live[info->first_fp_reg_save+i]
11252 && ! call_used_regs[info->first_fp_reg_save+i]))
11253 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, DFmode,
11254 info->first_fp_reg_save + i,
11255 info->fp_save_offset + sp_offset + 8 * i,
11258 else if (info->first_fp_reg_save != 64)
11262 const char *alloc_rname;
11264 p = rtvec_alloc (2 + 64 - info->first_fp_reg_save);
11266 RTVEC_ELT (p, 0) = gen_rtx_CLOBBER (VOIDmode,
11267 gen_rtx_REG (Pmode,
11268 LINK_REGISTER_REGNUM));
11269 sprintf (rname, "%s%d%s", SAVE_FP_PREFIX,
11270 info->first_fp_reg_save - 32, SAVE_FP_SUFFIX);
11271 alloc_rname = ggc_strdup (rname);
11272 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11273 gen_rtx_SYMBOL_REF (Pmode,
11275 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11277 rtx addr, reg, mem;
11278 reg = gen_rtx_REG (DFmode, info->first_fp_reg_save + i);
11279 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11280 GEN_INT (info->fp_save_offset
11281 + sp_offset + 8*i));
11282 mem = gen_rtx_MEM (DFmode, addr);
11283 set_mem_alias_set (mem, rs6000_sr_alias_set);
11285 RTVEC_ELT (p, i + 2) = gen_rtx_SET (VOIDmode, mem, reg);
11287 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11288 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11289 NULL_RTX, NULL_RTX);
11292 /* Save GPRs. This is done as a PARALLEL if we are using
11293 the store-multiple instructions. */
11294 if (using_store_multiple)
11298 p = rtvec_alloc (32 - info->first_gp_reg_save);
11299 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11301 rtx addr, reg, mem;
11302 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11303 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11304 GEN_INT (info->gp_save_offset
11307 mem = gen_rtx_MEM (reg_mode, addr);
11308 set_mem_alias_set (mem, rs6000_sr_alias_set);
11310 RTVEC_ELT (p, i) = gen_rtx_SET (VOIDmode, mem, reg);
11312 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11313 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11314 NULL_RTX, NULL_RTX);
11319 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11320 if ((regs_ever_live[info->first_gp_reg_save+i]
11321 && ! call_used_regs[info->first_gp_reg_save+i])
11322 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11323 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11324 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11326 rtx addr, reg, mem;
11327 reg = gen_rtx_REG (reg_mode, info->first_gp_reg_save + i);
11329 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11331 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11334 if (!SPE_CONST_OFFSET_OK (offset))
11336 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11337 emit_move_insn (b, GEN_INT (offset));
11340 b = GEN_INT (offset);
11342 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11343 mem = gen_rtx_MEM (V2SImode, addr);
11344 set_mem_alias_set (mem, rs6000_sr_alias_set);
11345 insn = emit_move_insn (mem, reg);
11347 if (GET_CODE (b) == CONST_INT)
11348 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11349 NULL_RTX, NULL_RTX);
11351 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11352 b, GEN_INT (offset));
11356 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11357 GEN_INT (info->gp_save_offset
11360 mem = gen_rtx_MEM (reg_mode, addr);
11361 set_mem_alias_set (mem, rs6000_sr_alias_set);
11363 insn = emit_move_insn (mem, reg);
11364 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11365 NULL_RTX, NULL_RTX);
11370 /* ??? There's no need to emit actual instructions here, but it's the
11371 easiest way to get the frame unwind information emitted. */
11372 if (current_function_calls_eh_return)
11374 unsigned int i, regno;
11378 regno = EH_RETURN_DATA_REGNO (i);
11379 if (regno == INVALID_REGNUM)
11382 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
11383 info->ehrd_offset + sp_offset
11384 + reg_size * (int) i,
11389 /* Save lr if we used it. */
11390 if (info->lr_save_p)
11392 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11393 GEN_INT (info->lr_save_offset + sp_offset));
11394 rtx reg = gen_rtx_REG (Pmode, 0);
11395 rtx mem = gen_rtx_MEM (Pmode, addr);
11396 /* This should not be of rs6000_sr_alias_set, because of
11397 __builtin_return_address. */
11399 insn = emit_move_insn (mem, reg);
11400 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11401 reg, gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM));
11404 /* Save CR if we use any that must be preserved. */
11405 if (info->cr_save_p)
11407 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11408 GEN_INT (info->cr_save_offset + sp_offset));
11409 rtx mem = gen_rtx_MEM (SImode, addr);
11411 set_mem_alias_set (mem, rs6000_sr_alias_set);
11413 /* If r12 was used to hold the original sp, copy cr into r0 now
11415 if (REGNO (frame_reg_rtx) == 12)
11417 cr_save_rtx = gen_rtx_REG (SImode, 0);
11418 emit_insn (gen_movesi_from_cr (cr_save_rtx));
11420 insn = emit_move_insn (mem, cr_save_rtx);
11422 /* Now, there's no way that dwarf2out_frame_debug_expr is going
11423 to understand '(unspec:SI [(reg:CC 68) ...] UNSPEC_MOVESI_FROM_CR)'.
11424 But that's OK. All we have to do is specify that _one_ condition
11425 code register is saved in this stack slot. The thrower's epilogue
11426 will then restore all the call-saved registers.
11427 We use CR2_REGNO (70) to be compatible with gcc-2.95 on Linux. */
11428 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
11429 cr_save_rtx, gen_rtx_REG (SImode, CR2_REGNO));
11432 /* Update stack and set back pointer unless this is V.4,
11433 for which it was done previously. */
11434 if (info->push_p && DEFAULT_ABI != ABI_V4)
11435 rs6000_emit_allocate_stack (info->total_size, FALSE);
11437 /* Set frame pointer, if needed. */
11438 if (frame_pointer_needed)
11440 insn = emit_move_insn (gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM),
11442 RTX_FRAME_RELATED_P (insn) = 1;
11445 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
11446 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
11447 || (DEFAULT_ABI == ABI_V4 && flag_pic == 1
11448 && regs_ever_live[RS6000_PIC_OFFSET_TABLE_REGNUM]))
11450 /* If emit_load_toc_table will use the link register, we need to save
11451 it. We use R12 for this purpose because emit_load_toc_table
11452 can use register 0. This allows us to use a plain 'blr' to return
11453 from the procedure more often. */
11454 int save_LR_around_toc_setup = (TARGET_ELF
11455 && DEFAULT_ABI != ABI_AIX
11457 && ! info->lr_save_p
11458 && EXIT_BLOCK_PTR->pred != NULL);
11459 if (save_LR_around_toc_setup)
11461 rtx lr = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11462 rs6000_maybe_dead (emit_move_insn (frame_ptr_rtx, lr));
11463 rs6000_emit_load_toc_table (TRUE);
11464 rs6000_maybe_dead (emit_move_insn (lr, frame_ptr_rtx));
11467 rs6000_emit_load_toc_table (TRUE);
11471 if (DEFAULT_ABI == ABI_DARWIN
11472 && flag_pic && current_function_uses_pic_offset_table)
11474 rtx dest = gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM);
11475 const char *picbase = machopic_function_base_name ();
11476 rtx src = gen_rtx_SYMBOL_REF (Pmode, picbase);
11478 rs6000_maybe_dead (emit_insn (gen_load_macho_picbase (dest, src)));
11480 rs6000_maybe_dead (
11481 emit_move_insn (gen_rtx_REG (Pmode, RS6000_PIC_OFFSET_TABLE_REGNUM),
11482 gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM)));
11487 /* Write function prologue. */
11490 rs6000_output_function_prologue (file, size)
11492 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11494 rs6000_stack_t *info = rs6000_stack_info ();
11496 if (TARGET_DEBUG_STACK)
11497 debug_stack_info (info);
11499 /* Write .extern for any function we will call to save and restore
11501 if (info->first_fp_reg_save < 64
11502 && !FP_SAVE_INLINE (info->first_fp_reg_save))
11503 fprintf (file, "\t.extern %s%d%s\n\t.extern %s%d%s\n",
11504 SAVE_FP_PREFIX, info->first_fp_reg_save - 32, SAVE_FP_SUFFIX,
11505 RESTORE_FP_PREFIX, info->first_fp_reg_save - 32,
11506 RESTORE_FP_SUFFIX);
11508 /* Write .extern for AIX common mode routines, if needed. */
11509 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
11511 fputs ("\t.extern __mulh\n", file);
11512 fputs ("\t.extern __mull\n", file);
11513 fputs ("\t.extern __divss\n", file);
11514 fputs ("\t.extern __divus\n", file);
11515 fputs ("\t.extern __quoss\n", file);
11516 fputs ("\t.extern __quous\n", file);
11517 common_mode_defined = 1;
11520 if (! HAVE_prologue)
11524 /* A NOTE_INSN_DELETED is supposed to be at the start and end of
11525 the "toplevel" insn chain. */
11526 emit_note (0, NOTE_INSN_DELETED);
11527 rs6000_emit_prologue ();
11528 emit_note (0, NOTE_INSN_DELETED);
11530 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11534 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11536 INSN_ADDRESSES_NEW (insn, addr);
11541 if (TARGET_DEBUG_STACK)
11542 debug_rtx_list (get_insns (), 100);
11543 final (get_insns (), file, FALSE, FALSE);
11547 rs6000_pic_labelno++;
11550 /* Emit function epilogue as insns.
11552 At present, dwarf2out_frame_debug_expr doesn't understand
11553 register restores, so we don't bother setting RTX_FRAME_RELATED_P
11554 anywhere in the epilogue. Most of the insns below would in any case
11555 need special notes to explain where r11 is in relation to the stack. */
11558 rs6000_emit_epilogue (sibcall)
11561 rs6000_stack_t *info;
11562 int restoring_FPRs_inline;
11563 int using_load_multiple;
11564 int using_mfcr_multiple;
11565 int use_backchain_to_restore_sp;
11567 rtx sp_reg_rtx = gen_rtx_REG (Pmode, 1);
11568 rtx frame_reg_rtx = sp_reg_rtx;
11569 enum machine_mode reg_mode = TARGET_POWERPC64 ? DImode : SImode;
11570 int reg_size = TARGET_POWERPC64 ? 8 : 4;
11573 info = rs6000_stack_info ();
11575 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11577 reg_mode = V2SImode;
11581 using_load_multiple = (TARGET_MULTIPLE && ! TARGET_POWERPC64
11582 && (!TARGET_SPE_ABI
11583 || info->spe_64bit_regs_used == 0)
11584 && info->first_gp_reg_save < 31);
11585 restoring_FPRs_inline = (sibcall
11586 || current_function_calls_eh_return
11587 || info->first_fp_reg_save == 64
11588 || FP_SAVE_INLINE (info->first_fp_reg_save));
11589 use_backchain_to_restore_sp = (frame_pointer_needed
11590 || current_function_calls_alloca
11591 || info->total_size > 32767);
11592 using_mfcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
11593 || rs6000_cpu == PROCESSOR_PPC603
11594 || rs6000_cpu == PROCESSOR_PPC750
11597 /* If we have a frame pointer, a call to alloca, or a large stack
11598 frame, restore the old stack pointer using the backchain. Otherwise,
11599 we know what size to update it with. */
11600 if (use_backchain_to_restore_sp)
11602 /* Under V.4, don't reset the stack pointer until after we're done
11603 loading the saved registers. */
11604 if (DEFAULT_ABI == ABI_V4)
11605 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
11607 emit_move_insn (frame_reg_rtx,
11608 gen_rtx_MEM (Pmode, sp_reg_rtx));
11611 else if (info->push_p)
11613 if (DEFAULT_ABI == ABI_V4)
11614 sp_offset = info->total_size;
11617 emit_insn (TARGET_32BIT
11618 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11619 GEN_INT (info->total_size))
11620 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11621 GEN_INT (info->total_size)));
11625 /* Restore AltiVec registers if needed. */
11626 if (TARGET_ALTIVEC_ABI && info->altivec_size != 0)
11630 for (i = info->first_altivec_reg_save; i <= LAST_ALTIVEC_REGNO; ++i)
11631 if (info->vrsave_mask & ALTIVEC_REG_BIT (i))
11633 rtx addr, areg, mem;
11635 areg = gen_rtx_REG (Pmode, 0);
11637 (areg, GEN_INT (info->altivec_save_offset
11639 + 16 * (i - info->first_altivec_reg_save)));
11641 /* AltiVec addressing mode is [reg+reg]. */
11642 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, areg);
11643 mem = gen_rtx_MEM (V4SImode, addr);
11644 set_mem_alias_set (mem, rs6000_sr_alias_set);
11646 emit_move_insn (gen_rtx_REG (V4SImode, i), mem);
11650 /* Restore VRSAVE if needed. */
11651 if (TARGET_ALTIVEC_ABI && info->vrsave_mask != 0)
11653 rtx addr, mem, reg;
11655 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11656 GEN_INT (info->vrsave_save_offset + sp_offset));
11657 mem = gen_rtx_MEM (SImode, addr);
11658 set_mem_alias_set (mem, rs6000_sr_alias_set);
11659 reg = gen_rtx_REG (SImode, 12);
11660 emit_move_insn (reg, mem);
11662 emit_insn (generate_set_vrsave (reg, info, 1));
11665 /* Get the old lr if we saved it. */
11666 if (info->lr_save_p)
11668 rtx mem = gen_frame_mem_offset (Pmode, frame_reg_rtx,
11669 info->lr_save_offset + sp_offset);
11671 set_mem_alias_set (mem, rs6000_sr_alias_set);
11673 emit_move_insn (gen_rtx_REG (Pmode, 0), mem);
11676 /* Get the old cr if we saved it. */
11677 if (info->cr_save_p)
11679 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11680 GEN_INT (info->cr_save_offset + sp_offset));
11681 rtx mem = gen_rtx_MEM (SImode, addr);
11683 set_mem_alias_set (mem, rs6000_sr_alias_set);
11685 emit_move_insn (gen_rtx_REG (SImode, 12), mem);
11688 /* Set LR here to try to overlap restores below. */
11689 if (info->lr_save_p)
11690 emit_move_insn (gen_rtx_REG (Pmode, LINK_REGISTER_REGNUM),
11691 gen_rtx_REG (Pmode, 0));
11693 /* Load exception handler data registers, if needed. */
11694 if (current_function_calls_eh_return)
11696 unsigned int i, regno;
11702 regno = EH_RETURN_DATA_REGNO (i);
11703 if (regno == INVALID_REGNUM)
11706 mem = gen_frame_mem_offset (reg_mode, frame_reg_rtx,
11707 info->ehrd_offset + sp_offset
11708 + reg_size * (int) i);
11709 set_mem_alias_set (mem, rs6000_sr_alias_set);
11711 emit_move_insn (gen_rtx_REG (reg_mode, regno), mem);
11715 /* Restore GPRs. This is done as a PARALLEL if we are using
11716 the load-multiple instructions. */
11717 if (using_load_multiple)
11720 p = rtvec_alloc (32 - info->first_gp_reg_save);
11721 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11723 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11724 GEN_INT (info->gp_save_offset
11727 rtx mem = gen_rtx_MEM (reg_mode, addr);
11729 set_mem_alias_set (mem, rs6000_sr_alias_set);
11732 gen_rtx_SET (VOIDmode,
11733 gen_rtx_REG (reg_mode, info->first_gp_reg_save + i),
11736 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11739 for (i = 0; i < 32 - info->first_gp_reg_save; i++)
11740 if ((regs_ever_live[info->first_gp_reg_save+i]
11741 && ! call_used_regs[info->first_gp_reg_save+i])
11742 || (i+info->first_gp_reg_save == RS6000_PIC_OFFSET_TABLE_REGNUM
11743 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
11744 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))))
11746 rtx addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11747 GEN_INT (info->gp_save_offset
11750 rtx mem = gen_rtx_MEM (reg_mode, addr);
11752 /* Restore 64-bit quantities for SPE. */
11753 if (TARGET_SPE_ABI && info->spe_64bit_regs_used != 0)
11755 int offset = info->spe_gp_save_offset + sp_offset + 8 * i;
11758 if (!SPE_CONST_OFFSET_OK (offset))
11760 b = gen_rtx_REG (Pmode, FIXED_SCRATCH);
11761 emit_move_insn (b, GEN_INT (offset));
11764 b = GEN_INT (offset);
11766 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, b);
11767 mem = gen_rtx_MEM (V2SImode, addr);
11770 set_mem_alias_set (mem, rs6000_sr_alias_set);
11772 emit_move_insn (gen_rtx_REG (reg_mode,
11773 info->first_gp_reg_save + i), mem);
11776 /* Restore fpr's if we need to do it without calling a function. */
11777 if (restoring_FPRs_inline)
11778 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11779 if ((regs_ever_live[info->first_fp_reg_save+i]
11780 && ! call_used_regs[info->first_fp_reg_save+i]))
11783 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
11784 GEN_INT (info->fp_save_offset
11787 mem = gen_rtx_MEM (DFmode, addr);
11788 set_mem_alias_set (mem, rs6000_sr_alias_set);
11790 emit_move_insn (gen_rtx_REG (DFmode,
11791 info->first_fp_reg_save + i),
11795 /* If we saved cr, restore it here. Just those that were used. */
11796 if (info->cr_save_p)
11798 rtx r12_rtx = gen_rtx_REG (SImode, 12);
11801 if (using_mfcr_multiple)
11803 for (i = 0; i < 8; i++)
11804 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11810 if (using_mfcr_multiple && count > 1)
11815 p = rtvec_alloc (count);
11818 for (i = 0; i < 8; i++)
11819 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11821 rtvec r = rtvec_alloc (2);
11822 RTVEC_ELT (r, 0) = r12_rtx;
11823 RTVEC_ELT (r, 1) = GEN_INT (1 << (7-i));
11824 RTVEC_ELT (p, ndx) =
11825 gen_rtx_SET (VOIDmode, gen_rtx_REG (CCmode, CR0_REGNO+i),
11826 gen_rtx_UNSPEC (CCmode, r, UNSPEC_MOVESI_TO_CR));
11829 emit_insn (gen_rtx_PARALLEL (VOIDmode, p));
11834 for (i = 0; i < 8; i++)
11835 if (regs_ever_live[CR0_REGNO+i] && ! call_used_regs[CR0_REGNO+i])
11837 emit_insn (gen_movsi_to_cr_one (gen_rtx_REG (CCmode,
11843 /* If this is V.4, unwind the stack pointer after all of the loads
11844 have been done. We need to emit a block here so that sched
11845 doesn't decide to move the sp change before the register restores
11846 (which may not have any obvious dependency on the stack). This
11847 doesn't hurt performance, because there is no scheduling that can
11848 be done after this point. */
11849 if (DEFAULT_ABI == ABI_V4)
11851 if (frame_reg_rtx != sp_reg_rtx)
11852 rs6000_emit_stack_tie ();
11854 if (use_backchain_to_restore_sp)
11856 emit_move_insn (sp_reg_rtx, frame_reg_rtx);
11858 else if (sp_offset != 0)
11860 emit_insn (TARGET_32BIT
11861 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx,
11862 GEN_INT (sp_offset))
11863 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx,
11864 GEN_INT (sp_offset)));
11868 if (current_function_calls_eh_return)
11870 rtx sa = EH_RETURN_STACKADJ_RTX;
11871 emit_insn (TARGET_32BIT
11872 ? gen_addsi3 (sp_reg_rtx, sp_reg_rtx, sa)
11873 : gen_adddi3 (sp_reg_rtx, sp_reg_rtx, sa));
11879 if (! restoring_FPRs_inline)
11880 p = rtvec_alloc (3 + 64 - info->first_fp_reg_save);
11882 p = rtvec_alloc (2);
11884 RTVEC_ELT (p, 0) = gen_rtx_RETURN (VOIDmode);
11885 RTVEC_ELT (p, 1) = gen_rtx_USE (VOIDmode,
11886 gen_rtx_REG (Pmode,
11887 LINK_REGISTER_REGNUM));
11889 /* If we have to restore more than two FP registers, branch to the
11890 restore function. It will return to our caller. */
11891 if (! restoring_FPRs_inline)
11895 const char *alloc_rname;
11897 sprintf (rname, "%s%d%s", RESTORE_FP_PREFIX,
11898 info->first_fp_reg_save - 32, RESTORE_FP_SUFFIX);
11899 alloc_rname = ggc_strdup (rname);
11900 RTVEC_ELT (p, 2) = gen_rtx_USE (VOIDmode,
11901 gen_rtx_SYMBOL_REF (Pmode,
11904 for (i = 0; i < 64 - info->first_fp_reg_save; i++)
11907 addr = gen_rtx_PLUS (Pmode, sp_reg_rtx,
11908 GEN_INT (info->fp_save_offset + 8*i));
11909 mem = gen_rtx_MEM (DFmode, addr);
11910 set_mem_alias_set (mem, rs6000_sr_alias_set);
11912 RTVEC_ELT (p, i+3) =
11913 gen_rtx_SET (VOIDmode,
11914 gen_rtx_REG (DFmode, info->first_fp_reg_save + i),
11919 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, p));
11923 /* Write function epilogue. */
11926 rs6000_output_function_epilogue (file, size)
11928 HOST_WIDE_INT size ATTRIBUTE_UNUSED;
11930 rs6000_stack_t *info = rs6000_stack_info ();
11932 if (! HAVE_epilogue)
11934 rtx insn = get_last_insn ();
11935 /* If the last insn was a BARRIER, we don't have to write anything except
11936 the trace table. */
11937 if (GET_CODE (insn) == NOTE)
11938 insn = prev_nonnote_insn (insn);
11939 if (insn == 0 || GET_CODE (insn) != BARRIER)
11941 /* This is slightly ugly, but at least we don't have two
11942 copies of the epilogue-emitting code. */
11945 /* A NOTE_INSN_DELETED is supposed to be at the start
11946 and end of the "toplevel" insn chain. */
11947 emit_note (0, NOTE_INSN_DELETED);
11948 rs6000_emit_epilogue (FALSE);
11949 emit_note (0, NOTE_INSN_DELETED);
11951 /* Expand INSN_ADDRESSES so final() doesn't crash. */
11955 for (insn = get_insns (); insn != 0; insn = NEXT_INSN (insn))
11957 INSN_ADDRESSES_NEW (insn, addr);
11962 if (TARGET_DEBUG_STACK)
11963 debug_rtx_list (get_insns (), 100);
11964 final (get_insns (), file, FALSE, FALSE);
11969 /* Output a traceback table here. See /usr/include/sys/debug.h for info
11972 We don't output a traceback table if -finhibit-size-directive was
11973 used. The documentation for -finhibit-size-directive reads
11974 ``don't output a @code{.size} assembler directive, or anything
11975 else that would cause trouble if the function is split in the
11976 middle, and the two halves are placed at locations far apart in
11977 memory.'' The traceback table has this property, since it
11978 includes the offset from the start of the function to the
11979 traceback table itself.
11981 System V.4 Powerpc's (and the embedded ABI derived from it) use a
11982 different traceback table. */
11983 if (DEFAULT_ABI == ABI_AIX && ! flag_inhibit_size_directive
11984 && rs6000_traceback != traceback_none)
11986 const char *fname = NULL;
11987 const char *language_string = lang_hooks.name;
11988 int fixed_parms = 0, float_parms = 0, parm_info = 0;
11990 int optional_tbtab;
11992 if (rs6000_traceback == traceback_full)
11993 optional_tbtab = 1;
11994 else if (rs6000_traceback == traceback_part)
11995 optional_tbtab = 0;
11997 optional_tbtab = !optimize_size && !TARGET_ELF;
11999 if (optional_tbtab)
12001 fname = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
12002 while (*fname == '.') /* V.4 encodes . in the name */
12005 /* Need label immediately before tbtab, so we can compute
12006 its offset from the function start. */
12007 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12008 ASM_OUTPUT_LABEL (file, fname);
12011 /* The .tbtab pseudo-op can only be used for the first eight
12012 expressions, since it can't handle the possibly variable
12013 length fields that follow. However, if you omit the optional
12014 fields, the assembler outputs zeros for all optional fields
12015 anyways, giving each variable length field is minimum length
12016 (as defined in sys/debug.h). Thus we can not use the .tbtab
12017 pseudo-op at all. */
12019 /* An all-zero word flags the start of the tbtab, for debuggers
12020 that have to find it by searching forward from the entry
12021 point or from the current pc. */
12022 fputs ("\t.long 0\n", file);
12024 /* Tbtab format type. Use format type 0. */
12025 fputs ("\t.byte 0,", file);
12027 /* Language type. Unfortunately, there doesn't seem to be any
12028 official way to get this info, so we use language_string. C
12029 is 0. C++ is 9. No number defined for Obj-C, so use the
12030 value for C for now. There is no official value for Java,
12031 although IBM appears to be using 13. There is no official value
12032 for Chill, so we've chosen 44 pseudo-randomly. */
12033 if (! strcmp (language_string, "GNU C")
12034 || ! strcmp (language_string, "GNU Objective-C"))
12036 else if (! strcmp (language_string, "GNU F77"))
12038 else if (! strcmp (language_string, "GNU Ada"))
12040 else if (! strcmp (language_string, "GNU Pascal"))
12042 else if (! strcmp (language_string, "GNU C++"))
12044 else if (! strcmp (language_string, "GNU Java"))
12046 else if (! strcmp (language_string, "GNU CHILL"))
12050 fprintf (file, "%d,", i);
12052 /* 8 single bit fields: global linkage (not set for C extern linkage,
12053 apparently a PL/I convention?), out-of-line epilogue/prologue, offset
12054 from start of procedure stored in tbtab, internal function, function
12055 has controlled storage, function has no toc, function uses fp,
12056 function logs/aborts fp operations. */
12057 /* Assume that fp operations are used if any fp reg must be saved. */
12058 fprintf (file, "%d,",
12059 (optional_tbtab << 5) | ((info->first_fp_reg_save != 64) << 1));
12061 /* 6 bitfields: function is interrupt handler, name present in
12062 proc table, function calls alloca, on condition directives
12063 (controls stack walks, 3 bits), saves condition reg, saves
12065 /* The `function calls alloca' bit seems to be set whenever reg 31 is
12066 set up as a frame pointer, even when there is no alloca call. */
12067 fprintf (file, "%d,",
12068 ((optional_tbtab << 6)
12069 | ((optional_tbtab & frame_pointer_needed) << 5)
12070 | (info->cr_save_p << 1)
12071 | (info->lr_save_p)));
12073 /* 3 bitfields: saves backchain, fixup code, number of fpr saved
12075 fprintf (file, "%d,",
12076 (info->push_p << 7) | (64 - info->first_fp_reg_save));
12078 /* 2 bitfields: spare bits (2 bits), number of gpr saved (6 bits). */
12079 fprintf (file, "%d,", (32 - first_reg_to_save ()));
12081 if (optional_tbtab)
12083 /* Compute the parameter info from the function decl argument
12086 int next_parm_info_bit = 31;
12088 for (decl = DECL_ARGUMENTS (current_function_decl);
12089 decl; decl = TREE_CHAIN (decl))
12091 rtx parameter = DECL_INCOMING_RTL (decl);
12092 enum machine_mode mode = GET_MODE (parameter);
12094 if (GET_CODE (parameter) == REG)
12096 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
12102 if (mode == SFmode)
12104 else if (mode == DFmode || mode == TFmode)
12109 /* If only one bit will fit, don't or in this entry. */
12110 if (next_parm_info_bit > 0)
12111 parm_info |= (bits << (next_parm_info_bit - 1));
12112 next_parm_info_bit -= 2;
12116 fixed_parms += ((GET_MODE_SIZE (mode)
12117 + (UNITS_PER_WORD - 1))
12119 next_parm_info_bit -= 1;
12125 /* Number of fixed point parameters. */
12126 /* This is actually the number of words of fixed point parameters; thus
12127 an 8 byte struct counts as 2; and thus the maximum value is 8. */
12128 fprintf (file, "%d,", fixed_parms);
12130 /* 2 bitfields: number of floating point parameters (7 bits), parameters
12132 /* This is actually the number of fp registers that hold parameters;
12133 and thus the maximum value is 13. */
12134 /* Set parameters on stack bit if parameters are not in their original
12135 registers, regardless of whether they are on the stack? Xlc
12136 seems to set the bit when not optimizing. */
12137 fprintf (file, "%d\n", ((float_parms << 1) | (! optimize)));
12139 if (! optional_tbtab)
12142 /* Optional fields follow. Some are variable length. */
12144 /* Parameter types, left adjusted bit fields: 0 fixed, 10 single float,
12145 11 double float. */
12146 /* There is an entry for each parameter in a register, in the order that
12147 they occur in the parameter list. Any intervening arguments on the
12148 stack are ignored. If the list overflows a long (max possible length
12149 34 bits) then completely leave off all elements that don't fit. */
12150 /* Only emit this long if there was at least one parameter. */
12151 if (fixed_parms || float_parms)
12152 fprintf (file, "\t.long %d\n", parm_info);
12154 /* Offset from start of code to tb table. */
12155 fputs ("\t.long ", file);
12156 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
12158 RS6000_OUTPUT_BASENAME (file, fname);
12160 assemble_name (file, fname);
12162 fputs ("-.", file);
12164 RS6000_OUTPUT_BASENAME (file, fname);
12166 assemble_name (file, fname);
12170 /* Interrupt handler mask. */
12171 /* Omit this long, since we never set the interrupt handler bit
12174 /* Number of CTL (controlled storage) anchors. */
12175 /* Omit this long, since the has_ctl bit is never set above. */
12177 /* Displacement into stack of each CTL anchor. */
12178 /* Omit this list of longs, because there are no CTL anchors. */
12180 /* Length of function name. */
12183 fprintf (file, "\t.short %d\n", (int) strlen (fname));
12185 /* Function name. */
12186 assemble_string (fname, strlen (fname));
12188 /* Register for alloca automatic storage; this is always reg 31.
12189 Only emit this if the alloca bit was set above. */
12190 if (frame_pointer_needed)
12191 fputs ("\t.byte 31\n", file);
12193 fputs ("\t.align 2\n", file);
12197 /* A C compound statement that outputs the assembler code for a thunk
12198 function, used to implement C++ virtual function calls with
12199 multiple inheritance. The thunk acts as a wrapper around a virtual
12200 function, adjusting the implicit object parameter before handing
12201 control off to the real function.
12203 First, emit code to add the integer DELTA to the location that
12204 contains the incoming first argument. Assume that this argument
12205 contains a pointer, and is the one used to pass the `this' pointer
12206 in C++. This is the incoming argument *before* the function
12207 prologue, e.g. `%o0' on a sparc. The addition must preserve the
12208 values of all other incoming arguments.
12210 After the addition, emit code to jump to FUNCTION, which is a
12211 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does
12212 not touch the return address. Hence returning from FUNCTION will
12213 return to whoever called the current `thunk'.
12215 The effect must be as if FUNCTION had been called directly with the
12216 adjusted first argument. This macro is responsible for emitting
12217 all of the code for a thunk function; output_function_prologue()
12218 and output_function_epilogue() are not invoked.
12220 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already
12221 been extracted from it.) It might possibly be useful on some
12222 targets, but probably not.
12224 If you do not define this macro, the target-independent code in the
12225 C++ frontend will generate a less efficient heavyweight thunk that
12226 calls FUNCTION instead of jumping to it. The generic approach does
12227 not support varargs. */
12230 rs6000_output_mi_thunk (file, thunk_fndecl, delta, vcall_offset, function)
12232 tree thunk_fndecl ATTRIBUTE_UNUSED;
12233 HOST_WIDE_INT delta;
12234 HOST_WIDE_INT vcall_offset;
12237 rtx this, insn, funexp;
12239 reload_completed = 1;
12240 epilogue_completed = 1;
12241 no_new_pseudos = 1;
12243 /* Mark the end of the (empty) prologue. */
12244 emit_note (NULL, NOTE_INSN_PROLOGUE_END);
12246 /* Find the "this" pointer. If the function returns a structure,
12247 the structure return pointer is in r3. */
12248 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function))))
12249 this = gen_rtx_REG (Pmode, 4);
12251 this = gen_rtx_REG (Pmode, 3);
12253 /* Apply the constant offset, if required. */
12256 rtx delta_rtx = GEN_INT (delta);
12257 emit_insn (TARGET_32BIT
12258 ? gen_addsi3 (this, this, delta_rtx)
12259 : gen_adddi3 (this, this, delta_rtx));
12262 /* Apply the offset from the vtable, if required. */
12265 rtx vcall_offset_rtx = GEN_INT (vcall_offset);
12266 rtx tmp = gen_rtx_REG (Pmode, 12);
12268 emit_move_insn (tmp, gen_rtx_MEM (Pmode, this));
12269 emit_insn (TARGET_32BIT
12270 ? gen_addsi3 (tmp, tmp, vcall_offset_rtx)
12271 : gen_adddi3 (tmp, tmp, vcall_offset_rtx));
12272 emit_move_insn (tmp, gen_rtx_MEM (Pmode, tmp));
12273 emit_insn (TARGET_32BIT
12274 ? gen_addsi3 (this, this, tmp)
12275 : gen_adddi3 (this, this, tmp));
12278 /* Generate a tail call to the target function. */
12279 if (!TREE_USED (function))
12281 assemble_external (function);
12282 TREE_USED (function) = 1;
12284 funexp = XEXP (DECL_RTL (function), 0);
12285 SYMBOL_REF_FLAGS (funexp) &= ~SYMBOL_FLAG_LOCAL;
12286 funexp = gen_rtx_MEM (FUNCTION_MODE, funexp);
12289 if (MACHOPIC_INDIRECT)
12290 funexp = machopic_indirect_call_target (funexp);
12293 /* gen_sibcall expects reload to convert scratch pseudo to LR so we must
12294 generate sibcall RTL explicitly to avoid constraint abort. */
12295 insn = emit_call_insn (
12296 gen_rtx_PARALLEL (VOIDmode,
12298 gen_rtx_CALL (VOIDmode,
12299 funexp, const0_rtx),
12300 gen_rtx_USE (VOIDmode, const0_rtx),
12301 gen_rtx_USE (VOIDmode,
12302 gen_rtx_REG (SImode,
12303 LINK_REGISTER_REGNUM)),
12304 gen_rtx_RETURN (VOIDmode))));
12305 SIBLING_CALL_P (insn) = 1;
12308 /* Run just enough of rest_of_compilation to get the insns emitted.
12309 There's not really enough bulk here to make other passes such as
12310 instruction scheduling worth while. Note that use_thunk calls
12311 assemble_start_function and assemble_end_function. */
12312 insn = get_insns ();
12313 insn_locators_initialize ();
12314 shorten_branches (insn);
12315 final_start_function (insn, file, 1);
12316 final (insn, file, 1, 0);
12317 final_end_function ();
12319 reload_completed = 0;
12320 epilogue_completed = 0;
12321 no_new_pseudos = 0;
12324 /* A quick summary of the various types of 'constant-pool tables'
12327 Target Flags Name One table per
12328 AIX (none) AIX TOC object file
12329 AIX -mfull-toc AIX TOC object file
12330 AIX -mminimal-toc AIX minimal TOC translation unit
12331 SVR4/EABI (none) SVR4 SDATA object file
12332 SVR4/EABI -fpic SVR4 pic object file
12333 SVR4/EABI -fPIC SVR4 PIC translation unit
12334 SVR4/EABI -mrelocatable EABI TOC function
12335 SVR4/EABI -maix AIX TOC object file
12336 SVR4/EABI -maix -mminimal-toc
12337 AIX minimal TOC translation unit
12339 Name Reg. Set by entries contains:
12340 made by addrs? fp? sum?
12342 AIX TOC 2 crt0 as Y option option
12343 AIX minimal TOC 30 prolog gcc Y Y option
12344 SVR4 SDATA 13 crt0 gcc N Y N
12345 SVR4 pic 30 prolog ld Y not yet N
12346 SVR4 PIC 30 prolog gcc Y option option
12347 EABI TOC 30 prolog gcc Y option option
12351 /* Hash functions for the hash table. */
12354 rs6000_hash_constant (k)
12357 enum rtx_code code = GET_CODE (k);
12358 enum machine_mode mode = GET_MODE (k);
12359 unsigned result = (code << 3) ^ mode;
12360 const char *format;
12363 format = GET_RTX_FORMAT (code);
12364 flen = strlen (format);
12370 return result * 1231 + (unsigned) INSN_UID (XEXP (k, 0));
12373 if (mode != VOIDmode)
12374 return real_hash (CONST_DOUBLE_REAL_VALUE (k)) * result;
12386 for (; fidx < flen; fidx++)
12387 switch (format[fidx])
12392 const char *str = XSTR (k, fidx);
12393 len = strlen (str);
12394 result = result * 613 + len;
12395 for (i = 0; i < len; i++)
12396 result = result * 613 + (unsigned) str[i];
12401 result = result * 1231 + rs6000_hash_constant (XEXP (k, fidx));
12405 result = result * 613 + (unsigned) XINT (k, fidx);
12408 if (sizeof (unsigned) >= sizeof (HOST_WIDE_INT))
12409 result = result * 613 + (unsigned) XWINT (k, fidx);
12413 for (i = 0; i < sizeof(HOST_WIDE_INT)/sizeof(unsigned); i++)
12414 result = result * 613 + (unsigned) (XWINT (k, fidx)
12428 toc_hash_function (hash_entry)
12429 const void * hash_entry;
12431 const struct toc_hash_struct *thc =
12432 (const struct toc_hash_struct *) hash_entry;
12433 return rs6000_hash_constant (thc->key) ^ thc->key_mode;
12436 /* Compare H1 and H2 for equivalence. */
12439 toc_hash_eq (h1, h2)
12443 rtx r1 = ((const struct toc_hash_struct *) h1)->key;
12444 rtx r2 = ((const struct toc_hash_struct *) h2)->key;
12446 if (((const struct toc_hash_struct *) h1)->key_mode
12447 != ((const struct toc_hash_struct *) h2)->key_mode)
12450 return rtx_equal_p (r1, r2);
12453 /* These are the names given by the C++ front-end to vtables, and
12454 vtable-like objects. Ideally, this logic should not be here;
12455 instead, there should be some programmatic way of inquiring as
12456 to whether or not an object is a vtable. */
12458 #define VTABLE_NAME_P(NAME) \
12459 (strncmp ("_vt.", name, strlen("_vt.")) == 0 \
12460 || strncmp ("_ZTV", name, strlen ("_ZTV")) == 0 \
12461 || strncmp ("_ZTT", name, strlen ("_ZTT")) == 0 \
12462 || strncmp ("_ZTC", name, strlen ("_ZTC")) == 0)
12465 rs6000_output_symbol_ref (file, x)
12469 /* Currently C++ toc references to vtables can be emitted before it
12470 is decided whether the vtable is public or private. If this is
12471 the case, then the linker will eventually complain that there is
12472 a reference to an unknown section. Thus, for vtables only,
12473 we emit the TOC reference to reference the symbol and not the
12475 const char *name = XSTR (x, 0);
12477 if (VTABLE_NAME_P (name))
12479 RS6000_OUTPUT_BASENAME (file, name);
12482 assemble_name (file, name);
12485 /* Output a TOC entry. We derive the entry name from what is being
12489 output_toc (file, x, labelno, mode)
12493 enum machine_mode mode;
12496 const char *name = buf;
12497 const char *real_name;
12504 /* When the linker won't eliminate them, don't output duplicate
12505 TOC entries (this happens on AIX if there is any kind of TOC,
12506 and on SVR4 under -fPIC or -mrelocatable). Don't do this for
12508 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
12510 struct toc_hash_struct *h;
12513 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS
12514 time because GGC is not initialised at that point. */
12515 if (toc_hash_table == NULL)
12516 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
12517 toc_hash_eq, NULL);
12519 h = ggc_alloc (sizeof (*h));
12521 h->key_mode = mode;
12522 h->labelno = labelno;
12524 found = htab_find_slot (toc_hash_table, h, 1);
12525 if (*found == NULL)
12527 else /* This is indeed a duplicate.
12528 Set this label equal to that label. */
12530 fputs ("\t.set ", file);
12531 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12532 fprintf (file, "%d,", labelno);
12533 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LC");
12534 fprintf (file, "%d\n", ((*(const struct toc_hash_struct **)
12540 /* If we're going to put a double constant in the TOC, make sure it's
12541 aligned properly when strict alignment is on. */
12542 if (GET_CODE (x) == CONST_DOUBLE
12543 && STRICT_ALIGNMENT
12544 && GET_MODE_BITSIZE (mode) >= 64
12545 && ! (TARGET_NO_FP_IN_TOC && ! TARGET_MINIMAL_TOC)) {
12546 ASM_OUTPUT_ALIGN (file, 3);
12549 (*targetm.asm_out.internal_label) (file, "LC", labelno);
12551 /* Handle FP constants specially. Note that if we have a minimal
12552 TOC, things we put here aren't actually in the TOC, so we can allow
12554 if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == TFmode)
12556 REAL_VALUE_TYPE rv;
12559 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12560 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
12564 if (TARGET_MINIMAL_TOC)
12565 fputs (DOUBLE_INT_ASM_OP, file);
12567 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12568 k[0] & 0xffffffff, k[1] & 0xffffffff,
12569 k[2] & 0xffffffff, k[3] & 0xffffffff);
12570 fprintf (file, "0x%lx%08lx,0x%lx%08lx\n",
12571 k[0] & 0xffffffff, k[1] & 0xffffffff,
12572 k[2] & 0xffffffff, k[3] & 0xffffffff);
12577 if (TARGET_MINIMAL_TOC)
12578 fputs ("\t.long ", file);
12580 fprintf (file, "\t.tc FT_%lx_%lx_%lx_%lx[TC],",
12581 k[0] & 0xffffffff, k[1] & 0xffffffff,
12582 k[2] & 0xffffffff, k[3] & 0xffffffff);
12583 fprintf (file, "0x%lx,0x%lx,0x%lx,0x%lx\n",
12584 k[0] & 0xffffffff, k[1] & 0xffffffff,
12585 k[2] & 0xffffffff, k[3] & 0xffffffff);
12589 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
12591 REAL_VALUE_TYPE rv;
12594 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12595 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
12599 if (TARGET_MINIMAL_TOC)
12600 fputs (DOUBLE_INT_ASM_OP, file);
12602 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12603 k[0] & 0xffffffff, k[1] & 0xffffffff);
12604 fprintf (file, "0x%lx%08lx\n",
12605 k[0] & 0xffffffff, k[1] & 0xffffffff);
12610 if (TARGET_MINIMAL_TOC)
12611 fputs ("\t.long ", file);
12613 fprintf (file, "\t.tc FD_%lx_%lx[TC],",
12614 k[0] & 0xffffffff, k[1] & 0xffffffff);
12615 fprintf (file, "0x%lx,0x%lx\n",
12616 k[0] & 0xffffffff, k[1] & 0xffffffff);
12620 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
12622 REAL_VALUE_TYPE rv;
12625 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
12626 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
12630 if (TARGET_MINIMAL_TOC)
12631 fputs (DOUBLE_INT_ASM_OP, file);
12633 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12634 fprintf (file, "0x%lx00000000\n", l & 0xffffffff);
12639 if (TARGET_MINIMAL_TOC)
12640 fputs ("\t.long ", file);
12642 fprintf (file, "\t.tc FS_%lx[TC],", l & 0xffffffff);
12643 fprintf (file, "0x%lx\n", l & 0xffffffff);
12647 else if (GET_MODE (x) == VOIDmode
12648 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
12650 unsigned HOST_WIDE_INT low;
12651 HOST_WIDE_INT high;
12653 if (GET_CODE (x) == CONST_DOUBLE)
12655 low = CONST_DOUBLE_LOW (x);
12656 high = CONST_DOUBLE_HIGH (x);
12659 #if HOST_BITS_PER_WIDE_INT == 32
12662 high = (low & 0x80000000) ? ~0 : 0;
12666 low = INTVAL (x) & 0xffffffff;
12667 high = (HOST_WIDE_INT) INTVAL (x) >> 32;
12671 /* TOC entries are always Pmode-sized, but since this
12672 is a bigendian machine then if we're putting smaller
12673 integer constants in the TOC we have to pad them.
12674 (This is still a win over putting the constants in
12675 a separate constant pool, because then we'd have
12676 to have both a TOC entry _and_ the actual constant.)
12678 For a 32-bit target, CONST_INT values are loaded and shifted
12679 entirely within `low' and can be stored in one TOC entry. */
12681 if (TARGET_64BIT && POINTER_SIZE < GET_MODE_BITSIZE (mode))
12682 abort ();/* It would be easy to make this work, but it doesn't now. */
12684 if (POINTER_SIZE > GET_MODE_BITSIZE (mode))
12686 #if HOST_BITS_PER_WIDE_INT == 32
12687 lshift_double (low, high, POINTER_SIZE - GET_MODE_BITSIZE (mode),
12688 POINTER_SIZE, &low, &high, 0);
12691 low <<= POINTER_SIZE - GET_MODE_BITSIZE (mode);
12692 high = (HOST_WIDE_INT) low >> 32;
12699 if (TARGET_MINIMAL_TOC)
12700 fputs (DOUBLE_INT_ASM_OP, file);
12702 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12703 (long) high & 0xffffffff, (long) low & 0xffffffff);
12704 fprintf (file, "0x%lx%08lx\n",
12705 (long) high & 0xffffffff, (long) low & 0xffffffff);
12710 if (POINTER_SIZE < GET_MODE_BITSIZE (mode))
12712 if (TARGET_MINIMAL_TOC)
12713 fputs ("\t.long ", file);
12715 fprintf (file, "\t.tc ID_%lx_%lx[TC],",
12716 (long) high & 0xffffffff, (long) low & 0xffffffff);
12717 fprintf (file, "0x%lx,0x%lx\n",
12718 (long) high & 0xffffffff, (long) low & 0xffffffff);
12722 if (TARGET_MINIMAL_TOC)
12723 fputs ("\t.long ", file);
12725 fprintf (file, "\t.tc IS_%lx[TC],", (long) low & 0xffffffff);
12726 fprintf (file, "0x%lx\n", (long) low & 0xffffffff);
12732 if (GET_CODE (x) == CONST)
12734 if (GET_CODE (XEXP (x, 0)) != PLUS)
12737 base = XEXP (XEXP (x, 0), 0);
12738 offset = INTVAL (XEXP (XEXP (x, 0), 1));
12741 if (GET_CODE (base) == SYMBOL_REF)
12742 name = XSTR (base, 0);
12743 else if (GET_CODE (base) == LABEL_REF)
12744 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (XEXP (base, 0)));
12745 else if (GET_CODE (base) == CODE_LABEL)
12746 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (base));
12750 real_name = (*targetm.strip_name_encoding) (name);
12751 if (TARGET_MINIMAL_TOC)
12752 fputs (TARGET_32BIT ? "\t.long " : DOUBLE_INT_ASM_OP, file);
12755 fprintf (file, "\t.tc %s", real_name);
12758 fprintf (file, ".N%d", - offset);
12760 fprintf (file, ".P%d", offset);
12762 fputs ("[TC],", file);
12765 /* Currently C++ toc references to vtables can be emitted before it
12766 is decided whether the vtable is public or private. If this is
12767 the case, then the linker will eventually complain that there is
12768 a TOC reference to an unknown section. Thus, for vtables only,
12769 we emit the TOC reference to reference the symbol and not the
12771 if (VTABLE_NAME_P (name))
12773 RS6000_OUTPUT_BASENAME (file, name);
12775 fprintf (file, "%d", offset);
12776 else if (offset > 0)
12777 fprintf (file, "+%d", offset);
12780 output_addr_const (file, x);
12784 /* Output an assembler pseudo-op to write an ASCII string of N characters
12785 starting at P to FILE.
12787 On the RS/6000, we have to do this using the .byte operation and
12788 write out special characters outside the quoted string.
12789 Also, the assembler is broken; very long strings are truncated,
12790 so we must artificially break them up early. */
12793 output_ascii (file, p, n)
12799 int i, count_string;
12800 const char *for_string = "\t.byte \"";
12801 const char *for_decimal = "\t.byte ";
12802 const char *to_close = NULL;
12805 for (i = 0; i < n; i++)
12808 if (c >= ' ' && c < 0177)
12811 fputs (for_string, file);
12814 /* Write two quotes to get one. */
12822 for_decimal = "\"\n\t.byte ";
12826 if (count_string >= 512)
12828 fputs (to_close, file);
12830 for_string = "\t.byte \"";
12831 for_decimal = "\t.byte ";
12839 fputs (for_decimal, file);
12840 fprintf (file, "%d", c);
12842 for_string = "\n\t.byte \"";
12843 for_decimal = ", ";
12849 /* Now close the string if we have written one. Then end the line. */
12851 fputs (to_close, file);
12854 /* Generate a unique section name for FILENAME for a section type
12855 represented by SECTION_DESC. Output goes into BUF.
12857 SECTION_DESC can be any string, as long as it is different for each
12858 possible section type.
12860 We name the section in the same manner as xlc. The name begins with an
12861 underscore followed by the filename (after stripping any leading directory
12862 names) with the last period replaced by the string SECTION_DESC. If
12863 FILENAME does not contain a period, SECTION_DESC is appended to the end of
12867 rs6000_gen_section_name (buf, filename, section_desc)
12869 const char *filename;
12870 const char *section_desc;
12872 const char *q, *after_last_slash, *last_period = 0;
12876 after_last_slash = filename;
12877 for (q = filename; *q; q++)
12880 after_last_slash = q + 1;
12881 else if (*q == '.')
12885 len = strlen (after_last_slash) + strlen (section_desc) + 2;
12886 *buf = (char *) xmalloc (len);
12891 for (q = after_last_slash; *q; q++)
12893 if (q == last_period)
12895 strcpy (p, section_desc);
12896 p += strlen (section_desc);
12900 else if (ISALNUM (*q))
12904 if (last_period == 0)
12905 strcpy (p, section_desc);
12910 /* Emit profile function. */
12913 output_profile_hook (labelno)
12914 int labelno ATTRIBUTE_UNUSED;
12916 if (TARGET_PROFILE_KERNEL)
12919 if (DEFAULT_ABI == ABI_AIX)
12921 #ifndef NO_PROFILE_COUNTERS
12922 # define NO_PROFILE_COUNTERS 0
12924 if (NO_PROFILE_COUNTERS)
12925 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 0);
12929 const char *label_name;
12932 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12933 label_name = (*targetm.strip_name_encoding) (ggc_strdup (buf));
12934 fun = gen_rtx_SYMBOL_REF (Pmode, label_name);
12936 emit_library_call (init_one_libfunc (RS6000_MCOUNT), 0, VOIDmode, 1,
12940 else if (DEFAULT_ABI == ABI_DARWIN)
12942 const char *mcount_name = RS6000_MCOUNT;
12943 int caller_addr_regno = LINK_REGISTER_REGNUM;
12945 /* Be conservative and always set this, at least for now. */
12946 current_function_uses_pic_offset_table = 1;
12949 /* For PIC code, set up a stub and collect the caller's address
12950 from r0, which is where the prologue puts it. */
12951 if (MACHOPIC_INDIRECT)
12953 mcount_name = machopic_stub_name (mcount_name);
12954 if (current_function_uses_pic_offset_table)
12955 caller_addr_regno = 0;
12958 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, mcount_name),
12960 gen_rtx_REG (Pmode, caller_addr_regno), Pmode);
12964 /* Write function profiler code. */
12967 output_function_profiler (file, labelno)
12974 switch (DEFAULT_ABI)
12983 warning ("no profiling of 64-bit code for this ABI");
12986 ASM_GENERATE_INTERNAL_LABEL (buf, "LP", labelno);
12987 fprintf (file, "\tmflr %s\n", reg_names[0]);
12990 fputs ("\tbl _GLOBAL_OFFSET_TABLE_@local-4\n", file);
12991 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
12992 reg_names[0], save_lr, reg_names[1]);
12993 asm_fprintf (file, "\tmflr %s\n", reg_names[12]);
12994 asm_fprintf (file, "\t{l|lwz} %s,", reg_names[0]);
12995 assemble_name (file, buf);
12996 asm_fprintf (file, "@got(%s)\n", reg_names[12]);
12998 else if (flag_pic > 1)
13000 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13001 reg_names[0], save_lr, reg_names[1]);
13002 /* Now, we need to get the address of the label. */
13003 fputs ("\tbl 1f\n\t.long ", file);
13004 assemble_name (file, buf);
13005 fputs ("-.\n1:", file);
13006 asm_fprintf (file, "\tmflr %s\n", reg_names[11]);
13007 asm_fprintf (file, "\t{l|lwz} %s,0(%s)\n",
13008 reg_names[0], reg_names[11]);
13009 asm_fprintf (file, "\t{cax|add} %s,%s,%s\n",
13010 reg_names[0], reg_names[0], reg_names[11]);
13014 asm_fprintf (file, "\t{liu|lis} %s,", reg_names[12]);
13015 assemble_name (file, buf);
13016 fputs ("@ha\n", file);
13017 asm_fprintf (file, "\t{st|stw} %s,%d(%s)\n",
13018 reg_names[0], save_lr, reg_names[1]);
13019 asm_fprintf (file, "\t{cal|la} %s,", reg_names[0]);
13020 assemble_name (file, buf);
13021 asm_fprintf (file, "@l(%s)\n", reg_names[12]);
13024 /* ABI_V4 saves the static chain reg with ASM_OUTPUT_REG_PUSH. */
13025 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13030 if (!TARGET_PROFILE_KERNEL)
13032 /* Don't do anything, done in output_profile_hook (). */
13039 asm_fprintf (file, "\tmflr %s\n", reg_names[0]);
13040 asm_fprintf (file, "\tstd %s,16(%s)\n", reg_names[0], reg_names[1]);
13042 if (current_function_needs_context)
13044 asm_fprintf (file, "\tstd %s,24(%s)\n",
13045 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13046 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13047 asm_fprintf (file, "\tld %s,24(%s)\n",
13048 reg_names[STATIC_CHAIN_REGNUM], reg_names[1]);
13051 fprintf (file, "\tbl %s\n", RS6000_MCOUNT);
13059 rs6000_use_dfa_pipeline_interface ()
13064 /* Power4 load update and store update instructions are cracked into a
13065 load or store and an integer insn which are executed in the same cycle.
13066 Branches have their own dispatch slot which does not count against the
13067 GCC issue rate, but it changes the program flow so there are no other
13068 instructions to issue in this cycle. */
13071 rs6000_variable_issue (stream, verbose, insn, more)
13072 FILE *stream ATTRIBUTE_UNUSED;
13073 int verbose ATTRIBUTE_UNUSED;
13077 if (GET_CODE (PATTERN (insn)) == USE
13078 || GET_CODE (PATTERN (insn)) == CLOBBER)
13081 if (rs6000_cpu == PROCESSOR_POWER4)
13083 enum attr_type type = get_attr_type (insn);
13084 if (type == TYPE_LOAD_EXT_U || type == TYPE_LOAD_EXT_UX
13085 || type == TYPE_LOAD_UX || type == TYPE_STORE_UX)
13087 else if (type == TYPE_LOAD_U || type == TYPE_STORE_U
13088 || type == TYPE_FPLOAD_U || type == TYPE_FPSTORE_U
13089 || type == TYPE_FPLOAD_UX || type == TYPE_FPSTORE_UX
13090 || type == TYPE_LOAD_EXT || type == TYPE_DELAYED_CR
13091 || type == TYPE_COMPARE || type == TYPE_DELAYED_COMPARE
13092 || type == TYPE_IMUL_COMPARE || type == TYPE_LMUL_COMPARE
13093 || type == TYPE_IDIV || type == TYPE_LDIV
13094 || type == TYPE_INSERT_WORD)
13095 return more > 2 ? more - 2 : 0;
13101 /* Adjust the cost of a scheduling dependency. Return the new cost of
13102 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
13105 rs6000_adjust_cost (insn, link, dep_insn, cost)
13108 rtx dep_insn ATTRIBUTE_UNUSED;
13111 if (! recog_memoized (insn))
13114 if (REG_NOTE_KIND (link) != 0)
13117 if (REG_NOTE_KIND (link) == 0)
13119 /* Data dependency; DEP_INSN writes a register that INSN reads
13120 some cycles later. */
13121 switch (get_attr_type (insn))
13124 /* Tell the first scheduling pass about the latency between
13125 a mtctr and bctr (and mtlr and br/blr). The first
13126 scheduling pass will not know about this latency since
13127 the mtctr instruction, which has the latency associated
13128 to it, will be generated by reload. */
13129 return TARGET_POWER ? 5 : 4;
13131 /* Leave some extra cycles between a compare and its
13132 dependent branch, to inhibit expensive mispredicts. */
13133 if ((rs6000_cpu_attr == CPU_PPC603
13134 || rs6000_cpu_attr == CPU_PPC604
13135 || rs6000_cpu_attr == CPU_PPC604E
13136 || rs6000_cpu_attr == CPU_PPC620
13137 || rs6000_cpu_attr == CPU_PPC630
13138 || rs6000_cpu_attr == CPU_PPC750
13139 || rs6000_cpu_attr == CPU_PPC7400
13140 || rs6000_cpu_attr == CPU_PPC7450
13141 || rs6000_cpu_attr == CPU_POWER4)
13142 && recog_memoized (dep_insn)
13143 && (INSN_CODE (dep_insn) >= 0)
13144 && (get_attr_type (dep_insn) == TYPE_CMP
13145 || get_attr_type (dep_insn) == TYPE_COMPARE
13146 || get_attr_type (dep_insn) == TYPE_DELAYED_COMPARE
13147 || get_attr_type (dep_insn) == TYPE_IMUL_COMPARE
13148 || get_attr_type (dep_insn) == TYPE_LMUL_COMPARE
13149 || get_attr_type (dep_insn) == TYPE_FPCOMPARE
13150 || get_attr_type (dep_insn) == TYPE_CR_LOGICAL
13151 || get_attr_type (dep_insn) == TYPE_DELAYED_CR))
13156 /* Fall out to return default cost. */
13162 /* A C statement (sans semicolon) to update the integer scheduling
13163 priority INSN_PRIORITY (INSN). Reduce the priority to execute the
13164 INSN earlier, increase the priority to execute INSN later. Do not
13165 define this macro if you do not need to adjust the scheduling
13166 priorities of insns. */
13169 rs6000_adjust_priority (insn, priority)
13170 rtx insn ATTRIBUTE_UNUSED;
13173 /* On machines (like the 750) which have asymmetric integer units,
13174 where one integer unit can do multiply and divides and the other
13175 can't, reduce the priority of multiply/divide so it is scheduled
13176 before other integer operations. */
13179 if (! INSN_P (insn))
13182 if (GET_CODE (PATTERN (insn)) == USE)
13185 switch (rs6000_cpu_attr) {
13187 switch (get_attr_type (insn))
13194 fprintf (stderr, "priority was %#x (%d) before adjustment\n",
13195 priority, priority);
13196 if (priority >= 0 && priority < 0x01000000)
13206 /* Return how many instructions the machine can issue per cycle. */
13209 rs6000_issue_rate ()
13211 /* Use issue rate of 1 for first scheduling pass to decrease degradation. */
13212 if (!reload_completed)
13215 switch (rs6000_cpu_attr) {
13216 case CPU_RIOS1: /* ? */
13218 case CPU_PPC601: /* ? */
13239 /* Return how many instructions to look ahead for better insn
13243 rs6000_use_sched_lookahead ()
13245 if (rs6000_cpu_attr == CPU_PPC8540)
13251 /* Length in units of the trampoline for entering a nested function. */
13254 rs6000_trampoline_size ()
13258 switch (DEFAULT_ABI)
13264 ret = (TARGET_32BIT) ? 12 : 24;
13269 ret = (TARGET_32BIT) ? 40 : 48;
13276 /* Emit RTL insns to initialize the variable parts of a trampoline.
13277 FNADDR is an RTX for the address of the function's pure code.
13278 CXT is an RTX for the static chain value for the function. */
13281 rs6000_initialize_trampoline (addr, fnaddr, cxt)
13286 enum machine_mode pmode = Pmode;
13287 int regsize = (TARGET_32BIT) ? 4 : 8;
13288 rtx ctx_reg = force_reg (pmode, cxt);
13290 switch (DEFAULT_ABI)
13295 /* Macros to shorten the code expansions below. */
13296 #define MEM_DEREF(addr) gen_rtx_MEM (pmode, memory_address (pmode, addr))
13297 #define MEM_PLUS(addr,offset) \
13298 gen_rtx_MEM (pmode, memory_address (pmode, plus_constant (addr, offset)))
13300 /* Under AIX, just build the 3 word function descriptor */
13303 rtx fn_reg = gen_reg_rtx (pmode);
13304 rtx toc_reg = gen_reg_rtx (pmode);
13305 emit_move_insn (fn_reg, MEM_DEREF (fnaddr));
13306 emit_move_insn (toc_reg, MEM_PLUS (fnaddr, regsize));
13307 emit_move_insn (MEM_DEREF (addr), fn_reg);
13308 emit_move_insn (MEM_PLUS (addr, regsize), toc_reg);
13309 emit_move_insn (MEM_PLUS (addr, 2*regsize), ctx_reg);
13313 /* Under V.4/eabi/darwin, __trampoline_setup does the real work. */
13316 emit_library_call (gen_rtx_SYMBOL_REF (SImode, "__trampoline_setup"),
13317 FALSE, VOIDmode, 4,
13319 GEN_INT (rs6000_trampoline_size ()), SImode,
13329 /* Table of valid machine attributes. */
13331 const struct attribute_spec rs6000_attribute_table[] =
13333 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
13334 { "longcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13335 { "shortcall", 0, 0, false, true, true, rs6000_handle_longcall_attribute },
13336 { NULL, 0, 0, false, false, false, NULL }
13339 /* Handle a "longcall" or "shortcall" attribute; arguments as in
13340 struct attribute_spec.handler. */
13343 rs6000_handle_longcall_attribute (node, name, args, flags, no_add_attrs)
13346 tree args ATTRIBUTE_UNUSED;
13347 int flags ATTRIBUTE_UNUSED;
13348 bool *no_add_attrs;
13350 if (TREE_CODE (*node) != FUNCTION_TYPE
13351 && TREE_CODE (*node) != FIELD_DECL
13352 && TREE_CODE (*node) != TYPE_DECL)
13354 warning ("`%s' attribute only applies to functions",
13355 IDENTIFIER_POINTER (name));
13356 *no_add_attrs = true;
13362 /* Set longcall attributes on all functions declared when
13363 rs6000_default_long_calls is true. */
13365 rs6000_set_default_type_attributes (type)
13368 if (rs6000_default_long_calls
13369 && (TREE_CODE (type) == FUNCTION_TYPE
13370 || TREE_CODE (type) == METHOD_TYPE))
13371 TYPE_ATTRIBUTES (type) = tree_cons (get_identifier ("longcall"),
13373 TYPE_ATTRIBUTES (type));
13376 /* Return a reference suitable for calling a function with the
13377 longcall attribute. */
13380 rs6000_longcall_ref (call_ref)
13383 const char *call_name;
13386 if (GET_CODE (call_ref) != SYMBOL_REF)
13389 /* System V adds '.' to the internal name, so skip them. */
13390 call_name = XSTR (call_ref, 0);
13391 if (*call_name == '.')
13393 while (*call_name == '.')
13396 node = get_identifier (call_name);
13397 call_ref = gen_rtx_SYMBOL_REF (VOIDmode, IDENTIFIER_POINTER (node));
13400 return force_reg (Pmode, call_ref);
13403 #ifdef USING_ELFOS_H
13405 /* A C statement or statements to switch to the appropriate section
13406 for output of RTX in mode MODE. You can assume that RTX is some
13407 kind of constant in RTL. The argument MODE is redundant except in
13408 the case of a `const_int' rtx. Select the section by calling
13409 `text_section' or one of the alternatives for other sections.
13411 Do not define this macro if you put all constants in the read-only
13415 rs6000_elf_select_rtx_section (mode, x, align)
13416 enum machine_mode mode;
13418 unsigned HOST_WIDE_INT align;
13420 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
13423 default_elf_select_rtx_section (mode, x, align);
13426 /* A C statement or statements to switch to the appropriate
13427 section for output of DECL. DECL is either a `VAR_DECL' node
13428 or a constant of some sort. RELOC indicates whether forming
13429 the initial value of DECL requires link-time relocations. */
13432 rs6000_elf_select_section (decl, reloc, align)
13435 unsigned HOST_WIDE_INT align;
13437 /* Pretend that we're always building for a shared library when
13438 ABI_AIX, because otherwise we end up with dynamic relocations
13439 in read-only sections. This happens for function pointers,
13440 references to vtables in typeinfo, and probably other cases. */
13441 default_elf_select_section_1 (decl, reloc, align,
13442 flag_pic || DEFAULT_ABI == ABI_AIX);
13445 /* A C statement to build up a unique section name, expressed as a
13446 STRING_CST node, and assign it to DECL_SECTION_NAME (decl).
13447 RELOC indicates whether the initial value of EXP requires
13448 link-time relocations. If you do not define this macro, GCC will use
13449 the symbol name prefixed by `.' as the section name. Note - this
13450 macro can now be called for uninitialized data items as well as
13451 initialized data and functions. */
13454 rs6000_elf_unique_section (decl, reloc)
13458 /* As above, pretend that we're always building for a shared library
13459 when ABI_AIX, to avoid dynamic relocations in read-only sections. */
13460 default_unique_section_1 (decl, reloc,
13461 flag_pic || DEFAULT_ABI == ABI_AIX);
13464 /* For a SYMBOL_REF, set generic flags and then perform some
13465 target-specific processing.
13467 When the AIX ABI is requested on a non-AIX system, replace the
13468 function name with the real name (with a leading .) rather than the
13469 function descriptor name. This saves a lot of overriding code to
13470 read the prefixes. */
13473 rs6000_elf_encode_section_info (decl, rtl, first)
13478 default_encode_section_info (decl, rtl, first);
13481 && TREE_CODE (decl) == FUNCTION_DECL
13483 && DEFAULT_ABI == ABI_AIX)
13485 rtx sym_ref = XEXP (rtl, 0);
13486 size_t len = strlen (XSTR (sym_ref, 0));
13487 char *str = alloca (len + 2);
13489 memcpy (str + 1, XSTR (sym_ref, 0), len + 1);
13490 XSTR (sym_ref, 0) = ggc_alloc_string (str, len + 1);
13495 rs6000_elf_in_small_data_p (decl)
13498 if (rs6000_sdata == SDATA_NONE)
13501 if (TREE_CODE (decl) == VAR_DECL && DECL_SECTION_NAME (decl))
13503 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
13504 if (strcmp (section, ".sdata") == 0
13505 || strcmp (section, ".sdata2") == 0
13506 || strcmp (section, ".sbss") == 0
13507 || strcmp (section, ".sbss2") == 0
13508 || strcmp (section, ".PPC.EMB.sdata0") == 0
13509 || strcmp (section, ".PPC.EMB.sbss0") == 0)
13514 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
13517 && (unsigned HOST_WIDE_INT) size <= g_switch_value
13518 /* If it's not public, and we're not going to reference it there,
13519 there's no need to put it in the small data section. */
13520 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
13527 #endif /* USING_ELFOS_H */
13530 /* Return a REG that occurs in ADDR with coefficient 1.
13531 ADDR can be effectively incremented by incrementing REG.
13533 r0 is special and we must not select it as an address
13534 register by this routine since our caller will try to
13535 increment the returned register via an "la" instruction. */
13538 find_addr_reg (addr)
13541 while (GET_CODE (addr) == PLUS)
13543 if (GET_CODE (XEXP (addr, 0)) == REG
13544 && REGNO (XEXP (addr, 0)) != 0)
13545 addr = XEXP (addr, 0);
13546 else if (GET_CODE (XEXP (addr, 1)) == REG
13547 && REGNO (XEXP (addr, 1)) != 0)
13548 addr = XEXP (addr, 1);
13549 else if (CONSTANT_P (XEXP (addr, 0)))
13550 addr = XEXP (addr, 1);
13551 else if (CONSTANT_P (XEXP (addr, 1)))
13552 addr = XEXP (addr, 0);
13556 if (GET_CODE (addr) == REG && REGNO (addr) != 0)
13562 rs6000_fatal_bad_address (op)
13565 fatal_insn ("bad address", op);
13571 /* Returns 1 if OP is either a symbol reference or a sum of a symbol
13572 reference and a constant. */
13575 symbolic_operand (op)
13578 switch (GET_CODE (op))
13585 return (GET_CODE (op) == SYMBOL_REF ||
13586 (GET_CODE (XEXP (op, 0)) == SYMBOL_REF
13587 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
13588 && GET_CODE (XEXP (op, 1)) == CONST_INT);
13595 #ifdef RS6000_LONG_BRANCH
13597 static tree stub_list = 0;
13599 /* ADD_COMPILER_STUB adds the compiler generated stub for handling
13600 procedure calls to the linked list. */
13603 add_compiler_stub (label_name, function_name, line_number)
13605 tree function_name;
13608 tree stub = build_tree_list (function_name, label_name);
13609 TREE_TYPE (stub) = build_int_2 (line_number, 0);
13610 TREE_CHAIN (stub) = stub_list;
13614 #define STUB_LABEL_NAME(STUB) TREE_VALUE (STUB)
13615 #define STUB_FUNCTION_NAME(STUB) TREE_PURPOSE (STUB)
13616 #define STUB_LINE_NUMBER(STUB) TREE_INT_CST_LOW (TREE_TYPE (STUB))
13618 /* OUTPUT_COMPILER_STUB outputs the compiler generated stub for
13619 handling procedure calls from the linked list and initializes the
13623 output_compiler_stub ()
13626 char label_buf[256];
13630 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13632 fprintf (asm_out_file,
13633 "%s:\n", IDENTIFIER_POINTER(STUB_LABEL_NAME(stub)));
13635 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13636 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13637 fprintf (asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER(stub));
13638 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13640 if (IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))[0] == '*')
13642 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub))+1);
13645 label_buf[0] = '_';
13646 strcpy (label_buf+1,
13647 IDENTIFIER_POINTER (STUB_FUNCTION_NAME (stub)));
13650 strcpy (tmp_buf, "lis r12,hi16(");
13651 strcat (tmp_buf, label_buf);
13652 strcat (tmp_buf, ")\n\tori r12,r12,lo16(");
13653 strcat (tmp_buf, label_buf);
13654 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
13655 output_asm_insn (tmp_buf, 0);
13657 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
13658 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
13659 fprintf(asm_out_file, "\t.stabd 68,0,%d\n", STUB_LINE_NUMBER (stub));
13660 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
13666 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
13667 already there or not. */
13670 no_previous_def (function_name)
13671 tree function_name;
13674 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13675 if (function_name == STUB_FUNCTION_NAME (stub))
13680 /* GET_PREV_LABEL gets the label name from the previous definition of
13684 get_prev_label (function_name)
13685 tree function_name;
13688 for (stub = stub_list; stub; stub = TREE_CHAIN (stub))
13689 if (function_name == STUB_FUNCTION_NAME (stub))
13690 return STUB_LABEL_NAME (stub);
13694 /* INSN is either a function call or a millicode call. It may have an
13695 unconditional jump in its delay slot.
13697 CALL_DEST is the routine we are calling. */
13700 output_call (insn, call_dest, operand_number)
13703 int operand_number;
13705 static char buf[256];
13706 if (GET_CODE (call_dest) == SYMBOL_REF && TARGET_LONG_BRANCH && !flag_pic)
13709 tree funname = get_identifier (XSTR (call_dest, 0));
13711 if (no_previous_def (funname))
13713 int line_number = 0;
13714 rtx label_rtx = gen_label_rtx ();
13715 char *label_buf, temp_buf[256];
13716 ASM_GENERATE_INTERNAL_LABEL (temp_buf, "L",
13717 CODE_LABEL_NUMBER (label_rtx));
13718 label_buf = temp_buf[0] == '*' ? temp_buf + 1 : temp_buf;
13719 labelname = get_identifier (label_buf);
13720 for (; insn && GET_CODE (insn) != NOTE; insn = PREV_INSN (insn));
13722 line_number = NOTE_LINE_NUMBER (insn);
13723 add_compiler_stub (labelname, funname, line_number);
13726 labelname = get_prev_label (funname);
13728 sprintf (buf, "jbsr %%z%d,%.246s",
13729 operand_number, IDENTIFIER_POINTER (labelname));
13734 sprintf (buf, "bl %%z%d", operand_number);
13739 #endif /* RS6000_LONG_BRANCH */
13741 #define GEN_LOCAL_LABEL_FOR_SYMBOL(BUF,SYMBOL,LENGTH,N) \
13743 const char *const symbol_ = (SYMBOL); \
13744 char *buffer_ = (BUF); \
13745 if (symbol_[0] == '"') \
13747 sprintf(buffer_, "\"L%d$%s", (N), symbol_+1); \
13749 else if (name_needs_quotes(symbol_)) \
13751 sprintf(buffer_, "\"L%d$%s\"", (N), symbol_); \
13755 sprintf(buffer_, "L%d$%s", (N), symbol_); \
13760 /* Generate PIC and indirect symbol stubs. */
13763 machopic_output_stub (file, symb, stub)
13765 const char *symb, *stub;
13767 unsigned int length;
13768 char *symbol_name, *lazy_ptr_name;
13769 char *local_label_0;
13770 static int label = 0;
13772 /* Lose our funky encoding stuff so it doesn't contaminate the stub. */
13773 symb = (*targetm.strip_name_encoding) (symb);
13777 length = strlen (symb);
13778 symbol_name = alloca (length + 32);
13779 GEN_SYMBOL_NAME_FOR_SYMBOL (symbol_name, symb, length);
13781 lazy_ptr_name = alloca (length + 32);
13782 GEN_LAZY_PTR_NAME_FOR_SYMBOL (lazy_ptr_name, symb, length);
13784 local_label_0 = alloca (length + 32);
13785 GEN_LOCAL_LABEL_FOR_SYMBOL (local_label_0, symb, length, 0);
13788 machopic_picsymbol_stub1_section ();
13790 machopic_symbol_stub1_section ();
13791 fprintf (file, "\t.align 2\n");
13793 fprintf (file, "%s:\n", stub);
13794 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13798 fprintf (file, "\tmflr r0\n");
13799 fprintf (file, "\tbcl 20,31,%s\n", local_label_0);
13800 fprintf (file, "%s:\n\tmflr r11\n", local_label_0);
13801 fprintf (file, "\taddis r11,r11,ha16(%s-%s)\n",
13802 lazy_ptr_name, local_label_0);
13803 fprintf (file, "\tmtlr r0\n");
13804 fprintf (file, "\tlwzu r12,lo16(%s-%s)(r11)\n",
13805 lazy_ptr_name, local_label_0);
13806 fprintf (file, "\tmtctr r12\n");
13807 fprintf (file, "\tbctr\n");
13811 fprintf (file, "\tlis r11,ha16(%s)\n", lazy_ptr_name);
13812 fprintf (file, "\tlwzu r12,lo16(%s)(r11)\n", lazy_ptr_name);
13813 fprintf (file, "\tmtctr r12\n");
13814 fprintf (file, "\tbctr\n");
13817 machopic_lazy_symbol_ptr_section ();
13818 fprintf (file, "%s:\n", lazy_ptr_name);
13819 fprintf (file, "\t.indirect_symbol %s\n", symbol_name);
13820 fprintf (file, "\t.long dyld_stub_binding_helper\n");
13823 /* Legitimize PIC addresses. If the address is already
13824 position-independent, we return ORIG. Newly generated
13825 position-independent addresses go into a reg. This is REG if non
13826 zero, otherwise we allocate register(s) as necessary. */
13828 #define SMALL_INT(X) ((unsigned) (INTVAL(X) + 0x8000) < 0x10000)
13831 rs6000_machopic_legitimize_pic_address (orig, mode, reg)
13833 enum machine_mode mode;
13838 if (reg == NULL && ! reload_in_progress && ! reload_completed)
13839 reg = gen_reg_rtx (Pmode);
13841 if (GET_CODE (orig) == CONST)
13843 if (GET_CODE (XEXP (orig, 0)) == PLUS
13844 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
13847 if (GET_CODE (XEXP (orig, 0)) == PLUS)
13850 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 0),
13853 rs6000_machopic_legitimize_pic_address (XEXP (XEXP (orig, 0), 1),
13859 if (GET_CODE (offset) == CONST_INT)
13861 if (SMALL_INT (offset))
13862 return plus_constant (base, INTVAL (offset));
13863 else if (! reload_in_progress && ! reload_completed)
13864 offset = force_reg (Pmode, offset);
13867 rtx mem = force_const_mem (Pmode, orig);
13868 return machopic_legitimize_pic_address (mem, Pmode, reg);
13871 return gen_rtx (PLUS, Pmode, base, offset);
13874 /* Fall back on generic machopic code. */
13875 return machopic_legitimize_pic_address (orig, mode, reg);
13878 /* This is just a placeholder to make linking work without having to
13879 add this to the generic Darwin EXTRA_SECTIONS. If -mcall-aix is
13880 ever needed for Darwin (not too likely!) this would have to get a
13881 real definition. */
13888 #endif /* TARGET_MACHO */
13891 static unsigned int
13892 rs6000_elf_section_type_flags (decl, name, reloc)
13898 = default_section_type_flags_1 (decl, name, reloc,
13899 flag_pic || DEFAULT_ABI == ABI_AIX);
13901 if (TARGET_RELOCATABLE)
13902 flags |= SECTION_WRITE;
13907 /* Record an element in the table of global constructors. SYMBOL is
13908 a SYMBOL_REF of the function to be called; PRIORITY is a number
13909 between 0 and MAX_INIT_PRIORITY.
13911 This differs from default_named_section_asm_out_constructor in
13912 that we have special handling for -mrelocatable. */
13915 rs6000_elf_asm_out_constructor (symbol, priority)
13919 const char *section = ".ctors";
13922 if (priority != DEFAULT_INIT_PRIORITY)
13924 sprintf (buf, ".ctors.%.5u",
13925 /* Invert the numbering so the linker puts us in the proper
13926 order; constructors are run from right to left, and the
13927 linker sorts in increasing order. */
13928 MAX_INIT_PRIORITY - priority);
13932 named_section_flags (section, SECTION_WRITE);
13933 assemble_align (POINTER_SIZE);
13935 if (TARGET_RELOCATABLE)
13937 fputs ("\t.long (", asm_out_file);
13938 output_addr_const (asm_out_file, symbol);
13939 fputs (")@fixup\n", asm_out_file);
13942 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13946 rs6000_elf_asm_out_destructor (symbol, priority)
13950 const char *section = ".dtors";
13953 if (priority != DEFAULT_INIT_PRIORITY)
13955 sprintf (buf, ".dtors.%.5u",
13956 /* Invert the numbering so the linker puts us in the proper
13957 order; constructors are run from right to left, and the
13958 linker sorts in increasing order. */
13959 MAX_INIT_PRIORITY - priority);
13963 named_section_flags (section, SECTION_WRITE);
13964 assemble_align (POINTER_SIZE);
13966 if (TARGET_RELOCATABLE)
13968 fputs ("\t.long (", asm_out_file);
13969 output_addr_const (asm_out_file, symbol);
13970 fputs (")@fixup\n", asm_out_file);
13973 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
13977 rs6000_elf_declare_function_name (file, name, decl)
13984 fputs ("\t.section\t\".opd\",\"aw\"\n\t.align 3\n", file);
13985 ASM_OUTPUT_LABEL (file, name);
13986 fputs (DOUBLE_INT_ASM_OP, file);
13988 assemble_name (file, name);
13989 fputs (",.TOC.@tocbase,0\n\t.previous\n\t.size\t", file);
13990 assemble_name (file, name);
13991 fputs (",24\n\t.type\t.", file);
13992 assemble_name (file, name);
13993 fputs (",@function\n", file);
13994 if (TREE_PUBLIC (decl) && ! DECL_WEAK (decl))
13996 fputs ("\t.globl\t.", file);
13997 assemble_name (file, name);
14000 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14002 ASM_OUTPUT_LABEL (file, name);
14006 if (TARGET_RELOCATABLE
14007 && (get_pool_size () != 0 || current_function_profile)
14012 (*targetm.asm_out.internal_label) (file, "LCL", rs6000_pic_labelno);
14014 ASM_GENERATE_INTERNAL_LABEL (buf, "LCTOC", 1);
14015 fprintf (file, "\t.long ");
14016 assemble_name (file, buf);
14018 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno);
14019 assemble_name (file, buf);
14023 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
14024 ASM_DECLARE_RESULT (file, DECL_RESULT (decl));
14026 if (DEFAULT_ABI == ABI_AIX)
14028 const char *desc_name, *orig_name;
14030 orig_name = (*targetm.strip_name_encoding) (name);
14031 desc_name = orig_name;
14032 while (*desc_name == '.')
14035 if (TREE_PUBLIC (decl))
14036 fprintf (file, "\t.globl %s\n", desc_name);
14038 fprintf (file, "%s\n", MINIMAL_TOC_SECTION_ASM_OP);
14039 fprintf (file, "%s:\n", desc_name);
14040 fprintf (file, "\t.long %s\n", orig_name);
14041 fputs ("\t.long _GLOBAL_OFFSET_TABLE_\n", file);
14042 if (DEFAULT_ABI == ABI_AIX)
14043 fputs ("\t.long 0\n", file);
14044 fprintf (file, "\t.previous\n");
14046 ASM_OUTPUT_LABEL (file, name);
14052 rs6000_xcoff_asm_globalize_label (stream, name)
14056 fputs (GLOBAL_ASM_OP, stream);
14057 RS6000_OUTPUT_BASENAME (stream, name);
14058 putc ('\n', stream);
14062 rs6000_xcoff_asm_named_section (name, flags)
14064 unsigned int flags;
14067 static const char * const suffix[3] = { "PR", "RO", "RW" };
14069 if (flags & SECTION_CODE)
14071 else if (flags & SECTION_WRITE)
14076 fprintf (asm_out_file, "\t.csect %s%s[%s],%u\n",
14077 (flags & SECTION_CODE) ? "." : "",
14078 name, suffix[smclass], flags & SECTION_ENTSIZE);
14082 rs6000_xcoff_select_section (decl, reloc, align)
14085 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14087 if (decl_readonly_section_1 (decl, reloc, 1))
14089 if (TREE_PUBLIC (decl))
14090 read_only_data_section ();
14092 read_only_private_data_section ();
14096 if (TREE_PUBLIC (decl))
14099 private_data_section ();
14104 rs6000_xcoff_unique_section (decl, reloc)
14106 int reloc ATTRIBUTE_UNUSED;
14110 /* Use select_section for private and uninitialized data. */
14111 if (!TREE_PUBLIC (decl)
14112 || DECL_COMMON (decl)
14113 || DECL_INITIAL (decl) == NULL_TREE
14114 || DECL_INITIAL (decl) == error_mark_node
14115 || (flag_zero_initialized_in_bss
14116 && initializer_zerop (DECL_INITIAL (decl))))
14119 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
14120 name = (*targetm.strip_name_encoding) (name);
14121 DECL_SECTION_NAME (decl) = build_string (strlen (name), name);
14124 /* Select section for constant in constant pool.
14126 On RS/6000, all constants are in the private read-only data area.
14127 However, if this is being placed in the TOC it must be output as a
14131 rs6000_xcoff_select_rtx_section (mode, x, align)
14132 enum machine_mode mode;
14134 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED;
14136 if (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (x, mode))
14139 read_only_private_data_section ();
14142 /* Remove any trailing [DS] or the like from the symbol name. */
14144 static const char *
14145 rs6000_xcoff_strip_name_encoding (name)
14151 len = strlen (name);
14152 if (name[len - 1] == ']')
14153 return ggc_alloc_string (name, len - 4);
14158 /* Section attributes. AIX is always PIC. */
14160 static unsigned int
14161 rs6000_xcoff_section_type_flags (decl, name, reloc)
14166 unsigned int align;
14167 unsigned int flags = default_section_type_flags_1 (decl, name, reloc, 1);
14169 /* Align to at least UNIT size. */
14170 if (flags & SECTION_CODE)
14171 align = MIN_UNITS_PER_WORD;
14173 /* Increase alignment of large objects if not already stricter. */
14174 align = MAX ((DECL_ALIGN (decl) / BITS_PER_UNIT),
14175 int_size_in_bytes (TREE_TYPE (decl)) > MIN_UNITS_PER_WORD
14176 ? UNITS_PER_FP_WORD : MIN_UNITS_PER_WORD);
14178 return flags | (exact_log2 (align) & SECTION_ENTSIZE);
14181 /* Output at end of assembler file.
14182 On the RS/6000, referencing data should automatically pull in text. */
14185 rs6000_xcoff_file_end ()
14188 fputs ("_section_.text:\n", asm_out_file);
14190 fputs (TARGET_32BIT
14191 ? "\t.long _section_.text\n" : "\t.llong _section_.text\n",
14194 #endif /* TARGET_XCOFF */
14197 /* Cross-module name binding. Darwin does not support overriding
14198 functions at dynamic-link time. */
14201 rs6000_binds_local_p (decl)
14204 return default_binds_local_p_1 (decl, 0);
14208 /* Compute a (partial) cost for rtx X. Return true if the complete
14209 cost has been computed, and false if subexpressions should be
14210 scanned. In either case, *TOTAL contains the cost result. */
14213 rs6000_rtx_costs (x, code, outer_code, total)
14215 int code, outer_code ATTRIBUTE_UNUSED;
14220 /* On the RS/6000, if it is valid in the insn, it is free.
14221 So this always returns 0. */
14232 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14233 && ((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 1))
14234 + 0x8000) >= 0x10000)
14235 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14236 ? COSTS_N_INSNS (2)
14237 : COSTS_N_INSNS (1));
14243 *total = ((GET_CODE (XEXP (x, 1)) == CONST_INT
14244 && (INTVAL (XEXP (x, 1)) & (~ (HOST_WIDE_INT) 0xffff)) != 0
14245 && ((INTVAL (XEXP (x, 1)) & 0xffff) != 0))
14246 ? COSTS_N_INSNS (2)
14247 : COSTS_N_INSNS (1));
14253 *total = COSTS_N_INSNS (2);
14256 switch (rs6000_cpu)
14258 case PROCESSOR_RIOS1:
14259 case PROCESSOR_PPC405:
14260 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14261 ? COSTS_N_INSNS (5)
14262 : (INTVAL (XEXP (x, 1)) >= -256
14263 && INTVAL (XEXP (x, 1)) <= 255)
14264 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14267 case PROCESSOR_PPC440:
14268 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14269 ? COSTS_N_INSNS (3)
14270 : COSTS_N_INSNS (2));
14273 case PROCESSOR_RS64A:
14274 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14275 ? GET_MODE (XEXP (x, 1)) != DImode
14276 ? COSTS_N_INSNS (20) : COSTS_N_INSNS (34)
14277 : (INTVAL (XEXP (x, 1)) >= -256
14278 && INTVAL (XEXP (x, 1)) <= 255)
14279 ? COSTS_N_INSNS (8) : COSTS_N_INSNS (12));
14282 case PROCESSOR_RIOS2:
14283 case PROCESSOR_MPCCORE:
14284 case PROCESSOR_PPC604e:
14285 *total = COSTS_N_INSNS (2);
14288 case PROCESSOR_PPC601:
14289 *total = COSTS_N_INSNS (5);
14292 case PROCESSOR_PPC603:
14293 case PROCESSOR_PPC7400:
14294 case PROCESSOR_PPC750:
14295 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14296 ? COSTS_N_INSNS (5)
14297 : (INTVAL (XEXP (x, 1)) >= -256
14298 && INTVAL (XEXP (x, 1)) <= 255)
14299 ? COSTS_N_INSNS (2) : COSTS_N_INSNS (3));
14302 case PROCESSOR_PPC7450:
14303 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14304 ? COSTS_N_INSNS (4)
14305 : COSTS_N_INSNS (3));
14308 case PROCESSOR_PPC403:
14309 case PROCESSOR_PPC604:
14310 case PROCESSOR_PPC8540:
14311 *total = COSTS_N_INSNS (4);
14314 case PROCESSOR_PPC620:
14315 case PROCESSOR_PPC630:
14316 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14317 ? GET_MODE (XEXP (x, 1)) != DImode
14318 ? COSTS_N_INSNS (5) : COSTS_N_INSNS (7)
14319 : (INTVAL (XEXP (x, 1)) >= -256
14320 && INTVAL (XEXP (x, 1)) <= 255)
14321 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4));
14324 case PROCESSOR_POWER4:
14325 *total = (GET_CODE (XEXP (x, 1)) != CONST_INT
14326 ? GET_MODE (XEXP (x, 1)) != DImode
14327 ? COSTS_N_INSNS (3) : COSTS_N_INSNS (4)
14328 : COSTS_N_INSNS (2));
14337 if (GET_CODE (XEXP (x, 1)) == CONST_INT
14338 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0)
14340 *total = COSTS_N_INSNS (2);
14347 switch (rs6000_cpu)
14349 case PROCESSOR_RIOS1:
14350 *total = COSTS_N_INSNS (19);
14353 case PROCESSOR_RIOS2:
14354 *total = COSTS_N_INSNS (13);
14357 case PROCESSOR_RS64A:
14358 *total = (GET_MODE (XEXP (x, 1)) != DImode
14359 ? COSTS_N_INSNS (65)
14360 : COSTS_N_INSNS (67));
14363 case PROCESSOR_MPCCORE:
14364 *total = COSTS_N_INSNS (6);
14367 case PROCESSOR_PPC403:
14368 *total = COSTS_N_INSNS (33);
14371 case PROCESSOR_PPC405:
14372 *total = COSTS_N_INSNS (35);
14375 case PROCESSOR_PPC440:
14376 *total = COSTS_N_INSNS (34);
14379 case PROCESSOR_PPC601:
14380 *total = COSTS_N_INSNS (36);
14383 case PROCESSOR_PPC603:
14384 *total = COSTS_N_INSNS (37);
14387 case PROCESSOR_PPC604:
14388 case PROCESSOR_PPC604e:
14389 *total = COSTS_N_INSNS (20);
14392 case PROCESSOR_PPC620:
14393 case PROCESSOR_PPC630:
14394 *total = (GET_MODE (XEXP (x, 1)) != DImode
14395 ? COSTS_N_INSNS (21)
14396 : COSTS_N_INSNS (37));
14399 case PROCESSOR_PPC750:
14400 case PROCESSOR_PPC8540:
14401 case PROCESSOR_PPC7400:
14402 *total = COSTS_N_INSNS (19);
14405 case PROCESSOR_PPC7450:
14406 *total = COSTS_N_INSNS (23);
14409 case PROCESSOR_POWER4:
14410 *total = (GET_MODE (XEXP (x, 1)) != DImode
14411 ? COSTS_N_INSNS (18)
14412 : COSTS_N_INSNS (34));
14420 *total = COSTS_N_INSNS (4);
14424 /* MEM should be slightly more expensive than (plus (reg) (const)) */
14433 /* A C expression returning the cost of moving data from a register of class
14434 CLASS1 to one of CLASS2. */
14437 rs6000_register_move_cost (mode, from, to)
14438 enum machine_mode mode;
14439 enum reg_class from, to;
14441 /* Moves from/to GENERAL_REGS. */
14442 if (reg_classes_intersect_p (to, GENERAL_REGS)
14443 || reg_classes_intersect_p (from, GENERAL_REGS))
14445 if (! reg_classes_intersect_p (to, GENERAL_REGS))
14448 if (from == FLOAT_REGS || from == ALTIVEC_REGS)
14449 return (rs6000_memory_move_cost (mode, from, 0)
14450 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0));
14452 /* It's more expensive to move CR_REGS than CR0_REGS because of the shift...*/
14453 else if (from == CR_REGS)
14457 /* A move will cost one instruction per GPR moved. */
14458 return 2 * HARD_REGNO_NREGS (0, mode);
14461 /* Moving between two similar registers is just one instruction. */
14462 else if (reg_classes_intersect_p (to, from))
14463 return mode == TFmode ? 4 : 2;
14465 /* Everything else has to go through GENERAL_REGS. */
14467 return (rs6000_register_move_cost (mode, GENERAL_REGS, to)
14468 + rs6000_register_move_cost (mode, from, GENERAL_REGS));
14471 /* A C expressions returning the cost of moving data of MODE from a register to
14475 rs6000_memory_move_cost (mode, class, in)
14476 enum machine_mode mode;
14477 enum reg_class class;
14478 int in ATTRIBUTE_UNUSED;
14480 if (reg_classes_intersect_p (class, GENERAL_REGS))
14481 return 4 * HARD_REGNO_NREGS (0, mode);
14482 else if (reg_classes_intersect_p (class, FLOAT_REGS))
14483 return 4 * HARD_REGNO_NREGS (32, mode);
14484 else if (reg_classes_intersect_p (class, ALTIVEC_REGS))
14485 return 4 * HARD_REGNO_NREGS (FIRST_ALTIVEC_REGNO, mode);
14487 return 4 + rs6000_register_move_cost (mode, class, GENERAL_REGS);
14490 /* Return an RTX representing where to find the function value of a
14491 function returning MODE. */
14493 rs6000_complex_function_value (enum machine_mode mode)
14495 unsigned int regno;
14497 enum machine_mode inner = GET_MODE_INNER (mode);
14499 if (FLOAT_MODE_P (mode))
14500 regno = FP_ARG_RETURN;
14503 regno = GP_ARG_RETURN;
14505 /* 32-bit is OK since it'll go in r3/r4. */
14507 && GET_MODE_BITSIZE (inner) >= 32)
14508 return gen_rtx_REG (mode, regno);
14511 r1 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno),
14513 r2 = gen_rtx_EXPR_LIST (inner, gen_rtx_REG (inner, regno + 1),
14514 GEN_INT (GET_MODE_UNIT_SIZE (inner)));
14515 return gen_rtx_PARALLEL (mode, gen_rtvec (2, r1, r2));
14518 /* Define how to find the value returned by a function.
14519 VALTYPE is the data type of the value (as a tree).
14520 If the precise function being called is known, FUNC is its FUNCTION_DECL;
14521 otherwise, FUNC is 0.
14523 On the SPE, both FPs and vectors are returned in r3.
14525 On RS/6000 an integer value is in r3 and a floating-point value is in
14526 fp1, unless -msoft-float. */
14529 rs6000_function_value (tree valtype, tree func ATTRIBUTE_UNUSED)
14531 enum machine_mode mode;
14532 unsigned int regno;
14534 if ((INTEGRAL_TYPE_P (valtype)
14535 && TYPE_PRECISION (valtype) < BITS_PER_WORD)
14536 || POINTER_TYPE_P (valtype))
14539 mode = TYPE_MODE (valtype);
14541 if (TREE_CODE (valtype) == REAL_TYPE && TARGET_HARD_FLOAT && TARGET_FPRS)
14542 regno = FP_ARG_RETURN;
14543 else if (TREE_CODE (valtype) == COMPLEX_TYPE
14544 && TARGET_HARD_FLOAT
14545 && SPLIT_COMPLEX_ARGS)
14546 return rs6000_complex_function_value (mode);
14547 else if (TREE_CODE (valtype) == VECTOR_TYPE && TARGET_ALTIVEC)
14548 regno = ALTIVEC_ARG_RETURN;
14550 regno = GP_ARG_RETURN;
14552 return gen_rtx_REG (mode, regno);
14555 /* Define how to find the value returned by a library function
14556 assuming the value has mode MODE. */
14558 rs6000_libcall_value (enum machine_mode mode)
14560 unsigned int regno;
14562 if (GET_MODE_CLASS (mode) == MODE_FLOAT
14563 && TARGET_HARD_FLOAT && TARGET_FPRS)
14564 regno = FP_ARG_RETURN;
14565 else if (ALTIVEC_VECTOR_MODE (mode))
14566 regno = ALTIVEC_ARG_RETURN;
14567 else if (COMPLEX_MODE_P (mode) && SPLIT_COMPLEX_ARGS)
14568 return rs6000_complex_function_value (mode);
14570 regno = GP_ARG_RETURN;
14572 return gen_rtx_REG (mode, regno);
14575 /* Return true if TYPE is of type __ev64_opaque__. */
14578 is_ev64_opaque_type (type)
14582 && (type == opaque_V2SI_type_node
14583 || type == opaque_V2SF_type_node
14584 || type == opaque_p_V2SI_type_node
14585 || (TREE_CODE (type) == VECTOR_TYPE
14586 && TYPE_NAME (type)
14587 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14588 && DECL_NAME (TYPE_NAME (type))
14589 && strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
14590 "__ev64_opaque__") == 0)));
14594 rs6000_dwarf_register_span (reg)
14599 if (!TARGET_SPE || !SPE_VECTOR_MODE (GET_MODE (reg)))
14602 regno = REGNO (reg);
14604 /* The duality of the SPE register size wreaks all kinds of havoc.
14605 This is a way of distinguishing r0 in 32-bits from r0 in
14608 gen_rtx_PARALLEL (VOIDmode,
14611 gen_rtx_REG (SImode, regno + 1200),
14612 gen_rtx_REG (SImode, regno))
14614 gen_rtx_REG (SImode, regno),
14615 gen_rtx_REG (SImode, regno + 1200)));
14618 #include "gt-rs6000.h"